Просмотр исходного кода

Merge pull request #523 from nihalranjan-hpc/test_control_plane

Issue #512: Test Automation Script for Control_Plane
Lucas A. Wilson 3 лет назад
Родитель
Сommit
6c16f352cc

+ 709 - 0
control_plane/test/test_control_plane.yml

@@ -0,0 +1,709 @@
+#  Copyright 2020 Dell Inc. or its subsidiaries. All Rights Reserved.
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+---
+
+# Testase OMNIA_1.1_MS_TC_001
+# Test Case to validate the execution of control_plane.yml with valid inputs -- Default Test Case
+- name: OMNIA_1.1_MS_TC_001
+  hosts: localhost
+  connection: local
+  vars_files:
+    - ../roles/control_plane_common/vars/main.yml  
+    - test_vars/test_control_plane_vars.yml   
+   
+  gather_subset:
+    - 'min'
+  tags: VERIFY_OMNIA_01
+  
+  tasks:
+    - name: Check OS Version
+      assert:
+        that:
+          - 'ansible_distribution == "{{ os_name }}"'
+          - 'ansible_distribution_version == "{{ os_version }}"'
+        success_msg: "{{ check_os_success_msg }}"
+        fail_msg: "{{ check_os_fail_msg }}"
+      tags: Check_OS
+             
+    - name: Replace input parameters folder
+      copy:
+        src: "{{ item }}"
+        dest: "{{ input_params_folder }}"
+        force: yes
+        mode: '0644'
+      with_items:
+        - "{{ input_files_tc01 }}"
+      tags: Replace_input
+     
+    - block:    
+        - name: Execute control_plane.yml playbook
+          command: ansible-playbook control_plane.yml
+          args:
+            chdir: "{{ control_plane_dir }}"
+          tags: Execute_control_plane
+       
+    - block:
+        - name: Execute default validation script
+          include_tasks: "{{ control_plane_validation_script_path }}"
+          tags: Execute_Validation_Script
+
+# Testcase OMNIA_1.1_MS_TC_002
+# Test Case to validate the execution of control_place.yml with no input
+- name: OMNIA_1.1_MS_TC_002
+  hosts: localhost
+  connection: local
+  vars_files:
+    - ../roles/control_plane_common/vars/main.yml  
+    - test_vars/test_control_plane_vars.yml   
+   
+  gather_subset:
+    - 'min'
+  tags: TC_002
+
+  tasks:
+    - name: Check OS Version
+      assert:
+        that:
+          - 'ansible_distribution == "{{ os_name }}"'
+          - 'ansible_distribution_version == "{{ os_version }}"'
+        success_msg: "{{ check_os_success_msg }}"
+        fail_msg: "{{ check_os_fail_msg }}"
+      tags: Check_OS
+        
+    - name: Replace input parameters folder
+      copy:
+        src: "{{ item }}"
+        dest: "{{ input_params_folder }}"
+        force: yes
+        mode: '0644'
+      with_items:
+        - "{{ input_files_tc02 }}"
+      tags: Replace_input
+     
+    - block:    
+        - name: Execute control_plane_common role
+          include_role:
+            name: ../roles/control_plane_common
+          vars:
+            base_vars_filename: ../input_params/base_vars.yml
+      rescue:
+        - name: Validate error
+          assert:
+            that: input_base_failure_msg in ansible_failed_result.msg
+            success_msg: "{{ input_config_check_success_msg }}"
+            fail_msg: "{{ input_config_check_fail_msg }}"
+      tags: Execute_common_role
+    
+# Testcase OMNIA_1.1_MS_TC_003 and OMNIA_1.1_MS_TC_004
+# Test Case to validate the execution of control_plane.yml with NFS share already present
+- name: OMNIA_1.1_MS_TC_003
+  hosts: localhost
+  connection: local
+  vars_files:
+    - ../roles/control_plane_common/vars/main.yml  
+    - test_vars/test_control_plane_vars.yml   
+   
+  gather_subset:
+    - 'min'
+  tags: TC_003,TC_004
+  
+  tasks:
+    - name: Check OS Version
+      assert:
+        that:
+          - 'ansible_distribution == "{{ os_name }}"'
+          - 'ansible_distribution_version == "{{ os_version }}"'
+        success_msg: "{{ check_os_success_msg }}"
+        fail_msg: "{{ check_os_fail_msg }}"
+      tags: Check_OS
+        
+    - name: Replace input parameters folder
+      copy:
+        src: "{{ item }}"
+        dest: "{{ input_params_folder }}"
+        force: yes
+        mode: '0644'
+      with_items:
+        - "{{ input_files_tc01 }}"
+      tags: Replace_input
+    
+    - name: Creating new control_plane.yml
+      copy:
+        dest: "../test_control_plane.yml"
+        content: |
+         - name: Executing omnia roles
+           hosts: localhost
+           connection: local
+           roles:
+              - control_plane_common
+              - control_plane_repo
+        mode: '0644'
+      tags: Replace_control_plane
+      
+    - block:    
+        - name: Execute control_plane.yml playbook
+          command: ansible-playbook test_control_plane.yml
+          args:
+            chdir: "{{ control_plane_dir }}"
+          tags: Execute_control_plane
+    
+    - block:
+        - name: Execute validation script
+          include_tasks: "{{ control_plane_validation_script_path }}" 
+          tags: Execute_Validation_Script
+      
+    - name: Delete newly created control_plane.yml
+      file:
+        state: absent
+        path: ../test_control_plane.yml
+      when: foo_stat.stat.exists
+      tags: Delete_test_control_plane
+
+# Testcase OMNIA_1.1_MS_TC_005
+# Test Case to validate the execution of control_plane.yml after a successful run and validate k8s pods along with services after reboot.
+- name: OMNIA_1.1_MS_TC_005
+  hosts: localhost
+  connection: local
+  vars_files:
+    - ../roles/control_plane_common/vars/main.yml  
+    - test_vars/test_control_plane_vars.yml   
+   
+  gather_subset:
+    - 'min'
+  tags: TC_005
+  
+  tasks:
+    - name: Check OS Version
+      assert:
+        that:
+          - 'ansible_distribution == "{{ os_name }}"'
+          - 'ansible_distribution_version == "{{ os_version }}"'
+        success_msg: "{{ check_os_success_msg }}"
+        fail_msg: "{{ check_os_fail_msg }}"
+      tags: Check_OS
+      
+    - name: Check uptime
+      command: uptime -p
+      register: system_uptime
+      changed_when: false
+      tags: Check_Uptime
+      
+    - name: Extracting data from system_uptime
+      set_fact:
+        uptime_number: "{{ system_uptime.stdout.split()[1] }}"
+        uptime_min: "{{ system_uptime.stdout.split()[2] }}"
+    
+    - name: Replace input parameters folder
+      copy:
+        src: "{{ item }}"
+        dest: "{{ input_params_folder }}"
+        force: yes
+        mode: '0644'
+      with_items:
+        - "{{ input_files_tc01 }}"
+      when: uptime_number|int > 15
+      tags: Replace_input
+      
+    - block:    
+        - name: Execute control_plane.yml playbook
+          command: ansible-playbook control_plane.yml
+          args:
+            chdir: "{{ control_plane_dir }}"
+          when: uptime_number|int > 15
+          tags: Execute_control_plane
+          
+        - name: Reboot system
+          command: reboot
+          when: uptime_number|int > 15
+          tags: Reboot_System
+    
+    - block:
+        - name: Wait for 30sec for kubectl to get things ready
+          pause:
+            seconds: 200
+          when: (uptime_number| int <= 15) and (uptime_min == "minutes")
+
+        - name: Execute default validation script
+          include_tasks: "{{ control_plane_validation_script_path }}"
+          when: (uptime_number| int <= 15) and (uptime_min == "minutes")
+          tags: Execute_Validation_Script
+
+# Testcase OMNIA_1.1_MS_TC_006 and OMNIA_1.1_MS_TC_007
+# Test Case to validate the execution of control_plane.yml and after a successful run the user deletes/stops all pods
+- name: OMNIA_1.1_MS_TC_006
+  hosts: localhost
+  connection: local
+  vars_files:
+    - ../roles/control_plane_common/vars/main.yml  
+    - test_vars/test_control_plane_vars.yml   
+   
+  gather_subset:
+    - 'min'
+  tags: TC_006,TC_007
+  
+  tasks:
+    - name: Check OS Version
+      assert:
+        that:
+          - 'ansible_distribution == "{{ os_name }}"'
+          - 'ansible_distribution_version == "{{ os_version }}"'
+        success_msg: "{{ check_os_success_msg }}"
+        fail_msg: "{{ check_os_fail_msg }}"
+      tags: Check_OS
+        
+    - name: Replace input parameters folder
+      copy:
+        src: "{{ item }}"
+        dest: "{{ input_params_folder }}"
+        force: yes
+        mode: "0644"
+      with_items:
+        - "{{ input_files_tc01 }}"
+      tags: Replace_input
+     
+    - block:    
+        - name: Execute control_plane.yml playbook
+          command: ansible-playbook control_plane.yml
+          args:
+            chdir: "{{ control_plane_dir }}"
+      tags: Execute_control_plane
+      
+    - name: Delete all containers
+      command: kubectl delete --all namespaces
+      changed_when: false
+      tags: Delete_Pods
+      
+    - block:    
+        - name: Execute control_plane.yml playbook
+          command: ansible-playbook control_plane.yml
+          args:
+            chdir: "{{ control_plane_dir }}"
+      tags: Execute_control_plane
+      
+    - block:
+        - name: Execute default validation script
+          include_tasks: "{{ control_plane_validation_script_path }}"
+      tags: Execute_Validation_Script
+
+# Testcase OMNIA_1.1_MS_TC_008
+# Test Case to validate the execution of control_plane.yml with infiniband=false, powervault=true and ethernet=true
+- name: OMNIA_1.1_MS_TC_008
+  hosts: localhost
+  connection: local
+  vars_files:
+    - ../roles/control_plane_common/vars/main.yml  
+    - test_vars/test_control_plane_vars.yml   
+   
+  gather_subset:
+    - 'min'
+  tags: TC_008
+  
+  tasks:
+    - name: Check OS Version
+      assert:
+        that:
+          - 'ansible_distribution == "{{ os_name }}"'
+          - 'ansible_distribution_version == "{{ os_version }}"'
+        success_msg: "{{ check_os_success_msg }}"
+        fail_msg: "{{ check_os_fail_msg }}"
+      tags: Check_OS
+        
+    - name: Replace input parameters folder
+      copy:
+        src: "{{ item }}"
+        dest: "{{ input_params_folder }}"
+        force: yes
+        mode: '0644'
+      with_items:
+        - "{{ input_files_tc01 }}"
+      tags: Replace_input
+     
+    - name: Editing base_vars.yml for powervault to true
+      replace:
+        path: ../input_params/base_vars.yml
+        regexp: "{{ powervault_false }}"
+        replace: "{{ powervault_true }}"
+      tags: Edit_base_vars
+    
+    - name: Editing base_vars.yml for ethernet to true
+      replace:
+        path: ../input_params/base_vars.yml
+        regexp: "{{ ethernet_false }}"
+        replace: "{{ ethernet_true }}"
+      tags: Edit_base_vars
+    
+    - name: Editing base_vars.yml for infiniband to false
+      replace:
+        path: ../input_params/base_vars.yml
+        regexp: "{{ infiniband_true }}"
+        replace: "{{ infiniband_false }}"
+      tags: Edit_base_vars
+        
+    - block:    
+        - name: Execute control_plane.yml playbook
+          command: ansible-playbook control_plane.yml
+          args:
+            chdir: "{{ control_plane_dir }}"
+      tags: Execute_control_plane
+       
+    - block:
+        - name: Execute default validation script
+          include_tasks: "{{ control_plane_validation_script_path }}"
+      tags: Execute_Validation_Script
+
+# Testcase OMNIA_1.1_MS_TC_009
+# Test Case to validate the execution of control_plane.yml with infiniband=true, powervault=false and ethernet=true
+- name: OMNIA_1.1_MS_TC_009
+  hosts: localhost
+  connection: local
+  vars_files:
+    - ../roles/control_plane_common/vars/main.yml  
+    - test_vars/test_control_plane_vars.yml   
+   
+  gather_subset:
+    - 'min'
+  tags: TC_009
+  
+  tasks:
+    - name: Check OS Version
+      assert:
+        that:
+          - 'ansible_distribution == "{{ os_name }}"'
+          - 'ansible_distribution_version == "{{ os_version }}"'
+        success_msg: "{{ check_os_success_msg }}"
+        fail_msg: "{{ check_os_fail_msg }}"
+      tags: Check_OS
+        
+    - name: Replace input parameters folder
+      copy:
+        src: "{{ item }}"
+        dest: "{{ input_params_folder }}"
+        force: yes
+        mode: '0644'
+      with_items:
+        - "{{ input_files_tc01 }}"
+      tags: Replace_input
+     
+    - name: Editing base_vars.yml for powervault to false
+      replace:
+        path: ../input_params/base_vars.yml
+        regexp: "{{ powervault_true }}"
+        replace: "{{ powervault_false }}"
+      tags: Edit_base_vars
+    
+    - name: Editing base_vars.yml for ethernet to true
+      replace:
+        path: ../input_params/base_vars.yml
+        regexp: "{{ ethernet_false }}"
+        replace: "{{ ethernet_true }}"
+      tags: Edit_base_vars
+    
+    - name: Editing base_vars.yml for infiniband to true
+      replace:
+        path: ../input_params/base_vars.yml
+        regexp: "{{ infiniband_false }}"
+        replace: "{{ infiniband_true }}"
+      tags: Edit_base_vars
+        
+    - block:    
+        - name: Execute control_plane.yml playbook
+          command: ansible-playbook control_plane.yml
+          args:
+            chdir: "{{ control_plane_dir }}"
+      tags: Execute_control_plane
+       
+    - block:
+        - name: Execute default validation script
+          include_tasks: "{{ control_plane_validation_script_path }}"
+      tags: Execute_Validation_Script
+
+# Testcase OMNIA_1.1_MS_TC_010
+# Test Case to validate the execution of control_plane.yml with infiniband=true, powervault=true and ethernet=false
+- name: OMNIA_1.1_MS_TC_010
+  hosts: localhost
+  connection: local
+  vars_files:
+    - ../roles/control_plane_common/vars/main.yml  
+    - test_vars/test_control_plane_vars.yml   
+   
+  gather_subset:
+    - 'min'
+  tags: TC_010
+
+  tasks:
+    - name: Check OS Version
+      assert:
+        that:
+          - 'ansible_distribution == "{{ os_name }}"'
+          - 'ansible_distribution_version == "{{ os_version }}"'
+        success_msg: "{{ check_os_success_msg }}"
+        fail_msg: "{{ check_os_fail_msg }}"
+      tags: Check_OS
+        
+    - name: Replace input parameters folder
+      copy:
+        src: "{{ item }}"
+        dest: "{{ input_params_folder }}"
+        force: yes
+        mode: '0644'
+      with_items:
+        - "{{ input_files_tc01 }}"
+      tags: Replace_input
+     
+    - name: Editing base_vars.yml for powervault to true
+      replace:
+        path: ../input_params/base_vars.yml
+        regexp: "{{ powervault_false }}"
+        replace: "{{ powervault_true }}"
+      tags: Edit_base_vars
+    
+    - name: Editing base_vars.yml for ethernet to false
+      replace:
+        path: ../input_params/base_vars.yml
+        regexp: "{{ ethernet_true }}"
+        replace: "{{ ethernet_false }}"
+      tags: Edit_base_vars
+    
+    - name: Editing base_vars.yml for infiniband to true
+      replace:
+        path: ../input_params/base_vars.yml
+        regexp: "{{ infiniband_false }}"
+        replace: "{{ infiniband_true }}"
+      tags: Edit_base_vars
+        
+    - block:    
+        - name: Execute control_plane.yml playbook
+          command: ansible-playbook control_plane.yml
+          args:
+            chdir: "{{ control_plane_dir }}"
+      tags: Execute_control_plane
+       
+    - block:
+        - name: Execute default validation script
+          include_tasks: "{{ control_plane_validation_script_path }}"
+      tags: Execute_Validation_Script
+
+# Testcase OMNIA_1.1_MS_TC_011
+# Test Case to validate the execution of control_plane.yml with firmware update set to False
+- name: OMNIA_1.1_MS_TC_011
+  hosts: localhost
+  connection: local
+  vars_files:
+    - ../roles/control_plane_common/vars/main.yml  
+    - test_vars/test_control_plane_vars.yml   
+   
+  gather_subset:
+    - 'min'
+  tags: TC_011
+  
+  tasks:
+    - name: Check OS Version
+      assert:
+        that:
+          - 'ansible_distribution == "{{ os_name }}"'
+          - 'ansible_distribution_version == "{{ os_version }}"'
+        success_msg: "{{ check_os_success_msg }}"
+        fail_msg: "{{ check_os_fail_msg }}"
+      tags: Check_OS
+      
+    - name: Set firmware update to false
+      replace:
+        path: ../input_params/idrac_vars.yml
+        regexp: "{{ fw_update_true }}"
+        replace: "{{ fw_update_false }}"
+      tags: Set_FW_Update
+        
+    - name: Replace input parameters folder
+      copy:
+        src: "{{ item }}"
+        dest: "{{ input_params_folder }}"
+        force: yes
+        mode: '0644'
+      with_items:
+        - "{{ input_files_tc01 }}"
+      tags: Replace_input
+     
+    - block:    
+        - name: Execute control_plane.yml playbook
+          command: ansible-playbook control_plane.yml
+          args:
+            chdir: "{{ control_plane_dir }}"
+      tags: Execute_control_plane
+       
+    - block:
+        - name: Execute default validation script
+          include_tasks: "{{ control_plane_validation_script_path }}"
+      tags: Execute_Validation_Script
+      
+    - name: Check if firmware updates folder exists
+      stat:
+        path: /var/nfs_repo/dellupdates
+      register: fw_update_dir
+      tags: Set_FW_Update
+      
+    - name: Verify firmware updates were downloaded
+      assert:
+        that:
+          - not fw_update_dir.stat.exists
+        success_msg: "{{ fw_success_validation }}"
+        fail_msg: "{{ fw_fail_validation }}"
+      tags: Set_FW_Update
+        
+# Testcase OMNIA_1.1_MS_TC_012
+# Test Case to validate the execution of control_plane.yml with firmware update set to true
+- name: OMNIA_1.1_MS_TC_012
+  hosts: localhost
+  connection: local
+  vars_files:
+    - ../roles/control_plane_common/vars/main.yml  
+    - test_vars/test_control_plane_vars.yml   
+   
+  gather_subset:
+    - 'min'
+  tags: TC_012
+  
+  tasks:
+    - name: Check OS Version
+      assert:
+        that:
+          - 'ansible_distribution == "{{ os_name }}"'
+          - 'ansible_distribution_version == "{{ os_version }}"'
+        success_msg: "{{ check_os_success_msg }}"
+        fail_msg: "{{ check_os_fail_msg }}"
+      tags: Check_OS
+      
+    - name: Set firmware update to true
+      replace:
+        path: ../input_params/idrac_vars.yml
+        regexp: "{{ fw_update_false }}"
+        replace: "{{ fw_update_true }}"
+      tags: Set_FW_Update
+        
+    - name: Replace input parameters folder
+      copy:
+        src: "{{ item }}"
+        dest: "{{ input_params_folder }}"
+        force: yes
+        mode: '0644'
+      with_items:
+        - "{{ input_files_tc01 }}"
+      tags: Replace_input
+     
+    - block:    
+        - name: Execute control_plane.yml playbook
+          command: ansible-playbook control_plane.yml
+          args:
+            chdir: "{{ control_plane_dir }}"
+      tags: Execute_control_plane
+       
+    - block:
+        - name: Execute default validation script
+          include_tasks: "{{ control_plane_validation_script_path }}"
+      tags: Execute_Validation_Script
+      
+    - name: Check if firmware updates folder exists
+      stat:
+        path: /var/nfs_repo/dellupdates
+      register: fw_update_dir
+      tags: Set_FW_Update
+      
+    - name: Verify firmware updates were downloaded
+      assert:
+        that:
+          - fw_update_dir.stat.exists
+        success_msg: "{{ fw_success_validation }}"
+        fail_msg: "{{ fw_fail_validation }}"
+      tags: Set_FW_Update
+
+# Testcase OMNIA_1.1_MS_TC_013
+# Test Case to validate the execution of control_plane.yml with docker login credential
+- name: OMNIA_1.1_MS_TC_013
+  hosts: localhost
+  connection: local
+  vars_files:
+    - ../roles/control_plane_common/vars/main.yml  
+    - test_vars/test_control_plane_vars.yml  
+   
+  gather_subset:
+    - 'min'
+  tags: TC_013
+  
+  tasks:
+    - name: Check OS Version
+      assert:
+        that:
+          - 'ansible_distribution == "{{ os_name }}"'
+          - 'ansible_distribution_version == "{{ os_version }}"'
+        success_msg: "{{ check_os_success_msg }}"
+        fail_msg: "{{ check_os_fail_msg }}"
+      tags: Check_OS
+    
+    - name: Replace input parameters folder
+      copy:
+        src: "{{ item }}"
+        dest: "{{ input_params_folder }}"
+        force: yes
+        mode: '0644'
+      with_items:
+        - "{{ input_files_tc01 }}"
+      tags: Replace_input
+      
+    - name: Change docker params in omnia_config.yml
+      replace:
+        path: ../../omnia_config.yml
+        regexp: "docker_username: .*$"
+        replace: 'docker_username: "{{ docker_user }}"'
+      tags: Set_Docker_Creds
+    
+    - name: Assert if the credentials are valid in test_control_plane_vars.yml
+      assert:
+        that:
+          - 'docker_user != "User"'
+          - 'docker_password != "Password"'
+        success_msg: "{{ valid_docker_creds }}"
+        fail_msg: "{{ invalid_docker_creds }}"
+      tags: Set_Docker_Creds
+    
+    - name: Change docker params in omnia_config.yml
+      replace:
+        path: ../../omnia_config.yml
+        regexp: "docker_password: .*$"
+        replace: 'docker_password: "{{ docker_password }}"'
+      tags: Set_Docker_Creds
+    
+    - block:    
+        - name: Execute control_plane.yml playbook
+          command: ansible-playbook control_plane.yml
+          args:
+            chdir: "{{ control_plane_dir }}"
+          tags: Execute_control_plane
+    
+    - block:
+        - name: Execute default validation script
+          include_tasks: "{{ control_plane_validation_script_path }}"
+          tags: Execute_Validation_Script
+      
+    - name: Fetch docker info
+      shell: docker login & sleep 3
+      register: new
+      changed_when: false
+      tags: Set_Docker_Creds
+
+    - name: Assert that docker was used to pull images 
+      assert:
+        that:
+          - "'Login did not succeed' in new.stderr"
+        success_msg: "{{ docker_success_validation }}"
+        fail_msg: "{{ docker_fail_validation }}"
+      tags: Set_Docker_Creds

+ 271 - 0
control_plane/test/test_control_plane_validation.yml

@@ -0,0 +1,271 @@
+#  Copyright 2021 Dell Inc. or its subsidiaries. All Rights Reserved.
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+---  
+
+- block:
+
+    - name: Fetch Package info
+      package_facts:
+        manager: auto
+      
+    - name: Verify all packages are installed
+      assert:
+        that: "'{{ item }}' in ansible_facts.packages"
+        success_msg: "{{ install_package_success_msg }}"
+        fail_msg: "{{ install_package_fail_msg }}"
+      when: "'python-docker' not in item"
+      with_items: "{{ common_packages }}"
+      ignore_errors: true
+      
+    - name: Check login_vars is encrypted
+      command: cat {{ login_vars_filename }}
+      changed_when: false
+      register: config_content
+       
+    - name: Validate login file is encypted or not
+      assert:
+        that: "'$ANSIBLE_VAULT;' in config_content.stdout"
+        fail_msg: "{{ login_vars_fail_msg }}"
+        success_msg: "{{ login_vars_success_msg }}"
+            
+#  Installing a required package : JQ      
+    - name: Installing jq (JSON Query)
+      package:
+        name: "{{ test_package }}"
+        state: present
+           
+#  Checking if all the required pods are working
+    - name: Get pods info
+      shell: kubectl get pods --all-namespaces
+      register: all_pods_info
+          
+    - name: Check the count of pods
+      set_fact:
+         count: "{{ all_pods_info.stdout_lines|length - 1 }}"
+          
+    - name: Check if all the pods are running
+      assert:
+        that:
+          - "'Running' in all_pods_info.stdout_lines[{{ item }}]"
+        fail_msg: "{{ check_pods_fail_msg }}"
+        success_msg: "{{ check_pods_success_msg }}"
+      with_sequence: start=1 end={{ count }}
+      
+#  Checking if NFS Server is running and Custom ISO is created
+    - name: Get NFS Stat
+      shell: systemctl status nfs-idmapd
+      register: nfstat_info
+       
+    - name: Verify NFS Stat is running
+      assert:
+        that:
+          - "'Active: active (running)' in nfstat_info.stdout"
+        success_msg: "{{ nfs_share_success_msg }}"
+        fail_msg: "{{ nfs_share_fail_msg }}"
+        
+    - name: Check nfs mount point
+      stat:
+        path: "{{ nfs_mount_Path }}"
+      register: nfs_mount_info
+          
+    - name: Verify nfs share is mounted
+      assert:
+        that:
+          - "{{ nfs_mount_info.stat.exists }}"
+        success_msg: "{{ nfs_mount_success_msg }}"
+        fail_msg: "{{ nfs_mount_fail_msg }}"
+           
+    - name: Check Custom ISO
+      stat:
+        path: "{{ check_iso_path }}"
+      register: check_iso_info
+          
+    - name: Verify Custom ISO is created in the NFS repo
+      assert:
+        that:
+          - "{{ check_iso_info.stat.exists }}"
+        success_msg: "{{ check_iso_success_msg }}"
+        fail_msg: "{{ check_iso_fail_msg }}"
+      
+#  Checking if network-config container is running
+    
+    - name: Get Pod info for network-config
+      shell: |
+         crictl ps -o json | jq '.containers[] | select(.labels."io.kubernetes.pod.namespace" == "network-config" and .labels."io.kubernetes.container.name" == "mngmnt-network-container") | "\(.id) \(.metadata.name) \(.state)"'
+      register: network_config_pod_info
+          
+    - name: Get Pod Status for network-config
+      assert:
+        that:
+          - network_config_pod_info.stdout_lines | regex_search( "{{ container_info }}")
+        success_msg: "{{ network_config_pod_success_msg }}"
+        fail_msg: "{{ network_config_pod_fail_msg }}"
+         
+    - name: Get Pod facts
+      shell: |
+            crictl ps -o json | jq '.containers[] | select(.labels."io.kubernetes.pod.namespace" == "network-config" and .labels."io.kubernetes.container.name" == "mngmnt-network-container") | "\(.id)"'
+      register: network_config_pod_fact
+         
+    - name: Parse container id for the pods
+      set_fact: 
+        container_id: "{{ network_config_pod_fact.stdout[1:-1] }}"   
+          
+    - name: Check dhcpd,xinetd service is running
+      command: crictl exec {{ container_id }} systemctl is-active {{ item }}
+      changed_when: false
+      ignore_errors: yes
+      register: pod_service_check
+      with_items:
+        - dhcpd
+        - xinetd
+            
+    - name: Verify dhcpd, xinetd service is running
+      assert:
+        that:
+          - "'active' in pod_service_check.results[{{ item }}].stdout"
+          - "'inactive' not in pod_service_check.results[{{ item }}].stdout"
+          - "'unknown' not in pod_service_check.results[{{ item }}].stdout"
+        fail_msg: "{{ pod_service_check_fail_msg }}"
+        success_msg: "{{ pod_service_check_success_msg }}"
+      with_sequence: start=0 end={{ pod_service_check.results|length - 1 }}
+         
+# Checking if cobbler-container is running
+    - name: Get Pod info for cobbler
+      shell: |
+         crictl ps -o json | jq '.containers[] | select(.labels."io.kubernetes.pod.namespace" == "cobbler") | "\(.id) \(.metadata.name) \(.state)"'
+      register: network_config_pod_info
+      
+    - name: Get Pod Status for cobbler
+      assert:
+        that:
+          - network_config_pod_info.stdout_lines | regex_search( "{{ container_info }}")
+        success_msg: "{{ cobbler_pod_success_msg }}"
+        fail_msg: "{{ cobbler_pod_fail_msg }}"
+      
+    - name: Get Pod facts for cobbler
+      shell: |
+            crictl ps -o json | jq '.containers[] | select(.labels."io.kubernetes.pod.namespace" == "cobbler") | "\(.id)"'
+      register: network_config_pod_fact
+      
+    - name: Extract cobbler pod id
+      set_fact: 
+        cobbler_id: "{{ network_config_pod_fact.stdout[1:-1] }}"   
+      
+    - name: Check tftp,dhcpd,xinetd,cobblerd service is running
+      command: crictl exec {{ cobbler_id }} systemctl is-active {{ item }}
+      changed_when: false
+      ignore_errors: yes
+      register: pod_service_check
+      with_items:
+        - dhcpd
+        - tftp
+        - xinetd
+        - cobblerd
+        
+    - name: Verify tftp,dhcpd,xinetd,cobblerd service is running
+      assert:
+        that:
+          - "'active' in pod_service_check.results[{{  item  }}].stdout"
+          - "'inactive' not in pod_service_check.results[{{  item  }}].stdout"
+          - "'unknown' not in pod_service_check.results[{{  item  }}].stdout"
+        fail_msg: "{{pod_service_check_fail_msg}}"
+        success_msg: "{{pod_service_check_success_msg}}"
+      with_sequence: start=0 end=3
+
+# Checking Cron-Jobs
+    - name: Check crontab list
+      command: crictl exec {{ cobbler_id }} crontab -l
+      changed_when: false
+      register: crontab_list
+
+    - name: Verify crontab list
+      assert:
+        that:
+          - "'* * * * * /usr/bin/ansible-playbook /root/tftp.yml' in crontab_list.stdout"
+          - "'*/5 * * * * /usr/bin/ansible-playbook /root/inventory_creation.yml' in crontab_list.stdout"
+        fail_msg: "{{cron_jobs_fail_msg}}"
+        success_msg: "{{cron_jobs_success_msg}}"
+
+#  Checking subnet-manger pod is running and open sm is running 
+#  Comment if infiniband is not connected
+    - name: Fetch subnet-manager stats
+      shell: kubectl get pods -n subnet-manager 
+      register: sm_manager_info
+
+    - name: Verify subnet_manager container is running
+      assert:
+        that:
+          - "'Running' in sm_manager_info.stdout_lines[1]"
+        fail_msg: "{{subnet_manager_fail_msg}}"
+        success_msg: "{{subnet_manager_success_msg}}"
+
+# Checking awx pod is running
+
+    - name: Get Pod info for awx
+      shell: |
+         crictl ps -o json | jq '.containers[] | select(.labels."io.kubernetes.pod.namespace" == "awx") | "\(.id) \(.metadata.name) \(.state)"'
+      register: awx_config_pod_info
+           
+    - name: Get Pod Status for awx
+      assert:
+        that:
+          - network_config_pod_info.stdout_lines[{{ item }}] | regex_search( "{{ container_info }}")
+        success_msg: "{{ awx_pod_success_msg }}"
+        fail_msg: "{{ awx_pod_fail_msg }}"
+      ignore_errors: yes
+      with_sequence: start=0 end={{ network_config_pod_info.stdout_lines |length - 1 }}
+          
+    - name: Get pvc stats
+      shell: |
+          kubectl get pvc -n awx -o json |jq '.items[] | "\(.status.phase)"'
+      register: pvc_stats_info
+            
+    - name: Verify if pvc stats is running
+      assert:
+        that:
+          - "'Bound' in pvc_stats_info.stdout"
+        fail_msg: "{{ pvc_stat_fail_msg }}"
+        success_msg: "{{ pvc_stat_success_msg }}"
+      with_sequence: start=0 end={{ pvc_stats_info.stdout_lines |length|int - 1 }}
+            
+    - name: Get svc stats
+      shell: kubectl get svc -n awx awx-service -o json
+      register: svc_stats_info
+           
+    - name: Verify if svc is up and running
+      assert:
+        that:
+          - "'Error from server (NotFound):' not in svc_stats_info.stdout"
+        success_msg: "{{ svc_stat_success_msg }}"
+        fail_msg: "{{ svc_stat_fail_msg }}"
+             
+    - name: Fetch Cluster IP from svc
+      shell: |
+          kubectl get svc -n awx -o json | jq '.items[] | select(.metadata.name == "awx-service") | "\(.spec.clusterIP)"'
+      register: cluster_ip_info
+           
+    - name: Check if connection to svc Cluster IP is enabled
+      uri:
+        url: http://{{ cluster_ip_info.stdout[1:-1] }}
+        follow_redirects: none
+        method: GET
+      ignore_errors: yes
+      register: cluster_ip_conn
+           
+    - name: Verify connection to svc cluster is working
+      assert:
+        that:
+          - cluster_ip_conn.status == 200
+        success_msg: "{{ svc_conn_success_msg }} : {{ cluster_ip_info.stdout[1:-1] }}"
+        fail_msg: "{{ svc_conn_fail_msg }} : {{ cluster_ip_info.stdout[1:-1] }}"

+ 150 - 0
control_plane/test/test_vars/base_vars.yml

@@ -0,0 +1,150 @@
+# Copyright 2021 Dell Inc. or its subsidiaries. All Rights Reserved.
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+---
+
+# Path to directory hosting ansible config file (ansible.cfg file)
+# Default value is /etc/ansible
+# This directory is on the host running ansible, if ansible is installed using dnf
+# If ansible is installed using pip, this path should be set
+ansible_conf_file_path: /etc/ansible
+
+# This variable is used to enable ethernet switch configuration
+# It accepts boolean values "true" or "false". 
+# By default its value is "false".
+# If ethernet switch support is needed set this to "true"
+ethernet_switch_support: true
+
+# This variable is used to enable infiniband switch configuration
+# It accepts boolean values "true" or "false". 
+# By default its value is "false".
+# If infiniband configuration is needed set this to "true"
+ib_switch_support: true
+
+# This variable is used to enable powervault configuration
+# It accepts boolean values "true" or "false". 
+# By default its value is "false".
+# If powervault configuration is needed set this to "true"
+powervault_support: false
+
+# The nic/ethernet card that will be connected to the public internet.
+# Default value of nic is eno2
+public_nic: "eno2"
+
+# Kubernetes pod network CIDR for appliance k8s network
+# Make sure this value does not overlap with any of the host networks.
+# Default value is "192.168.0.0/16"
+appliance_k8s_pod_net_cidr: "192.168.0.0/16"
+
+### Usage: provision_idrac, network_ib, network_ethernet, powervault_me4 ###
+
+# The trap destination IP address is the IP address of the SNMP Server where the trap will be sent
+# If this variable is left blank, it means SNMP will be disabled
+# Provide a valid SNMP server IP
+snmp_trap_destination: ""
+
+# Provide the snmp community name needed
+# By default this is set to "public"
+snmp_community_name: "public"
+
+### Usage: webui_awx ###
+
+# Organization name that is created in AWX.
+# The default value is “DellEMC”
+awx_organization: "DellEMC"
+
+### Usage: provision_cobbler, provision_idrac ###
+
+# This variable is used to set node provisioning method
+# It accepts values: idrac, pxe
+# Default value is "idrac"
+# If provisioning needs to be done through cobbler, set it to "pxe"
+# If idrac license is not present, provisioning mode will be set to "pxe"
+provision_method: "idrac"
+
+# This is the timezone that will be set during provisioning of OS
+# Available timezones are provided in control_plane/common/files/timezone.txt
+# Default timezone will be "GMT"
+# Some of the other available timezones are EST,CET,MST,CST6CDT,PST8PDT
+timezone: "GMT"
+
+# This is the language that will be set during provisioning of the OS
+# Default language supported is "en-US"
+language: "en-US"
+
+# This is the path where the user has to place the iso image that needs to be provisioned in target nodes.
+# The iso file should be CentOS7-2009-minimal edition.
+# Other iso files are not supported.
+# Mandatory value required
+iso_file_path: "/root/CentOS-7-x86_64-Minimal-2009.iso"
+
+# Default lease time that will be used by dhcp
+# Its unit is seconds
+# Min: 21600 seconds
+# Default: 86400 seconds
+# Max: 31536000 seconds
+# Mandatory value required
+default_lease_time: "86400"
+
+### Usage: control_plane_device ###
+
+# The nic/ethernet card that needs to be connected to provision 
+# the fabric, idrac and powervault.
+# This nic will be configured by Omnia for the DHCP server.
+# Default value of nic is eno1
+mngmnt_network_nic: "eno1"
+
+# The dhcp range for assigning the IPv4 address
+# Example: 172.17.0.1
+# Mandatory value required
+mngmnt_network_dhcp_start_range: "172.19.0.101"
+mngmnt_network_dhcp_end_range: "172.19.0.200"
+
+# The mapping file consists of the MAC address and its respective IP address and Hostname.
+# The format of mapping file should be MAC,Hostname,IP and must be a CSV file.
+# Eg: xx:yy:zz:aa:bb,172.17.0.5
+# A template for mapping file exists in omnia/examples and is named as mapping_device_file.csv.
+# This depicts the path where user has kept the mapping file for DHCP configurations.
+mngmnt_mapping_file_path: ""
+
+### Usage: provision_cobbler ###
+
+# The nic/ethernet card that needs to be connected to provision the OS of bare metal servers
+# This nic will be configured by Omnia for the DHCP server.
+# Default value of nic is eno3
+host_network_nic: "eno3"
+
+# The dhcp range for assigning the IPv4 address
+# Example: 172.17.0.1
+# Mandatory value required
+host_network_dhcp_start_range: "172.17.0.101"
+host_network_dhcp_end_range: "172.17.0.200"
+
+# The mapping file consists of the MAC address and its respective IP address and Hostname.
+# The format of mapping file should be MAC,Hostname,IP and must be a CSV file.
+# Eg: xx:yy:zz:aa:bb,server,172.17.0.5,Group(if any)
+# A template for mapping file exists in omnia/examples and is named as mapping_host_file.csv.
+# This depicts the path where user has kept the mapping file for DHCP configurations.
+host_mapping_file_path: ""
+
+### Usage: control_plane_ib ###
+
+# The nic/ethernet card that needs to be connected to configure infiniband switch
+# This nic will be configured by Omnia for the DHCP server.
+# Default value of nic is ib0
+ib_network_nic: "ib0"
+
+# The dhcp range for assigning the IPv4 address
+# Example: 172.17.0.1
+ib_network_dhcp_start_range: "172.25.0.101"
+ib_network_dhcp_end_range: "172.25.0.200"

+ 81 - 0
control_plane/test/test_vars/login_vars.yml

@@ -0,0 +1,81 @@
+# Copyright 2021 Dell Inc. or its subsidiaries. All Rights Reserved.
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+---
+
+### Usage: provision_cobbler, provison_idrac ###
+
+# Password used while deploying OS on bare metal servers.
+# The Length of the password should be at least 8.
+# The password must not contain -,\, ',"
+# Mandatory value required
+provision_password: "test@123"
+
+### Usage: provision_cobbler ###
+
+# Password used for cobbler
+# The Length of the password should be at least 8.
+# The password must not contain -,\, ',"
+# Mandatory value required
+cobbler_password: "test@123"
+
+### Usage: provision_idrac ###
+
+# The username for idrac
+# The username must not contain -,\, ',"
+# Mandatory value required
+idrac_username: "root"
+
+# Password used for idrac
+# The password must not contain -,\, ',"
+# Mandatory value required
+idrac_password: "calvin"
+
+### Usage: webui_awx ###
+
+# Password used for awx UI
+# The Length of the password should be at least 8.
+# The password must not contain -,\, ',"
+#awx_password: ""
+
+### Usage: network_ethernet ###
+
+# The username for ethernet switch
+# The username must not contain -,\, ',"
+ethernet_switch_username: "admin"
+
+# Password used for ethernet switch
+# The password must not contain -,\, ',"
+ethernet_switch_password: "admin"
+
+### Usage: network_ib ###
+
+# The username for infiniband switch
+# The username must not contain -,\, ',"
+ib_username: "admin"
+
+# Password used for infiniband switch
+# The password must not contain -,\, ',"
+ib_password: "admin"
+
+### Usage: powervault_me4 ###
+
+# The username for powervault_me4
+# The username must not contain -,\, ',"
+powervault_me4_username: "manage"
+
+# Password used for powervault_me4
+# The password should have atleast one uppercase character, one lowercase character,
+# one numeric character and one non-alphanumeric character.
+# The password must not contain -,\, ',", . , < , comma(,)
+powervault_me4_password: "Test@123"

+ 94 - 0
control_plane/test/test_vars/test_control_plane_vars.yml

@@ -0,0 +1,94 @@
+#  Copyright 2021 Dell Inc. or its subsidiaries. All Rights Reserved.
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+---
+
+#usage: test_control_plane_validation.yml
+
+port_no: 22
+os_name: CentOS
+os_version: '8.4'
+internet_status: "Failed. No Internet connection. Make sure network is up."
+check_os_success_msg: "OS and Version are supported"
+check_os_fail_msg: "Unsupported OS or OS version. OS should be {{ os_name }} and Version should be {{ os_version }} or more"
+
+input_params_folder: "../input_params/"
+control_plane_dir: "../"
+control_plane_validation_script_path: test_control_plane_validation.yml
+
+input_files_tc01:
+  - "test_vars/base_vars.yml"
+  - "test_vars/login_vars.yml"
+
+input_files_tc02:
+  - "test_vars/login_vars.yml"
+
+input_config_check_success_msg: "control_plane.yml validation passed"
+input_config_check_fail_msg: "control_plane.yml validation failed"
+
+install_package_success_msg: "{{item}} is installed"
+install_package_fail_msg: "{{item}} is not installed"
+login_vars_filename: "../input_params/login_vars.yml"
+login_vars_fail_msg: "Login vars is not encrypted"
+login_vars_success_msg: "Login vars is encrypted"
+
+fw_update_false: "firmware_update_required: false"
+fw_update_true: "firmware_update_required: true"
+fw_success_validation: "Validation Success for firmware update"
+fw_fail_validation: "Validation Failed for firmware update"
+docker_success_validation: "Docker Validated successfully"
+docker_fail_validation: "Docker not validated"
+
+test_package: 'jq'
+check_pods_success_msg: "Pod is running"
+check_pods_fail_msg: "Pods is not running"
+nfs_share_success_msg: "NFS Server is running"
+nfs_share_fail_msg: "NFS Server is not running"
+
+nfs_mount_Path: "/var/nfs_repo"
+nfs_mount_success_msg: "NFS repo is mounted"
+nfs_mount_fail_msg: "NFS repo is not mounted"
+check_iso_path: '/var/nfs_repo/unattended_centos7.iso'
+check_iso_success_msg: "ISO is present in the NFS repo"
+check_iso_fail_msg: "ISO is not present in the NFS repo"
+
+pod_service_check_fail_msg: "Service is not running"
+pod_service_check_success_msg: "Service is up and running"
+network_config_pod_success_msg: "Network-Config Pod is running"
+network_config_pod_fail_msg: "Network-Config Pod is not running"
+awx_pod_success_msg: "awx pod is up and running."
+awx_pod_fail_msg: "awx pod is not running"
+pvc_stat_success_msg: "pvc stat is running"
+pvc_stat_fail_msg: "pvc stat is not running"
+svc_stat_success_msg: "svc stat is running"
+svc_stat_fail_msg: "svc stat is not running"
+svc_conn_success_msg: "Connection to svc is successful at"
+svc_conn_fail_msg: "Connection to svc failed at: "
+cobbler_pod_success_msg: "Cobbler service is running"
+cobbler_pod_fail_msg: "Cobbler service is not running"
+subnet_manager_success_msg: "Subnet Manager is running"
+subnet_manager_fail_msg: "Subnet Manager is not running"
+cron_jobs_success_msg: "Cron jobs are running"
+cron_jobs_fail_msg: "Cron jobs are not running"
+container_info: "CONTAINER_RUNNING"
+ethernet_true: "ethernet_switch_support: true"
+ethernet_false: "ethernet_switch_support: false"
+powervault_true: "powervault_support: true"
+powervault_false: "powervault_support: false"
+infiniband_true: "ib_switch_support: true"
+infiniband_false: "ib_switch_support: false"
+# Update
+docker_user: "User"
+docker_password: "Password"
+valid_docker_creds: "Credentials are valid"
+invalid_docker_creds: "Please input valid docker username and password in test_control_plane_vars.yml"