Explorar el Código

Issue #153: TA - Bare metal provisioning using cobbler

Signed-off-by: abhishek-s-a <a_sa@dellteam.com>
John Lockman hace 4 años
padre
commit
b53d261f53

+ 0 - 3
appliance/test/cobbler_inventory

@@ -1,3 +0,0 @@
-[cobbler_servers]
-172.17.0.10
-100.98.24.231

+ 39 - 0
appliance/test/input_config_empty.yml

@@ -0,0 +1,39 @@
+#  Copyright 2020 Dell Inc. or its subsidiaries. All Rights Reserved.
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+---
+
+# Password used while deploying OS on bare metal servers and for Cobbler UI.
+# The Length of the password should be more than 7.
+# The password must not contain -,\, ',"
+provision_password: ""
+
+# Password used for the AWX UI.
+# The Length of the password should be more than 7.
+# The password must not contain -,\, ',"
+awx_password: ""
+
+# Password used for Slurm database.
+# The Length of the password should be more than 7.
+# The password must not contain -,\, ',"
+mariadb_password: ""
+
+# The nic/ethernet card that needs to be connected to the HPC switch.
+# This nic will be configured by Omnia for the DHCP server.
+# Default value of nic is em1.
+hpc_nic: "em1"
+
+# The nic card that needs to be connected to the public internet.
+# The public_nic should be em2, em1 or em3
+# Default value of nic is em2.
+public_nic: "em2"

+ 39 - 0
appliance/test/input_config_test.yml

@@ -0,0 +1,39 @@
+#  Copyright 2020 Dell Inc. or its subsidiaries. All Rights Reserved.
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+---
+
+# Password used while deploying OS on bare metal servers and for Cobbler UI.
+# The Length of the password should be more than 7.
+# The password must not contain -,\, ',"
+provision_password: "omnia@123"
+
+# Password used for the AWX UI.
+# The Length of the password should be more than 7.
+# The password must not contain -,\, ',"
+awx_password: "omnia@123"
+
+# Password used for Slurm database.
+# The Length of the password should be more than 7.
+# The password must not contain -,\, ',"
+mariadb_password: "omnia@123"
+
+# The nic/ethernet card that needs to be connected to the HPC switch.
+# This nic will be configured by Omnia for the DHCP server.
+# Default value of nic is em1.
+hpc_nic: "em1"
+
+# The nic card that needs to be connected to the public internet.
+# The public_nic should be em2, em1 or em3
+# Default value of nic is em2.
+public_nic: "em2"

+ 3 - 0
appliance/test/provisioned_hosts.yml

@@ -0,0 +1,3 @@
+[all]
+172.17.0.10
+172.17.0.15

La diferencia del archivo ha sido suprimido porque es demasiado grande
+ 1333 - 15
appliance/test/test_common.yml


+ 365 - 67
appliance/test/test_provision_cc.yml

@@ -13,37 +13,204 @@
 #  limitations under the License.
 ---
 
-# Testcase OMNIA_DIO_US_CC_TC_010
+# Testcase OMNIA_DIO_US_CC_TC_004
 # Execute provision role in management station and verify cobbler configuration
-- name: OMNIA_DIO_US_CC_TC_010
+- name: OMNIA_DIO_US_CC_TC_004
   hosts: localhost
   connection: local
   vars_files:
     - test_vars/test_provision_vars.yml
     - ../roles/provision/vars/main.yml
   tasks:
+    - name: Check the iso file is present
+      stat:
+        path: "{{ iso_file_path }}/{{ iso_name }}"
+      register: iso_status
+      tags: TC_004
+
+    - name: Fail if iso file is missing
+      fail:
+        msg: "{{ iso_fail }}"
+      when: iso_status.stat.exists == false
+      tags: TC_004
+
     - name: Delete the cobbler container if exits
       docker_container:
         name: "{{ docker_container_name }}"
         state: absent
-      tags: TC_010
+      tags: TC_004
 
     - name: Delete docker image if exists
       docker_image:
         name: "{{ docker_image_name }}"
         tag: "{{ docker_image_tag }}"
         state: absent
-      tags: TC_010
+      tags: TC_004
 
     - block:
+        - name: Call common role
+          include_role:
+            name: ../roles/common
+          vars:
+            input_config_filename: "{{ test_input_config_filename }}"
+
         - name: Call provision role
           include_role:
             name: ../roles/provision
+      tags: TC_004
+
+    - name: Check the connection to cobbler UI and it returns a status 200
+      uri:
+        url: https://localhost/cobbler_web
+        status_code: 200
+        return_content: yes
+        validate_certs: no
+      tags: TC_004,VERIFY_004
+
+    - name: Fetch cobbler version in cobbler container
+      command: docker exec {{ docker_container_name }} cobbler version
+      changed_when: false
+      register: cobbler_version
+      tags: TC_004,VERIFY_004
+
+    - name: Verify cobbler version
+      assert:
+        that:
+          - "'Cobbler' in cobbler_version.stdout"
+          - "'Error' not in cobbler_version.stdout"
+        fail_msg: "{{ cobbler_version_fail_msg }}"
+        success_msg: "{{ cobbler_version_success_msg }}"
+      tags: TC_004,VERIFY_004
+
+    - name: Run cobbler check command in cobbler container
+      command: docker exec {{ docker_container_name }} cobbler check
+      changed_when: false
+      register: cobbler_check
+      tags: TC_004,VERIFY_004
+
+    - name: Verify cobbler check command output
+      assert:
+        that:
+          - "'The following are potential configuration items that you may want to fix' not in cobbler_check.stdout"
+          - "'Error' not in cobbler_check.stdout"
+        fail_msg: "{{ cobbler_check_fail_msg }}"
+        success_msg: "{{ cobbler_check_success_msg }}"
+      ignore_errors: yes
+      tags: TC_004,VERIFY_004
+
+    - name: Run cobbler sync command in cobbler container
+      command: docker exec {{ docker_container_name }} cobbler sync
+      changed_when: false
+      register: cobbler_sync
+      tags: TC_004,VERIFY_004
+
+    - name: Verify cobbler sync command output
+      assert:
+        that:
+          - "'TASK COMPLETE' in cobbler_sync.stdout"
+          - "'Fail' not in cobbler_sync.stdout"
+          - "'Error' not in cobbler_sync.stdout"
+        fail_msg: "{{ cobbler_sync_fail_msg }}"
+        success_msg: "{{ cobbler_sync_success_msg }}"
+      tags: TC_004,VERIFY_004
+
+    - name: Fetch cobbler distro list
+      command: docker exec {{ docker_container_name }} cobbler distro list
+      changed_when: false
+      register: cobbler_distro_list
+      tags: TC_004,VERIFY_004
+
+    - name: Verify cobbler distro list
+      assert:
+        that:
+          - "'CentOS' in cobbler_distro_list.stdout"
+        fail_msg: "{{ cobbler_distro_list_fail_msg }}"
+        success_msg: "{{ cobbler_distro_list_success_msg }}"
+      tags: TC_004,VERIFY_004
+
+    - name: Fetch cobbler profile list
+      command: docker exec cobbler cobbler profile list
+      changed_when: false
+      register: cobbler_profile_list
+      tags: TC_004,VERIFY_004
+
+    - name: Verify cobbler profile list
+      assert:
+        that:
+          - "'CentOS' in cobbler_profile_list.stdout"
+        fail_msg: "{{ cobbler_profile_list_fail_msg }}"
+        success_msg: "{{ cobbler_profile_list_success_msg }}"
+      tags: TC_004,VERIFY_004
+
+    - name: Check kickstart file
+      shell: |
+        docker exec {{ docker_container_name }} [ -f /var/lib/cobbler/kickstarts/{{ kickstart_filename }} ] && echo "File exist" || echo "File does not exist"
+      changed_when: false
+      register: kickstart_file_status
+      tags: TC_004,VERIFY_004
+
+    - name: Verify kickstart file present
+      assert:
+        that:
+          - "'File exist' in kickstart_file_status.stdout"
+        fail_msg: "{{ kickstart_file_fail_msg }}"
+        success_msg: "{{ kickstart_file_success_msg }}"
+      tags: TC_004,VERIFY_004
+
+    - name: Check crontab list
+      command: docker exec cobbler crontab -l
+      changed_when: false
+      register: crontab_list
+      tags: TC_004,VERIFY_004
+
+    - name: Verify crontab list
+      assert:
+        that:
+          - "'* * * * * ansible-playbook /root/tftp.yml' in crontab_list.stdout"
+          - "'5 * * * * ansible-playbook /root/inventory_creation.yml' in crontab_list.stdout"
+        fail_msg: "{{ crontab_list_fail_msg }}"
+        success_msg: "{{ crontab_list_success_msg }}"
+      tags: TC_004,VERIFY_004
+
+    - name: Check tftp,dhcpd,xinetd,cobblerd service is running
+      command: docker exec cobbler systemctl is-active {{ item }}
+      changed_when: false
+      ignore_errors: yes
+      register: cobbler_service_check
+      with_items: "{{ cobbler_services }}"
+      tags: TC_004,VERIFY_004
+
+    - name: Verify tftp,dhcpd,xinetd,cobblerd service is running
+      assert:
+        that:
+          - "'active' in cobbler_service_check.results[{{ item }}].stdout"
+          - "'inactive' not in cobbler_service_check.results[{{ item }}].stdout"
+          - "'unknown' not in cobbler_service_check.results[{{ item }}].stdout"
+        fail_msg: "{{ cobbler_service_check_fail_msg }}"
+        success_msg: "{{ cobbler_service_check_success_msg }}"
+      with_sequence: start=0 end=3
+      tags: TC_004,VERIFY_004
+
+# Testcase OMNIA_DIO_US_CDIP_TC_005
+# Execute provison role in management station where cobbler container is configured
+- name: OMNIA_DIO_US_CDIP_TC_005
+  hosts: localhost
+  connection: local
+  vars_files:
+    - test_vars/test_provision_vars.yml
+    - ../roles/provision/vars/main.yml
+  tasks:
+    - block:
+        - name: Call common role
+          include_role:
+            name: ../roles/common
           vars:
-            no_prompt: true
-            admin_password: "{{ boundary_password }}"
-            admin_password_confirm: "{{ boundary_password }}"
-      tags: TC_010
+            input_config_filename: "{{ test_input_config_filename }}"
+
+        - name: Call provision role
+          include_role:
+            name: ../roles/provision
+      tags: TC_005
 
     - name: Check the connection to cobbler UI and it returns a status 200
       uri:
@@ -51,13 +218,13 @@
         status_code: 200
         return_content: yes
         validate_certs: no
-      tags: TC_010
+      tags: TC_005,VERIFY_005
 
     - name: Fetch cobbler version in cobbler container
       command: docker exec {{ docker_container_name }} cobbler version
       changed_when: false
       register: cobbler_version
-      tags: TC_010
+      tags: TC_005,VERIFY_005
 
     - name: Verify cobbler version
       assert:
@@ -66,13 +233,13 @@
           - "'Error' not in cobbler_version.stdout"
         fail_msg: "{{ cobbler_version_fail_msg }}"
         success_msg: "{{ cobbler_version_success_msg }}"
-      tags: TC_010
+      tags: TC_005,VERIFY_005
 
     - name: Run cobbler check command in cobbler container
       command: docker exec {{ docker_container_name }} cobbler check
       changed_when: false
       register: cobbler_check
-      tags: TC_010
+      tags: TC_005,VERIFY_005
 
     - name: Verify cobbler check command output
       assert:
@@ -82,13 +249,13 @@
         fail_msg: "{{ cobbler_check_fail_msg }}"
         success_msg: "{{ cobbler_check_success_msg }}"
       ignore_errors: yes
-      tags: TC_010
+      tags: TC_005,VERIFY_005
 
     - name: Run cobbler sync command in cobbler container
       command: docker exec {{ docker_container_name }} cobbler sync
       changed_when: false
       register: cobbler_sync
-      tags: TC_010
+      tags: TC_005,VERIFY_005
 
     - name: Verify cobbler sync command output
       assert:
@@ -98,13 +265,13 @@
           - "'Error' not in cobbler_sync.stdout"
         fail_msg: "{{ cobbler_sync_fail_msg }}"
         success_msg: "{{ cobbler_sync_success_msg }}"
-      tags: TC_010
+      tags: TC_005,VERIFY_005
 
     - name: Fetch cobbler distro list
       command: docker exec {{ docker_container_name }} cobbler distro list
       changed_when: false
       register: cobbler_distro_list
-      tags: TC_010
+      tags: TC_005,VERIFY_005
 
     - name: Verify cobbler distro list
       assert:
@@ -112,13 +279,13 @@
           - "'CentOS' in cobbler_distro_list.stdout"
         fail_msg: "{{ cobbler_distro_list_fail_msg }}"
         success_msg: "{{ cobbler_distro_list_success_msg }}"
-      tags: TC_010
+      tags: TC_005,VERIFY_005
 
     - name: Fetch cobbler profile list
       command: docker exec cobbler cobbler profile list
       changed_when: false
       register: cobbler_profile_list
-      tags: TC_010
+      tags: TC_005,VERIFY_005
 
     - name: Verify cobbler profile list
       assert:
@@ -126,14 +293,14 @@
           - "'CentOS' in cobbler_profile_list.stdout"
         fail_msg: "{{ cobbler_profile_list_fail_msg }}"
         success_msg: "{{ cobbler_profile_list_success_msg }}"
-      tags: TC_010
+      tags: TC_005,VERIFY_005
 
     - name: Check kickstart file
       shell: |
         docker exec {{ docker_container_name }} [ -f /var/lib/cobbler/kickstarts/{{ kickstart_filename }} ] && echo "File exist" || echo "File does not exist"
       changed_when: false
       register: kickstart_file_status
-      tags: TC_010
+      tags: TC_005,VERIFY_005
 
     - name: Verify kickstart file present
       assert:
@@ -141,11 +308,45 @@
           - "'File exist' in kickstart_file_status.stdout"
         fail_msg: "{{ kickstart_file_fail_msg }}"
         success_msg: "{{ kickstart_file_success_msg }}"
-      tags: TC_010
+      tags: TC_005,VERIFY_005
+
+    - name: Check crontab list
+      command: docker exec cobbler crontab -l
+      changed_when: false
+      register: crontab_list
+      tags: TC_005,VERIFY_005
+
+    - name: Verify crontab list
+      assert:
+        that:
+          - "'* * * * * ansible-playbook /root/tftp.yml' in crontab_list.stdout"
+          - "'5 * * * * ansible-playbook /root/inventory_creation.yml' in crontab_list.stdout"
+        fail_msg: "{{ crontab_list_fail_msg }}"
+        success_msg: "{{ crontab_list_success_msg }}"
+      tags: TC_005,VERIFY_005
+
+    - name: Check tftp,dhcpd,xinetd,cobblerd service is running
+      command: docker exec cobbler systemctl is-active {{ item }}
+      changed_when: false
+      ignore_errors: yes
+      register: cobbler_service_check
+      with_items: "{{ cobbler_services }}"
+      tags: TC_005,VERIFY_005
 
-# Testcase OMNIA_DIO_US_CC_TC_011
+    - name: Verify tftp,dhcpd,xinetd,cobblerd service is running
+      assert:
+        that:
+          - "'active' in cobbler_service_check.results[{{ item }}].stdout"
+          - "'inactive' not in cobbler_service_check.results[{{ item }}].stdout"
+          - "'unknown' not in cobbler_service_check.results[{{ item }}].stdout"
+        fail_msg: "{{ cobbler_service_check_fail_msg }}"
+        success_msg: "{{ cobbler_service_check_success_msg }}"
+      with_sequence: start=0 end=3
+      tags: TC_005,VERIFY_005
+
+# Testcase OMNIA_DIO_US_CC_TC_006
 # Execute provision role in management station where already one container present
-- name: OMNIA_DIO_US_CC_TC_011
+- name: OMNIA_DIO_US_CC_TC_006
   hosts: localhost
   connection: local
   vars_files:
@@ -156,21 +357,21 @@
       docker_container:
         name: "{{ docker_container_name }}"
         state: absent
-      tags: TC_011
+      tags: TC_006
 
     - name: Delete docker image if exists
       docker_image:
         name: "{{ docker_image_name }}"
         tag: "{{ docker_image_tag }}"
         state: absent
-      tags: TC_011
+      tags: TC_006
 
     - name: Create docker image
       docker_image:
         name: ubuntu
         tag: latest
         source: pull
-      tags: TC_011
+      tags: TC_006
 
     - name: Create docker container
       command: docker run -dit ubuntu
@@ -178,17 +379,19 @@
       changed_when: true
       args:
         warn: false
-      tags: TC_011
+      tags: TC_006
 
     - block:
+        - name: Call common role
+          include_role:
+            name: ../roles/common
+          vars:
+            input_config_filename: "{{ test_input_config_filename }}"
+
         - name: Call provision role
           include_role:
             name: ../roles/provision
-          vars:
-            no_prompt: true
-            admin_password: "{{ boundary_password }}"
-            admin_password_confirm: "{{ boundary_password }}"
-      tags: TC_011
+      tags: TC_006
 
     - name: Check the connection to cobbler UI and it returns a status 200
       uri:
@@ -196,13 +399,13 @@
         status_code: 200
         return_content: yes
         validate_certs: no
-      tags: TC_011
+      tags: TC_006,VERIFY_006
 
     - name: Fetch cobbler version in cobbler container
       command: docker exec {{ docker_container_name }} cobbler version
       changed_when: false
       register: cobbler_version
-      tags: TC_011
+      tags: TC_006,VERIFY_006
 
     - name: Verify cobbler version
       assert:
@@ -211,13 +414,13 @@
           - "'Error' not in cobbler_version.stdout"
         fail_msg: "{{ cobbler_version_fail_msg }}"
         success_msg: "{{ cobbler_version_success_msg }}"
-      tags: TC_011
+      tags: TC_006,VERIFY_006
 
     - name: Run cobbler check command in cobbler container
       command: docker exec {{ docker_container_name }} cobbler check
       changed_when: false
       register: cobbler_check
-      tags: TC_011
+      tags: TC_006,VERIFY_006
 
     - name: Verify cobbler check command output
       assert:
@@ -227,13 +430,13 @@
         fail_msg: "{{ cobbler_check_fail_msg }}"
         success_msg: "{{ cobbler_check_success_msg }}"
       ignore_errors: yes
-      tags: TC_011
+      tags: TC_006,VERIFY_006
 
     - name: Run cobbler sync command in cobbler container
       command: docker exec {{ docker_container_name }} cobbler sync
       changed_when: false
       register: cobbler_sync
-      tags: TC_011
+      tags: TC_006,VERIFY_006
 
     - name: Verify cobbler sync command output
       assert:
@@ -243,13 +446,13 @@
           - "'Error' not in cobbler_sync.stdout"
         fail_msg: "{{ cobbler_sync_fail_msg }}"
         success_msg: "{{ cobbler_sync_success_msg }}"
-      tags: TC_011
+      tags: TC_006,VERIFY_006
 
     - name: Fetch cobbler distro list
       command: docker exec {{ docker_container_name }} cobbler distro list
       changed_when: false
       register: cobbler_distro_list
-      tags: TC_011
+      tags: TC_006,VERIFY_006
 
     - name: Verify cobbler distro list
       assert:
@@ -257,13 +460,13 @@
           - "'CentOS' in cobbler_distro_list.stdout"
         fail_msg: "{{ cobbler_distro_list_fail_msg }}"
         success_msg: "{{ cobbler_distro_list_success_msg }}"
-      tags: TC_011
+      tags: TC_006,VERIFY_006
 
     - name: Fetch cobbler profile list
       command: docker exec cobbler cobbler profile list
       changed_when: false
       register: cobbler_profile_list
-      tags: TC_011
+      tags: TC_006,VERIFY_006
 
     - name: Verify cobbler profile list
       assert:
@@ -271,14 +474,14 @@
           - "'CentOS' in cobbler_profile_list.stdout"
         fail_msg: "{{ cobbler_profile_list_fail_msg }}"
         success_msg: "{{ cobbler_profile_list_success_msg }}"
-      tags: TC_011
+      tags: TC_006,VERIFY_006
 
     - name: Check kickstart file
       shell: |
         docker exec {{ docker_container_name }} [ -f /var/lib/cobbler/kickstarts/{{ kickstart_filename }} ] && echo "File exist" || echo "File does not exist"
       changed_when: false
       register: kickstart_file_status
-      tags: TC_011
+      tags: TC_006,VERIFY_006
 
     - name: Verify kickstart file present
       assert:
@@ -286,23 +489,57 @@
           - "'File exist' in kickstart_file_status.stdout"
         fail_msg: "{{ kickstart_file_fail_msg }}"
         success_msg: "{{ kickstart_file_success_msg }}"
-      tags: TC_011
+      tags: TC_006,VERIFY_006
+
+    - name: Check crontab list
+      command: docker exec cobbler crontab -l
+      changed_when: false
+      register: crontab_list
+      tags: TC_006,VERIFY_006
+
+    - name: Verify crontab list
+      assert:
+        that:
+          - "'* * * * * ansible-playbook /root/tftp.yml' in crontab_list.stdout"
+          - "'5 * * * * ansible-playbook /root/inventory_creation.yml' in crontab_list.stdout"
+        fail_msg: "{{ crontab_list_fail_msg }}"
+        success_msg: "{{ crontab_list_success_msg }}"
+      tags: TC_006,VERIFY_006
+
+    - name: Check tftp,dhcpd,xinetd,cobblerd service is running
+      command: docker exec cobbler systemctl is-active {{ item }}
+      changed_when: false
+      ignore_errors: yes
+      register: cobbler_service_check
+      with_items: "{{ cobbler_services }}"
+      tags: TC_006,VERIFY_006
+
+    - name: Verify tftp,dhcpd,xinetd,cobblerd service is running
+      assert:
+        that:
+          - "'active' in cobbler_service_check.results[{{ item }}].stdout"
+          - "'inactive' not in cobbler_service_check.results[{{ item }}].stdout"
+          - "'unknown' not in cobbler_service_check.results[{{ item }}].stdout"
+        fail_msg: "{{ cobbler_service_check_fail_msg }}"
+        success_msg: "{{ cobbler_service_check_success_msg }}"
+      with_sequence: start=0 end=3
+      tags: TC_006,VERIFY_006
 
     - name: Delete the ubuntu container
       docker_container:
         name: "{{ create_docker_container.stdout }}"
         state: absent
-      tags: TC_011
+      tags: TC_006
 
     - name: Delete the ubuntu umage
       docker_image:
         name: ubuntu
         state: absent
-      tags: TC_011
+      tags: TC_006
 
-# Testcase OMNIA_DIO_US_CC_TC_012
+# Testcase OMNIA_DIO_US_CC_TC_007
 # Execute provision role in management station and reboot management station
-- name: OMNIA_DIO_US_CC_TC_012
+- name: OMNIA_DIO_US_CC_TC_007
   hosts: localhost
   connection: local
   vars_files:
@@ -310,54 +547,115 @@
     - ../roles/provision/vars/main.yml
   tasks:
     - name: Check last uptime of the server
-      shell: |
-        current_time=$(date +"%Y-%m-%d %H")
-        uptime -s | grep "$current_time"
+      command: uptime -s
       register: uptime_status
       changed_when: false
       ignore_errors: yes
-      tags: TC_012
+      tags: TC_007
+
+    - name: Check current date
+      command: date +"%Y-%m-%d %H"
+      register: current_time
+      changed_when: false
+      ignore_errors: yes
+      tags: TC_007
 
     - name: Delete the cobbler container if exits
       docker_container:
         name: "{{ docker_container_name }}"
         state: absent
-      when: uptime_status.stdout|length < 1
-      tags: TC_012
+      when: current_time.stdout not in uptime_status.stdout
+      tags: TC_007
 
     - name: Delete docker image if exists
       docker_image:
         name: "{{ docker_image_name }}"
         tag: "{{ docker_image_tag }}"
         state: absent
-      when: uptime_status.stdout|length < 1
-      tags: TC_012
+      when: current_time.stdout not in uptime_status.stdout
+      tags: TC_007
 
     - block:
+        - name: Call common role
+          include_role:
+            name: ../roles/common
+          vars:
+            input_config_filename: "{{ test_input_config_filename }}"
+
         - name: Call provision role
           include_role:
             name: ../roles/provision
-          vars:
-            no_prompt: true
-            admin_password: "{{ boundary_password }}"
-            admin_password_confirm: "{{ boundary_password }}"
-      when: uptime_status.stdout|length < 1
-      tags: TC_012
+      when: current_time.stdout not in uptime_status.stdout
+      tags: TC_007
 
     - name: Reboot localhost
       command: reboot
-      when: uptime_status.stdout|length < 1
-      tags: TC_012
+      when: current_time.stdout not in uptime_status.stdout
+      tags: TC_007
 
     - name: Inspect cobbler container
       docker_container_info:
         name: "{{ docker_container_name }}"
       register: cobbler_cnt_status
-      tags: TC_012
+      tags: TC_007,VERIFY_007
 
     - name: Verify cobbler container is running after reboot
       assert:
         that: "'running' in cobbler_cnt_status.container.State.Status"
         fail_msg: "{{ cobbler_reboot_fail_msg }}"
         success_msg: "{{ cobbler_reboot_success_msg }}"
-      tags: TC_012
+      tags: TC_007,VERIFY_007
+
+# Testcase OMNIA_DIO_US_CC_TC_008
+# Execute provison role in management station with centos iso file not present in files folder of provision role
+- name: OMNIA_DIO_US_CC_TC_008
+  hosts: localhost
+  connection: local
+  vars_files:
+    - test_vars/test_provision_vars.yml
+    - ../roles/provision/vars/main.yml
+  tasks:
+    - name: Check the iso file is present
+      stat:
+        path: "{{ iso_file_path }}/{{ iso_name }}"
+      register: iso_status
+      tags: TC_008
+
+    - name: Copy iso file to different name
+      copy:
+        src: "{{ iso_file_path }}/{{ iso_name }}"
+        dest: "{{ iso_file_path }}/{{ temp_iso_name }}"
+      when: iso_status.stat.exists == true
+      tags: TC_008
+
+    - name: Delete iso file
+      file:
+        path: "{{ iso_file_path }}/{{ iso_name }}"
+        state: "absent"
+      when: iso_status.stat.exists == true
+      tags: TC_008
+
+    - block:
+        - name: Call common role
+          include_role:
+            name: ../roles/common
+          vars:
+            input_config_filename: "{{ test_input_config_filename }}"
+
+        - name: Call provision role
+          include_role:
+            name: ../roles/provision
+      rescue:
+        - name: Validate iso missing error
+          assert:
+            that: iso_fail in iso_file_check.msg
+            success_msg: "{{ iso_check_success_msg }}"
+            fail_msg: "{{ iso_check_fail_msg }}"
+      tags: TC_008
+
+    - name: Copy iso file to old name
+      copy:
+        src: "{{ iso_file_path }}/{{ temp_iso_name }}"
+        dest: "{{ iso_file_path }}/{{ iso_name }}"
+      when: iso_status.stat.exists == true
+      tags: TC_008

+ 83 - 322
appliance/test/test_provision_cdip.yml

@@ -14,7 +14,7 @@
 ---
 
 # Testcase OMNIA_DIO_US_CDIP_TC_001
-# Execute provison role in management station with cobbler as empty
+# Execute provison role in management station with os installed centos 7
 - name: OMNIA_DIO_US_CDIP_TC_001
   hosts: localhost
   connection: local
@@ -36,320 +36,25 @@
       tags: TC_001
 
     - block:
-        - name: Test cobbler password with empty string
+        - name: Call common role
           include_role:
-            name: ../roles/provision
-            tasks_from: "{{ item }}"
-          with_items:
-           - "{{ cobbler_image_files }}"
-          vars:
-            no_prompt: true
-            admin_password: "{{ empty_password }}"
-            admin_password_confirm: "{{ empty_password }}"
-      rescue:
-        - name: Validate failure message
-          assert:
-            that: fail_msg_pwd_format in msg_pwd_format.msg
-            success_msg: "{{ validate_password_success_msg }}"
-            fail_msg: "{{ validate_password_fail_msg }}"
-      tags: TC_001
-
-# Testcase OMNIA_DIO_US_CDIP_TC_002
-# Execute provison role in management station with cobbler password of length 8 characters
-- name: OMNIA_DIO_US_CDIP_TC_002
-  hosts: localhost
-  connection: local
-  vars_files:
-    - test_vars/test_provision_vars.yml
-    - ../roles/provision/vars/main.yml
-  tasks:
-    - name: Delete the cobbler container if exits
-      docker_container:
-        name: "{{ docker_container_name }}"
-        state: absent
-      tags: TC_002
-
-    - name: Delete docker image if exists
-      docker_image:
-        name: "{{ docker_image_name }}"
-        tag: "{{ docker_image_tag }}"
-        state: absent
-      tags: TC_002
-
-    - block:
-        - name: Test cobbler password with 8 characters
-          include_role:
-            name: ../roles/provision
-            tasks_from: "{{ item }}"
-          with_items:
-           - "{{ cobbler_image_files }}"
-          vars:
-            no_prompt: true
-            admin_password: "{{ boundary_password }}"
-            admin_password_confirm: "{{ boundary_password }}"
-      always:
-        - name: Validate success message
-          assert:
-            that:  success_msg_pwd_format in msg_pwd_format.msg
-            success_msg: "{{ validate_password_success_msg }}"
-            fail_msg: "{{ validate_password_fail_msg }}"
-      tags: TC_002
-
-# Testcase OMNIA_DIO_US_CDIP_TC_003
-# Execute provison role in management station with cobbler password of length greather than 15 characters
-- name: OMNIA_DIO_US_CDIP_TC_003
-  hosts: localhost
-  connection: local
-  vars_files:
-    - test_vars/test_provision_vars.yml
-    - ../roles/provision/vars/main.yml
-  tasks:
-    - name: Delete the cobbler container if exits
-      docker_container:
-        name: "{{ docker_container_name }}"
-        state: absent
-      tags: TC_003
-
-    - name: Delete docker image if exists
-      docker_image:
-        name: "{{ docker_image_name }}"
-        tag: "{{ docker_image_tag }}"
-        state: absent
-      tags: TC_003
-
-    - block:
-        - name: Test cobbler password with lengthy string
-          include_role:
-             name: ../roles/provision
-             tasks_from: "{{ item }}"
-          with_items:
-           - "{{ cobbler_image_files }}"
-          vars:
-            no_prompt: true
-            admin_password: "{{ lengthy_password }}"
-            admin_password_confirm: "{{ lengthy_password }}"
-      always:
-        - name: Validate success message
-          assert:
-            that:  success_msg_pwd_format in msg_pwd_format.msg
-            success_msg: "{{ validate_password_success_msg }}"
-            fail_msg: "{{ validate_password_fail_msg }}"
-      tags: TC_003
-
-# Testcase OMNIA_DIO_US_CDIP_TC_004
-# Execute provison role in management station with cobbler password contains white spaces
-- name: OMNIA_DIO_US_CDIP_TC_004
-  hosts: localhost
-  connection: local
-  vars_files:
-    - test_vars/test_provision_vars.yml
-    - ../roles/provision/vars/main.yml
-  tasks:
-    - name: Delete the cobbler container if exits
-      docker_container:
-        name: "{{ docker_container_name }}"
-        state: absent
-      tags: TC_004
-
-    - name: Delete docker image if exists
-      docker_image:
-        name: "{{ docker_image_name }}"
-        tag: "{{ docker_image_tag }}"
-        state: absent
-      tags: TC_004
-
-    - block:
-        - name: Test cobbler password with string contains white space
-          include_role:
-            name: ../roles/provision
-            tasks_from: "{{ item }}"
-          with_items:
-           - "{{ cobbler_image_files }}"
-          vars:
-            no_prompt: true
-            admin_password: "{{ whitespace_password }}"
-            admin_password_confirm: "{{ whitespace_password }}"
-      always:
-        - name: Validate success message
-          assert:
-            that:  success_msg_pwd_format in msg_pwd_format.msg
-            success_msg: "{{ validate_password_success_msg }}"
-            fail_msg: "{{ validate_password_fail_msg }}"
-      tags: TC_004
-
-# Testcase OMNIA_DIO_US_CDIP_TC_005
-# Execute provison role in management station with cobbler password as string with special characters
-- name: OMNIA_DIO_US_CDIP_TC_005
-  hosts: localhost
-  connection: local
-  vars_files:
-    - test_vars/test_provision_vars.yml
-    - ../roles/provision/vars/main.yml
-  tasks:
-    - name: Delete the cobbler container if exits
-      docker_container:
-        name: "{{ docker_container_name }}"
-        state: absent
-      tags: TC_005
-
-    - name: Delete docker image if exists
-      docker_image:
-        name: "{{ docker_image_name }}"
-        tag: "{{ docker_image_tag }}"
-        state: absent
-      tags: TC_005
-
-    - block:
-        - name: Test cobbler password with string contains special characters
-          include_role:
-            name: ../roles/provision
-            tasks_from: "{{ item }}"
-          with_items:
-           - "{{ cobbler_image_files }}"
-          vars:
-            no_prompt: true
-            admin_password: "{{ special_character_password }}"
-            admin_password_confirm: "{{ special_character_password }}"
-      always:
-        - name: Validate success message
-          assert:
-            that:  success_msg_pwd_format in msg_pwd_format.msg
-            success_msg: "{{ validate_password_success_msg }}"
-            fail_msg: "{{ validate_password_success_msg }}"
-      tags: TC_005
-
-# Testcase OMNIA_DIO_US_CDIP_TC_006
-# Execute provison role in management station with cobbler password and cobbler password confirm having unequal values
-- name: OMNIA_DIO_US_CDIP_TC_006
-  hosts: localhost
-  connection: local
-  vars_files:
-    - test_vars/test_provision_vars.yml
-    - ../roles/provision/vars/main.yml
-  tasks:
-    - name: Delete the cobbler container if exits
-      docker_container:
-        name: "{{ docker_container_name }}"
-        state: absent
-      tags: TC_006
-
-    - name: Delete docker image if exists
-      docker_image:
-        name: "{{ docker_image_name }}"
-        tag: "{{ docker_image_tag }}"
-        state: absent
-      tags: TC_006
-
-    - block:
-        - name: Test cobbler password with unequal values
-          include_role:
-            name: ../roles/provision
-            tasks_from: "{{ item }}"
-          with_items:
-           - "{{ cobbler_image_files }}"
-          vars:
-            no_prompt: true
-            admin_password: "{{ boundary_password }}"
-            admin_password_confirm: "{{ lengthy_password }}"
-      rescue:
-        - name: Validate failure message
-          assert:
-            that:  fail_msg_pwd_confirm in msg_pwd_confirm.msg
-            success_msg: "{{ validate_password_success_msg }}"
-            fail_msg: "{{ validate_password_success_msg }}"
-      tags: TC_006
-
-# Testcase OMNIA_DIO_US_CDIP_TC_007
-# Execute provison role in management station where docker service not running
-- name: OMNIA_DIO_US_CDIP_TC_007
-  hosts: localhost
-  connection: local
-  vars_files:
-    - test_vars/test_provision_vars.yml
-    - ../roles/provision/vars/main.yml
-  tasks:
-    - name: Delete the cobbler container if exits
-      docker_container:
-        name: "{{ docker_container_name }}"
-        state: absent
-      tags: TC_007
-
-    - name: Delete docker image if exists
-      docker_image:
-        name: "{{ docker_image_name }}"
-        tag: "{{ docker_image_tag }}"
-        state: absent
-      tags: TC_007
-
-    - name: Stop docker service
-      service:
-        name: docker
-        state: stopped
-      tags: TC_007
-
-    - block:
-        - name: Call provision role
-          include_role:
-            name: ../roles/provision
+            name: ../roles/common
           vars:
-            no_prompt: true
-            admin_password: "{{ boundary_password }}"
-            admin_password_confirm: "{{ boundary_password }}"
-
-        - name: Docker service stopped usecase fail message
-          fail:
-            msg: "{{ docker_check_fail_msg }}"
-      rescue:
-        - name: Docker service stopped usecase success message
-          debug:
-            msg: "{{ docker_check_success_msg }}"
-      always:
-        - name: Start docker service
-          service:
-            name: docker
-            state: started
-      tags: TC_007
-
-# Testcase OMNIA_DIO_US_CDIP_TC_008
-# Execute provison role in management station with os installed centos 8.2
-- name: OMNIA_DIO_US_CDIP_TC_008
-  hosts: localhost
-  connection: local
-  vars_files:
-    - test_vars/test_provision_vars.yml
-    - ../roles/provision/vars/main.yml
-  tasks:
-    - name: Delete the cobbler container if exits
-      docker_container:
-        name: "{{ docker_container_name }}"
-        state: absent
-      tags: TC_008
-
-    - name: Delete docker image if exists
-      docker_image:
-        name: "{{ docker_image_name }}"
-        tag: "{{ docker_image_tag }}"
-        state: absent
-      tags: TC_008
+            input_config_filename: "{{ test_input_config_filename }}"
 
-    - block:
         - name: Call provision role
           include_role:
             name: ../roles/provision
             tasks_from: "{{ item }}"
           with_items:
            - "{{ cobbler_image_files }}"
-          vars:
-            no_prompt: true
-            admin_password: "{{ boundary_password }}"
-            admin_password_confirm: "{{ boundary_password }}"
-      tags: TC_008
+      tags: TC_001
 
     - name: Inspect cobbler docker image
       docker_image_info:
         name: "{{ docker_image_name }}"
       register: cobbler_image_status
-      tags: TC_008
+      tags: TC_001,VERIFY_001
 
     - name: Validate cobbler docker image
       assert:
@@ -357,13 +62,13 @@
           - cobbler_image_status.images
         fail_msg: "{{ cobbler_img_fail_msg }}"
         success_msg: "{{ cobbler_img_success_msg }}"
-      tags: TC_008
+      tags: TC_001,VERIFY_001
 
     - name: Inspect cobbler container
       docker_container_info:
         name: "{{ docker_container_name }}"
       register: cobbler_cnt_status
-      tags: TC_008
+      tags: TC_001,VERIFY_001
 
     - name: Validate cobbler docker container
       assert:
@@ -371,7 +76,7 @@
           - cobbler_cnt_status.exists
         fail_msg: "{{ cobbler_cnt_fail_msg }}"
         success_msg: "{{ cobbler_cnt_success_msg }}"
-      tags: TC_008
+      tags: TC_001,VERIFY_001
 
     - name: Validate first NIC is not assigned to public internet
       shell: |
@@ -382,18 +87,18 @@
         executable: /bin/bash
       failed_when: first_nic in nic_output.stdout
       changed_when: false
-      tags: TC_008
+      tags: TC_001,VERIFY_001
 
     - name: "Validate NIC-1 is assigned to IP {{ nic1_ip_address }}"
       assert:
-        that: "'{{ nic1_ip_address }}' in ansible_eno1.ipv4.address"
+        that: "'{{ nic1_ip_address }}' in ansible_em1.ipv4.address"
         fail_msg: "{{ nic_check_fail_msg }}"
         success_msg: "{{ nic_check_success_msg }}"
-      tags: TC_008
+      tags: TC_001,VERIFY_001
 
-# Testcase OMNIA_DIO_US_CDIP_TC_009
+# Testcase OMNIA_DIO_US_CDIP_TC_002
 # Execute provison role in management station where cobbler container and image already created
-- name: OMNIA_DIO_US_CDIP_TC_009
+- name: OMNIA_DIO_US_CDIP_TC_002
   hosts: localhost
   connection: local
   vars_files:
@@ -401,21 +106,22 @@
     - ../roles/provision/vars/main.yml
   tasks:
     - block:
+        - name: Call common role
+          include_role:
+            name: ../roles/common
+          vars:
+            input_config_filename: "{{ test_input_config_filename }}"
+
         - name: Call provision role
           include_role:
             name: ../roles/provision
-          vars:
-            no_prompt: true
-            username: "{{ cobbler_username }}"
-            admin_password: "{{ boundary_password }}"
-            admin_password_confirm: "{{ boundary_password }}"
-      tags: TC_009
+      tags: TC_002
 
     - name: Inspect cobbler docker image
       docker_image_info:
         name: "{{ docker_image_name }}"
       register: cobbler_image_status
-      tags: TC_009
+      tags: TC_002,VERIFY_002
 
     - name: Validate cobbler docker image
       assert:
@@ -423,13 +129,13 @@
           - cobbler_image_status.images
         fail_msg: "{{ cobbler_img_fail_msg }}"
         success_msg: "{{ cobbler_img_success_msg }}"
-      tags: TC_009
+      tags: TC_002,VERIFY_002
 
     - name: Inspect cobbler container
       docker_container_info:
         name: "{{ docker_container_name }}"
       register: cobbler_cnt_status
-      tags: TC_009
+      tags: TC_002,VERIFY_002
 
     - name: Validate cobbler docker container
       assert:
@@ -437,7 +143,7 @@
           - cobbler_cnt_status.exists
         fail_msg: "{{ cobbler_cnt_fail_msg }}"
         success_msg: "{{ cobbler_cnt_success_msg }}"
-      tags: TC_009
+      tags: TC_002,VERIFY_002
 
     - name: Validate first NIC is not assigned to public internet
       shell: |
@@ -448,11 +154,66 @@
         executable: /bin/bash
       failed_when: first_nic in nic_output.stdout
       changed_when: false
-      tags: TC_009
+      tags: TC_002,VERIFY_002
 
     - name: "Validate NIC-1 is assigned to IP {{ nic1_ip_address }}"
       assert:
-        that: "'{{ nic1_ip_address }}' in ansible_eno1.ipv4.address"
+        that: "'{{ nic1_ip_address }}' in ansible_em1.ipv4.address"
         fail_msg: "{{ nic_check_fail_msg }}"
         success_msg: "{{ nic_check_success_msg }}"
-      tags: TC_009
+      tags: TC_002,VERIFY_002
+
+# Testcase OMNIA_DIO_US_CDIP_TC_003
+# Execute provison role in management station where docker service not running
+- name: OMNIA_DIO_US_CDIP_TC_003
+  hosts: localhost
+  connection: local
+  vars_files:
+    - test_vars/test_provision_vars.yml
+    - ../roles/provision/vars/main.yml
+  tasks:
+    - name: Delete the cobbler container if exits
+      docker_container:
+        name: "{{ docker_container_name }}"
+        state: absent
+      tags: TC_003
+
+    - name: Delete docker image if exists
+      docker_image:
+        name: "{{ docker_image_name }}"
+        tag: "{{ docker_image_tag }}"
+        state: absent
+      tags: TC_003
+
+    - name: Stop docker service
+      service:
+        name: docker
+        state: stopped
+      tags: TC_003
+
+    - block:
+        - name: Call common role
+          include_role:
+            name: ../roles/common
+          vars:
+            input_config_filename: "{{ test_input_config_filename }}"
+
+        - name: Call provision role
+          include_role:
+            name: ../roles/provision
+
+        - name: Docker service stopped usecase success message
+          debug:
+            msg: "{{ docker_check_success_msg }}"
+
+      rescue:
+        - name: Docker service stopped usecase fail message
+          fail:
+            msg: "{{ docker_check_fail_msg }}"
+
+      always:
+        - name: Start docker service
+          service:
+            name: docker
+            state: started
+      tags: TC_003

+ 90 - 41
appliance/test/test_provision_ndod.yml

@@ -13,44 +13,61 @@
 #  limitations under the License.
 ---
 
-# OMNIA_DIO_US_NDOD_TC_013
-# Execute provison role in management station and  PXE boot one compute node 
-- name: OMNIA_DIO_US_NDOD_TC_013
+# OMNIA_DIO_US_NDOD_TC_009
+# Execute provison role in management station and  PXE boot one compute node
+- name: OMNIA_DIO_US_NDOD_TC_009
   hosts: localhost
   connection: local
   gather_subset:
     - 'min'
   vars_files:
     - test_vars/test_provision_vars.yml
+    - ../roles/common/vars/main.yml
   tasks:
     - name: Set ip address of the compute node
       set_fact:
         single_node_ip_address: "{{ groups[cobbler_groupname][0] }}"
-      tags: TC_013
+      tags: TC_009,VERIFY_009
 
     - name: Delete inventory if exists
       file:
         path: inventory
         state: absent
-      tags: TC_013
+      tags: TC_009,VERIFY_009
+
+    - name: Check input config file is encrypted
+      command: cat {{ test_input_config_filename }}
+      changed_when: false
+      register: config_content
+      tags: TC_009,VERIFY_009
+
+    - name: Decrpyt input_config.yml
+      command: ansible-vault decrypt {{ test_input_config_filename }} --vault-password-file {{ vault_path }}
+      changed_when: false
+      when: "'$ANSIBLE_VAULT;' in config_content.stdout"
+      tags: TC_009,VERIFY_009
+
+    - name: Include variable file input_config.yml
+      include_vars: "{{ test_input_config_filename }}"
+      tags: TC_009,VERIFY_009
 
     - name: Create inventory file
       lineinfile:
         path: inventory
-        line: "{{ single_node_ip_address }} ansible_user=root ansible_password={{ boundary_password }} ansible_ssh_common_args='-o StrictHostKeyChecking=no'"
+        line: "{{ single_node_ip_address }} ansible_user=root ansible_password={{ provision_password }} ansible_ssh_common_args='-o StrictHostKeyChecking=no'"
         create: yes
         mode: '{{ file_permission }}'
-      tags: TC_013
+      tags: TC_009,VERIFY_009
 
     - meta: refresh_inventory
-      tags: TC_013
+      tags: TC_009,VERIFY_009
 
     - name: Validate authentication of username and password
       command: ansible {{ single_node_ip_address }} -m ping -i inventory
       register: validate_login
       changed_when: false
       ignore_errors: yes
-      tags: TC_013
+      tags: TC_009,VERIFY_009
 
     - name: Validate the authentication output
       assert:
@@ -60,31 +77,31 @@
           - "'UNREACHABLE' not in validate_login.stdout"
         fail_msg: "{{ authentication_fail_msg }}"
         success_msg: "{{ authentication_success_msg }}"
-      tags: TC_013
+      tags: TC_009,VERIFY_009
 
     - name: Check hostname
       command: ansible {{ single_node_ip_address }} -m shell -a hostname -i inventory
       register: validate_hostname
       changed_when: false
       ignore_errors: yes
-      tags: TC_013
+      tags: TC_009,VERIFY_009
 
     - name: Validate the hostname
       assert:
         that: "'localhost' not in validate_hostname.stdout"
         fail_msg: "{{ hostname_fail_msg }}"
         success_msg: "{{ hostname_success_msg }}"
-      tags: TC_013
+      tags: TC_009,VERIFY_009
 
     - name: Delete inventory if exists
       file:
         path: inventory
         state: absent
-      tags: TC_013
+      tags: TC_009,VERIFY_009
 
-# OMNIA_DIO_US_NDOD_TC_014
+# OMNIA_DIO_US_NDOD_TC_010
 # Execute provison role in management station and PXE boot two compute node
-- name: OMNIA_DIO_US_NDOD_TC_014
+- name: OMNIA_DIO_US_NDOD_TC_010
   hosts: localhost
   connection: local
   gather_subset:
@@ -97,7 +114,23 @@
       file:
         path: inventory
         state: absent
-      tags: TC_014
+      tags: TC_010,VERIFY_010
+
+    - name: Check input config file is encrypted
+      command: cat {{ test_input_config_filename }}
+      changed_when: false
+      register: config_content
+      tags: TC_010,VERIFY_010
+
+    - name: Decrpyt input_config.yml
+      command: ansible-vault decrypt {{ test_input_config_filename }} --vault-password-file {{ vault_path }}
+      changed_when: false
+      when: "'$ANSIBLE_VAULT;' in config_content.stdout"
+      tags: TC_010,VERIFY_010
+
+    - name: Include variable file input_config.yml
+      include_vars: "{{ test_input_config_filename }}"
+      tags: TC_010,VERIFY_010
 
     - name: Create inventory file
       lineinfile:
@@ -105,18 +138,18 @@
         line: "[nodes]"
         create: yes
         mode: '{{ file_permission }}'
-      tags: TC_014
+      tags: TC_010,VERIFY_010
 
     - name: Edit inventory file
       lineinfile:
         path: inventory
-        line: "{{ item }} ansible_user=root ansible_password={{ boundary_password }} ansible_ssh_common_args='-o StrictHostKeyChecking=no'"
+        line: "{{ item }} ansible_user=root ansible_password={{ provision_password }} ansible_ssh_common_args='-o StrictHostKeyChecking=no'"
       with_items:
         - "{{ groups[cobbler_groupname] }}"
-      tags: TC_014
+      tags: TC_010,VERIFY_010
 
     - meta: refresh_inventory
-      tags: TC_014
+      tags: TC_010,VERIFY_010
 
     - name: Validate ip address is different for both servers
       assert:
@@ -125,14 +158,14 @@
         success_msg: "{{ ip_address_success_msg }}"
       delegate_to: localhost
       run_once: yes
-      tags: TC_014
+      tags: TC_010,VERIFY_010
 
     - name: Check hostname of both servers
       command: ansible nodes -m shell -a hostname -i inventory
       register: node_hostname
       changed_when: false
       ignore_errors: yes
-      tags: TC_014
+      tags: TC_010,VERIFY_010
 
     - name: Validate hostname is different for both servers
       assert:
@@ -144,7 +177,7 @@
         success_msg: "{{ hostname_success_msg }}"
       delegate_to: localhost
       run_once: yes
-      tags: TC_014
+      tags: TC_010,VERIFY_010
 
     - name: Delete inventory if exists
       file:
@@ -152,11 +185,11 @@
         state: absent
       delegate_to: localhost
       run_once: yes
-      tags: TC_014
+      tags: TC_010,VERIFY_010
 
-# OMNIA_DIO_US_NDOD_TC_015
+# OMNIA_DIO_US_NDOD_TC_011
 # Validate passwordless ssh connection established or not with compute nodes
-- name: OMNIA_DIO_US_NDOD_TC_015
+- name: OMNIA_DIO_US_NDOD_TC_011
   hosts: localhost
   gather_subset:
     - 'min'
@@ -165,11 +198,11 @@
     - ../roles/provision/vars/main.yml
   tasks:
     - name: Validate authentication of username and password
-      command: "ansible {{ cobbler_groupname }} -m ping -i cobbler_inventory"
+      command: "ansible {{ cobbler_groupname }} -m ping -i {{ inventory_file }}"
       register: validate_login
       changed_when: false
       ignore_errors: yes
-      tags: TC_015
+      tags: TC_011,VERIFY_011
 
     - name: Validate the passwordless SSH connection
       assert:
@@ -179,11 +212,11 @@
           - "'UNREACHABLE' not in validate_login.stdout"
         success_msg: "{{ authentication_success_msg }}"
         fail_msg: "{{ authentication_fail_msg }}"
-      tags: TC_015
+      tags: TC_011,VERIFY_011
 
-# OMNIA_DIO_US_NDOD_TC_016
+# OMNIA_DIO_US_NDOD_TC_012
 # Execute provison role in management station and reboot compute node after os provision again
-- name: OMNIA_DIO_US_NDOD_TC_016
+- name: OMNIA_DIO_US_NDOD_TC_012
   hosts: localhost
   connection: local
   gather_subset:
@@ -194,13 +227,29 @@
     - name: Set ip address of the compute node
       set_fact:
         single_node_ip_address: "{{ groups[cobbler_groupname][0] }}"
-      tags: TC_016
+      tags: TC_012,VERIFY_012
 
     - name: Delete inventory if exists
       file:
         path: inventory
         state: absent
-      tags: TC_016
+      tags: TC_012,VERIFY_012
+
+    - name: Check input config file is encrypted
+      command: cat {{ test_input_config_filename }}
+      changed_when: false
+      register: config_content
+      tags: TC_012,VERIFY_012
+
+    - name: Decrpyt input_config.yml
+      command: ansible-vault decrypt {{ test_input_config_filename }} --vault-password-file {{ vault_path }}
+      changed_when: false
+      when: "'$ANSIBLE_VAULT;' in config_content.stdout"
+      tags: TC_012,VERIFY_012
+
+    - name: Include variable file input_config.yml
+      include_vars: "{{ test_input_config_filename }}"
+      tags: TC_012,VERIFY_012
 
     - name: Create inventory file
       lineinfile:
@@ -208,38 +257,38 @@
         line: "[nodes]"
         create: yes
         mode: '{{ file_permission }}'
-      tags: TC_016
+      tags: TC_012,VERIFY_012
 
     - name: Edit inventory file
       lineinfile:
         path: inventory
-        line: "{{ single_node_ip_address }} ansible_user=root ansible_password={{ boundary_password }} ansible_ssh_common_args='-o StrictHostKeyChecking=no'"
-      tags: TC_016
+        line: "{{ single_node_ip_address }} ansible_user=root ansible_password={{ provision_password }} ansible_ssh_common_args='-o StrictHostKeyChecking=no'"
+      tags: TC_012,VERIFY_012
 
     - meta: refresh_inventory
-      tags: TC_016
+      tags: TC_012,VERIFY_012
 
     - name: Reboot servers
       command: ansible nodes -m command -a reboot -i inventory
       ignore_errors: yes
       changed_when: true
-      tags: TC_016
+      tags: TC_012,VERIFY_012
 
     - name: Wait for 10 minutes
       pause:
         minutes: 10
-      tags: TC_016
+      tags: TC_012,VERIFY_012
 
     - name: Check ip address of servers
       command: ansible nodes -m command -a 'ip a' -i inventory
       ignore_errors: yes
       changed_when: false
       register: ip_address_after_reboot
-      tags: TC_016
+      tags: TC_012,VERIFY_012
 
     - name: Validate ip address is same after reboot
       assert:
         that: "'{{ single_node_ip_address }}' in ip_address_after_reboot.stdout"
         fail_msg: "{{ ip_address_fail_msg }}"
         success_msg: "{{ ip_address_success_msg }}"
-      tags: TC_016
+      tags: TC_012,VERIFY_012

+ 18 - 11
appliance/test/test_vars/test_common_vars.yml

@@ -14,24 +14,31 @@
 ---
 
 # vars file for test_common.yml file
-docker_volume_fail_msg: "Docker volume omnia-storage does not exist"
-
-docker_volume_success_msg: "Docker volume omnia-storage exists"
-
 centos_version: '7.8'
+test_input_config_filename: "input_config_test.yml"
+empty_input_config_filename: "input_config_empty.yml"
+new_input_config_filename: "input_config_new.yml"
+password_config_file: "password_config"
+min_length_password: "testpass"
+max_length_password: "helloworld123helloworld12hello"
+long_password: "helloworld123hellowordl12hello3"
+white_space_password: "hello world 123"
+special_character_password1: "hello-world/"
+special_character_password2: "hello@$%!world"
 
+docker_volume_success_msg: "Docker volume omnia-storage exists"
+docker_volume_fail_msg: "Docker volume omnia-storage does not exist"
+input_config_success_msg: "Input config file is encrypted using ansible-vault successfully"
+input_config_fail_msg: "Input config file is failed to encrypt using ansible-vault"
 os_check_success_msg: "OS check passed"
-
 os_check_fail_msg: "OS check failed"
-
 internet_check_success_msg: "Internet connectivity check passed"
-
 internet_check_fail_msg: "Internet connectivity check failed"
-
 different_user_check_success_msg: "Different user execution check passed"
-
 different_user_check_fail_msg: "Different user execution check failed"
-
 selinux_check_success_msg: "selinux check passed"
-
 selinux_check_fail_msg: "selinux check failed"
+input_config_check_success_msg: "input_config.yml validation passed"
+input_config_check_fail_msg: "input_config.yml validation failed"
+install_package_success_msg: "Installation of package is successful"
+install_package_fail_msg: "Installation of package is failed"

+ 21 - 8
appliance/test/test_vars/test_provision_vars.yml

@@ -14,11 +14,7 @@
 ---
 
 # Usage: test_provision_cdip.yml
-empty_password: ""
-lengthy_password: "a1b2c3d4e5f6g7h8i9j10k11"
-whitespace_password: "hello world 123"
-special_character_password: "hello@123#%"
-first_nic: "eno1"
+first_nic: "em1"
 nic1_ip_address: 172.17.0.1
 validate_password_success_msg: "Password validation successful"
 validate_password_fail_msg: "Password validation failed"
@@ -35,6 +31,8 @@ cobbler_image_files:
  - firewall_settings
  - provision_password
  - cobbler_image
+password_config_file: "password_config"
+test_input_config_filename: "input_config_test.yml"
 
 # Usage: test_provision_cc.yml
 docker_check_success_msg: "Docker service stopped usescase validation successful"
@@ -55,7 +53,20 @@ kickstart_file_fail_msg: "Kickstart file validation failed"
 kickstart_file_success_msg: "Kickstart file validation successful"
 cobbler_reboot_fail_msg: "Cobbler container failed to start after reboot"
 cobbler_reboot_success_msg: "Cobbler container started successfully after reboot"
-kickstart_filename: "centos8.ks"
+crontab_list_fail_msg: "Crontab list validation failed"
+crontab_list_success_msg: "Crontab list validation successful"
+iso_check_fail_msg: "centos iso file check validation failed"
+iso_check_success_msg: "centos iso file check validation successful"
+cobbler_service_check_fail_msg: "TFTP service validation failed"
+cobbler_service_check_success_msg: "TFTP service validation successful"
+kickstart_filename: "centos7.ks"
+iso_file_path: "../roles/provision/files"
+temp_iso_name: "temp_centos.iso"
+cobbler_services:
+ - tftp
+ - dhcpd
+ - cobblerd
+ - xinetd
 
 # Usage: test_provision_cdip.yml, test_provision_cc.yml, test_provision_ndod.yml
 docker_container_name: "cobbler"
@@ -68,5 +79,7 @@ authentication_fail_msg: "Server authentication validation failed"
 authentication_success_msg: "Server authentication validation successful"
 ip_address_fail_msg: "IP address validation failed"
 ip_address_success_msg: "IP address validation successful"
-cobbler_groupname: "cobbler_servers"
-file_permission: 0644
+cobbler_groupname: "all"
+inventory_file: "provisioned_hosts.yml"
+file_permission: 0644
+vault_path: ../roles/common/files/.vault_key

+ 5 - 5
omnia.yml

@@ -24,11 +24,11 @@
   roles:
     - common
  
-- name: Apply GPU node config
-  hosts: gpus
-  gather_facts: false
-  roles:
-    - compute_gpu
+#- name: Apply GPU node config
+#  hosts: gpus
+#  gather_facts: false
+#  roles:
+#    - compute_gpu
 
 - name: Apply K8s manager config
   hosts: manager

roles/compute_gpu/files/daemon.json → roles/common/files/daemon.json


+ 18 - 0
roles/common/files/inventory.fact

@@ -0,0 +1,18 @@
+#!/bin/bash
+INVENTORY=$(mktemp lspci.XXXXXXXX)
+
+lspci > $INVENTORY
+
+NVIDIA_GPU=$(cat $INVENTORY | grep -i nvidia | wc -l)
+XILINX_FPGA=$(cat $INVENTORY | grep "Processing accelerators: Xilinx Corporation Device" | wc -l)
+INTEL_A10_FPGA=$(cat $INVENTORY | grep "Processing accelerators: Intel Corporation Device" | wc -l)
+
+cat << EOF
+{
+	"xilinx_fpga" : $XILINX_FPGA,
+	"nvidia_gpu" : $NVIDIA_GPU,
+	"intel_a10_fpga" : $INTEL_A10_FPGA
+}
+EOF
+
+rm -f $INVENTORY

+ 18 - 0
roles/common/tasks/main.yml

@@ -13,6 +13,17 @@
 #  limitations under the License.
 ---
 
+- name: Create a custom fact directory on each host
+  file:
+    path: /etc/ansible/facts.d
+    state: directory
+
+- name: Install accelerator discovery script
+  copy:
+    src: inventory.fact
+    dest: /etc/ansible/facts.d/inventory.fact
+    mode: 0755
+
 - name: Add kubernetes repo
   copy:
     src: kubernetes.repo
@@ -70,6 +81,9 @@
     state: present
   tags: install
 
+- name: Collect host facts (including acclerator information)
+  setup: ~
+
 - name: Install k8s packages
   package:
     name: "{{ k8s_packages }}"
@@ -107,3 +121,7 @@
 - name: Deploy time ntp/chrony
   include_tasks: ntp.yml
   tags: install
+
+- name: Install Nvidia drivers and software components
+  include_tasks: nvidia.yml
+  when: ansible_local.inventory.nvidia_gpu > 0

roles/compute_gpu/tasks/main.yml → roles/common/tasks/nvidia.yml


+ 13 - 0
roles/common/vars/main.yml

@@ -23,6 +23,7 @@ common_packages:
   - bash-completion
   - nvidia-detect
   - chrony
+  - pciutils
 
 k8s_packages:
   - kubelet-1.16.7
@@ -61,3 +62,15 @@ ntp_servers:
   - 2.centos.pool.ntp.org
 chrony_servers:
   - 2.centos.pool.ntp.org
+
+nvidia_docker_repo_url: https://nvidia.github.io/nvidia-docker/centos7/nvidia-docker.repo
+nvidia_docker_repo_dest: /etc/yum.repos.d/nvidia-docker.repo
+nvidia_container_repo_url: https://nvidia.github.io/libnvidia-container/centos7/libnvidia-container.repo
+nvidia_container_repo_dest: /etc/yum.repos.d/libnvidia-container.repo
+
+nvidia_packages:
+  - kmod-nvidia
+  - nvidia-docker2
+
+daemon_file_dest: /etc/docker/
+daemon_file_mode: 0644

+ 0 - 3
roles/compute_gpu/files/k8s.conf

@@ -1,3 +0,0 @@
-net.bridge.bridge-nf-call-ip6tables = 1
-net.bridge.bridge-nf-call-iptables = 1
-

+ 0 - 8
roles/compute_gpu/files/kubernetes.repo

@@ -1,8 +0,0 @@
-[kubernetes]
-name=Kubernetes
-baseurl=https://packages.cloud.google.com/yum/repos/kubernetes-el7-x86_64
-enabled=1
-gpgcheck=1
-repo_gpgcheck=1
-gpgkey=https://packages.cloud.google.com/yum/doc/yum-key.gpg https://packages.cloud.google.com/yum/doc/rpm-package-key.gpg
-

+ 0 - 30
roles/compute_gpu/vars/main.yml

@@ -1,30 +0,0 @@
-#  Copyright 2020 Dell Inc. or its subsidiaries. All Rights Reserved.
-#
-#  Licensed under the Apache License, Version 2.0 (the "License");
-#  you may not use this file except in compliance with the License.
-#  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
----
-
-nvidia_docker_repo_url: https://nvidia.github.io/nvidia-docker/centos7/nvidia-docker.repo
-
-nvidia_docker_repo_dest: /etc/yum.repos.d/nvidia-docker.repo
-
-nvidia_container_repo_url: https://nvidia.github.io/libnvidia-container/centos7/libnvidia-container.repo 
-
-nvidia_container_repo_dest: /etc/yum.repos.d/libnvidia-container.repo
-
-nvidia_packages:
-  - kmod-nvidia
-  - nvidia-docker2
-
-daemon_file_dest: /etc/docker/
-
-daemon_file_mode: 0644