Ver código fonte

Merge pull request #567 from abhishek-s-a/delete_test_omnia

Issue #566: Delete omnia 1.0 test files
Lucas A. Wilson 3 anos atrás
pai
commit
df7d99b8c7

+ 0 - 49
control_plane/test/appliance_config_empty.yml

@@ -1,49 +0,0 @@
-#  Copyright 2020 Dell Inc. or its subsidiaries. All Rights Reserved.
-#
-#  Licensed under the Apache License, Version 2.0 (the "License");
-#  you may not use this file except in compliance with the License.
-#  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
----
-
-# Password used while deploying OS on bare metal servers and for Cobbler UI.
-# The Length of the password should be at least 8.
-# The password must not contain -,\, ',"
-provision_password: ""
-
-# Password used for the AWX UI.
-# The Length of the password should be at least 8.
-# The password must not contain -,\, ',"
-awx_password: ""
-
-# The nic/ethernet card that needs to be connected to the HPC switch.
-# This nic will be configured by Omnia for the DHCP server.
-# Default value of nic is em1.
-hpc_nic: "em1"
-
-# The nic/ethernet card that will be connected to the public internet.
-# Default value of nic is em2
-public_nic: "em2"
-
-# This is the  path where user has kept the iso image that needs to be provisioned in target nodes.
-# The iso file should be CentOS7-2009-minimal edition.
-# Other iso files are not supported.
-iso_file_path: ""
-
-# The mapping file consists of the MAC address and its respective IP address and hostname.
-# The format of mapping file should be MAC,hostname,IP and must be a CSV file.
-# A template for mapping file exists in omnia/examples and is named as mapping_file.csv.
-# This depicts the path where user has kept the mapping file for DHCP configurations.
-mapping_file_path: ""
-
-# The dhcp range for assigning the IPv4 address to the baremetal nodes.
-# Example: 10.1.23.1
-dhcp_start_ip_range: ""
-dhcp_end_ip_range: ""

+ 0 - 49
control_plane/test/appliance_config_test.yml

@@ -1,49 +0,0 @@
-# Copyright 2020 Dell Inc. or its subsidiaries. All Rights Reserved.
-#
-#  Licensed under the Apache License, Version 2.0 (the "License");
-#  you may not use this file except in compliance with the License.
-#  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
----
-
-# Password used while deploying OS on bare metal servers and for Cobbler UI.
-# The Length of the password should be at least 8.
-# The password must not contain -,\, ',"
-provision_password: "omnia@123"
-
-# Password used for the AWX UI.
-# The Length of the password should be at least 8.
-# The password must not contain -,\, ',"
-awx_password: "omnia@123"
-
-# The nic/ethernet card that needs to be connected to the HPC switch.
-# This nic will be configured by Omnia for the DHCP server.
-# Default value of nic is em1.
-hpc_nic: "em1"
-
-# The nic/ethernet card that will be connected to the public internet.
-# Default value of nic is em2
-public_nic: "em2"
-
-# This is the  path where user has kept the iso image that needs to be provisioned in target nodes.
-# The iso file should be CentOS7-2009-minimal edition.
-# Other iso files are not supported.
-iso_file_path: "/root/CentOS-7-x86_64-Minimal-2009.iso"
-
-# The mapping file consists of the MAC address and its respective IP address and hostname.
-# The format of mapping file should be MAC,hostname,IP and must be a CSV file.
-# A template for mapping file exists in omnia/examples and is named as mapping_file.csv.
-# This depicts the path where user has kept the mapping file for DHCP configurations.
-mapping_file_path: ""
-
-# The dhcp range for assigning the IPv4 address to the baremetal nodes.
-# Example: 10.1.23.1
-dhcp_start_ip_range: "172.17.0.10"
-dhcp_end_ip_range: "172.17.0.100"

+ 0 - 3
control_plane/test/provisioned_hosts.yml

@@ -1,3 +0,0 @@
-[all]
-172.17.0.10
-172.17.0.15

Diferenças do arquivo suprimidas por serem muito extensas
+ 0 - 1882
control_plane/test/test_common.yml


+ 0 - 2
control_plane/test/test_mapping_file

@@ -1,2 +0,0 @@
-Mac,Hostname,IP
-xx:yy:zz:aa:bb,validation-host21,172.20.0.21

+ 0 - 608
control_plane/test/test_provision_cc.yml

@@ -1,608 +0,0 @@
-#  Copyright 2020 Dell Inc. or its subsidiaries. All Rights Reserved.
-#
-#  Licensed under the Apache License, Version 2.0 (the "License");
-#  you may not use this file except in compliance with the License.
-#  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
----
-
-# Testcase OMNIA_DIO_US_CC_TC_004
-# Execute provision role in management station and verify cobbler configuration
-- name: OMNIA_DIO_US_CC_TC_004
-  hosts: localhost
-  connection: local
-  vars_files:
-    - test_vars/test_provision_vars.yml
-    - ../roles/provision/vars/main.yml
-    - "{{ test_input_config_filename }}"
-  tasks:
-    - name: Check the iso file is present
-      stat:
-        path: "{{ iso_file_path }}"
-      register: iso_status
-      tags: TC_004
-
-    - name: Fail if iso file is missing
-      fail:
-        msg: "{{ iso_fail }}"
-      when: iso_status.stat.exists == false
-      tags: TC_004
-
-    - name: Delete the cobbler container if exits
-      docker_container:
-        name: "{{ docker_container_name }}"
-        state: absent
-      tags: TC_004
-
-    - name: Delete docker image if exists
-      docker_image:
-        name: "{{ docker_image_name }}"
-        tag: "{{ docker_image_tag }}"
-        state: absent
-      tags: TC_004
-
-    - block:
-        - name: Call common role
-          include_role:
-            name: ../roles/common
-          vars:
-            input_config_filename: "{{ test_input_config_filename }}"
-
-        - name: Call provision role
-          include_role:
-            name: ../roles/provision
-      tags: TC_004
-
-    - name: Check the connection to cobbler UI and it returns a status 200
-      uri:
-        url: https://localhost/cobbler_web
-        status_code: 200
-        return_content: yes
-        validate_certs: no
-      tags: TC_004,VERIFY_004
-
-    - name: Fetch cobbler version in cobbler container
-      command: docker exec {{ docker_container_name }} cobbler version
-      changed_when: false
-      register: cobbler_version
-      tags: TC_004,VERIFY_004
-
-    - name: Verify cobbler version
-      assert:
-        that:
-          - "'Cobbler' in cobbler_version.stdout"
-          - "'Error' not in cobbler_version.stdout"
-        fail_msg: "{{ cobbler_version_fail_msg }}"
-        success_msg: "{{ cobbler_version_success_msg }}"
-      tags: TC_004,VERIFY_004
-
-    - name: Run cobbler check command in cobbler container
-      command: docker exec {{ docker_container_name }} cobbler check
-      changed_when: false
-      register: cobbler_check
-      tags: TC_004,VERIFY_004
-
-    - name: Verify cobbler check command output
-      assert:
-        that:
-          - "'The following are potential configuration items that you may want to fix' not in cobbler_check.stdout"
-          - "'Error' not in cobbler_check.stdout"
-        fail_msg: "{{ cobbler_check_fail_msg }}"
-        success_msg: "{{ cobbler_check_success_msg }}"
-      ignore_errors: yes
-      tags: TC_004,VERIFY_004
-
-    - name: Run cobbler sync command in cobbler container
-      command: docker exec {{ docker_container_name }} cobbler sync
-      changed_when: false
-      register: cobbler_sync
-      tags: TC_004,VERIFY_004
-
-    - name: Verify cobbler sync command output
-      assert:
-        that:
-          - "'TASK COMPLETE' in cobbler_sync.stdout"
-          - "'Fail' not in cobbler_sync.stdout"
-          - "'Error' not in cobbler_sync.stdout"
-        fail_msg: "{{ cobbler_sync_fail_msg }}"
-        success_msg: "{{ cobbler_sync_success_msg }}"
-      tags: TC_004,VERIFY_004
-
-    - name: Fetch cobbler distro list
-      command: docker exec {{ docker_container_name }} cobbler distro list
-      changed_when: false
-      register: cobbler_distro_list
-      tags: TC_004,VERIFY_004
-
-    - name: Verify cobbler distro list
-      assert:
-        that:
-          - "'CentOS' in cobbler_distro_list.stdout"
-        fail_msg: "{{ cobbler_distro_list_fail_msg }}"
-        success_msg: "{{ cobbler_distro_list_success_msg }}"
-      tags: TC_004,VERIFY_004
-
-    - name: Fetch cobbler profile list
-      command: docker exec cobbler cobbler profile list
-      changed_when: false
-      register: cobbler_profile_list
-      tags: TC_004,VERIFY_004
-
-    - name: Verify cobbler profile list
-      assert:
-        that:
-          - "'CentOS' in cobbler_profile_list.stdout"
-        fail_msg: "{{ cobbler_profile_list_fail_msg }}"
-        success_msg: "{{ cobbler_profile_list_success_msg }}"
-      tags: TC_004,VERIFY_004
-
-    - name: Check kickstart file
-      shell: |
-        docker exec {{ docker_container_name }} [ -f /var/lib/cobbler/kickstarts/{{ kickstart_filename }} ] && echo "File exist" || echo "File does not exist"
-      changed_when: false
-      register: kickstart_file_status
-      tags: TC_004,VERIFY_004
-
-    - name: Verify kickstart file present
-      assert:
-        that:
-          - "'File exist' in kickstart_file_status.stdout"
-        fail_msg: "{{ kickstart_file_fail_msg }}"
-        success_msg: "{{ kickstart_file_success_msg }}"
-      tags: TC_004,VERIFY_004
-
-    - name: Check crontab list
-      command: docker exec cobbler crontab -l
-      changed_when: false
-      register: crontab_list
-      tags: TC_004,VERIFY_004
-
-    - name: Verify crontab list
-      assert:
-        that:
-          - "'* * * * * ansible-playbook /root/tftp.yml' in crontab_list.stdout"
-          - "'5 * * * * ansible-playbook /root/inventory_creation.yml' in crontab_list.stdout"
-        fail_msg: "{{ crontab_list_fail_msg }}"
-        success_msg: "{{ crontab_list_success_msg }}"
-      tags: TC_004,VERIFY_004
-
-    - name: Check tftp,dhcpd,xinetd,cobblerd service is running
-      command: docker exec cobbler systemctl is-active {{ item }}
-      changed_when: false
-      ignore_errors: yes
-      register: cobbler_service_check
-      with_items: "{{ cobbler_services }}"
-      tags: TC_004,VERIFY_004
-
-    - name: Verify tftp,dhcpd,xinetd,cobblerd service is running
-      assert:
-        that:
-          - "'active' in cobbler_service_check.results[{{ item }}].stdout"
-          - "'inactive' not in cobbler_service_check.results[{{ item }}].stdout"
-          - "'unknown' not in cobbler_service_check.results[{{ item }}].stdout"
-        fail_msg: "{{ cobbler_service_check_fail_msg }}"
-        success_msg: "{{ cobbler_service_check_success_msg }}"
-      with_sequence: start=0 end=3
-      tags: TC_004,VERIFY_004
-
-# Testcase OMNIA_DIO_US_CDIP_TC_005
-# Execute provison role in management station where cobbler container is configured
-- name: OMNIA_DIO_US_CDIP_TC_005
-  hosts: localhost
-  connection: local
-  vars_files:
-    - test_vars/test_provision_vars.yml
-    - ../roles/provision/vars/main.yml
-  tasks:
-    - block:
-        - name: Call common role
-          include_role:
-            name: ../roles/common
-          vars:
-            input_config_filename: "{{ test_input_config_filename }}"
-
-        - name: Call provision role
-          include_role:
-            name: ../roles/provision
-      tags: TC_005
-
-    - name: Check the connection to cobbler UI and it returns a status 200
-      uri:
-        url: https://localhost/cobbler_web
-        status_code: 200
-        return_content: yes
-        validate_certs: no
-      tags: TC_005,VERIFY_005
-
-    - name: Fetch cobbler version in cobbler container
-      command: docker exec {{ docker_container_name }} cobbler version
-      changed_when: false
-      register: cobbler_version
-      tags: TC_005,VERIFY_005
-
-    - name: Verify cobbler version
-      assert:
-        that:
-          - "'Cobbler' in cobbler_version.stdout"
-          - "'Error' not in cobbler_version.stdout"
-        fail_msg: "{{ cobbler_version_fail_msg }}"
-        success_msg: "{{ cobbler_version_success_msg }}"
-      tags: TC_005,VERIFY_005
-
-    - name: Run cobbler check command in cobbler container
-      command: docker exec {{ docker_container_name }} cobbler check
-      changed_when: false
-      register: cobbler_check
-      tags: TC_005,VERIFY_005
-
-    - name: Verify cobbler check command output
-      assert:
-        that:
-          - "'The following are potential configuration items that you may want to fix' not in cobbler_check.stdout"
-          - "'Error' not in cobbler_check.stdout"
-        fail_msg: "{{ cobbler_check_fail_msg }}"
-        success_msg: "{{ cobbler_check_success_msg }}"
-      ignore_errors: yes
-      tags: TC_005,VERIFY_005
-
-    - name: Run cobbler sync command in cobbler container
-      command: docker exec {{ docker_container_name }} cobbler sync
-      changed_when: false
-      register: cobbler_sync
-      tags: TC_005,VERIFY_005
-
-    - name: Verify cobbler sync command output
-      assert:
-        that:
-          - "'TASK COMPLETE' in cobbler_sync.stdout"
-          - "'Fail' not in cobbler_sync.stdout"
-          - "'Error' not in cobbler_sync.stdout"
-        fail_msg: "{{ cobbler_sync_fail_msg }}"
-        success_msg: "{{ cobbler_sync_success_msg }}"
-      tags: TC_005,VERIFY_005
-
-    - name: Fetch cobbler distro list
-      command: docker exec {{ docker_container_name }} cobbler distro list
-      changed_when: false
-      register: cobbler_distro_list
-      tags: TC_005,VERIFY_005
-
-    - name: Verify cobbler distro list
-      assert:
-        that:
-          - "'CentOS' in cobbler_distro_list.stdout"
-        fail_msg: "{{ cobbler_distro_list_fail_msg }}"
-        success_msg: "{{ cobbler_distro_list_success_msg }}"
-      tags: TC_005,VERIFY_005
-
-    - name: Fetch cobbler profile list
-      command: docker exec cobbler cobbler profile list
-      changed_when: false
-      register: cobbler_profile_list
-      tags: TC_005,VERIFY_005
-
-    - name: Verify cobbler profile list
-      assert:
-        that:
-          - "'CentOS' in cobbler_profile_list.stdout"
-        fail_msg: "{{ cobbler_profile_list_fail_msg }}"
-        success_msg: "{{ cobbler_profile_list_success_msg }}"
-      tags: TC_005,VERIFY_005
-
-    - name: Check kickstart file
-      shell: |
-        docker exec {{ docker_container_name }} [ -f /var/lib/cobbler/kickstarts/{{ kickstart_filename }} ] && echo "File exist" || echo "File does not exist"
-      changed_when: false
-      register: kickstart_file_status
-      tags: TC_005,VERIFY_005
-
-    - name: Verify kickstart file present
-      assert:
-        that:
-          - "'File exist' in kickstart_file_status.stdout"
-        fail_msg: "{{ kickstart_file_fail_msg }}"
-        success_msg: "{{ kickstart_file_success_msg }}"
-      tags: TC_005,VERIFY_005
-
-    - name: Check crontab list
-      command: docker exec cobbler crontab -l
-      changed_when: false
-      register: crontab_list
-      tags: TC_005,VERIFY_005
-
-    - name: Verify crontab list
-      assert:
-        that:
-          - "'* * * * * ansible-playbook /root/tftp.yml' in crontab_list.stdout"
-          - "'5 * * * * ansible-playbook /root/inventory_creation.yml' in crontab_list.stdout"
-        fail_msg: "{{ crontab_list_fail_msg }}"
-        success_msg: "{{ crontab_list_success_msg }}"
-      tags: TC_005,VERIFY_005
-
-    - name: Check tftp,dhcpd,xinetd,cobblerd service is running
-      command: docker exec cobbler systemctl is-active {{ item }}
-      changed_when: false
-      ignore_errors: yes
-      register: cobbler_service_check
-      with_items: "{{ cobbler_services }}"
-      tags: TC_005,VERIFY_005
-
-    - name: Verify tftp,dhcpd,xinetd,cobblerd service is running
-      assert:
-        that:
-          - "'active' in cobbler_service_check.results[{{ item }}].stdout"
-          - "'inactive' not in cobbler_service_check.results[{{ item }}].stdout"
-          - "'unknown' not in cobbler_service_check.results[{{ item }}].stdout"
-        fail_msg: "{{ cobbler_service_check_fail_msg }}"
-        success_msg: "{{ cobbler_service_check_success_msg }}"
-      with_sequence: start=0 end=3
-      tags: TC_005,VERIFY_005
-
-# Testcase OMNIA_DIO_US_CC_TC_006
-# Execute provision role in management station where already one container present
-- name: OMNIA_DIO_US_CC_TC_006
-  hosts: localhost
-  connection: local
-  vars_files:
-    - test_vars/test_provision_vars.yml
-    - ../roles/provision/vars/main.yml
-  tasks:
-    - name: Delete the cobbler container if exits
-      docker_container:
-        name: "{{ docker_container_name }}"
-        state: absent
-      tags: TC_006
-
-    - name: Delete docker image if exists
-      docker_image:
-        name: "{{ docker_image_name }}"
-        tag: "{{ docker_image_tag }}"
-        state: absent
-      tags: TC_006
-
-    - name: Create docker image
-      docker_image:
-        name: ubuntu
-        tag: latest
-        source: pull
-      tags: TC_006
-
-    - name: Create docker container
-      command: docker run -dit ubuntu
-      register: create_docker_container
-      changed_when: true
-      args:
-        warn: false
-      tags: TC_006
-
-    - block:
-        - name: Call common role
-          include_role:
-            name: ../roles/common
-          vars:
-            input_config_filename: "{{ test_input_config_filename }}"
-
-        - name: Call provision role
-          include_role:
-            name: ../roles/provision
-      tags: TC_006
-
-    - name: Check the connection to cobbler UI and it returns a status 200
-      uri:
-        url: https://localhost/cobbler_web
-        status_code: 200
-        return_content: yes
-        validate_certs: no
-      tags: TC_006,VERIFY_006
-
-    - name: Fetch cobbler version in cobbler container
-      command: docker exec {{ docker_container_name }} cobbler version
-      changed_when: false
-      register: cobbler_version
-      tags: TC_006,VERIFY_006
-
-    - name: Verify cobbler version
-      assert:
-        that:
-          - "'Cobbler' in cobbler_version.stdout"
-          - "'Error' not in cobbler_version.stdout"
-        fail_msg: "{{ cobbler_version_fail_msg }}"
-        success_msg: "{{ cobbler_version_success_msg }}"
-      tags: TC_006,VERIFY_006
-
-    - name: Run cobbler check command in cobbler container
-      command: docker exec {{ docker_container_name }} cobbler check
-      changed_when: false
-      register: cobbler_check
-      tags: TC_006,VERIFY_006
-
-    - name: Verify cobbler check command output
-      assert:
-        that:
-          - "'The following are potential configuration items that you may want to fix' not in cobbler_check.stdout"
-          - "'Error' not in cobbler_check.stdout"
-        fail_msg: "{{ cobbler_check_fail_msg }}"
-        success_msg: "{{ cobbler_check_success_msg }}"
-      ignore_errors: yes
-      tags: TC_006,VERIFY_006
-
-    - name: Run cobbler sync command in cobbler container
-      command: docker exec {{ docker_container_name }} cobbler sync
-      changed_when: false
-      register: cobbler_sync
-      tags: TC_006,VERIFY_006
-
-    - name: Verify cobbler sync command output
-      assert:
-        that:
-          - "'TASK COMPLETE' in cobbler_sync.stdout"
-          - "'Fail' not in cobbler_sync.stdout"
-          - "'Error' not in cobbler_sync.stdout"
-        fail_msg: "{{ cobbler_sync_fail_msg }}"
-        success_msg: "{{ cobbler_sync_success_msg }}"
-      tags: TC_006,VERIFY_006
-
-    - name: Fetch cobbler distro list
-      command: docker exec {{ docker_container_name }} cobbler distro list
-      changed_when: false
-      register: cobbler_distro_list
-      tags: TC_006,VERIFY_006
-
-    - name: Verify cobbler distro list
-      assert:
-        that:
-          - "'CentOS' in cobbler_distro_list.stdout"
-        fail_msg: "{{ cobbler_distro_list_fail_msg }}"
-        success_msg: "{{ cobbler_distro_list_success_msg }}"
-      tags: TC_006,VERIFY_006
-
-    - name: Fetch cobbler profile list
-      command: docker exec cobbler cobbler profile list
-      changed_when: false
-      register: cobbler_profile_list
-      tags: TC_006,VERIFY_006
-
-    - name: Verify cobbler profile list
-      assert:
-        that:
-          - "'CentOS' in cobbler_profile_list.stdout"
-        fail_msg: "{{ cobbler_profile_list_fail_msg }}"
-        success_msg: "{{ cobbler_profile_list_success_msg }}"
-      tags: TC_006,VERIFY_006
-
-    - name: Check kickstart file
-      shell: |
-        docker exec {{ docker_container_name }} [ -f /var/lib/cobbler/kickstarts/{{ kickstart_filename }} ] && echo "File exist" || echo "File does not exist"
-      changed_when: false
-      register: kickstart_file_status
-      tags: TC_006,VERIFY_006
-
-    - name: Verify kickstart file present
-      assert:
-        that:
-          - "'File exist' in kickstart_file_status.stdout"
-        fail_msg: "{{ kickstart_file_fail_msg }}"
-        success_msg: "{{ kickstart_file_success_msg }}"
-      tags: TC_006,VERIFY_006
-
-    - name: Check crontab list
-      command: docker exec cobbler crontab -l
-      changed_when: false
-      register: crontab_list
-      tags: TC_006,VERIFY_006
-
-    - name: Verify crontab list
-      assert:
-        that:
-          - "'* * * * * ansible-playbook /root/tftp.yml' in crontab_list.stdout"
-          - "'5 * * * * ansible-playbook /root/inventory_creation.yml' in crontab_list.stdout"
-        fail_msg: "{{ crontab_list_fail_msg }}"
-        success_msg: "{{ crontab_list_success_msg }}"
-      tags: TC_006,VERIFY_006
-
-    - name: Check tftp,dhcpd,xinetd,cobblerd service is running
-      command: docker exec cobbler systemctl is-active {{ item }}
-      changed_when: false
-      ignore_errors: yes
-      register: cobbler_service_check
-      with_items: "{{ cobbler_services }}"
-      tags: TC_006,VERIFY_006
-
-    - name: Verify tftp,dhcpd,xinetd,cobblerd service is running
-      assert:
-        that:
-          - "'active' in cobbler_service_check.results[{{ item }}].stdout"
-          - "'inactive' not in cobbler_service_check.results[{{ item }}].stdout"
-          - "'unknown' not in cobbler_service_check.results[{{ item }}].stdout"
-        fail_msg: "{{ cobbler_service_check_fail_msg }}"
-        success_msg: "{{ cobbler_service_check_success_msg }}"
-      with_sequence: start=0 end=3
-      tags: TC_006,VERIFY_006
-
-    - name: Delete the ubuntu container
-      docker_container:
-        name: "{{ create_docker_container.stdout }}"
-        state: absent
-      tags: TC_006
-
-    - name: Delete the ubuntu umage
-      docker_image:
-        name: ubuntu
-        state: absent
-      tags: TC_006
-
-# Testcase OMNIA_DIO_US_CC_TC_007
-# Execute provision role in management station and reboot management station
-- name: OMNIA_DIO_US_CC_TC_007
-  hosts: localhost
-  connection: local
-  vars_files:
-    - test_vars/test_provision_vars.yml
-    - ../roles/provision/vars/main.yml
-  tasks:
-    - name: Check last uptime of the server
-      command: uptime -s
-      register: uptime_status
-      changed_when: false
-      ignore_errors: yes
-      tags: TC_007
-
-    - name: Check current date
-      command: date +"%Y-%m-%d %H"
-      register: current_time
-      changed_when: false
-      ignore_errors: yes
-      tags: TC_007
-
-    - name: Delete the cobbler container if exits
-      docker_container:
-        name: "{{ docker_container_name }}"
-        state: absent
-      when: current_time.stdout not in uptime_status.stdout
-      tags: TC_007
-
-    - name: Delete docker image if exists
-      docker_image:
-        name: "{{ docker_image_name }}"
-        tag: "{{ docker_image_tag }}"
-        state: absent
-      when: current_time.stdout not in uptime_status.stdout
-      tags: TC_007
-
-    - block:
-        - name: Call common role
-          include_role:
-            name: ../roles/common
-          vars:
-            input_config_filename: "{{ test_input_config_filename }}"
-
-        - name: Call provision role
-          include_role:
-            name: ../roles/provision
-      when: current_time.stdout not in uptime_status.stdout
-      tags: TC_007
-
-    - name: Reboot localhost
-      command: reboot
-      when: current_time.stdout not in uptime_status.stdout
-      tags: TC_007
-
-    - name: Inspect cobbler container
-      docker_container_info:
-        name: "{{ docker_container_name }}"
-      register: cobbler_cnt_status
-      tags: TC_007,VERIFY_007
-
-    - name: Verify cobbler container is running after reboot
-      assert:
-        that: "'running' in cobbler_cnt_status.container.State.Status"
-        fail_msg: "{{ cobbler_reboot_fail_msg }}"
-        success_msg: "{{ cobbler_reboot_success_msg }}"
-      tags: TC_007,VERIFY_007

+ 0 - 183
control_plane/test/test_provision_cdip.yml

@@ -1,183 +0,0 @@
-#  Copyright 2020 Dell Inc. or its subsidiaries. All Rights Reserved.
-#
-#  Licensed under the Apache License, Version 2.0 (the "License");
-#  you may not use this file except in compliance with the License.
-#  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
----
-
-# Testcase OMNIA_DIO_US_CDIP_TC_001
-# Execute provison role in management station with os installed centos 7
-- name: OMNIA_DIO_US_CDIP_TC_001
-  hosts: localhost
-  connection: local
-  vars_files:
-    - test_vars/test_provision_vars.yml
-    - ../roles/provision/vars/main.yml
-  tasks:
-    - name: Delete the cobbler container if exits
-      docker_container:
-        name: "{{ docker_container_name }}"
-        state: absent
-      tags: TC_001
-
-    - name: Delete docker image if exists
-      docker_image:
-        name: "{{ docker_image_name }}"
-        tag: "{{ docker_image_tag }}"
-        state: absent
-      tags: TC_001
-
-    - block:
-        - name: Call common role
-          include_role:
-            name: ../roles/common
-          vars:
-            input_config_filename: "{{ test_input_config_filename }}"
-
-        - name: Call provision role
-          include_role:
-            name: ../roles/provision
-            tasks_from: "{{ item }}"
-          with_items:
-           - "{{ cobbler_image_files }}"
-      tags: TC_001
-
-    - name: Inspect cobbler docker image
-      docker_image_info:
-        name: "{{ docker_image_name }}"
-      register: cobbler_image_status
-      tags: TC_001,VERIFY_001
-
-    - name: Validate cobbler docker image
-      assert:
-        that:
-          - cobbler_image_status.images
-        fail_msg: "{{ cobbler_img_fail_msg }}"
-        success_msg: "{{ cobbler_img_success_msg }}"
-      tags: TC_001,VERIFY_001
-
-    - name: Inspect cobbler container
-      docker_container_info:
-        name: "{{ docker_container_name }}"
-      register: cobbler_cnt_status
-      tags: TC_001,VERIFY_001
-
-    - name: Validate cobbler docker container
-      assert:
-        that:
-          - cobbler_cnt_status.exists
-        fail_msg: "{{ cobbler_cnt_fail_msg }}"
-        success_msg: "{{ cobbler_cnt_success_msg }}"
-      tags: TC_001,VERIFY_001
-
-# Testcase OMNIA_DIO_US_CDIP_TC_002
-# Execute provison role in management station where cobbler container and image already created
-- name: OMNIA_DIO_US_CDIP_TC_002
-  hosts: localhost
-  connection: local
-  vars_files:
-    - test_vars/test_provision_vars.yml
-    - ../roles/provision/vars/main.yml
-  tasks:
-    - block:
-        - name: Call common role
-          include_role:
-            name: ../roles/common
-          vars:
-            input_config_filename: "{{ test_input_config_filename }}"
-
-        - name: Call provision role
-          include_role:
-            name: ../roles/provision
-      tags: TC_002
-
-    - name: Inspect cobbler docker image
-      docker_image_info:
-        name: "{{ docker_image_name }}"
-      register: cobbler_image_status
-      tags: TC_002,VERIFY_002
-
-    - name: Validate cobbler docker image
-      assert:
-        that:
-          - cobbler_image_status.images
-        fail_msg: "{{ cobbler_img_fail_msg }}"
-        success_msg: "{{ cobbler_img_success_msg }}"
-      tags: TC_002,VERIFY_002
-
-    - name: Inspect cobbler container
-      docker_container_info:
-        name: "{{ docker_container_name }}"
-      register: cobbler_cnt_status
-      tags: TC_002,VERIFY_002
-
-    - name: Validate cobbler docker container
-      assert:
-        that:
-          - cobbler_cnt_status.exists
-        fail_msg: "{{ cobbler_cnt_fail_msg }}"
-        success_msg: "{{ cobbler_cnt_success_msg }}"
-      tags: TC_002,VERIFY_002
-
-# Testcase OMNIA_DIO_US_CDIP_TC_003
-# Execute provison role in management station where docker service not running
-- name: OMNIA_DIO_US_CDIP_TC_003
-  hosts: localhost
-  connection: local
-  vars_files:
-    - test_vars/test_provision_vars.yml
-    - ../roles/provision/vars/main.yml
-  tasks:
-    - name: Delete the cobbler container if exits
-      docker_container:
-        name: "{{ docker_container_name }}"
-        state: absent
-      tags: TC_003
-
-    - name: Delete docker image if exists
-      docker_image:
-        name: "{{ docker_image_name }}"
-        tag: "{{ docker_image_tag }}"
-        state: absent
-      tags: TC_003
-
-    - name: Stop docker service
-      service:
-        name: docker
-        state: stopped
-      tags: TC_003
-
-    - block:
-        - name: Call common role
-          include_role:
-            name: ../roles/common
-          vars:
-            input_config_filename: "{{ test_input_config_filename }}"
-
-        - name: Call provision role
-          include_role:
-            name: ../roles/provision
-
-        - name: Docker service stopped usecase success message
-          debug:
-            msg: "{{ docker_check_success_msg }}"
-
-      rescue:
-        - name: Docker service stopped usecase fail message
-          fail:
-            msg: "{{ docker_check_fail_msg }}"
-
-      always:
-        - name: Start docker service
-          service:
-            name: docker
-            state: started
-      tags: TC_003

+ 0 - 294
control_plane/test/test_provision_ndod.yml

@@ -1,294 +0,0 @@
-#  Copyright 2020 Dell Inc. or its subsidiaries. All Rights Reserved.
-#
-#  Licensed under the Apache License, Version 2.0 (the "License");
-#  you may not use this file except in compliance with the License.
-#  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
----
-
-# OMNIA_DIO_US_NDOD_TC_009
-# Execute provison role in management station and  PXE boot one compute node
-- name: OMNIA_DIO_US_NDOD_TC_009
-  hosts: localhost
-  connection: local
-  gather_subset:
-    - 'min'
-  vars_files:
-    - test_vars/test_provision_vars.yml
-    - ../roles/common/vars/main.yml
-  tasks:
-    - name: Set ip address of the compute node
-      set_fact:
-        single_node_ip_address: "{{ groups[cobbler_groupname][0] }}"
-      tags: TC_009,VERIFY_009
-
-    - name: Delete inventory if exists
-      file:
-        path: inventory
-        state: absent
-      tags: TC_009,VERIFY_009
-
-    - name: Check input config file is encrypted
-      command: cat {{ test_input_config_filename }}
-      changed_when: false
-      register: config_content
-      tags: TC_009,VERIFY_009
-
-    - name: Decrpyt input_config.yml
-      command: ansible-vault decrypt {{ test_input_config_filename }} --vault-password-file {{ vault_path }}
-      changed_when: false
-      when: "'$ANSIBLE_VAULT;' in config_content.stdout"
-      tags: TC_009,VERIFY_009
-
-    - name: Include variable file input_config.yml
-      include_vars: "{{ test_input_config_filename }}"
-      tags: TC_009,VERIFY_009
-
-    - name: Create inventory file
-      lineinfile:
-        path: inventory
-        line: "{{ single_node_ip_address }} ansible_user=root ansible_password={{ provision_password }} ansible_ssh_common_args='-o StrictHostKeyChecking=no'"
-        create: yes
-        mode: '{{ file_permission }}'
-      tags: TC_009,VERIFY_009
-
-    - meta: refresh_inventory
-      tags: TC_009,VERIFY_009
-
-    - name: Validate authentication of username and password
-      command: ansible {{ single_node_ip_address }} -m ping -i inventory
-      register: validate_login
-      changed_when: false
-      ignore_errors: yes
-      tags: TC_009,VERIFY_009
-
-    - name: Validate the authentication output
-      assert:
-        that:
-          - "'pong' in validate_login.stdout"
-          - "'SUCCESS' in validate_login.stdout"
-          - "'UNREACHABLE' not in validate_login.stdout"
-        fail_msg: "{{ authentication_fail_msg }}"
-        success_msg: "{{ authentication_success_msg }}"
-      tags: TC_009,VERIFY_009
-
-    - name: Check hostname
-      command: ansible {{ single_node_ip_address }} -m shell -a hostname -i inventory
-      register: validate_hostname
-      changed_when: false
-      ignore_errors: yes
-      tags: TC_009,VERIFY_009
-
-    - name: Validate the hostname
-      assert:
-        that: "'localhost' not in validate_hostname.stdout"
-        fail_msg: "{{ hostname_fail_msg }}"
-        success_msg: "{{ hostname_success_msg }}"
-      tags: TC_009,VERIFY_009
-
-    - name: Delete inventory if exists
-      file:
-        path: inventory
-        state: absent
-      tags: TC_009,VERIFY_009
-
-# OMNIA_DIO_US_NDOD_TC_010
-# Execute provison role in management station and PXE boot two compute node
-- name: OMNIA_DIO_US_NDOD_TC_010
-  hosts: localhost
-  connection: local
-  gather_subset:
-    - 'min'
-  vars_files:
-    - test_vars/test_provision_vars.yml
-    - ../roles/provision/vars/main.yml
-  tasks:
-    - name: Delete inventory if exists
-      file:
-        path: inventory
-        state: absent
-      tags: TC_010,VERIFY_010
-
-    - name: Check input config file is encrypted
-      command: cat {{ test_input_config_filename }}
-      changed_when: false
-      register: config_content
-      tags: TC_010,VERIFY_010
-
-    - name: Decrpyt input_config.yml
-      command: ansible-vault decrypt {{ test_input_config_filename }} --vault-password-file {{ vault_path }}
-      changed_when: false
-      when: "'$ANSIBLE_VAULT;' in config_content.stdout"
-      tags: TC_010,VERIFY_010
-
-    - name: Include variable file input_config.yml
-      include_vars: "{{ test_input_config_filename }}"
-      tags: TC_010,VERIFY_010
-
-    - name: Create inventory file
-      lineinfile:
-        path: inventory
-        line: "[nodes]"
-        create: yes
-        mode: '{{ file_permission }}'
-      tags: TC_010,VERIFY_010
-
-    - name: Edit inventory file
-      lineinfile:
-        path: inventory
-        line: "{{ item }} ansible_user=root ansible_password={{ provision_password }} ansible_ssh_common_args='-o StrictHostKeyChecking=no'"
-      with_items:
-        - "{{ groups[cobbler_groupname] }}"
-      tags: TC_010,VERIFY_010
-
-    - meta: refresh_inventory
-      tags: TC_010,VERIFY_010
-
-    - name: Validate ip address is different for both servers
-      assert:
-        that: groups[cobbler_groupname][0] != groups[cobbler_groupname][1]
-        fail_msg: "{{ ip_address_fail_msg }}"
-        success_msg: "{{ ip_address_success_msg }}"
-      delegate_to: localhost
-      run_once: yes
-      tags: TC_010,VERIFY_010
-
-    - name: Check hostname of both servers
-      command: ansible nodes -m shell -a hostname -i inventory
-      register: node_hostname
-      changed_when: false
-      ignore_errors: yes
-      tags: TC_010,VERIFY_010
-
-    - name: Validate hostname is different for both servers
-      assert:
-        that:
-          - node_hostname.stdout_lines[1] != node_hostname.stdout_lines[3]
-          - "'localhost' not in node_hostname.stdout_lines[1]"
-          - "'localhost' not in node_hostname.stdout_lines[3]"
-        fail_msg: "{{ hostname_fail_msg }}"
-        success_msg: "{{ hostname_success_msg }}"
-      delegate_to: localhost
-      run_once: yes
-      tags: TC_010,VERIFY_010
-
-    - name: Delete inventory if exists
-      file:
-        path: inventory
-        state: absent
-      delegate_to: localhost
-      run_once: yes
-      tags: TC_010,VERIFY_010
-
-# OMNIA_DIO_US_NDOD_TC_011
-# Validate passwordless ssh connection established or not with compute nodes
-- name: OMNIA_DIO_US_NDOD_TC_011
-  hosts: localhost
-  gather_subset:
-    - 'min'
-  vars_files:
-    - test_vars/test_provision_vars.yml
-    - ../roles/provision/vars/main.yml
-  tasks:
-    - name: Validate authentication of username and password
-      command: "ansible {{ cobbler_groupname }} -m ping -i {{ inventory_file }}"
-      register: validate_login
-      changed_when: false
-      ignore_errors: yes
-      tags: TC_011,VERIFY_011
-
-    - name: Validate the passwordless SSH connection
-      assert:
-        that:
-          - "'pong' in validate_login.stdout"
-          - "'SUCCESS' in validate_login.stdout"
-          - "'UNREACHABLE' not in validate_login.stdout"
-        success_msg: "{{ authentication_success_msg }}"
-        fail_msg: "{{ authentication_fail_msg }}"
-      tags: TC_011,VERIFY_011
-
-# OMNIA_DIO_US_NDOD_TC_012
-# Execute provison role in management station and reboot compute node after os provision again
-- name: OMNIA_DIO_US_NDOD_TC_012
-  hosts: localhost
-  connection: local
-  gather_subset:
-    - 'min'
-  vars_files:
-    - test_vars/test_provision_vars.yml
-  tasks:
-    - name: Set ip address of the compute node
-      set_fact:
-        single_node_ip_address: "{{ groups[cobbler_groupname][0] }}"
-      tags: TC_012,VERIFY_012
-
-    - name: Delete inventory if exists
-      file:
-        path: inventory
-        state: absent
-      tags: TC_012,VERIFY_012
-
-    - name: Check input config file is encrypted
-      command: cat {{ test_input_config_filename }}
-      changed_when: false
-      register: config_content
-      tags: TC_012,VERIFY_012
-
-    - name: Decrpyt input_config.yml
-      command: ansible-vault decrypt {{ test_input_config_filename }} --vault-password-file {{ vault_path }}
-      changed_when: false
-      when: "'$ANSIBLE_VAULT;' in config_content.stdout"
-      tags: TC_012,VERIFY_012
-
-    - name: Include variable file input_config.yml
-      include_vars: "{{ test_input_config_filename }}"
-      tags: TC_012,VERIFY_012
-
-    - name: Create inventory file
-      lineinfile:
-        path: inventory
-        line: "[nodes]"
-        create: yes
-        mode: '{{ file_permission }}'
-      tags: TC_012,VERIFY_012
-
-    - name: Edit inventory file
-      lineinfile:
-        path: inventory
-        line: "{{ single_node_ip_address }} ansible_user=root ansible_password={{ provision_password }} ansible_ssh_common_args='-o StrictHostKeyChecking=no'"
-      tags: TC_012,VERIFY_012
-
-    - meta: refresh_inventory
-      tags: TC_012,VERIFY_012
-
-    - name: Reboot servers
-      command: ansible nodes -m command -a reboot -i inventory
-      ignore_errors: yes
-      changed_when: true
-      tags: TC_012,VERIFY_012
-
-    - name: Wait for 10 minutes
-      pause:
-        minutes: 10
-      tags: TC_012,VERIFY_012
-
-    - name: Check ip address of servers
-      command: ansible nodes -m command -a 'ip a' -i inventory
-      ignore_errors: yes
-      changed_when: false
-      register: ip_address_after_reboot
-      tags: TC_012,VERIFY_012
-
-    - name: Validate ip address is same after reboot
-      assert:
-        that: "'{{ single_node_ip_address }}' in ip_address_after_reboot.stdout"
-        fail_msg: "{{ ip_address_fail_msg }}"
-        success_msg: "{{ ip_address_success_msg }}"
-      tags: TC_012,VERIFY_012

+ 0 - 51
control_plane/test/test_vars/test_common_vars.yml

@@ -1,51 +0,0 @@
-#  Copyright 2020 Dell Inc. or its subsidiaries. All Rights Reserved.
-#
-#  Licensed under the Apache License, Version 2.0 (the "License");
-#  you may not use this file except in compliance with the License.
-#  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
----
-
-# vars file for test_common.yml file
-centos_version: '7.8'
-test_input_config_filename: "appliance_config_test.yml"
-empty_input_config_filename: "appliance_config_empty.yml"
-new_input_config_filename: "appliance_config_new.yml"
-password_config_file: "password_config"
-min_length_password: "testpass"
-max_length_password: "helloworld123helloworld12hello"
-long_password: "helloworld123hellowordl12hello3"
-white_space_password: "hello world 123"
-special_character_password1: "hello-world/"
-special_character_password2: "hello@$%!world"
-valid_dhcp_start_range: "172.17.0.10"
-valid_dhcp_end_range: "172.17.0.200"
-invalid_dhcp_ip: "1720.1700.1000.1000"
-wrong_dhcp_ip: "d6:dh1:dsj:10"
-valid_iso_path: "/root/CentOS-7-x86_64-Minimal-2009.iso"
-wrong_iso_path: "/root/testfile"
-valid_mapping_file_path: "test_mapping_file"
-
-docker_volume_success_msg: "Docker volume omnia-storage exists"
-docker_volume_fail_msg: "Docker volume omnia-storage does not exist"
-input_config_success_msg: "Input config file is encrypted using ansible-vault successfully"
-input_config_fail_msg: "Input config file is failed to encrypt using ansible-vault"
-os_check_success_msg: "OS check passed"
-os_check_fail_msg: "OS check failed"
-internet_check_success_msg: "Internet connectivity check passed"
-internet_check_fail_msg: "Internet connectivity check failed"
-different_user_check_success_msg: "Different user execution check passed"
-different_user_check_fail_msg: "Different user execution check failed"
-selinux_check_success_msg: "selinux check passed"
-selinux_check_fail_msg: "selinux check failed"
-input_config_check_success_msg: "appliance_config.yml validation passed"
-input_config_check_fail_msg: "appliance_config.yml validation failed"
-install_package_success_msg: "Installation of package is successful"
-install_package_fail_msg: "Installation of package is failed"

+ 0 - 85
control_plane/test/test_vars/test_provision_vars.yml

@@ -1,85 +0,0 @@
-#  Copyright 2020 Dell Inc. or its subsidiaries. All Rights Reserved.
-#
-#  Licensed under the Apache License, Version 2.0 (the "License");
-#  you may not use this file except in compliance with the License.
-#  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
----
-
-# Usage: test_provision_cdip.yml
-first_nic: "em1"
-nic1_ip_address: 172.17.0.1
-validate_password_success_msg: "Password validation successful"
-validate_password_fail_msg: "Password validation failed"
-cobbler_img_fail_msg: "Docker image cobbler does not exist"
-cobbler_img_success_msg: "Docker image cobbler exists"
-cobbler_cnt_fail_msg: "Docker container cobbler does not exist"
-cobbler_cnt_success_msg: "Docker container cobbler exists"
-nic_check_fail_msg: "NIC-1 ip address validation failed"
-nic_check_success_msg: "NIC-1 ip address validation successful"
-cobbler_image_files:
- - check_prerequisites
- - mount_iso
- - firewall_settings
- - provision_password
- - dhcp_configure
- - cobbler_image
-password_config_file: "password_config"
-test_input_config_filename: "appliance_config_test.yml"
-
-# Usage: test_provision_cc.yml
-docker_check_success_msg: "Docker service stopped usescase validation successful"
-docker_check_fail_msg: "Docker service stopped usescase validation failed"
-docker_ip_fail_msg: "Docker IP validation failed"
-docker_ip_success_msg: "Docker IP validation successful"
-cobbler_version_fail_msg: "Cobbler version validation failed"
-cobbler_version_success_msg: "Cobbler version validation successful"
-cobbler_check_fail_msg: "Cobbler check validation failed"
-cobbler_check_success_msg: "Cobbler check validation successful"
-cobbler_sync_fail_msg: "Cobbler sync validation failed"
-cobbler_sync_success_msg: "Cobbler sync validation successful"
-cobbler_distro_list_fail_msg: "Cobbler distro list validation failed"
-cobbler_distro_list_success_msg: "Cobbler distro list validation successful"
-cobbler_profile_list_fail_msg: "Cobbler profile list validation failed"
-cobbler_profile_list_success_msg: "Cobbler profile list validation successful"
-kickstart_file_fail_msg: "Kickstart file validation failed"
-kickstart_file_success_msg: "Kickstart file validation successful"
-cobbler_reboot_fail_msg: "Cobbler container failed to start after reboot"
-cobbler_reboot_success_msg: "Cobbler container started successfully after reboot"
-crontab_list_fail_msg: "Crontab list validation failed"
-crontab_list_success_msg: "Crontab list validation successful"
-iso_check_fail_msg: "centos iso file check validation failed"
-iso_check_success_msg: "centos iso file check validation successful"
-cobbler_service_check_fail_msg: "cobbler service validation failed"
-cobbler_service_check_success_msg: "cobbler service validation successful"
-kickstart_filename: "centos7.ks"
-iso_file_path: "../roles/provision/files"
-temp_iso_name: "temp_centos.iso"
-cobbler_services:
- - tftp
- - dhcpd
- - cobblerd
- - xinetd
-
-# Usage: test_provision_cdip.yml, test_provision_cc.yml, test_provision_ndod.yml
-docker_container_name: "cobbler"
-boundary_password: "testpass"
-
-# Usage: test_provision_ndod.yml
-hostname_fail_msg: "Hostname validation failed"
-hostname_success_msg: "Hostname validation successful"
-authentication_fail_msg: "Server authentication validation failed"
-authentication_success_msg: "Server authentication validation successful"
-ip_address_fail_msg: "IP address validation failed"
-ip_address_success_msg: "IP address validation successful"
-cobbler_groupname: "all"
-inventory_file: "provisioned_hosts.yml"
-file_permission: 0644
-vault_path: ../roles/common/files/.vault_key

+ 0 - 35
control_plane/test/test_vars/test_web_ui_vars.yml

@@ -1,35 +0,0 @@
-#  Copyright 2020 Dell Inc. or its subsidiaries. All Rights Reserved.
-#
-#  Licensed under the Apache License, Version 2.0 (the "License");
-#  you may not use this file except in compliance with the License.
-#  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
----
-
-# Usage: test_web_ui.yml
-return_status: 200
-fail_return_status: -1
-awx_listening_port: 8081
-time: 1
-actual_containers: 4
-package: "docker-ce"
-awx_exists_msg: "Test case passed: AWX instance is already running on your system"
-awx_not_exists_msg: "Test case failed: AWX does not exist"
-validate_password_success_msg: "Test case passed: Password validation succesful"
-validate_password_fail_msg: "Test case failed: Password validation failed"
-resource_exists_success_msg: "Success: Requested resource(s) exists"
-resource_exists_fail_msg: "Failure: Requested resource(s) does not exists"
-compute_group_name: "compute"
-manager_group_name: "manager"
-tower_cli_package_name: "ansible-tower-cli"
-docker_container_name: "awx_web"
-container_up_status_success_msg: "Container is running successfully after the reboot"
-container_up_status_fail_msg: "Container is not running after the reboot"
-test_input_config_filename: appliance_config_test.yml

+ 0 - 378
control_plane/test/test_web_ui.yml

@@ -1,378 +0,0 @@
-# Copyright 2020 Dell Inc. or its subsidiaries. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
----
-
-# Testcase OMNIA_CRM_US_AWXD_TC_001
-# Test case to verify the prerequisites are installed and execute the AWX deployment
-- name: OMNIA_CRM_US_AWXD_TC_001
-  hosts: localhost
-  connection: local
-  gather_subset:
-    - 'min'
-  vars_files:
-    - ../roles/web_ui/vars/main.yml
-    - test_vars/test_web_ui_vars.yml
-  tasks:
-    - name: Get the docker package facts
-      package_facts:
-        manager: auto
-      tags: TC_001
-
-    - name: Check if docker-ce is already installed
-      debug:
-        var: ansible_facts.packages['{{ package }}']
-      tags: TC_001
-
-    - block:
-        - name: Call common role
-          include_role:
-            name: ../roles/common
-          vars:
-            input_config_filename: "{{ test_input_config_filename }}"
-
-        - name: Calling the web_ui role to be tested
-          include_role:
-            name: ../roles/web_ui
-      tags: TC_001
-
-    - name: Check that you can connect to github repo and it returns a status 200
-      uri:
-        url: "{{ awx_git_repo }}"
-        status_code: "{{ return_status }}"
-        return_content: true
-      tags: TC_001
-
-    - name: Check that you can can connect to AWX UI and it returns a status 200
-      uri:
-        url: "{{ awx_ip }}"
-        status_code: "{{ return_status }}"
-        return_content: true
-      tags: TC_001
-
-    - name: verify awx-server is listening on 8081
-      wait_for:
-        port: "{{ awx_listening_port }}"
-        timeout: "{{ time }}"
-      tags: TC_001
-
-    - name: Get the containers count
-      shell: |
-        set -o pipefail
-        docker ps -a | grep awx | wc -l
-      register: containers_count
-      changed_when: False
-      tags: TC_001
-
-    - name: Validate the containers count
-      assert:
-        that: containers_count.stdout | int >= actual_containers
-        success_msg: "{{ awx_exists_msg }}"
-        fail_msg: "{{ awx_not_exists_msg }}"
-      tags: TC_001
-
-# Testcase OMNIA_CRM_US_AWXD_TC_002
-# Test case to verify regression testing
-- name: OMNIA_CRM_US_AWXD_TC_002
-  hosts: localhost
-  connection: local
-  gather_subset:
-    - 'min'
-  vars_files:
-    - ../roles/web_ui/vars/main.yml
-    - test_vars/test_web_ui_vars.yml
-  tasks:
-    - block:
-        - name: Call common role
-          include_role:
-            name: ../roles/common
-          vars:
-            input_config_filename: "{{ test_input_config_filename }}"
-
-        - name: Calling the web_ui role to be tested
-          include_role:
-            name: ../roles/web_ui
-      tags: TC_002
-
-    - name: Check that you can connect to github repo and it returns a status 200
-      uri:
-        url: "{{ awx_git_repo }}"
-        status_code: "{{ return_status }}"
-        return_content: true
-      tags: TC_002
-
-    - name: Check that you can can connect to AWX UI and it returns a status 200
-      uri:
-        url: "{{ awx_ip }}"
-        status_code: "{{ return_status }}"
-        return_content: true
-      tags: TC_002
-
-    - name: verify awx-server is listening on 80
-      wait_for:
-        port: "{{ awx_listening_port }}"
-        timeout: "{{ time }}"
-      tags: TC_002
-
-    - name: Get the containers count
-      shell: |
-        set -o pipefail
-        docker ps -a | grep awx | wc -l
-      register: containers_count
-      changed_when: False
-      tags: TC_002
-
-    - name: Validate the containers count
-      assert:
-        that: containers_count.stdout | int >= actual_containers
-        success_msg: "{{ awx_exists_msg }}"
-        fail_msg: "{{ awx_not_exists_msg }}"
-      tags: TC_002
-
-# Testcase OMNIA_CRM_US_AWXD_TC_003
-# Test case to validate the AWX configuration
-- name: OMNIA_CRM_US_AWXD_TC_003
-  hosts: localhost
-  connection: local
-  gather_subset:
-    - 'min'
-  vars_files:
-    - ../roles/web_ui/vars/main.yml
-    - ../roles/common/vars/main.yml
-    - test_vars/test_web_ui_vars.yml
-  tasks:
-
-    - block:
-        - name: Call common role
-          include_role:
-            name: ../roles/common
-          vars:
-            input_config_filename: "{{ test_input_config_filename }}"
-
-        - name: Calling the web_ui role to be tested
-          include_role:
-            name: ../roles/web_ui
-      tags: TC_003
-
-    - name: Get the package facts
-      package_facts:
-        manager: auto
-      tags: TC_003
-
-    - name: Check if ansible-tower-cli is already installed
-      assert:
-        that: "'{{ tower_cli_package_name }}' in ansible_facts.packages"
-        success_msg: "{{ resource_exists_success_msg }}"
-        fail_msg: "{{ resource_exists_fail_msg }}"
-      tags: TC_003
-
-    - name: Get the existing organizations
-      command: >-
-        awx --conf.host "{{ awx_ip }}" --conf.username "{{ awx_user }}" --conf.password "{{ admin_password }}"
-        organizations list -f human
-      register: organizations_array
-      changed_when: False
-      tags: TC_003
-
-    - name: Check for organization
-      assert:
-        that: organization_name in organizations_array.stdout
-        success_msg: "{{ resource_exists_success_msg }}"
-        fail_msg: "{{ resource_exists_fail_msg }}"
-      tags: TC_003
-
-    - name: Get the existing projects
-      command: >-
-        awx --conf.host "{{ awx_ip }}" --conf.username "{{ awx_user }}" --conf.password "{{ admin_password }}"
-        projects list -f human
-      changed_when: False
-      register: projects_array
-      tags: TC_003
-
-    - name: Check for project
-      assert:
-        that: project_name in projects_array.stdout
-        success_msg: "{{ resource_exists_success_msg }}"
-        fail_msg: "{{ resource_exists_fail_msg }}"
-      tags: TC_003
-
-    - name: Get the existing inventories
-      command: >-
-        awx --conf.host "{{ awx_ip }}" --conf.username "{{ awx_user }}" --conf.password "{{ admin_password }}"
-        inventory list -f human
-      changed_when: False
-      register: inventory_array
-      tags: TC_003
-
-    - name: Check for inventories
-      assert:
-        that: omnia_inventory_name in inventory_array.stdout
-        success_msg: "{{ resource_exists_success_msg }}"
-        fail_msg: "{{ resource_exists_fail_msg }}"
-      tags: TC_003
-
-    - name: Get the existing groups if omnia-inventory exists
-      command: >-
-        awx --conf.host "{{ awx_ip }}" --conf.username "{{ awx_user }}" --conf.password "{{ admin_password }}"
-        groups list --inventory "{{ omnia_inventory_name }}" -f human
-      changed_when: False
-      register: groups_array
-      when: omnia_inventory_name in inventory_array.stdout
-      tags: TC_003
-
-    - name: Check for manager and compute groups
-      assert:
-        that: manager_group_name and compute_group_name in groups_array.stdout
-        success_msg: "{{ resource_exists_success_msg }}"
-        fail_msg: "{{ resource_exists_fail_msg }}"
-      tags: TC_003
-
-    - name: Get the existing credentials
-      command: >-
-        awx --conf.host "{{ awx_ip }}" --conf.username "{{ awx_user }}" --conf.password "{{ admin_password }}"
-        credentials list -f human
-      changed_when: False
-      register: credentials_array
-      tags: TC_003
-
-    - name: Check for "{{ credential_name }}"
-      assert:
-        that: credential_name in credentials_array.stdout
-        success_msg: "{{ resource_exists_success_msg }}"
-        fail_msg: "{{ resource_exists_fail_msg }}"
-      tags: TC_003
-
-    - name: Get the existing job templates
-      command: >-
-        awx --conf.host "{{ awx_ip }}" --conf.username "{{ awx_user }}" --conf.password "{{ admin_password }}"
-        job_templates list -f human
-      changed_when: False
-      register: templates_array
-      tags: TC_003
-
-    - name: Check for templates
-      assert:
-        that: omnia_template_name and inventory_template_name in templates_array.stdout
-        success_msg: "{{ resource_exists_success_msg }}"
-        fail_msg: "{{ resource_exists_fail_msg }}"
-      tags: TC_003
-
-    - name: Get the existing schedules for job templates
-      command: >-
-        awx --conf.host "{{ awx_ip }}" --conf.username "{{ awx_user }}" --conf.password "{{ admin_password }}"
-        schedules list -f human
-      changed_when: False
-      register: schedules_array
-      tags: TC_003
-
-    - name: Check for schedules to job template
-      assert:
-        that: schedule_name in schedules_array.stdout
-        success_msg: "{{ resource_exists_success_msg }}"
-        fail_msg: "{{ resource_exists_fail_msg }}"
-      tags: TC_003
-
-# Testcase OMNIA_CRM_US_AWXD_TC_004
-# Execute common role in management station without internet connectivity
-- name: OMNIA_CRM_US_AWXD_TC_004
-  hosts: localhost
-  connection: local
-  gather_subset:
-    - 'min'
-  vars_files:
-    - test_vars/test_common_vars.yml
-    - ../roles/common/vars/main.yml
-  tasks:
-    - name: Down internet connectivity
-      lineinfile:
-        path: /etc/hosts
-        line: "172.16.0.5 github.com"
-        state: present
-        backup: yes
-      tags: TC_004
-
-    - block:
-        - name: Call common role
-          include_role:
-            name: ../roles/common
-          vars:
-            input_config_filename: "{{ test_input_config_filename }}"
-
-        - name: Calling the web_ui role to be tested
-          include_role:
-            name: ../roles/web_ui
-
-      rescue:
-        - name: Validate internet connectivity failure message
-          assert:
-            that: internet_status in internet_value.msg
-            success_msg: "{{ internet_check_success_msg }}"
-            fail_msg: "{{ internet_check_fail_msg }}"
-      tags: TC_004
-
-    - name: Up internet connectivity
-      lineinfile:
-        path: /etc/hosts
-        line: "172.16.0.5 github.com"
-        state: absent
-      tags: TC_004
-
-# Testcase OMNIA_CRM_US_AWXD_TC_005
-# Execute web_ui role in management station and reboot the server
-- name: OMNIA_CRM_US_AWXD_TC_005
-  hosts: localhost
-  connection: local
-  vars_files:
-    - test_vars/test_web_ui_vars.yml
-  tasks:
-    - name: Get last uptime of the server
-      command: uptime -s
-      register: uptime_status
-      changed_when: false
-      ignore_errors: yes
-      tags: TC_005
-
-    - name: Get current date
-      command: date +"%Y-%m-%d %H"
-      register: current_time
-      changed_when: false
-      ignore_errors: yes
-      tags: TC_005
-
-    - block:
-        - name: Call common role
-          include_role:
-            name: ../roles/common
-          vars:
-            input_config_filename: "{{ test_input_config_filename }}"
-
-        - name: Calling the web_ui role to be tested
-          include_role:
-            name: ../roles/web_ui
-      tags: TC_005
-
-    - name: Reboot localhost
-      command: reboot
-      when: current_time.stdout not in uptime_status.stdout
-      tags: TC_005
-
-    - name: Inspect AWX web container
-      docker_container_info:
-        name: "{{ docker_container_name }}"
-      register: awx_container_status
-      tags: TC_005
-
-    - name: Verify AWX container is running after reboot
-      assert:
-        that:
-          - "'running' in awx_container_status.container.State.Status"