Переглянути джерело

Issue #568: Updating comments on input_params

Signed-off-by: cgoveas <cassandra.goveas@dell.com>
cgoveas 3 роки тому
батько
коміт
2d249e47c0

+ 22 - 22
control_plane/input_params/base_vars.yml

@@ -14,60 +14,60 @@
 ---
 
 # Path to directory hosting ansible config file (ansible.cfg file)
-# Default value is /etc/ansible
+# Default value: /etc/ansible
 # This directory is on the host running ansible, if ansible is installed using dnf
 # If ansible is installed using pip, this path should be set
 ansible_conf_file_path: /etc/ansible
 
 # This variable is used to enable ethernet switch configuration
-# It accepts boolean values "true" or "false". 
-# By default its value is "false".
+# Accepted values:  "true" or "false". 
+# Default value: "false".
 # If ethernet switch support is needed set this to "true"
 ethernet_switch_support: true
 
 # This variable is used to enable infiniband switch configuration
-# It accepts boolean values "true" or "false". 
-# By default its value is "false".
+# Accepted values:  "true" or "false". 
+# Default value: "false".
 # If infiniband configuration is needed set this to "true"
 ib_switch_support: true
 
 # This variable is used to enable powervault configuration
-# It accepts boolean values "true" or "false". 
-# By default its value is "false".
-# If powervault configuration is needed set this to "true"
+# Accepted values:  "true" or "false". 
+# Default value: "false".
+# If powervault configuration is needed, set this to "true"
 powervault_support: false
 
 # The nic/ethernet card that will be connected to the public internet.
-# Default value of nic is eno2
+# Default value: eno2
 public_nic: "eno2"
 
 # Kubernetes pod network CIDR for appliance k8s network
 # Make sure this value does not overlap with any of the host networks.
-# Default value is "192.168.0.0/16"
+# Default value: "192.168.0.0/16"
 appliance_k8s_pod_net_cidr: "192.168.0.0/16"
 
 ### Usage: provision_idrac, network_ib, network_ethernet, powervault_me4 ###
 
 # The trap destination IP address is the IP address of the SNMP Server where the trap will be sent
-# If this variable is left blank, it means SNMP will be disabled
+# If this variable is left blank, SNMP will be disabled.
 # Provide a valid SNMP server IP
 snmp_trap_destination: ""
 
 # Provide the snmp community name needed
-# By default this is set to "public"
+# Default value:"public"
 snmp_community_name: "public"
 
 ### Usage: webui_awx ###
 
 # Organization name that is created in AWX.
-# The default value is “DellEMC”
+# Default value: “DellEMC”
 awx_organization: "DellEMC"
 
 ### Usage: provision_cobbler, provision_idrac ###
 
 # This variable is used to set node provisioning method
-# It accepts values: idrac, pxe
-# Default value is "idrac"
+# Accepted values: idrac, pxe
+# Default value: "idrac"
 # If provisioning needs to be done through cobbler, set it to "pxe"
 # If idrac license is not present, provisioning mode will be set to "pxe"
 provision_method: "idrac"
@@ -85,7 +85,7 @@ language: "en-US"
 # This is the path where the user has to place the iso image that needs to be provisioned in target nodes.
 # The iso file should be CentOS7-2009-minimal edition.
 # Other iso files are not supported.
-# Mandatory value required
+# Required field
 iso_file_path: "/root/CentOS-7-x86_64-Minimal-2009.iso"
 
 # Default lease time that will be used by dhcp
@@ -93,7 +93,7 @@ iso_file_path: "/root/CentOS-7-x86_64-Minimal-2009.iso"
 # Min: 21600 seconds
 # Default: 86400 seconds
 # Max: 31536000 seconds
-# Mandatory value required
+# Required field
 default_lease_time: "86400"
 
 ### Usage: control_plane_device ###
@@ -101,12 +101,12 @@ default_lease_time: "86400"
 # The nic/ethernet card that needs to be connected to provision 
 # the fabric, idrac and powervault.
 # This nic will be configured by Omnia for the DHCP server.
-# Default value of nic is eno1
+# Default value: "eno1"
 mngmnt_network_nic: "eno1"
 
 # The dhcp range for assigning the IPv4 address
 # Example: 172.17.0.1
-# Mandatory value required
+# Required field
 mngmnt_network_dhcp_start_range: "172.19.0.100"
 mngmnt_network_dhcp_end_range: "172.19.0.200"
 
@@ -121,12 +121,12 @@ mngmnt_mapping_file_path: ""
 
 # The nic/ethernet card that needs to be connected to provision the OS of bare metal servers
 # This nic will be configured by Omnia for the DHCP server.
-# Default value of nic is eno3
+# Default value: "eno3"
 host_network_nic: "eno3"
 
 # The dhcp range for assigning the IPv4 address
 # Example: 172.17.0.1
-# Mandatory value required
+# Required field
 host_network_dhcp_start_range: "172.17.0.100"
 host_network_dhcp_end_range: "172.17.0.200"
 
@@ -146,7 +146,7 @@ host_mapping_file_path: ""
 
 # The nic/ethernet card that needs to be connected to configure infiniband switch
 # This nic will be configured by Omnia for the DHCP server.
-# Default value of nic is ib0
+# Default value: ib0
 ib_network_nic: "ib0"
 
 # The dhcp range for assigning the IPv4 address

+ 17 - 17
control_plane/input_params/idrac_tools_vars.yml

@@ -23,25 +23,25 @@
 # 2FA will be enabled only if email notification is working using SMTP.
 
 # DNS domain name to set to iDRAC
-# Mandatory value required
+# Required field
 dns_domain_name: ""
 
 # IPV4 static DNS1 and DNS2
-# Mandatory value required
+# Required field
 ipv4_static_dns1: ""
 ipv4_static_dns2: ""
 
 # Server IP used for SMTP
-# Mandatory value required
+# Required field
 smtp_server_ip: ""
 
 # Email address used for enabling 2FA
-# Mandatory value required
+# Required field
 use_email_address_2fa: ""
 
 # SMTP authentication disabled by default
-# If enabled provide smtp username and password
-# Mandatory value required
+# If enabled, provide smtp username and password
+# Required field
 smtp_authentication: "disabled"
 
 # Username used for SMTP
@@ -60,13 +60,13 @@ smtp_password: ""
 
 # CA certification validation value
 # cert_validation_enable supports only disabled
-# CA Certificate can't be upload using this playbook idrac_ldap.yml.
+# CA Certificate can't be uploaded using the playbook idrac_ldap.yml.
 # If required user has to manually upload CA certificate after idrac_ldap.yml execution.
 cert_validation_enable: "disabled"
 
 # Sever address used for LDAP
-# Mandatory value required
-# Recommended to provided LDAP server ip address instead of FQDN
+# Required field
+# It's recommended to provide the LDAP server ip address instead of FQDN
 ldap_server_address: ""
 
 # TCP port port at which the LDAP server is listening for connections
@@ -83,7 +83,7 @@ bind_password: ""
 
 # The distinguished name of the search base. 
 # For example: dc=mycompany,dc=com
-# Mandatory value required
+# Required field
 base_dn: ""
 
 # User attribute used for search in LDAP server
@@ -92,9 +92,9 @@ user_attribute: ""
 # Group attribute used for search in LDAP server
 group_attribute: ""
 
-# Specify group attribute type is DN or not
-# Supported options are "enabled" or "disabled"
-# By default group_attribute_is_dn will be disabled
+# Specify whether the group attribute type is DN or not
+# Accepted Values: "enabled" or "disabled"
+# Default value: disabled
 group_attribute_is_dn: "disabled"
 
 # Search scope is related to the Base DN. 
@@ -104,11 +104,11 @@ search_filter: ""
 # DN of LDAP group be to added
 # Supports adding only one role group
 # For example: cn=Admins,cn=Group,dc=mycompany,dc=com
-# Mandatory value required
+# Required field
 role_group1_dn: ""
 
 # Privielege to LDAP role group 1
-# Supported options are Administrator, Operator, ReadOnly
-# By default role_group1_privilege will be Administrator
-# Mandatory value required
+# Supported values: "Administrator", "Operator", "ReadOnly"
+# Default value: role_group1_privilege will be "Administrator"
+# Required field
 role_group1_privilege: "Administrator"

+ 13 - 13
control_plane/input_params/idrac_vars.yml

@@ -16,13 +16,13 @@
 ### Usage: provision_idrac ###
 
 # This indicates the system profile name used for BIOS configuration of the server
-# The values supported are - "Performance", "PerformancePerWatt(DAPC)", "PerformancePerWatt(OS)", "WorkstationPerformance".
-# The default value is "Performance"
+# Accepted values:  - "Performance", "PerformancePerWatt(DAPC)", "PerformancePerWatt(OS)", "WorkstationPerformance".
+# Default value:  "Performance"
 idrac_system_profile: "Performance"
 
 # Boolean value indicating whether OMNIA should perform firmware update or not
-# It takes values "true" or "false" indicating required and not required cases respectively.
-# Default value is "true"
+# Accepted values: true, false
+# Default value: true
 firmware_update_required: true
 
 # This is the list of poweredge server models
@@ -40,24 +40,24 @@ poweredge_model: C6420
 ### Usage: idrac_secure_boot ###
 
 # Specify whether the secure boot mode to be enabled
-# By default secure boot will be "disabled"
-# If required it can be "enabled"
+# Default value: "disabled"
+# Accepted values: "enabled", "disabled"
 uefi_secure_boot: "disabled"
 
 ### Usage: idrac_system_lockdown ###
 
 # Specify whether the system lockdown to be enabled
-# By default system lockdown will be "disabled"
-# If required it can be "enabled"
+# Default value: "disabled"
+# Accepted values: "enabled", "disabled"
 # Make sure system_lockdown is enabled only after OS provisioning is completed
 system_lockdown: "disabled"
 
 ### Usage: idrac_2fa ###
 
 # Specify whether the two factor authentication to be enabled
-# By default two factor authentication will be "disabled"
-# If required it can be "enabled"
-# [WARNING] Once 2FA is enabled, user has to disable 2FA manually. Other iDRAC playbooks won't run if 2FA is enabled
+# Default value: "disabled"
+# Accepted values: "enabled", "disabled"
+# [WARNING] Once 2FA is enabled, the user has to disable 2FA manually. Other iDRAC playbooks won't run if 2FA is enabled
 # Update 2FA input parameters in idrac_tools_vars.yml if two_factor_authentication is enabled
 # Command to edit idrac_tools_vars.yml: ansible-vault edit idrac_tools_vars.yml --vault-password-file .idrac_vault_key
 two_factor_authentication: "disabled"
@@ -65,8 +65,8 @@ two_factor_authentication: "disabled"
 ### Usage: idrac_ldap ###
 
 # Specify whether the LDAP directory services to be enabled
-# By default LDAP directory services will be disabled
-# If required it can be "enabled"
+# Default value: "disabled"
+# Accepted values: "enabled", "disabled"
 # Update 2FA input parameters in idrac_tools_vars.yml if two_factor_authentication is enabled
 # Command to edit idrac_tools_vars.yml: ansible-vault edit idrac_tools_vars.yml --vault-password-file .idrac_vault_key
 ldap_directory_services: "disabled"

+ 6 - 6
control_plane/input_params/login_vars.yml

@@ -16,9 +16,9 @@
 ### Usage: provision_cobbler, provison_idrac ###
 
 # Password used while deploying OS on bare metal servers.
-# The Length of the password should be at least 8.
+# The Length of the password should be at least 8 characters.
 # The password must not contain -,\, ',"
-# Mandatory value required
+# Required field
 provision_password: ""
 
 ### Usage: provision_cobbler ###
@@ -26,19 +26,19 @@ provision_password: ""
 # Password used for cobbler
 # The Length of the password should be at least 8.
 # The password must not contain -,\, ',"
-# Mandatory value required
+# Required field
 cobbler_password: ""
 
 ### Usage: provision_idrac ###
 
 # The username for idrac
 # The username must not contain -,\, ',"
-# Mandatory value required
+# Required field
 idrac_username: ""
 
 # Password used for idrac
 # The password must not contain -,\, ',"
-# Mandatory value required
+# Required field
 idrac_password: ""
 
 ### Usage: webui_awx ###
@@ -46,7 +46,7 @@ idrac_password: ""
 # Password used for awx UI
 # The Length of the password should be at least 8.
 # The password must not contain -,\, ',"
-#awx_password: ""
+awx_password: ""
 
 ### Usage: network_ethernet ###
 

+ 16 - 16
control_plane/input_params/powervault_me4_vars.yml

@@ -16,11 +16,11 @@
 ### Usage: powervault_me4 ###
 
 # This variable indicates the language selection
-# Currently only "English" is supported
+# Currently, only "English" is supported
 locale: "English"
 
 # Specify the system name to identify the system
-# By default it is set to "Uninitialized_Name"
+# Default value:  "Uninitialized_Name"
 # Length should be less than 30 and it should not contain space.
 # Optional
 powervault_me4_system_name: "Unintialized_Name"
@@ -33,13 +33,13 @@ powervault_me4_system_name: "Unintialized_Name"
 # info: Sends notifications for all events.
 # none: All events are excluded from trap notification and traps are disabled. 
 # However, Critical events and managed-logs events 400–402 are sent regardless of the notification setting.
-# Default value is "none"
-# Compulsory
+# Default value: "none"
+# Required field
 powervault_me4_snmp_notify_level: "none"
 
 # The type of pool to be created on the powervault
 # It can be either linear or virtual.
-# Default: linear
+# Default value: "linear"
 powervault_me4_pool_type: "linear"
 
 # Specify the required RAID Level
@@ -49,8 +49,8 @@ powervault_me4_pool_type: "linear"
 # r6/raid6: 4-16
 # r10/raid10: 4-16
 # adapt: 12-128
-# Default value is "raid1"
-# Compulsory
+# Default value: "raid1"
+# Required field
 powervault_me4_raid_levels: "raid1"
 
 # Specify the range of disks
@@ -61,31 +61,31 @@ powervault_me4_raid_levels: "raid1"
 # For ME4012 - 0.0-0.11 are the allowed values
 # For RAID 10 disk range should be given in subgroups separated by colons with no spaces.
 # RAID-10 example:1.1-2:1.3-4:1.7,1.10
-# Default value is 0.1-2
-# Compulsory
+# Default value: 0.1-2
+# Required field
 powervault_me4_disk_range: "0.1-2"
 
 # Specify the volume names
-# the default value is "k8s_volume" and "slurm_volume"
-# Compulsory
+# Default value "k8s_volume" and "slurm_volume"
+# Required field
 powervault_me4_k8s_volume_name: "k8s_volume"
 powervault_me4_slurm_volume_name: "slurm_volume"
 
 # Specify the disk group name
-# Mandatory
+# Required field
 powervault_me4_disk_group_name: "omnia"
 
 # Specify the percentage for partition in disk
 # Default value is "60%"
 # Min: 5
 # Max: 90
-# Compulsory
+# Required field
 powervault_me4_disk_partition_size: "60"
 
 # Specify the volume size
 # Format: 100GB <SizeGB>
 # Default: 100GB
-# Compulsory
+# Required field
 powervault_me4_volume_size: "100GB"
 
 # Specify the pool for disk and volumes
@@ -95,6 +95,6 @@ powervault_me4_pool: "a"
 
 # Specify the nic of the server with which Powervault is connected.
 # Make sure nfs server also has 3 nics (for internet, OS provision and powervault connection)
-# Default value is em1.
-# Compulsory
+# Default value: "em1".
+# Required field
 powervault_me4_server_nic: "em1"

+ 0 - 49
control_plane/test/appliance_config_empty.yml

@@ -1,49 +0,0 @@
-#  Copyright 2020 Dell Inc. or its subsidiaries. All Rights Reserved.
-#
-#  Licensed under the Apache License, Version 2.0 (the "License");
-#  you may not use this file except in compliance with the License.
-#  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
----
-
-# Password used while deploying OS on bare metal servers and for Cobbler UI.
-# The Length of the password should be at least 8.
-# The password must not contain -,\, ',"
-provision_password: ""
-
-# Password used for the AWX UI.
-# The Length of the password should be at least 8.
-# The password must not contain -,\, ',"
-awx_password: ""
-
-# The nic/ethernet card that needs to be connected to the HPC switch.
-# This nic will be configured by Omnia for the DHCP server.
-# Default value of nic is em1.
-hpc_nic: "em1"
-
-# The nic/ethernet card that will be connected to the public internet.
-# Default value of nic is em2
-public_nic: "em2"
-
-# This is the  path where user has kept the iso image that needs to be provisioned in target nodes.
-# The iso file should be CentOS7-2009-minimal edition.
-# Other iso files are not supported.
-iso_file_path: ""
-
-# The mapping file consists of the MAC address and its respective IP address and hostname.
-# The format of mapping file should be MAC,hostname,IP and must be a CSV file.
-# A template for mapping file exists in omnia/examples and is named as mapping_file.csv.
-# This depicts the path where user has kept the mapping file for DHCP configurations.
-mapping_file_path: ""
-
-# The dhcp range for assigning the IPv4 address to the baremetal nodes.
-# Example: 10.1.23.1
-dhcp_start_ip_range: ""
-dhcp_end_ip_range: ""

+ 0 - 49
control_plane/test/appliance_config_test.yml

@@ -1,49 +0,0 @@
-# Copyright 2020 Dell Inc. or its subsidiaries. All Rights Reserved.
-#
-#  Licensed under the Apache License, Version 2.0 (the "License");
-#  you may not use this file except in compliance with the License.
-#  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
----
-
-# Password used while deploying OS on bare metal servers and for Cobbler UI.
-# The Length of the password should be at least 8.
-# The password must not contain -,\, ',"
-provision_password: "omnia@123"
-
-# Password used for the AWX UI.
-# The Length of the password should be at least 8.
-# The password must not contain -,\, ',"
-awx_password: "omnia@123"
-
-# The nic/ethernet card that needs to be connected to the HPC switch.
-# This nic will be configured by Omnia for the DHCP server.
-# Default value of nic is em1.
-hpc_nic: "em1"
-
-# The nic/ethernet card that will be connected to the public internet.
-# Default value of nic is em2
-public_nic: "em2"
-
-# This is the  path where user has kept the iso image that needs to be provisioned in target nodes.
-# The iso file should be CentOS7-2009-minimal edition.
-# Other iso files are not supported.
-iso_file_path: "/root/CentOS-7-x86_64-Minimal-2009.iso"
-
-# The mapping file consists of the MAC address and its respective IP address and hostname.
-# The format of mapping file should be MAC,hostname,IP and must be a CSV file.
-# A template for mapping file exists in omnia/examples and is named as mapping_file.csv.
-# This depicts the path where user has kept the mapping file for DHCP configurations.
-mapping_file_path: ""
-
-# The dhcp range for assigning the IPv4 address to the baremetal nodes.
-# Example: 10.1.23.1
-dhcp_start_ip_range: "172.17.0.10"
-dhcp_end_ip_range: "172.17.0.100"

+ 0 - 3
control_plane/test/provisioned_hosts.yml

@@ -1,3 +0,0 @@
-[all]
-172.17.0.10
-172.17.0.15

Різницю між файлами не показано, бо вона завелика
+ 0 - 1882
control_plane/test/test_common.yml


+ 0 - 2
control_plane/test/test_mapping_file

@@ -1,2 +0,0 @@
-Mac,Hostname,IP
-xx:yy:zz:aa:bb,validation-host21,172.20.0.21

+ 0 - 608
control_plane/test/test_provision_cc.yml

@@ -1,608 +0,0 @@
-#  Copyright 2020 Dell Inc. or its subsidiaries. All Rights Reserved.
-#
-#  Licensed under the Apache License, Version 2.0 (the "License");
-#  you may not use this file except in compliance with the License.
-#  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
----
-
-# Testcase OMNIA_DIO_US_CC_TC_004
-# Execute provision role in management station and verify cobbler configuration
-- name: OMNIA_DIO_US_CC_TC_004
-  hosts: localhost
-  connection: local
-  vars_files:
-    - test_vars/test_provision_vars.yml
-    - ../roles/provision/vars/main.yml
-    - "{{ test_input_config_filename }}"
-  tasks:
-    - name: Check the iso file is present
-      stat:
-        path: "{{ iso_file_path }}"
-      register: iso_status
-      tags: TC_004
-
-    - name: Fail if iso file is missing
-      fail:
-        msg: "{{ iso_fail }}"
-      when: iso_status.stat.exists == false
-      tags: TC_004
-
-    - name: Delete the cobbler container if exits
-      docker_container:
-        name: "{{ docker_container_name }}"
-        state: absent
-      tags: TC_004
-
-    - name: Delete docker image if exists
-      docker_image:
-        name: "{{ docker_image_name }}"
-        tag: "{{ docker_image_tag }}"
-        state: absent
-      tags: TC_004
-
-    - block:
-        - name: Call common role
-          include_role:
-            name: ../roles/common
-          vars:
-            input_config_filename: "{{ test_input_config_filename }}"
-
-        - name: Call provision role
-          include_role:
-            name: ../roles/provision
-      tags: TC_004
-
-    - name: Check the connection to cobbler UI and it returns a status 200
-      uri:
-        url: https://localhost/cobbler_web
-        status_code: 200
-        return_content: yes
-        validate_certs: no
-      tags: TC_004,VERIFY_004
-
-    - name: Fetch cobbler version in cobbler container
-      command: docker exec {{ docker_container_name }} cobbler version
-      changed_when: false
-      register: cobbler_version
-      tags: TC_004,VERIFY_004
-
-    - name: Verify cobbler version
-      assert:
-        that:
-          - "'Cobbler' in cobbler_version.stdout"
-          - "'Error' not in cobbler_version.stdout"
-        fail_msg: "{{ cobbler_version_fail_msg }}"
-        success_msg: "{{ cobbler_version_success_msg }}"
-      tags: TC_004,VERIFY_004
-
-    - name: Run cobbler check command in cobbler container
-      command: docker exec {{ docker_container_name }} cobbler check
-      changed_when: false
-      register: cobbler_check
-      tags: TC_004,VERIFY_004
-
-    - name: Verify cobbler check command output
-      assert:
-        that:
-          - "'The following are potential configuration items that you may want to fix' not in cobbler_check.stdout"
-          - "'Error' not in cobbler_check.stdout"
-        fail_msg: "{{ cobbler_check_fail_msg }}"
-        success_msg: "{{ cobbler_check_success_msg }}"
-      ignore_errors: yes
-      tags: TC_004,VERIFY_004
-
-    - name: Run cobbler sync command in cobbler container
-      command: docker exec {{ docker_container_name }} cobbler sync
-      changed_when: false
-      register: cobbler_sync
-      tags: TC_004,VERIFY_004
-
-    - name: Verify cobbler sync command output
-      assert:
-        that:
-          - "'TASK COMPLETE' in cobbler_sync.stdout"
-          - "'Fail' not in cobbler_sync.stdout"
-          - "'Error' not in cobbler_sync.stdout"
-        fail_msg: "{{ cobbler_sync_fail_msg }}"
-        success_msg: "{{ cobbler_sync_success_msg }}"
-      tags: TC_004,VERIFY_004
-
-    - name: Fetch cobbler distro list
-      command: docker exec {{ docker_container_name }} cobbler distro list
-      changed_when: false
-      register: cobbler_distro_list
-      tags: TC_004,VERIFY_004
-
-    - name: Verify cobbler distro list
-      assert:
-        that:
-          - "'CentOS' in cobbler_distro_list.stdout"
-        fail_msg: "{{ cobbler_distro_list_fail_msg }}"
-        success_msg: "{{ cobbler_distro_list_success_msg }}"
-      tags: TC_004,VERIFY_004
-
-    - name: Fetch cobbler profile list
-      command: docker exec cobbler cobbler profile list
-      changed_when: false
-      register: cobbler_profile_list
-      tags: TC_004,VERIFY_004
-
-    - name: Verify cobbler profile list
-      assert:
-        that:
-          - "'CentOS' in cobbler_profile_list.stdout"
-        fail_msg: "{{ cobbler_profile_list_fail_msg }}"
-        success_msg: "{{ cobbler_profile_list_success_msg }}"
-      tags: TC_004,VERIFY_004
-
-    - name: Check kickstart file
-      shell: |
-        docker exec {{ docker_container_name }} [ -f /var/lib/cobbler/kickstarts/{{ kickstart_filename }} ] && echo "File exist" || echo "File does not exist"
-      changed_when: false
-      register: kickstart_file_status
-      tags: TC_004,VERIFY_004
-
-    - name: Verify kickstart file present
-      assert:
-        that:
-          - "'File exist' in kickstart_file_status.stdout"
-        fail_msg: "{{ kickstart_file_fail_msg }}"
-        success_msg: "{{ kickstart_file_success_msg }}"
-      tags: TC_004,VERIFY_004
-
-    - name: Check crontab list
-      command: docker exec cobbler crontab -l
-      changed_when: false
-      register: crontab_list
-      tags: TC_004,VERIFY_004
-
-    - name: Verify crontab list
-      assert:
-        that:
-          - "'* * * * * ansible-playbook /root/tftp.yml' in crontab_list.stdout"
-          - "'5 * * * * ansible-playbook /root/inventory_creation.yml' in crontab_list.stdout"
-        fail_msg: "{{ crontab_list_fail_msg }}"
-        success_msg: "{{ crontab_list_success_msg }}"
-      tags: TC_004,VERIFY_004
-
-    - name: Check tftp,dhcpd,xinetd,cobblerd service is running
-      command: docker exec cobbler systemctl is-active {{ item }}
-      changed_when: false
-      ignore_errors: yes
-      register: cobbler_service_check
-      with_items: "{{ cobbler_services }}"
-      tags: TC_004,VERIFY_004
-
-    - name: Verify tftp,dhcpd,xinetd,cobblerd service is running
-      assert:
-        that:
-          - "'active' in cobbler_service_check.results[{{ item }}].stdout"
-          - "'inactive' not in cobbler_service_check.results[{{ item }}].stdout"
-          - "'unknown' not in cobbler_service_check.results[{{ item }}].stdout"
-        fail_msg: "{{ cobbler_service_check_fail_msg }}"
-        success_msg: "{{ cobbler_service_check_success_msg }}"
-      with_sequence: start=0 end=3
-      tags: TC_004,VERIFY_004
-
-# Testcase OMNIA_DIO_US_CDIP_TC_005
-# Execute provison role in management station where cobbler container is configured
-- name: OMNIA_DIO_US_CDIP_TC_005
-  hosts: localhost
-  connection: local
-  vars_files:
-    - test_vars/test_provision_vars.yml
-    - ../roles/provision/vars/main.yml
-  tasks:
-    - block:
-        - name: Call common role
-          include_role:
-            name: ../roles/common
-          vars:
-            input_config_filename: "{{ test_input_config_filename }}"
-
-        - name: Call provision role
-          include_role:
-            name: ../roles/provision
-      tags: TC_005
-
-    - name: Check the connection to cobbler UI and it returns a status 200
-      uri:
-        url: https://localhost/cobbler_web
-        status_code: 200
-        return_content: yes
-        validate_certs: no
-      tags: TC_005,VERIFY_005
-
-    - name: Fetch cobbler version in cobbler container
-      command: docker exec {{ docker_container_name }} cobbler version
-      changed_when: false
-      register: cobbler_version
-      tags: TC_005,VERIFY_005
-
-    - name: Verify cobbler version
-      assert:
-        that:
-          - "'Cobbler' in cobbler_version.stdout"
-          - "'Error' not in cobbler_version.stdout"
-        fail_msg: "{{ cobbler_version_fail_msg }}"
-        success_msg: "{{ cobbler_version_success_msg }}"
-      tags: TC_005,VERIFY_005
-
-    - name: Run cobbler check command in cobbler container
-      command: docker exec {{ docker_container_name }} cobbler check
-      changed_when: false
-      register: cobbler_check
-      tags: TC_005,VERIFY_005
-
-    - name: Verify cobbler check command output
-      assert:
-        that:
-          - "'The following are potential configuration items that you may want to fix' not in cobbler_check.stdout"
-          - "'Error' not in cobbler_check.stdout"
-        fail_msg: "{{ cobbler_check_fail_msg }}"
-        success_msg: "{{ cobbler_check_success_msg }}"
-      ignore_errors: yes
-      tags: TC_005,VERIFY_005
-
-    - name: Run cobbler sync command in cobbler container
-      command: docker exec {{ docker_container_name }} cobbler sync
-      changed_when: false
-      register: cobbler_sync
-      tags: TC_005,VERIFY_005
-
-    - name: Verify cobbler sync command output
-      assert:
-        that:
-          - "'TASK COMPLETE' in cobbler_sync.stdout"
-          - "'Fail' not in cobbler_sync.stdout"
-          - "'Error' not in cobbler_sync.stdout"
-        fail_msg: "{{ cobbler_sync_fail_msg }}"
-        success_msg: "{{ cobbler_sync_success_msg }}"
-      tags: TC_005,VERIFY_005
-
-    - name: Fetch cobbler distro list
-      command: docker exec {{ docker_container_name }} cobbler distro list
-      changed_when: false
-      register: cobbler_distro_list
-      tags: TC_005,VERIFY_005
-
-    - name: Verify cobbler distro list
-      assert:
-        that:
-          - "'CentOS' in cobbler_distro_list.stdout"
-        fail_msg: "{{ cobbler_distro_list_fail_msg }}"
-        success_msg: "{{ cobbler_distro_list_success_msg }}"
-      tags: TC_005,VERIFY_005
-
-    - name: Fetch cobbler profile list
-      command: docker exec cobbler cobbler profile list
-      changed_when: false
-      register: cobbler_profile_list
-      tags: TC_005,VERIFY_005
-
-    - name: Verify cobbler profile list
-      assert:
-        that:
-          - "'CentOS' in cobbler_profile_list.stdout"
-        fail_msg: "{{ cobbler_profile_list_fail_msg }}"
-        success_msg: "{{ cobbler_profile_list_success_msg }}"
-      tags: TC_005,VERIFY_005
-
-    - name: Check kickstart file
-      shell: |
-        docker exec {{ docker_container_name }} [ -f /var/lib/cobbler/kickstarts/{{ kickstart_filename }} ] && echo "File exist" || echo "File does not exist"
-      changed_when: false
-      register: kickstart_file_status
-      tags: TC_005,VERIFY_005
-
-    - name: Verify kickstart file present
-      assert:
-        that:
-          - "'File exist' in kickstart_file_status.stdout"
-        fail_msg: "{{ kickstart_file_fail_msg }}"
-        success_msg: "{{ kickstart_file_success_msg }}"
-      tags: TC_005,VERIFY_005
-
-    - name: Check crontab list
-      command: docker exec cobbler crontab -l
-      changed_when: false
-      register: crontab_list
-      tags: TC_005,VERIFY_005
-
-    - name: Verify crontab list
-      assert:
-        that:
-          - "'* * * * * ansible-playbook /root/tftp.yml' in crontab_list.stdout"
-          - "'5 * * * * ansible-playbook /root/inventory_creation.yml' in crontab_list.stdout"
-        fail_msg: "{{ crontab_list_fail_msg }}"
-        success_msg: "{{ crontab_list_success_msg }}"
-      tags: TC_005,VERIFY_005
-
-    - name: Check tftp,dhcpd,xinetd,cobblerd service is running
-      command: docker exec cobbler systemctl is-active {{ item }}
-      changed_when: false
-      ignore_errors: yes
-      register: cobbler_service_check
-      with_items: "{{ cobbler_services }}"
-      tags: TC_005,VERIFY_005
-
-    - name: Verify tftp,dhcpd,xinetd,cobblerd service is running
-      assert:
-        that:
-          - "'active' in cobbler_service_check.results[{{ item }}].stdout"
-          - "'inactive' not in cobbler_service_check.results[{{ item }}].stdout"
-          - "'unknown' not in cobbler_service_check.results[{{ item }}].stdout"
-        fail_msg: "{{ cobbler_service_check_fail_msg }}"
-        success_msg: "{{ cobbler_service_check_success_msg }}"
-      with_sequence: start=0 end=3
-      tags: TC_005,VERIFY_005
-
-# Testcase OMNIA_DIO_US_CC_TC_006
-# Execute provision role in management station where already one container present
-- name: OMNIA_DIO_US_CC_TC_006
-  hosts: localhost
-  connection: local
-  vars_files:
-    - test_vars/test_provision_vars.yml
-    - ../roles/provision/vars/main.yml
-  tasks:
-    - name: Delete the cobbler container if exits
-      docker_container:
-        name: "{{ docker_container_name }}"
-        state: absent
-      tags: TC_006
-
-    - name: Delete docker image if exists
-      docker_image:
-        name: "{{ docker_image_name }}"
-        tag: "{{ docker_image_tag }}"
-        state: absent
-      tags: TC_006
-
-    - name: Create docker image
-      docker_image:
-        name: ubuntu
-        tag: latest
-        source: pull
-      tags: TC_006
-
-    - name: Create docker container
-      command: docker run -dit ubuntu
-      register: create_docker_container
-      changed_when: true
-      args:
-        warn: false
-      tags: TC_006
-
-    - block:
-        - name: Call common role
-          include_role:
-            name: ../roles/common
-          vars:
-            input_config_filename: "{{ test_input_config_filename }}"
-
-        - name: Call provision role
-          include_role:
-            name: ../roles/provision
-      tags: TC_006
-
-    - name: Check the connection to cobbler UI and it returns a status 200
-      uri:
-        url: https://localhost/cobbler_web
-        status_code: 200
-        return_content: yes
-        validate_certs: no
-      tags: TC_006,VERIFY_006
-
-    - name: Fetch cobbler version in cobbler container
-      command: docker exec {{ docker_container_name }} cobbler version
-      changed_when: false
-      register: cobbler_version
-      tags: TC_006,VERIFY_006
-
-    - name: Verify cobbler version
-      assert:
-        that:
-          - "'Cobbler' in cobbler_version.stdout"
-          - "'Error' not in cobbler_version.stdout"
-        fail_msg: "{{ cobbler_version_fail_msg }}"
-        success_msg: "{{ cobbler_version_success_msg }}"
-      tags: TC_006,VERIFY_006
-
-    - name: Run cobbler check command in cobbler container
-      command: docker exec {{ docker_container_name }} cobbler check
-      changed_when: false
-      register: cobbler_check
-      tags: TC_006,VERIFY_006
-
-    - name: Verify cobbler check command output
-      assert:
-        that:
-          - "'The following are potential configuration items that you may want to fix' not in cobbler_check.stdout"
-          - "'Error' not in cobbler_check.stdout"
-        fail_msg: "{{ cobbler_check_fail_msg }}"
-        success_msg: "{{ cobbler_check_success_msg }}"
-      ignore_errors: yes
-      tags: TC_006,VERIFY_006
-
-    - name: Run cobbler sync command in cobbler container
-      command: docker exec {{ docker_container_name }} cobbler sync
-      changed_when: false
-      register: cobbler_sync
-      tags: TC_006,VERIFY_006
-
-    - name: Verify cobbler sync command output
-      assert:
-        that:
-          - "'TASK COMPLETE' in cobbler_sync.stdout"
-          - "'Fail' not in cobbler_sync.stdout"
-          - "'Error' not in cobbler_sync.stdout"
-        fail_msg: "{{ cobbler_sync_fail_msg }}"
-        success_msg: "{{ cobbler_sync_success_msg }}"
-      tags: TC_006,VERIFY_006
-
-    - name: Fetch cobbler distro list
-      command: docker exec {{ docker_container_name }} cobbler distro list
-      changed_when: false
-      register: cobbler_distro_list
-      tags: TC_006,VERIFY_006
-
-    - name: Verify cobbler distro list
-      assert:
-        that:
-          - "'CentOS' in cobbler_distro_list.stdout"
-        fail_msg: "{{ cobbler_distro_list_fail_msg }}"
-        success_msg: "{{ cobbler_distro_list_success_msg }}"
-      tags: TC_006,VERIFY_006
-
-    - name: Fetch cobbler profile list
-      command: docker exec cobbler cobbler profile list
-      changed_when: false
-      register: cobbler_profile_list
-      tags: TC_006,VERIFY_006
-
-    - name: Verify cobbler profile list
-      assert:
-        that:
-          - "'CentOS' in cobbler_profile_list.stdout"
-        fail_msg: "{{ cobbler_profile_list_fail_msg }}"
-        success_msg: "{{ cobbler_profile_list_success_msg }}"
-      tags: TC_006,VERIFY_006
-
-    - name: Check kickstart file
-      shell: |
-        docker exec {{ docker_container_name }} [ -f /var/lib/cobbler/kickstarts/{{ kickstart_filename }} ] && echo "File exist" || echo "File does not exist"
-      changed_when: false
-      register: kickstart_file_status
-      tags: TC_006,VERIFY_006
-
-    - name: Verify kickstart file present
-      assert:
-        that:
-          - "'File exist' in kickstart_file_status.stdout"
-        fail_msg: "{{ kickstart_file_fail_msg }}"
-        success_msg: "{{ kickstart_file_success_msg }}"
-      tags: TC_006,VERIFY_006
-
-    - name: Check crontab list
-      command: docker exec cobbler crontab -l
-      changed_when: false
-      register: crontab_list
-      tags: TC_006,VERIFY_006
-
-    - name: Verify crontab list
-      assert:
-        that:
-          - "'* * * * * ansible-playbook /root/tftp.yml' in crontab_list.stdout"
-          - "'5 * * * * ansible-playbook /root/inventory_creation.yml' in crontab_list.stdout"
-        fail_msg: "{{ crontab_list_fail_msg }}"
-        success_msg: "{{ crontab_list_success_msg }}"
-      tags: TC_006,VERIFY_006
-
-    - name: Check tftp,dhcpd,xinetd,cobblerd service is running
-      command: docker exec cobbler systemctl is-active {{ item }}
-      changed_when: false
-      ignore_errors: yes
-      register: cobbler_service_check
-      with_items: "{{ cobbler_services }}"
-      tags: TC_006,VERIFY_006
-
-    - name: Verify tftp,dhcpd,xinetd,cobblerd service is running
-      assert:
-        that:
-          - "'active' in cobbler_service_check.results[{{ item }}].stdout"
-          - "'inactive' not in cobbler_service_check.results[{{ item }}].stdout"
-          - "'unknown' not in cobbler_service_check.results[{{ item }}].stdout"
-        fail_msg: "{{ cobbler_service_check_fail_msg }}"
-        success_msg: "{{ cobbler_service_check_success_msg }}"
-      with_sequence: start=0 end=3
-      tags: TC_006,VERIFY_006
-
-    - name: Delete the ubuntu container
-      docker_container:
-        name: "{{ create_docker_container.stdout }}"
-        state: absent
-      tags: TC_006
-
-    - name: Delete the ubuntu umage
-      docker_image:
-        name: ubuntu
-        state: absent
-      tags: TC_006
-
-# Testcase OMNIA_DIO_US_CC_TC_007
-# Execute provision role in management station and reboot management station
-- name: OMNIA_DIO_US_CC_TC_007
-  hosts: localhost
-  connection: local
-  vars_files:
-    - test_vars/test_provision_vars.yml
-    - ../roles/provision/vars/main.yml
-  tasks:
-    - name: Check last uptime of the server
-      command: uptime -s
-      register: uptime_status
-      changed_when: false
-      ignore_errors: yes
-      tags: TC_007
-
-    - name: Check current date
-      command: date +"%Y-%m-%d %H"
-      register: current_time
-      changed_when: false
-      ignore_errors: yes
-      tags: TC_007
-
-    - name: Delete the cobbler container if exits
-      docker_container:
-        name: "{{ docker_container_name }}"
-        state: absent
-      when: current_time.stdout not in uptime_status.stdout
-      tags: TC_007
-
-    - name: Delete docker image if exists
-      docker_image:
-        name: "{{ docker_image_name }}"
-        tag: "{{ docker_image_tag }}"
-        state: absent
-      when: current_time.stdout not in uptime_status.stdout
-      tags: TC_007
-
-    - block:
-        - name: Call common role
-          include_role:
-            name: ../roles/common
-          vars:
-            input_config_filename: "{{ test_input_config_filename }}"
-
-        - name: Call provision role
-          include_role:
-            name: ../roles/provision
-      when: current_time.stdout not in uptime_status.stdout
-      tags: TC_007
-
-    - name: Reboot localhost
-      command: reboot
-      when: current_time.stdout not in uptime_status.stdout
-      tags: TC_007
-
-    - name: Inspect cobbler container
-      docker_container_info:
-        name: "{{ docker_container_name }}"
-      register: cobbler_cnt_status
-      tags: TC_007,VERIFY_007
-
-    - name: Verify cobbler container is running after reboot
-      assert:
-        that: "'running' in cobbler_cnt_status.container.State.Status"
-        fail_msg: "{{ cobbler_reboot_fail_msg }}"
-        success_msg: "{{ cobbler_reboot_success_msg }}"
-      tags: TC_007,VERIFY_007

+ 0 - 183
control_plane/test/test_provision_cdip.yml

@@ -1,183 +0,0 @@
-#  Copyright 2020 Dell Inc. or its subsidiaries. All Rights Reserved.
-#
-#  Licensed under the Apache License, Version 2.0 (the "License");
-#  you may not use this file except in compliance with the License.
-#  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
----
-
-# Testcase OMNIA_DIO_US_CDIP_TC_001
-# Execute provison role in management station with os installed centos 7
-- name: OMNIA_DIO_US_CDIP_TC_001
-  hosts: localhost
-  connection: local
-  vars_files:
-    - test_vars/test_provision_vars.yml
-    - ../roles/provision/vars/main.yml
-  tasks:
-    - name: Delete the cobbler container if exits
-      docker_container:
-        name: "{{ docker_container_name }}"
-        state: absent
-      tags: TC_001
-
-    - name: Delete docker image if exists
-      docker_image:
-        name: "{{ docker_image_name }}"
-        tag: "{{ docker_image_tag }}"
-        state: absent
-      tags: TC_001
-
-    - block:
-        - name: Call common role
-          include_role:
-            name: ../roles/common
-          vars:
-            input_config_filename: "{{ test_input_config_filename }}"
-
-        - name: Call provision role
-          include_role:
-            name: ../roles/provision
-            tasks_from: "{{ item }}"
-          with_items:
-           - "{{ cobbler_image_files }}"
-      tags: TC_001
-
-    - name: Inspect cobbler docker image
-      docker_image_info:
-        name: "{{ docker_image_name }}"
-      register: cobbler_image_status
-      tags: TC_001,VERIFY_001
-
-    - name: Validate cobbler docker image
-      assert:
-        that:
-          - cobbler_image_status.images
-        fail_msg: "{{ cobbler_img_fail_msg }}"
-        success_msg: "{{ cobbler_img_success_msg }}"
-      tags: TC_001,VERIFY_001
-
-    - name: Inspect cobbler container
-      docker_container_info:
-        name: "{{ docker_container_name }}"
-      register: cobbler_cnt_status
-      tags: TC_001,VERIFY_001
-
-    - name: Validate cobbler docker container
-      assert:
-        that:
-          - cobbler_cnt_status.exists
-        fail_msg: "{{ cobbler_cnt_fail_msg }}"
-        success_msg: "{{ cobbler_cnt_success_msg }}"
-      tags: TC_001,VERIFY_001
-
-# Testcase OMNIA_DIO_US_CDIP_TC_002
-# Execute provison role in management station where cobbler container and image already created
-- name: OMNIA_DIO_US_CDIP_TC_002
-  hosts: localhost
-  connection: local
-  vars_files:
-    - test_vars/test_provision_vars.yml
-    - ../roles/provision/vars/main.yml
-  tasks:
-    - block:
-        - name: Call common role
-          include_role:
-            name: ../roles/common
-          vars:
-            input_config_filename: "{{ test_input_config_filename }}"
-
-        - name: Call provision role
-          include_role:
-            name: ../roles/provision
-      tags: TC_002
-
-    - name: Inspect cobbler docker image
-      docker_image_info:
-        name: "{{ docker_image_name }}"
-      register: cobbler_image_status
-      tags: TC_002,VERIFY_002
-
-    - name: Validate cobbler docker image
-      assert:
-        that:
-          - cobbler_image_status.images
-        fail_msg: "{{ cobbler_img_fail_msg }}"
-        success_msg: "{{ cobbler_img_success_msg }}"
-      tags: TC_002,VERIFY_002
-
-    - name: Inspect cobbler container
-      docker_container_info:
-        name: "{{ docker_container_name }}"
-      register: cobbler_cnt_status
-      tags: TC_002,VERIFY_002
-
-    - name: Validate cobbler docker container
-      assert:
-        that:
-          - cobbler_cnt_status.exists
-        fail_msg: "{{ cobbler_cnt_fail_msg }}"
-        success_msg: "{{ cobbler_cnt_success_msg }}"
-      tags: TC_002,VERIFY_002
-
-# Testcase OMNIA_DIO_US_CDIP_TC_003
-# Execute provison role in management station where docker service not running
-- name: OMNIA_DIO_US_CDIP_TC_003
-  hosts: localhost
-  connection: local
-  vars_files:
-    - test_vars/test_provision_vars.yml
-    - ../roles/provision/vars/main.yml
-  tasks:
-    - name: Delete the cobbler container if exits
-      docker_container:
-        name: "{{ docker_container_name }}"
-        state: absent
-      tags: TC_003
-
-    - name: Delete docker image if exists
-      docker_image:
-        name: "{{ docker_image_name }}"
-        tag: "{{ docker_image_tag }}"
-        state: absent
-      tags: TC_003
-
-    - name: Stop docker service
-      service:
-        name: docker
-        state: stopped
-      tags: TC_003
-
-    - block:
-        - name: Call common role
-          include_role:
-            name: ../roles/common
-          vars:
-            input_config_filename: "{{ test_input_config_filename }}"
-
-        - name: Call provision role
-          include_role:
-            name: ../roles/provision
-
-        - name: Docker service stopped usecase success message
-          debug:
-            msg: "{{ docker_check_success_msg }}"
-
-      rescue:
-        - name: Docker service stopped usecase fail message
-          fail:
-            msg: "{{ docker_check_fail_msg }}"
-
-      always:
-        - name: Start docker service
-          service:
-            name: docker
-            state: started
-      tags: TC_003

+ 0 - 294
control_plane/test/test_provision_ndod.yml

@@ -1,294 +0,0 @@
-#  Copyright 2020 Dell Inc. or its subsidiaries. All Rights Reserved.
-#
-#  Licensed under the Apache License, Version 2.0 (the "License");
-#  you may not use this file except in compliance with the License.
-#  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
----
-
-# OMNIA_DIO_US_NDOD_TC_009
-# Execute provison role in management station and  PXE boot one compute node
-- name: OMNIA_DIO_US_NDOD_TC_009
-  hosts: localhost
-  connection: local
-  gather_subset:
-    - 'min'
-  vars_files:
-    - test_vars/test_provision_vars.yml
-    - ../roles/common/vars/main.yml
-  tasks:
-    - name: Set ip address of the compute node
-      set_fact:
-        single_node_ip_address: "{{ groups[cobbler_groupname][0] }}"
-      tags: TC_009,VERIFY_009
-
-    - name: Delete inventory if exists
-      file:
-        path: inventory
-        state: absent
-      tags: TC_009,VERIFY_009
-
-    - name: Check input config file is encrypted
-      command: cat {{ test_input_config_filename }}
-      changed_when: false
-      register: config_content
-      tags: TC_009,VERIFY_009
-
-    - name: Decrpyt input_config.yml
-      command: ansible-vault decrypt {{ test_input_config_filename }} --vault-password-file {{ vault_path }}
-      changed_when: false
-      when: "'$ANSIBLE_VAULT;' in config_content.stdout"
-      tags: TC_009,VERIFY_009
-
-    - name: Include variable file input_config.yml
-      include_vars: "{{ test_input_config_filename }}"
-      tags: TC_009,VERIFY_009
-
-    - name: Create inventory file
-      lineinfile:
-        path: inventory
-        line: "{{ single_node_ip_address }} ansible_user=root ansible_password={{ provision_password }} ansible_ssh_common_args='-o StrictHostKeyChecking=no'"
-        create: yes
-        mode: '{{ file_permission }}'
-      tags: TC_009,VERIFY_009
-
-    - meta: refresh_inventory
-      tags: TC_009,VERIFY_009
-
-    - name: Validate authentication of username and password
-      command: ansible {{ single_node_ip_address }} -m ping -i inventory
-      register: validate_login
-      changed_when: false
-      ignore_errors: yes
-      tags: TC_009,VERIFY_009
-
-    - name: Validate the authentication output
-      assert:
-        that:
-          - "'pong' in validate_login.stdout"
-          - "'SUCCESS' in validate_login.stdout"
-          - "'UNREACHABLE' not in validate_login.stdout"
-        fail_msg: "{{ authentication_fail_msg }}"
-        success_msg: "{{ authentication_success_msg }}"
-      tags: TC_009,VERIFY_009
-
-    - name: Check hostname
-      command: ansible {{ single_node_ip_address }} -m shell -a hostname -i inventory
-      register: validate_hostname
-      changed_when: false
-      ignore_errors: yes
-      tags: TC_009,VERIFY_009
-
-    - name: Validate the hostname
-      assert:
-        that: "'localhost' not in validate_hostname.stdout"
-        fail_msg: "{{ hostname_fail_msg }}"
-        success_msg: "{{ hostname_success_msg }}"
-      tags: TC_009,VERIFY_009
-
-    - name: Delete inventory if exists
-      file:
-        path: inventory
-        state: absent
-      tags: TC_009,VERIFY_009
-
-# OMNIA_DIO_US_NDOD_TC_010
-# Execute provison role in management station and PXE boot two compute node
-- name: OMNIA_DIO_US_NDOD_TC_010
-  hosts: localhost
-  connection: local
-  gather_subset:
-    - 'min'
-  vars_files:
-    - test_vars/test_provision_vars.yml
-    - ../roles/provision/vars/main.yml
-  tasks:
-    - name: Delete inventory if exists
-      file:
-        path: inventory
-        state: absent
-      tags: TC_010,VERIFY_010
-
-    - name: Check input config file is encrypted
-      command: cat {{ test_input_config_filename }}
-      changed_when: false
-      register: config_content
-      tags: TC_010,VERIFY_010
-
-    - name: Decrpyt input_config.yml
-      command: ansible-vault decrypt {{ test_input_config_filename }} --vault-password-file {{ vault_path }}
-      changed_when: false
-      when: "'$ANSIBLE_VAULT;' in config_content.stdout"
-      tags: TC_010,VERIFY_010
-
-    - name: Include variable file input_config.yml
-      include_vars: "{{ test_input_config_filename }}"
-      tags: TC_010,VERIFY_010
-
-    - name: Create inventory file
-      lineinfile:
-        path: inventory
-        line: "[nodes]"
-        create: yes
-        mode: '{{ file_permission }}'
-      tags: TC_010,VERIFY_010
-
-    - name: Edit inventory file
-      lineinfile:
-        path: inventory
-        line: "{{ item }} ansible_user=root ansible_password={{ provision_password }} ansible_ssh_common_args='-o StrictHostKeyChecking=no'"
-      with_items:
-        - "{{ groups[cobbler_groupname] }}"
-      tags: TC_010,VERIFY_010
-
-    - meta: refresh_inventory
-      tags: TC_010,VERIFY_010
-
-    - name: Validate ip address is different for both servers
-      assert:
-        that: groups[cobbler_groupname][0] != groups[cobbler_groupname][1]
-        fail_msg: "{{ ip_address_fail_msg }}"
-        success_msg: "{{ ip_address_success_msg }}"
-      delegate_to: localhost
-      run_once: yes
-      tags: TC_010,VERIFY_010
-
-    - name: Check hostname of both servers
-      command: ansible nodes -m shell -a hostname -i inventory
-      register: node_hostname
-      changed_when: false
-      ignore_errors: yes
-      tags: TC_010,VERIFY_010
-
-    - name: Validate hostname is different for both servers
-      assert:
-        that:
-          - node_hostname.stdout_lines[1] != node_hostname.stdout_lines[3]
-          - "'localhost' not in node_hostname.stdout_lines[1]"
-          - "'localhost' not in node_hostname.stdout_lines[3]"
-        fail_msg: "{{ hostname_fail_msg }}"
-        success_msg: "{{ hostname_success_msg }}"
-      delegate_to: localhost
-      run_once: yes
-      tags: TC_010,VERIFY_010
-
-    - name: Delete inventory if exists
-      file:
-        path: inventory
-        state: absent
-      delegate_to: localhost
-      run_once: yes
-      tags: TC_010,VERIFY_010
-
-# OMNIA_DIO_US_NDOD_TC_011
-# Validate passwordless ssh connection established or not with compute nodes
-- name: OMNIA_DIO_US_NDOD_TC_011
-  hosts: localhost
-  gather_subset:
-    - 'min'
-  vars_files:
-    - test_vars/test_provision_vars.yml
-    - ../roles/provision/vars/main.yml
-  tasks:
-    - name: Validate authentication of username and password
-      command: "ansible {{ cobbler_groupname }} -m ping -i {{ inventory_file }}"
-      register: validate_login
-      changed_when: false
-      ignore_errors: yes
-      tags: TC_011,VERIFY_011
-
-    - name: Validate the passwordless SSH connection
-      assert:
-        that:
-          - "'pong' in validate_login.stdout"
-          - "'SUCCESS' in validate_login.stdout"
-          - "'UNREACHABLE' not in validate_login.stdout"
-        success_msg: "{{ authentication_success_msg }}"
-        fail_msg: "{{ authentication_fail_msg }}"
-      tags: TC_011,VERIFY_011
-
-# OMNIA_DIO_US_NDOD_TC_012
-# Execute provison role in management station and reboot compute node after os provision again
-- name: OMNIA_DIO_US_NDOD_TC_012
-  hosts: localhost
-  connection: local
-  gather_subset:
-    - 'min'
-  vars_files:
-    - test_vars/test_provision_vars.yml
-  tasks:
-    - name: Set ip address of the compute node
-      set_fact:
-        single_node_ip_address: "{{ groups[cobbler_groupname][0] }}"
-      tags: TC_012,VERIFY_012
-
-    - name: Delete inventory if exists
-      file:
-        path: inventory
-        state: absent
-      tags: TC_012,VERIFY_012
-
-    - name: Check input config file is encrypted
-      command: cat {{ test_input_config_filename }}
-      changed_when: false
-      register: config_content
-      tags: TC_012,VERIFY_012
-
-    - name: Decrpyt input_config.yml
-      command: ansible-vault decrypt {{ test_input_config_filename }} --vault-password-file {{ vault_path }}
-      changed_when: false
-      when: "'$ANSIBLE_VAULT;' in config_content.stdout"
-      tags: TC_012,VERIFY_012
-
-    - name: Include variable file input_config.yml
-      include_vars: "{{ test_input_config_filename }}"
-      tags: TC_012,VERIFY_012
-
-    - name: Create inventory file
-      lineinfile:
-        path: inventory
-        line: "[nodes]"
-        create: yes
-        mode: '{{ file_permission }}'
-      tags: TC_012,VERIFY_012
-
-    - name: Edit inventory file
-      lineinfile:
-        path: inventory
-        line: "{{ single_node_ip_address }} ansible_user=root ansible_password={{ provision_password }} ansible_ssh_common_args='-o StrictHostKeyChecking=no'"
-      tags: TC_012,VERIFY_012
-
-    - meta: refresh_inventory
-      tags: TC_012,VERIFY_012
-
-    - name: Reboot servers
-      command: ansible nodes -m command -a reboot -i inventory
-      ignore_errors: yes
-      changed_when: true
-      tags: TC_012,VERIFY_012
-
-    - name: Wait for 10 minutes
-      pause:
-        minutes: 10
-      tags: TC_012,VERIFY_012
-
-    - name: Check ip address of servers
-      command: ansible nodes -m command -a 'ip a' -i inventory
-      ignore_errors: yes
-      changed_when: false
-      register: ip_address_after_reboot
-      tags: TC_012,VERIFY_012
-
-    - name: Validate ip address is same after reboot
-      assert:
-        that: "'{{ single_node_ip_address }}' in ip_address_after_reboot.stdout"
-        fail_msg: "{{ ip_address_fail_msg }}"
-        success_msg: "{{ ip_address_success_msg }}"
-      tags: TC_012,VERIFY_012

+ 0 - 51
control_plane/test/test_vars/test_common_vars.yml

@@ -1,51 +0,0 @@
-#  Copyright 2020 Dell Inc. or its subsidiaries. All Rights Reserved.
-#
-#  Licensed under the Apache License, Version 2.0 (the "License");
-#  you may not use this file except in compliance with the License.
-#  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
----
-
-# vars file for test_common.yml file
-centos_version: '7.8'
-test_input_config_filename: "appliance_config_test.yml"
-empty_input_config_filename: "appliance_config_empty.yml"
-new_input_config_filename: "appliance_config_new.yml"
-password_config_file: "password_config"
-min_length_password: "testpass"
-max_length_password: "helloworld123helloworld12hello"
-long_password: "helloworld123hellowordl12hello3"
-white_space_password: "hello world 123"
-special_character_password1: "hello-world/"
-special_character_password2: "hello@$%!world"
-valid_dhcp_start_range: "172.17.0.10"
-valid_dhcp_end_range: "172.17.0.200"
-invalid_dhcp_ip: "1720.1700.1000.1000"
-wrong_dhcp_ip: "d6:dh1:dsj:10"
-valid_iso_path: "/root/CentOS-7-x86_64-Minimal-2009.iso"
-wrong_iso_path: "/root/testfile"
-valid_mapping_file_path: "test_mapping_file"
-
-docker_volume_success_msg: "Docker volume omnia-storage exists"
-docker_volume_fail_msg: "Docker volume omnia-storage does not exist"
-input_config_success_msg: "Input config file is encrypted using ansible-vault successfully"
-input_config_fail_msg: "Input config file is failed to encrypt using ansible-vault"
-os_check_success_msg: "OS check passed"
-os_check_fail_msg: "OS check failed"
-internet_check_success_msg: "Internet connectivity check passed"
-internet_check_fail_msg: "Internet connectivity check failed"
-different_user_check_success_msg: "Different user execution check passed"
-different_user_check_fail_msg: "Different user execution check failed"
-selinux_check_success_msg: "selinux check passed"
-selinux_check_fail_msg: "selinux check failed"
-input_config_check_success_msg: "appliance_config.yml validation passed"
-input_config_check_fail_msg: "appliance_config.yml validation failed"
-install_package_success_msg: "Installation of package is successful"
-install_package_fail_msg: "Installation of package is failed"

+ 0 - 85
control_plane/test/test_vars/test_provision_vars.yml

@@ -1,85 +0,0 @@
-#  Copyright 2020 Dell Inc. or its subsidiaries. All Rights Reserved.
-#
-#  Licensed under the Apache License, Version 2.0 (the "License");
-#  you may not use this file except in compliance with the License.
-#  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
----
-
-# Usage: test_provision_cdip.yml
-first_nic: "em1"
-nic1_ip_address: 172.17.0.1
-validate_password_success_msg: "Password validation successful"
-validate_password_fail_msg: "Password validation failed"
-cobbler_img_fail_msg: "Docker image cobbler does not exist"
-cobbler_img_success_msg: "Docker image cobbler exists"
-cobbler_cnt_fail_msg: "Docker container cobbler does not exist"
-cobbler_cnt_success_msg: "Docker container cobbler exists"
-nic_check_fail_msg: "NIC-1 ip address validation failed"
-nic_check_success_msg: "NIC-1 ip address validation successful"
-cobbler_image_files:
- - check_prerequisites
- - mount_iso
- - firewall_settings
- - provision_password
- - dhcp_configure
- - cobbler_image
-password_config_file: "password_config"
-test_input_config_filename: "appliance_config_test.yml"
-
-# Usage: test_provision_cc.yml
-docker_check_success_msg: "Docker service stopped usescase validation successful"
-docker_check_fail_msg: "Docker service stopped usescase validation failed"
-docker_ip_fail_msg: "Docker IP validation failed"
-docker_ip_success_msg: "Docker IP validation successful"
-cobbler_version_fail_msg: "Cobbler version validation failed"
-cobbler_version_success_msg: "Cobbler version validation successful"
-cobbler_check_fail_msg: "Cobbler check validation failed"
-cobbler_check_success_msg: "Cobbler check validation successful"
-cobbler_sync_fail_msg: "Cobbler sync validation failed"
-cobbler_sync_success_msg: "Cobbler sync validation successful"
-cobbler_distro_list_fail_msg: "Cobbler distro list validation failed"
-cobbler_distro_list_success_msg: "Cobbler distro list validation successful"
-cobbler_profile_list_fail_msg: "Cobbler profile list validation failed"
-cobbler_profile_list_success_msg: "Cobbler profile list validation successful"
-kickstart_file_fail_msg: "Kickstart file validation failed"
-kickstart_file_success_msg: "Kickstart file validation successful"
-cobbler_reboot_fail_msg: "Cobbler container failed to start after reboot"
-cobbler_reboot_success_msg: "Cobbler container started successfully after reboot"
-crontab_list_fail_msg: "Crontab list validation failed"
-crontab_list_success_msg: "Crontab list validation successful"
-iso_check_fail_msg: "centos iso file check validation failed"
-iso_check_success_msg: "centos iso file check validation successful"
-cobbler_service_check_fail_msg: "cobbler service validation failed"
-cobbler_service_check_success_msg: "cobbler service validation successful"
-kickstart_filename: "centos7.ks"
-iso_file_path: "../roles/provision/files"
-temp_iso_name: "temp_centos.iso"
-cobbler_services:
- - tftp
- - dhcpd
- - cobblerd
- - xinetd
-
-# Usage: test_provision_cdip.yml, test_provision_cc.yml, test_provision_ndod.yml
-docker_container_name: "cobbler"
-boundary_password: "testpass"
-
-# Usage: test_provision_ndod.yml
-hostname_fail_msg: "Hostname validation failed"
-hostname_success_msg: "Hostname validation successful"
-authentication_fail_msg: "Server authentication validation failed"
-authentication_success_msg: "Server authentication validation successful"
-ip_address_fail_msg: "IP address validation failed"
-ip_address_success_msg: "IP address validation successful"
-cobbler_groupname: "all"
-inventory_file: "provisioned_hosts.yml"
-file_permission: 0644
-vault_path: ../roles/common/files/.vault_key

+ 0 - 35
control_plane/test/test_vars/test_web_ui_vars.yml

@@ -1,35 +0,0 @@
-#  Copyright 2020 Dell Inc. or its subsidiaries. All Rights Reserved.
-#
-#  Licensed under the Apache License, Version 2.0 (the "License");
-#  you may not use this file except in compliance with the License.
-#  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
----
-
-# Usage: test_web_ui.yml
-return_status: 200
-fail_return_status: -1
-awx_listening_port: 8081
-time: 1
-actual_containers: 4
-package: "docker-ce"
-awx_exists_msg: "Test case passed: AWX instance is already running on your system"
-awx_not_exists_msg: "Test case failed: AWX does not exist"
-validate_password_success_msg: "Test case passed: Password validation succesful"
-validate_password_fail_msg: "Test case failed: Password validation failed"
-resource_exists_success_msg: "Success: Requested resource(s) exists"
-resource_exists_fail_msg: "Failure: Requested resource(s) does not exists"
-compute_group_name: "compute"
-manager_group_name: "manager"
-tower_cli_package_name: "ansible-tower-cli"
-docker_container_name: "awx_web"
-container_up_status_success_msg: "Container is running successfully after the reboot"
-container_up_status_fail_msg: "Container is not running after the reboot"
-test_input_config_filename: appliance_config_test.yml

+ 0 - 378
control_plane/test/test_web_ui.yml

@@ -1,378 +0,0 @@
-# Copyright 2020 Dell Inc. or its subsidiaries. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
----
-
-# Testcase OMNIA_CRM_US_AWXD_TC_001
-# Test case to verify the prerequisites are installed and execute the AWX deployment
-- name: OMNIA_CRM_US_AWXD_TC_001
-  hosts: localhost
-  connection: local
-  gather_subset:
-    - 'min'
-  vars_files:
-    - ../roles/web_ui/vars/main.yml
-    - test_vars/test_web_ui_vars.yml
-  tasks:
-    - name: Get the docker package facts
-      package_facts:
-        manager: auto
-      tags: TC_001
-
-    - name: Check if docker-ce is already installed
-      debug:
-        var: ansible_facts.packages['{{ package }}']
-      tags: TC_001
-
-    - block:
-        - name: Call common role
-          include_role:
-            name: ../roles/common
-          vars:
-            input_config_filename: "{{ test_input_config_filename }}"
-
-        - name: Calling the web_ui role to be tested
-          include_role:
-            name: ../roles/web_ui
-      tags: TC_001
-
-    - name: Check that you can connect to github repo and it returns a status 200
-      uri:
-        url: "{{ awx_git_repo }}"
-        status_code: "{{ return_status }}"
-        return_content: true
-      tags: TC_001
-
-    - name: Check that you can can connect to AWX UI and it returns a status 200
-      uri:
-        url: "{{ awx_ip }}"
-        status_code: "{{ return_status }}"
-        return_content: true
-      tags: TC_001
-
-    - name: verify awx-server is listening on 8081
-      wait_for:
-        port: "{{ awx_listening_port }}"
-        timeout: "{{ time }}"
-      tags: TC_001
-
-    - name: Get the containers count
-      shell: |
-        set -o pipefail
-        docker ps -a | grep awx | wc -l
-      register: containers_count
-      changed_when: False
-      tags: TC_001
-
-    - name: Validate the containers count
-      assert:
-        that: containers_count.stdout | int >= actual_containers
-        success_msg: "{{ awx_exists_msg }}"
-        fail_msg: "{{ awx_not_exists_msg }}"
-      tags: TC_001
-
-# Testcase OMNIA_CRM_US_AWXD_TC_002
-# Test case to verify regression testing
-- name: OMNIA_CRM_US_AWXD_TC_002
-  hosts: localhost
-  connection: local
-  gather_subset:
-    - 'min'
-  vars_files:
-    - ../roles/web_ui/vars/main.yml
-    - test_vars/test_web_ui_vars.yml
-  tasks:
-    - block:
-        - name: Call common role
-          include_role:
-            name: ../roles/common
-          vars:
-            input_config_filename: "{{ test_input_config_filename }}"
-
-        - name: Calling the web_ui role to be tested
-          include_role:
-            name: ../roles/web_ui
-      tags: TC_002
-
-    - name: Check that you can connect to github repo and it returns a status 200
-      uri:
-        url: "{{ awx_git_repo }}"
-        status_code: "{{ return_status }}"
-        return_content: true
-      tags: TC_002
-
-    - name: Check that you can can connect to AWX UI and it returns a status 200
-      uri:
-        url: "{{ awx_ip }}"
-        status_code: "{{ return_status }}"
-        return_content: true
-      tags: TC_002
-
-    - name: verify awx-server is listening on 80
-      wait_for:
-        port: "{{ awx_listening_port }}"
-        timeout: "{{ time }}"
-      tags: TC_002
-
-    - name: Get the containers count
-      shell: |
-        set -o pipefail
-        docker ps -a | grep awx | wc -l
-      register: containers_count
-      changed_when: False
-      tags: TC_002
-
-    - name: Validate the containers count
-      assert:
-        that: containers_count.stdout | int >= actual_containers
-        success_msg: "{{ awx_exists_msg }}"
-        fail_msg: "{{ awx_not_exists_msg }}"
-      tags: TC_002
-
-# Testcase OMNIA_CRM_US_AWXD_TC_003
-# Test case to validate the AWX configuration
-- name: OMNIA_CRM_US_AWXD_TC_003
-  hosts: localhost
-  connection: local
-  gather_subset:
-    - 'min'
-  vars_files:
-    - ../roles/web_ui/vars/main.yml
-    - ../roles/common/vars/main.yml
-    - test_vars/test_web_ui_vars.yml
-  tasks:
-
-    - block:
-        - name: Call common role
-          include_role:
-            name: ../roles/common
-          vars:
-            input_config_filename: "{{ test_input_config_filename }}"
-
-        - name: Calling the web_ui role to be tested
-          include_role:
-            name: ../roles/web_ui
-      tags: TC_003
-
-    - name: Get the package facts
-      package_facts:
-        manager: auto
-      tags: TC_003
-
-    - name: Check if ansible-tower-cli is already installed
-      assert:
-        that: "'{{ tower_cli_package_name }}' in ansible_facts.packages"
-        success_msg: "{{ resource_exists_success_msg }}"
-        fail_msg: "{{ resource_exists_fail_msg }}"
-      tags: TC_003
-
-    - name: Get the existing organizations
-      command: >-
-        awx --conf.host "{{ awx_ip }}" --conf.username "{{ awx_user }}" --conf.password "{{ admin_password }}"
-        organizations list -f human
-      register: organizations_array
-      changed_when: False
-      tags: TC_003
-
-    - name: Check for organization
-      assert:
-        that: organization_name in organizations_array.stdout
-        success_msg: "{{ resource_exists_success_msg }}"
-        fail_msg: "{{ resource_exists_fail_msg }}"
-      tags: TC_003
-
-    - name: Get the existing projects
-      command: >-
-        awx --conf.host "{{ awx_ip }}" --conf.username "{{ awx_user }}" --conf.password "{{ admin_password }}"
-        projects list -f human
-      changed_when: False
-      register: projects_array
-      tags: TC_003
-
-    - name: Check for project
-      assert:
-        that: project_name in projects_array.stdout
-        success_msg: "{{ resource_exists_success_msg }}"
-        fail_msg: "{{ resource_exists_fail_msg }}"
-      tags: TC_003
-
-    - name: Get the existing inventories
-      command: >-
-        awx --conf.host "{{ awx_ip }}" --conf.username "{{ awx_user }}" --conf.password "{{ admin_password }}"
-        inventory list -f human
-      changed_when: False
-      register: inventory_array
-      tags: TC_003
-
-    - name: Check for inventories
-      assert:
-        that: omnia_inventory_name in inventory_array.stdout
-        success_msg: "{{ resource_exists_success_msg }}"
-        fail_msg: "{{ resource_exists_fail_msg }}"
-      tags: TC_003
-
-    - name: Get the existing groups if omnia-inventory exists
-      command: >-
-        awx --conf.host "{{ awx_ip }}" --conf.username "{{ awx_user }}" --conf.password "{{ admin_password }}"
-        groups list --inventory "{{ omnia_inventory_name }}" -f human
-      changed_when: False
-      register: groups_array
-      when: omnia_inventory_name in inventory_array.stdout
-      tags: TC_003
-
-    - name: Check for manager and compute groups
-      assert:
-        that: manager_group_name and compute_group_name in groups_array.stdout
-        success_msg: "{{ resource_exists_success_msg }}"
-        fail_msg: "{{ resource_exists_fail_msg }}"
-      tags: TC_003
-
-    - name: Get the existing credentials
-      command: >-
-        awx --conf.host "{{ awx_ip }}" --conf.username "{{ awx_user }}" --conf.password "{{ admin_password }}"
-        credentials list -f human
-      changed_when: False
-      register: credentials_array
-      tags: TC_003
-
-    - name: Check for "{{ credential_name }}"
-      assert:
-        that: credential_name in credentials_array.stdout
-        success_msg: "{{ resource_exists_success_msg }}"
-        fail_msg: "{{ resource_exists_fail_msg }}"
-      tags: TC_003
-
-    - name: Get the existing job templates
-      command: >-
-        awx --conf.host "{{ awx_ip }}" --conf.username "{{ awx_user }}" --conf.password "{{ admin_password }}"
-        job_templates list -f human
-      changed_when: False
-      register: templates_array
-      tags: TC_003
-
-    - name: Check for templates
-      assert:
-        that: omnia_template_name and inventory_template_name in templates_array.stdout
-        success_msg: "{{ resource_exists_success_msg }}"
-        fail_msg: "{{ resource_exists_fail_msg }}"
-      tags: TC_003
-
-    - name: Get the existing schedules for job templates
-      command: >-
-        awx --conf.host "{{ awx_ip }}" --conf.username "{{ awx_user }}" --conf.password "{{ admin_password }}"
-        schedules list -f human
-      changed_when: False
-      register: schedules_array
-      tags: TC_003
-
-    - name: Check for schedules to job template
-      assert:
-        that: schedule_name in schedules_array.stdout
-        success_msg: "{{ resource_exists_success_msg }}"
-        fail_msg: "{{ resource_exists_fail_msg }}"
-      tags: TC_003
-
-# Testcase OMNIA_CRM_US_AWXD_TC_004
-# Execute common role in management station without internet connectivity
-- name: OMNIA_CRM_US_AWXD_TC_004
-  hosts: localhost
-  connection: local
-  gather_subset:
-    - 'min'
-  vars_files:
-    - test_vars/test_common_vars.yml
-    - ../roles/common/vars/main.yml
-  tasks:
-    - name: Down internet connectivity
-      lineinfile:
-        path: /etc/hosts
-        line: "172.16.0.5 github.com"
-        state: present
-        backup: yes
-      tags: TC_004
-
-    - block:
-        - name: Call common role
-          include_role:
-            name: ../roles/common
-          vars:
-            input_config_filename: "{{ test_input_config_filename }}"
-
-        - name: Calling the web_ui role to be tested
-          include_role:
-            name: ../roles/web_ui
-
-      rescue:
-        - name: Validate internet connectivity failure message
-          assert:
-            that: internet_status in internet_value.msg
-            success_msg: "{{ internet_check_success_msg }}"
-            fail_msg: "{{ internet_check_fail_msg }}"
-      tags: TC_004
-
-    - name: Up internet connectivity
-      lineinfile:
-        path: /etc/hosts
-        line: "172.16.0.5 github.com"
-        state: absent
-      tags: TC_004
-
-# Testcase OMNIA_CRM_US_AWXD_TC_005
-# Execute web_ui role in management station and reboot the server
-- name: OMNIA_CRM_US_AWXD_TC_005
-  hosts: localhost
-  connection: local
-  vars_files:
-    - test_vars/test_web_ui_vars.yml
-  tasks:
-    - name: Get last uptime of the server
-      command: uptime -s
-      register: uptime_status
-      changed_when: false
-      ignore_errors: yes
-      tags: TC_005
-
-    - name: Get current date
-      command: date +"%Y-%m-%d %H"
-      register: current_time
-      changed_when: false
-      ignore_errors: yes
-      tags: TC_005
-
-    - block:
-        - name: Call common role
-          include_role:
-            name: ../roles/common
-          vars:
-            input_config_filename: "{{ test_input_config_filename }}"
-
-        - name: Calling the web_ui role to be tested
-          include_role:
-            name: ../roles/web_ui
-      tags: TC_005
-
-    - name: Reboot localhost
-      command: reboot
-      when: current_time.stdout not in uptime_status.stdout
-      tags: TC_005
-
-    - name: Inspect AWX web container
-      docker_container_info:
-        name: "{{ docker_container_name }}"
-      register: awx_container_status
-      tags: TC_005
-
-    - name: Verify AWX container is running after reboot
-      assert:
-        that:
-          - "'running' in awx_container_status.container.State.Status"