Browse Source

Merge pull request #927 from abhishek-sa1/omnia_security

Issue #734: Adding ansible logging for no_log: true tasks
Sujit Jadhav 3 years ago
parent
commit
b0fdf8d031

+ 11 - 7
control_plane/roles/control_plane_k8s/tasks/k8s_init.yml

@@ -35,13 +35,17 @@
   failed_when: false
   register: k8s_pods
 
-- name: Docker login
-  command: docker login -u {{ docker_username }} -p {{ docker_password }}
-  changed_when: true
-  register: docker_login_output
-  failed_when: false
-  when: docker_username or docker_password
-  no_log: true
+- block:
+    - name: Docker login
+      command: docker login -u {{ docker_username }} -p {{ docker_password }}
+      changed_when: true
+      register: docker_login_output
+      when: docker_username or docker_password
+      no_log: true
+  rescue:
+    - name: Warning - docker login failed
+      debug:
+        msg: "Warning: {{ docker_login_output.stderr }}" 
 
 - name: Docker login check
   fail:

+ 33 - 21
control_plane/roles/control_plane_monitoring/tasks/configure_k8s_prom_grafana.yml

@@ -23,28 +23,40 @@
   changed_when: false
   register: kube_prom_svc_port
 
-- name: Create prometheus datasource in grafana
-  community.grafana.grafana_datasource:
-    name: control-plane-prometheus
-    grafana_url: "http://{{ grafana_svc_ip.stdout }}:{{ grafana_svc_port.stdout }}"
-    grafana_user: "{{ grafana_username }}"
-    grafana_password: "{{ grafana_password }}"
-    ds_type: prometheus
-    ds_url: "http://{{ kube_prom_svc_ip.stdout }}:{{ kube_prom_svc_port.stdout }}"
-    access: proxy
-  no_log: true
+- block:
+    - name: Create prometheus datasource in grafana
+      community.grafana.grafana_datasource:
+        name: control-plane-prometheus
+        grafana_url: "http://{{ grafana_svc_ip.stdout }}:{{ grafana_svc_port.stdout }}"
+        grafana_user: "{{ grafana_username }}"
+        grafana_password: "{{ grafana_password }}"
+        ds_type: prometheus
+        ds_url: "http://{{ kube_prom_svc_ip.stdout }}:{{ kube_prom_svc_port.stdout }}"
+        access: proxy
+      no_log: true
+      register: create_prom_datasource
+  rescue:
+    - name: Create prometheus datasource in grafana failed
+      fail:
+        msg: "Error: {{ create_prom_datasource.msg }}"
 
-- name: Import K8s grafana dashboards
-  community.grafana.grafana_dashboard:
-    grafana_url: "http://{{ grafana_svc_ip.stdout }}:{{ grafana_svc_port.stdout }}"
-    grafana_user: "{{ grafana_username }}"
-    grafana_password: "{{ grafana_password }}"
-    state: present
-    commit_message: Updated by ansible
-    overwrite: yes
-    path: "{{ role_path }}/files/{{ item }}"
-  with_items: "{{ grafana_dashboard_json_files }}"
-  no_log: true
+- block:
+    - name: Import K8s grafana dashboards
+      community.grafana.grafana_dashboard:
+        grafana_url: "http://{{ grafana_svc_ip.stdout }}:{{ grafana_svc_port.stdout }}"
+        grafana_user: "{{ grafana_username }}"
+        grafana_password: "{{ grafana_password }}"
+        state: present
+        commit_message: Updated by ansible
+        overwrite: yes
+        path: "{{ role_path }}/files/{{ item }}"
+      with_items: "{{ grafana_dashboard_json_files }}"
+      no_log: true
+      register: import_prom_dashboards
+  rescue:
+    - name: Import K8s grafana dashboards failed
+      fail:
+        msg: "Error: {{ import_prom_dashboards.msg }}"
 
 - name: Save grafana svc ip
   replace:

+ 31 - 19
control_plane/roles/control_plane_monitoring/tasks/configure_loki_grafana.yml

@@ -27,23 +27,35 @@
   changed_when: false
   register: loki_svc_port
 
-- name: Create loki datasource in grafana
-  community.grafana.grafana_datasource:
-    name: control-plane-loki
-    grafana_url: "http://{{ grafana_svc_ip.stdout }}:{{ grafana_svc_port.stdout }}"
-    grafana_user: "{{ grafana_username }}"
-    grafana_password: "{{ grafana_password }}"
-    ds_type: loki
-    ds_url: "http://{{ loki_svc_ip.stdout }}:{{ loki_svc_port.stdout }}"
-  no_log: true
+- block:
+    - name: Create loki datasource in grafana
+      community.grafana.grafana_datasource:
+        name: control-plane-loki
+        grafana_url: "http://{{ grafana_svc_ip.stdout }}:{{ grafana_svc_port.stdout }}"
+        grafana_user: "{{ grafana_username }}"
+        grafana_password: "{{ grafana_password }}"
+        ds_type: loki
+        ds_url: "http://{{ loki_svc_ip.stdout }}:{{ loki_svc_port.stdout }}"
+      no_log: true
+      register: create_loki_datasource
+  rescue:
+    - name: Create loki datasource in grafana failed
+      fail:
+        msg: "Error: {{ create_loki_datasource.msg }}"
 
-- name: Import loki dashboard in grafana
-  community.grafana.grafana_dashboard:
-    grafana_url: "http://{{ grafana_svc_ip.stdout }}:{{ grafana_svc_port.stdout }}"
-    grafana_user: "{{ grafana_username }}"
-    grafana_password: "{{ grafana_password }}"
-    state: present
-    commit_message: Updated by ansible
-    overwrite: yes
-    path: "{{ role_path }}/files/loki_dashboard.json"
-  no_log: true
+- block:
+    - name: Import loki dashboard in grafana
+      community.grafana.grafana_dashboard:
+        grafana_url: "http://{{ grafana_svc_ip.stdout }}:{{ grafana_svc_port.stdout }}"
+        grafana_user: "{{ grafana_username }}"
+        grafana_password: "{{ grafana_password }}"
+        state: present
+        commit_message: Updated by ansible
+        overwrite: yes
+        path: "{{ role_path }}/files/loki_dashboard.json"
+      no_log: true
+      register: import_loki_dashboard
+  rescue:
+    - name: Import loki datasource in grafana failed
+      fail:
+        msg: "Error: {{ import_loki_dashboard.msg }}"

+ 21 - 9
control_plane/roles/control_plane_security/tasks/install_389ds.yml

@@ -198,11 +198,17 @@
         - name: Create admin principal failed
           fail:
             msg: "Error: {{ create_admin_principal.stderr }}"
-
-    - name: Authenticate as admin
-      shell: set -o pipefail && echo {{ ms_kerberos_admin_password }} | kinit admin
-      no_log: true
-      changed_when: false
+    
+    - block:
+        - name: Authenticate as admin
+          shell: set -o pipefail && echo {{ ms_kerberos_admin_password }} | kinit admin
+          no_log: true
+          changed_when: false
+          register: authenticate_admin
+      rescue:
+        - name: Authenticate as admin failed
+          fail:
+            msg: "Error: {{ authenticate_admin.stderr }}"
 
     - name: Install sssd packages
       zypper:
@@ -244,8 +250,14 @@
         state: started
         enabled: yes
 
-    - name: Configure password policy in 389-ds
-      command: dsconf -w {{ ms_directory_manager_password }} -D "cn=Directory Manager" ldap://{{ server_hostname_ms }} pwpolicy set --pwdlockoutduration {{ lockout_duration }} --pwdmaxfailures {{ max_failures }} --pwdresetfailcount {{ failure_reset_interval }}
-      no_log: true
-      changed_when: true
+    - block:
+        - name: Configure password policy in 389-ds
+          command: dsconf -w {{ ms_directory_manager_password }} -D "cn=Directory Manager" ldap://{{ server_hostname_ms }} pwpolicy set --pwdlockoutduration {{ lockout_duration }} --pwdmaxfailures {{ max_failures }} --pwdresetfailcount {{ failure_reset_interval }}
+          no_log: true
+          changed_when: true
+          register: configure_pwpolicy
+      rescue:
+        - name: Configure password policy in 389-ds failed
+          fail:
+            msg: "Error: {{ configure_pwpolicy.stderr }}"
   when: not ds389_status

+ 11 - 5
control_plane/roles/control_plane_security/tasks/install_ipa_server.yml

@@ -26,11 +26,17 @@
       fail:
         msg: "Error: {{ install_ipa_server.stderr_lines }}"
 
-- name: Authenticate as admin
-  shell: set -o pipefail && echo $'{{ ms_kerberos_admin_password }}' | kinit {{ ms_ipa_admin_username }}
-  no_log: true
-  changed_when: false
-
+- block:
+    - name: Authenticate as admin
+      shell: set -o pipefail && echo $'{{ ms_kerberos_admin_password }}' | kinit {{ ms_ipa_admin_username }}
+      no_log: true
+      changed_when: false
+      register: authenticate_admin
+  rescue:
+    - name: Authenticate as admin failed
+      fail:
+        msg: "Error: {{ authenticate_admin.stderr }}"
+  
 - name: Replace the /etc/resolv.conf file
   copy:
     src: "{{ temp_resolv_conf_path }}"

+ 17 - 11
roles/k8s_start_services/tasks/configure_nginx_prom_grafana.yml

@@ -60,14 +60,20 @@
     state: restarted
     enabled: yes
 
-- name: Create prometheus datasource in grafana
-  community.grafana.grafana_datasource:
-    name: "hpc-prometheus-{{ ansible_default_ipv4.address }}"
-    grafana_url: "http://{{ grafana_svc_ip }}:{{ grafana_svc_port }}"
-    grafana_user: "{{ hostvars['127.0.0.1']['grafana_username'] }}"
-    grafana_password: "{{ hostvars['127.0.0.1']['grafana_password'] }}"
-    ds_type: prometheus
-    ds_url: "http://{{ ansible_default_ipv4.address }}:{{ nginx_listen_port }}"
-    access: direct
-  delegate_to: localhost
-  no_log: true
+- block:
+    - name: Create prometheus datasource in grafana
+      community.grafana.grafana_datasource:
+        name: "hpc-prometheus-{{ ansible_default_ipv4.address }}"
+        grafana_url: "http://{{ grafana_svc_ip }}:{{ grafana_svc_port }}"
+        grafana_user: "{{ hostvars['127.0.0.1']['grafana_username'] }}"
+        grafana_password: "{{ hostvars['127.0.0.1']['grafana_password'] }}"
+        ds_type: prometheus
+        ds_url: "http://{{ ansible_default_ipv4.address }}:{{ nginx_listen_port }}"
+        access: direct
+      delegate_to: localhost
+      no_log: true
+      register: create_k8s_prom_datasource
+  rescue:
+    - name: Create prometheus datasource in grafana failed
+      fail:
+        msg: "Error: {{ create_k8s_prom_datasource.msg }}"

+ 21 - 11
roles/login_node/tasks/install_389ds.yml

@@ -96,8 +96,6 @@
       no_log: true
       when: ldap1_search_key in ldap1_status.stdout
 
-    
-
     - name: Creating 389 directory server instance
       shell: dscreate -v from-file {{ ldap1_config_path }} | tee {{ ldap1_output_path }}
       changed_when: true
@@ -223,10 +221,16 @@
           fail:
             msg: "Error: {{ create_admin_principal.stderr }}"
 
-    - name: Authenticate as admin
-      shell: set -o pipefail && echo {{ kerberos_admin_password }} | kinit {{ ipa_admin_username }}
-      no_log: true
-      changed_when: false
+    - block:
+        - name: Authenticate as admin
+          shell: set -o pipefail && echo {{ kerberos_admin_password }} | kinit {{ ipa_admin_username }}
+          no_log: true
+          changed_when: false
+          register: authenticate_admin
+      rescue:
+        - name: Authenticate as admin failed
+          fail:
+            msg: "Error: {{ authenticate_admin.stderr }}"
 
     - name: Install sssd packages
       zypper:
@@ -269,8 +273,14 @@
         enabled: yes
   when: not ds389_status
 
-- name: Configure password policy in 389-ds
-  command: dsconf -w {{ directory_manager_password }} -D "cn=Directory Manager" ldap://{{ server_hostname_fqdn }} pwpolicy set --pwdlockoutduration {{ hostvars['127.0.0.1']['lockout_duration'] }} --pwdmaxfailures {{ hostvars['127.0.0.1']['max_failures'] }} --pwdresetfailcount {{ hostvars['127.0.0.1']['failure_reset_interval'] }}
-  changed_when: true
-  no_log: true
-  when: hostvars['127.0.0.1']['enable_secure_login_node']
+- block:
+    - name: Configure password policy in 389-ds
+      command: dsconf -w {{ directory_manager_password }} -D "cn=Directory Manager" ldap://{{ server_hostname_fqdn }} pwpolicy set --pwdlockoutduration {{ hostvars['127.0.0.1']['lockout_duration'] }} --pwdmaxfailures {{ hostvars['127.0.0.1']['max_failures'] }} --pwdresetfailcount {{ hostvars['127.0.0.1']['failure_reset_interval'] }}
+      changed_when: true
+      no_log: true
+      when: hostvars['127.0.0.1']['enable_secure_login_node']
+      register: configure_pwpolicy
+  rescue:
+    - name: Configure password policy in 389-ds failed
+      fail:
+        msg: "Error: {{ configure_pwpolicy.stderr }}"

+ 26 - 19
roles/login_node/tasks/install_ipa_client.yml

@@ -55,23 +55,30 @@
   changed_when: false
   failed_when: false
 
-- name: Install ipa client in CentOS 7.9
-  command: >-
-    ipa-client-install --domain '{{ required_domain_name }}' --server '{{ required_server_hostname }}'
-    --principal admin --password '{{ required_ipa_admin_pwd }}' --force-join --enable-dns-updates --force-ntpd -U
-  changed_when: true
-  no_log: true
-  when:
-    - ( ansible_distribution | lower == os_centos )
-    - ( ansible_distribution_version < os_version )
+- block:
+    - name: Install ipa client in CentOS 7.9
+      command: >-
+        ipa-client-install --domain '{{ required_domain_name }}' --server '{{ required_server_hostname }}'
+        --principal admin --password '{{ required_ipa_admin_pwd }}' --force-join --enable-dns-updates --force-ntpd -U
+      changed_when: true
+      no_log: true
+      register: install_ipa_client
+      when:
+        - ( ansible_distribution | lower == os_centos )
+        - ( ansible_distribution_version < os_version )
 
-- name: Install ipa client in Rocky 8.4
-  command: >-
-    ipa-client-install --domain '{{ required_domain_name }}' --server '{{ required_server_hostname }}'
-    --principal admin --password '{{ required_ipa_admin_pwd }}' --force-join --enable-dns-updates --no-ntp -U
-  changed_when: true
-  no_log: true
-  when:
-    - ( ansible_distribution | lower == os_centos ) or
-      ( ansible_distribution | lower == os_rocky )
-    - ( ansible_distribution_version >= os_version )
+    - name: Install ipa client in Rocky 8
+      command: >-
+        ipa-client-install --domain '{{ required_domain_name }}' --server '{{ required_server_hostname }}'
+        --principal admin --password '{{ required_ipa_admin_pwd }}' --force-join --enable-dns-updates --no-ntp -U
+      changed_when: true
+      no_log: true
+      register: install_ipa_client
+      when:
+        - ( ansible_distribution | lower == os_centos ) or
+          ( ansible_distribution | lower == os_rocky )
+        - ( ansible_distribution_version >= os_version )
+  rescue:
+    - name: Install ipa client failed
+      fail:
+        msg: "Error: {{ install_ipa_client.stderr_lines }}"

+ 36 - 23
roles/login_server/tasks/install_ipa_server.yml

@@ -30,31 +30,44 @@
   changed_when: false
   failed_when: false
 
-- name: Install ipa server in CentOS 7.9
-  command: >-
-    ipa-server-install -n '{{ hostvars['127.0.0.1']['domain_name'] }}' --hostname='{{ server_hostname }}' -a '{{ hostvars['127.0.0.1']['kerberos_admin_password'] }}'
-    -p '{{ hostvars['127.0.0.1']['directory_manager_password'] }}' -r '{{ hostvars['127.0.0.1']['realm_name'] }}' --setup-dns --auto-forwarders --auto-reverse -U
-  changed_when: true
-  no_log: true
-  when:
-    - ( ansible_distribution | lower == os_centos )
-    - ( ansible_distribution_version < os_version )
+- block:
+    - name: Install ipa server in CentOS 7.9
+      command: >-
+        ipa-server-install -n '{{ hostvars['127.0.0.1']['domain_name'] }}' --hostname='{{ server_hostname }}' -a '{{ hostvars['127.0.0.1']['kerberos_admin_password'] }}'
+        -p '{{ hostvars['127.0.0.1']['directory_manager_password'] }}' -r '{{ hostvars['127.0.0.1']['realm_name'] }}' --setup-dns --auto-forwarders --auto-reverse -U
+      changed_when: true
+      no_log: true
+      register: install_ipa_server
+      when:
+        - ( ansible_distribution | lower == os_centos )
+        - ( ansible_distribution_version < os_version )
 
-- name: Install ipa server in CentOS > 8 or Rocky 8.4
-  command: >-
-    ipa-server-install -n '{{ hostvars['127.0.0.1']['domain_name'] }}' --hostname='{{ server_hostname }}' -a '{{ hostvars['127.0.0.1']['kerberos_admin_password'] }}'
-    -p '{{ hostvars['127.0.0.1']['directory_manager_password'] }}' -r '{{ hostvars['127.0.0.1']['realm_name'] }}' --setup-dns --no-forwarders --no-reverse --no-ntp -U
-  changed_when: true
-  no_log: true
-  when:
-    - ( ansible_distribution | lower == os_centos ) or
-      ( ansible_distribution | lower == os_rocky )
-    - ( ansible_distribution_version >= os_version )
+    - name: Install ipa server in CentOS > 8 or Rocky 8
+      command: >-
+        ipa-server-install -n '{{ hostvars['127.0.0.1']['domain_name'] }}' --hostname='{{ server_hostname }}' -a '{{ hostvars['127.0.0.1']['kerberos_admin_password'] }}'
+        -p '{{ hostvars['127.0.0.1']['directory_manager_password'] }}' -r '{{ hostvars['127.0.0.1']['realm_name'] }}' --setup-dns --no-forwarders --no-reverse --no-ntp -U
+      changed_when: true
+      no_log: true
+      register: install_ipa_server
+      when:
+        - ( ansible_distribution | lower == os_centos ) or
+          ( ansible_distribution | lower == os_rocky )
+        - ( ansible_distribution_version >= os_version )
+  rescue:
+    - name: Install ipa server failed
+      fail:
+        msg: "Error: {{ install_ipa_server.stderr_lines }}"
 
-- name: Authenticate as admin
-  shell: set -o pipefail && echo $'{{ hostvars['127.0.0.1']['kerberos_admin_password'] }}' | kinit admin
-  no_log: true
-  changed_when: false
+- block:
+    - name: Authenticate as admin
+      shell: set -o pipefail && echo $'{{ hostvars['127.0.0.1']['kerberos_admin_password'] }}' | kinit admin
+      no_log: true
+      changed_when: false
+      register: authenticate_admin
+  rescue:
+    - name: Authenticate as admin failed
+      fail:
+        msg: "Error: {{ authenticate_admin.stderr }}"
 
 - name: Replace the /etc/resolv.conf file
   copy:

+ 2 - 2
roles/slurm_common/tasks/main.yml

@@ -26,7 +26,7 @@
     backup: yes
     mode: "{{ common_mode }}"
 
-- name: Enable powertools repo in Rocky 8.4
+- name: Enable powertools repo in Rocky 8
   command: dnf config-manager --set-enabled powertools -y
   when:
     - ( ansible_distribution | lower == os_centos ) or
@@ -42,7 +42,7 @@
     - ( ansible_distribution | lower == os_centos )
     - ( ansible_distribution_version < os_version )
 
-- name: Add python dependent packages for CentOS version > 8 and Rocky 8.4
+- name: Add python dependent packages for CentOS version > 8 and Rocky 8
   package:
     name: "{{ common_python3_packages }}"
     state: present

+ 35 - 23
roles/slurm_exporter/tasks/configure_grafana.yml

@@ -16,17 +16,23 @@
 - name: Include k8s_start_services variables
   include_vars: ../../k8s_start_services/vars/main.yml
 
-- name: Create prometheus datasource in grafana
-  community.grafana.grafana_datasource:
-    name: "hpc-prometheus-{{ ansible_default_ipv4.address }}"
-    grafana_url: "http://{{ grafana_svc_ip }}:{{ grafana_svc_port }}"
-    grafana_user: "{{ hostvars['127.0.0.1']['grafana_username'] }}"
-    grafana_password: "{{ hostvars['127.0.0.1']['grafana_password'] }}"
-    ds_type: prometheus
-    ds_url: "http://{{ ansible_default_ipv4.address }}:{{ nginx_listen_port }}"
-    access: direct
-  delegate_to: localhost
-  no_log: true
+- block:
+    - name: Create prometheus datasource in grafana
+      community.grafana.grafana_datasource:
+        name: "hpc-prometheus-{{ ansible_default_ipv4.address }}"
+        grafana_url: "http://{{ grafana_svc_ip }}:{{ grafana_svc_port }}"
+        grafana_user: "{{ hostvars['127.0.0.1']['grafana_username'] }}"
+        grafana_password: "{{ hostvars['127.0.0.1']['grafana_password'] }}"
+        ds_type: prometheus
+        ds_url: "http://{{ ansible_default_ipv4.address }}:{{ nginx_listen_port }}"
+        access: direct
+      delegate_to: localhost
+      no_log: true
+      register: create_slurm_prom_datasource
+  rescue:
+    - name: Create prometheus datasource in grafana failed
+      fail:
+        msg: "Error: {{ create_slurm_prom_datasource.msg }}"
 
 - name: Replace data source in slurm dashboard
   replace:
@@ -50,15 +56,21 @@
     replace: '"title": "SLURM - CPUs/GPUs, Nodes, Jobs, Scheduler ({{ ansible_default_ipv4.address }})",'
   delegate_to: localhost
 
-- name: Import Slurm Grafana dashboards
-  community.grafana.grafana_dashboard:
-    grafana_url: "http://{{ grafana_svc_ip }}:{{ grafana_svc_port }}"
-    grafana_user: "{{ hostvars['127.0.0.1']['grafana_username'] }}"
-    grafana_password: "{{ hostvars['127.0.0.1']['grafana_password'] }}"
-    state: present
-    commit_message: Updated by ansible
-    overwrite: yes
-    path: "{{ role_path }}/files/{{ item }}"
-  with_items: "{{ slurm_dashboard_json_files }}"
-  delegate_to: localhost
-  no_log: true
+- block:
+    - name: Import Slurm Grafana dashboards
+      community.grafana.grafana_dashboard:
+        grafana_url: "http://{{ grafana_svc_ip }}:{{ grafana_svc_port }}"
+        grafana_user: "{{ hostvars['127.0.0.1']['grafana_username'] }}"
+        grafana_password: "{{ hostvars['127.0.0.1']['grafana_password'] }}"
+        state: present
+        commit_message: Updated by ansible
+        overwrite: yes
+        path: "{{ role_path }}/files/{{ item }}"
+      with_items: "{{ slurm_dashboard_json_files }}"
+      delegate_to: localhost
+      no_log: true
+      register: import_prom_dashboards
+  rescue:
+    - name: Import K8s grafana dashboards failed
+      fail:
+        msg: "Error: {{ import_prom_dashboards.msg }}"