Execution
Date 11 Sep 2025 11:03:04 +0100
Duration 00:00:04.71
Controller ssh-gw-4.layershift.com
User root
Versions
Ansible 2.16.11
ara 1.7.3 / 1.7.3
Python 3.10.10
Summary
1 Hosts
2 Tasks
2 Results
1 Plays
1 Files
0 Records

File: /home/ssh-gateway/ansible/kuly/validate_provider_metrics_output.yaml

---
- name: Validate provider_metrics output from agent360
  hosts: all
  gather_facts: false
  vars:
    # Define expected types for each key
    expected_types:
      firewall_up: int
      http_response_code: int
      http_response_time_ms: float
      imunify360_and_fail2ban: int
      imunify360_license: int          # NOT list
      imunify360_services: int
      imunify360_underlicensed: int
      iptables_entries: int
      kcare_status: int
      litespeed_nginx_parallel: int
      litespeed_running_ok: int
      monitoring360_ip_dropped: int
      mysql_free_connections: int
      mysql_iptables_drop: int
      mysql_response_time_ms: float
      oom_kills_since_reboot: int
      queue_size: int
      ram_upgrade_required: int
      recent_oom_kills: int
      smtp_response_time_ms: float
      ssh_port_checksum_ok: int
      time_sync_enabled: int

  tasks:
    - name: Run agent360 test provider_metrics
      ansible.builtin.command: agent360 test provider_metrics
      register: metrics_output
      changed_when: false

    - name: Parse JSON output
      ansible.builtin.set_fact:
        metrics_json: "{{ metrics_output.stdout | from_json }}"

    - name: Initialize list for type mismatches
      ansible.builtin.set_fact:
        type_mismatches: []

    - name: Check each key against expected type
      ansible.builtin.set_fact:
        type_mismatches: >-
          {{
            type_mismatches + [{
              'key': item,
              'expected': expected_types[item],
              'actual_type': metrics_json[item] | type_debug,
              'actual_value': metrics_json[item]
            }]
          }}
      when:
        - metrics_json[item] is defined
        - (expected_types[item] == 'int' and metrics_json[item] | type_debug != 'int') or
          (expected_types[item] == 'float' and metrics_json[item] | type_debug not in ['float', 'int'])

      loop: "{{ expected_types.keys() | list }}"

    - name: Display mismatches if any
      ansible.builtin.debug:
        msg: |
          Type mismatch found:
          Key: {{ item.key }}
          Expected: {{ item.expected }}
          Actual Type: {{ item.actual_type }}
          Actual Value: {{ item.actual_value | to_nice_json }}
      loop: "{{ type_mismatches }}"
      when: type_mismatches | length > 0
      loop_control:
        label: "{{ item.key }}"

    - name: Fail if any mismatches found (optional)
      ansible.builtin.fail:
        msg: "Found {{ type_mismatches | length }} type mismatch(es) in provider_metrics output."
      when: type_mismatches | length > 0

    - name: Success message if all OK
      ansible.builtin.debug:
        msg: "✅ All metrics have correct types."
      when: type_mismatches | length == 0