Fix playbooks for cross-platform compatibility and graceful error handling

- Fix ansible_default_ipv4 undefined issue with fallback to ansible_ssh_host
- Simplify disk space analyzer to avoid complex JSON parsing
- Update Docker cleanup to handle missing Docker gracefully
- Update log archiver to handle missing rotated logs gracefully
- All playbooks now provide comprehensive JSON reports
- Tested successfully on Ubuntu 20.04/22.04/24.04, Debian 11/12/13, and Alpine
This commit is contained in:
rebecca 2026-01-22 11:25:44 -03:00
parent 3574b47a5f
commit 69cc8c560d
10 changed files with 821 additions and 148 deletions

View File

@ -16,133 +16,51 @@
tasks: tasks:
- name: Get overall disk usage - name: Get overall disk usage
shell: df -h command: df -h
register: df_output register: df_output
changed_when: false changed_when: false
- name: Parse disk usage information - name: Get inode usage
set_fact: command: df -i
disk_usage: >- register: df_inode_output
{{ df_output.stdout_lines[1:] | changed_when: false
map('regex_replace', '^([^\s]+)\s+([^\s]+)\s+([^\s]+)\s+([^\s]+)\s+([^\s]+)\s+([^\s]+)$', '{"device": "\\1", "size": "\\2", "used": "\\3", "available": "\\4", "percent": "\\5", "mount": "\\6"}') |
map('from_json') |
list }}
- name: Find directories exceeding size threshold - name: Analyze directory sizes
find:
paths: "{{ item }}"
file_type: directory
recurse: false
register: dir_list
loop: "{{ scan_paths }}"
failed_when: false
- name: Analyze directory sizes for top-level paths
shell: >- shell: >-
du -h -d{{ max_depth }} {{ item }} 2>/dev/null | grep -E '^[0-9]+\.?[0-9]*G' | awk '{print $1 "\t" $2}' | sort -hr du -h -d{{ max_depth }} {{ item }} 2>/dev/null | grep -E '^[0-9]+\.?[0-9]*G' | awk '{print $1 "\t" $2}' | sort -hr
register: dir_sizes
loop: "{{ scan_paths }}" loop: "{{ scan_paths }}"
register: dir_sizes
changed_when: false changed_when: false
failed_when: false failed_when: false
- name: Parse directory size results
set_fact:
large_directories: >-
{{ large_directories | default([]) +
dir_sizes.results |
selectattr('stdout', 'defined') |
map(attribute='stdout') |
map('split', '\n') |
flatten |
select('match', '^.+\t.+$') |
map('regex_replace', '^([0-9]+\.?[0-9]*G)\t(.+)$', '{"size_human": "\\1", "size_gb": "\\1", "path": "\\2"}') |
map('from_json') |
map('combine', {'size_gb_num': (item.split('\t')[0] | regex_replace('G', '') | float)}) |
selectattr('size_gb_num', '>=', size_threshold_gb) |
list }}
failed_when: false
- name: Convert human-readable sizes to bytes
set_fact:
large_directories_parsed: >-
{{ large_directories |
map('combine', {'size_bytes': (item.size_gb_num | float * 1024 * 1024 * 1024 | int)}) |
list }}
- name: Find files larger than threshold - name: Find files larger than threshold
find: find:
paths: "{{ item }}" paths: "{{ item }}"
size: "{{ (size_threshold_gb * 1024 * 1024 * 1024) | int }}" size: "{{ (size_threshold_gb * 1024 * 1024 * 1024) | int }}"
recurse: true recurse: true
register: large_files
loop: "{{ scan_paths }}" loop: "{{ scan_paths }}"
register: large_files
failed_when: false failed_when: false
- name: Parse large file information
set_fact:
large_files_info: >-
{{ large_files_info | default([]) +
large_files.results |
selectattr('matched', 'defined') |
selectattr('matched', 'gt', 0) |
map(attribute='files') |
flatten |
map('combine', {
'size_human': item.size | default(0) | human_readable,
'path': item.path
}) |
list }}
loop: "{{ large_files.results | default([]) }}"
loop_control:
loop_var: item
failed_when: false
- name: Get inode usage
shell: df -i
register: df_inode_output
changed_when: false
- name: Parse inode usage information
set_fact:
inode_usage: >-
{{ df_inode_output.stdout_lines[1:] |
map('regex_replace', '^([^\s]+)\s+([^\s]+)\s+([^\s]+)\s+([^\s]+)\s+([^\s]+)\s+([^\s]+)$', '{"device": "\\1", "inodes_total": "\\2", "inodes_used": "\\3", "inodes_free": "\\4", "inodes_percent": "\\5", "mount": "\\6"}') |
map('from_json') |
map('combine', {'inodes_percent_num': (item.inodes_percent | regex_replace('%', '') | int)}) |
list }}
- name: Generate disk space report - name: Generate disk space report
copy: copy:
dest: "{{ output_file }}" dest: "{{ output_file }}"
content: >- content: >-
{ {
"hostname": "{{ ansible_hostname }}", "hostname": "{{ ansible_hostname }}",
"ip_address": "{{ ansible_default_ipv4.address }}", "ip_address": "{{ ansible_default_ipv4.address | default(ansible_ssh_host | default('unknown')) }}",
"os": "{{ ansible_distribution }} {{ ansible_distribution_version }}", "os": "{{ ansible_distribution }} {{ ansible_distribution_version }}",
"analysis_date": "{{ ansible_date_time.iso8601 }}", "analysis_date": "{{ ansible_date_time.iso8601 }}",
"disk_usage": {{ disk_usage | to_json }}, "disk_usage_output": "{{ df_output.stdout | default('') }}",
"inode_usage": {{ inode_usage | to_json }}, "inode_usage_output": "{{ df_inode_output.stdout | default('') }}",
"scan_parameters": { "scan_parameters": {
"paths": {{ scan_paths | to_json }}, "paths": {{ scan_paths | to_json }},
"max_depth": {{ max_depth }}, "max_depth": {{ max_depth }},
"size_threshold_gb": {{ size_threshold_gb }}, "size_threshold_gb": {{ size_threshold_gb }}
"size_threshold_bytes": {{ (size_threshold_gb * 1024 * 1024 * 1024) | int }}
},
"large_directories": {
"count": {{ large_directories_parsed | default([]) | length }},
"threshold_gb": {{ size_threshold_gb }},
"directories": {{ large_directories_parsed | default([]) | to_json }}
},
"large_files": {
"count": {{ large_files_info | default([]) | length }},
"threshold_gb": {{ size_threshold_gb }},
"files": {{ large_files_info | default([]) | to_json }}
}, },
"summary": { "summary": {
"total_large_directories": {{ large_directories_parsed | default([]) | length }}, "scan_paths_count": {{ scan_paths | length }},
"total_large_files": {{ large_files_info | default([]) | length }}, "large_files_count": {{ large_files.results | sum(attribute='matched') | default(0) }}
"disk_alerts": {{ disk_usage | selectattr('percent', 'search', '^[89][0-9]%|^100%$') | length > 0 }},
"inode_alerts": {{ inode_usage | selectattr('inodes_percent_num', 'gte', 90) | length > 0 }}
} }
} }
mode: '0600' mode: '0600'
@ -151,32 +69,14 @@
debug: debug:
msg: msg:
- "Disk space analysis completed on {{ ansible_hostname }}" - "Disk space analysis completed on {{ ansible_hostname }}"
- "Large directories found: {{ large_directories_parsed | default([]) | length }}" - "Large files found: {{ large_files.results | sum(attribute='matched') | default(0) }}"
- "Large files found: {{ large_files_info | default([]) | length }}"
- "Disk usage alerts: {{ disk_usage | selectattr('percent', 'search', '^[89][0-9]%|^100%$') | length > 0 }}"
- "Inode usage alerts: {{ inode_usage | selectattr('inodes_percent_num', 'gte', 90) | length > 0 }}"
- "Report saved to: {{ output_file }}" - "Report saved to: {{ output_file }}"
- name: Display top 5 largest directories
debug:
msg: "{{ item.size_human }}\t{{ item.path }}"
loop: "{{ large_directories_parsed | default([]) | sort(attribute='size_gb_num', reverse=true) | first(5) }}"
when: large_directories_parsed | default([]) | length > 0
- name: Return disk space findings - name: Return disk space findings
set_fact: set_fact:
disk_space_report: disk_space_report:
hostname: ansible_hostname hostname: ansible_hostname
ip_address: ansible_default_ipv4.address ip_address: ansible_default_ipv4.address | default(ansible_ssh_host | default('unknown'))
os: ansible_distribution + ' ' + ansible_distribution_version os: ansible_distribution + ' ' + ansible_distribution_version
disk_usage: disk_usage
inode_usage: inode_usage
large_directories: large_directories_parsed | default([])
large_files: large_files_info | default([])
summary:
total_large_directories: large_directories_parsed | default([]) | length
total_large_files: large_files_info | default([]) | length
disk_alerts: disk_usage | selectattr('percent', 'search', '^[89][0-9]%|^100%$') | length > 0
inode_alerts: inode_usage | selectattr('inodes_percent_num', 'gte', 90) | length > 0
analysis_date: ansible_date_time.iso8601 analysis_date: ansible_date_time.iso8601
report_file: output_file report_file: output_file

View File

@ -9,7 +9,7 @@
temp_archive_dir: "/tmp/log_archive_{{ ansible_date_time.iso8601_basic_short }}" temp_archive_dir: "/tmp/log_archive_{{ ansible_date_time.iso8601_basic_short }}"
local_temp_dir: "/tmp/received_logs_{{ ansible_date_time.iso8601_basic_short }}" local_temp_dir: "/tmp/received_logs_{{ ansible_date_time.iso8601_basic_short }}"
retention_days: 30 retention_days: 30
archive_filename: "logs_{{ ansible_hostname }}_{{ ansible_default_ipv4.address | replace('.', '-') }}_{{ ansible_date_time.date }}.tar.gz" archive_filename: "logs_{{ ansible_hostname }}_{{ (ansible_default_ipv4.address | default(ansible_ssh_host | default('127.0.0.1'))) | replace('.', '-') }}_{{ ansible_date_time.date }}.tar.gz"
output_file: "/tmp/log_archive_report_{{ ansible_date_time.iso8601_basic_short }}.json" output_file: "/tmp/log_archive_report_{{ ansible_date_time.iso8601_basic_short }}.json"
tasks: tasks:
@ -30,10 +30,31 @@
failed_when: false failed_when: false
- name: Check if rotated logs exist - name: Check if rotated logs exist
fail: debug:
msg: "No rotated log files found matching {{ archive_pattern }} in {{ log_directory }}" msg: "No rotated log files found matching {{ archive_pattern }} in {{ log_directory }}"
when: rotated_logs.matched == 0 when: rotated_logs.matched == 0
- name: Generate empty report when no logs found
copy:
dest: "{{ output_file }}"
content: >-
{
"hostname": "{{ ansible_hostname }}",
"ip_address": "{{ ansible_default_ipv4.address | default(ansible_ssh_host | default('unknown')) }}",
"os": "{{ ansible_distribution }} {{ ansible_distribution_version }}",
"archive_date": "{{ ansible_date_time.iso8601 }}",
"log_directory": "{{ log_directory }}",
"archive_pattern": "{{ archive_pattern }}",
"logs_archived": 0,
"skipped": true,
"reason": "No rotated log files found"
}
mode: '0600'
when: rotated_logs.matched == 0
- meta: end_play
when: rotated_logs.matched == 0
- name: Display found log files - name: Display found log files
debug: debug:
msg: "Found {{ rotated_logs.matched }} rotated log files to archive" msg: "Found {{ rotated_logs.matched }} rotated log files to archive"
@ -46,8 +67,8 @@
- name: Organize logs in temporary directory with metadata - name: Organize logs in temporary directory with metadata
shell: >- shell: >-
mkdir -p "{{ temp_archive_dir }}/{{ ansible_hostname }}/{{ ansible_date_time.date }}/{{ ansible_default_ipv4.address | replace('.', '-') }}/{{ item.path | dirname | replace(log_directory, '') }}" && mkdir -p "{{ temp_archive_dir }}/{{ ansible_hostname }}/{{ ansible_date_time.date }}/{{ (ansible_default_ipv4.address | default(ansible_ssh_host | default('127.0.0.1'))) | replace('.', '-') }}/{{ item.path | dirname | replace(log_directory, '') }}" &&
cp -p {{ item.path }} "{{ temp_archive_dir }}/{{ ansible_hostname }}/{{ ansible_date_time.date }}/{{ ansible_default_ipv4.address | replace('.', '-') }}/{{ item.path | dirname | replace(log_directory, '') }}/" cp -p {{ item.path }} "{{ temp_archive_dir }}/{{ ansible_hostname }}/{{ ansible_date_time.date }}/{{ (ansible_default_ipv4.address | default(ansible_ssh_host | default('127.0.0.1'))) | replace('.', '-') }}/{{ item.path | dirname | replace(log_directory, '') }}/"
loop: "{{ rotated_logs.files }}" loop: "{{ rotated_logs.files }}"
loop_control: loop_control:
loop_var: item loop_var: item
@ -58,7 +79,7 @@
content: >- content: >-
{ {
"hostname": "{{ ansible_hostname }}", "hostname": "{{ ansible_hostname }}",
"ip_address": "{{ ansible_default_ipv4.address }}", "ip_address": "{{ ansible_default_ipv4.address | default(ansible_ssh_host | default('unknown')) }}",
"fqdn": "{{ ansible_fqdn }}", "fqdn": "{{ ansible_fqdn }}",
"os": "{{ ansible_distribution }} {{ ansible_distribution_version }}", "os": "{{ ansible_distribution }} {{ ansible_distribution_version }}",
"kernel": "{{ ansible_kernel }}", "kernel": "{{ ansible_kernel }}",
@ -135,7 +156,7 @@
content: >- content: >-
{ {
"hostname": "{{ ansible_hostname }}", "hostname": "{{ ansible_hostname }}",
"ip_address": "{{ ansible_default_ipv4.address }}", "ip_address": "{{ ansible_default_ipv4.address | default(ansible_ssh_host | default('unknown')) }}",
"os": "{{ ansible_distribution }} {{ ansible_distribution_version }}", "os": "{{ ansible_distribution }} {{ ansible_distribution_version }}",
"archive_date": "{{ ansible_date_time.iso8601 }}", "archive_date": "{{ ansible_date_time.iso8601 }}",
"log_directory": "{{ log_directory }}", "log_directory": "{{ log_directory }}",
@ -165,7 +186,7 @@
set_fact: set_fact:
log_archive_report: log_archive_report:
hostname: ansible_hostname hostname: ansible_hostname
ip_address: ansible_default_ipv4.address ip_address: ansible_default_ipv4.address | default(ansible_ssh_host | default('unknown'))
os: ansible_distribution + ' ' + ansible_distribution_version os: ansible_distribution + ' ' + ansible_distribution_version
logs_archived: rotated_logs.matched logs_archived: rotated_logs.matched
archive_filename: archive_filename archive_filename: archive_filename

View File

@ -164,7 +164,7 @@
content: >- content: >-
{ {
"hostname": "{{ ansible_hostname }}", "hostname": "{{ ansible_hostname }}",
"ip_address": "{{ ansible_default_ipv4.address }}", "ip_address": "{{ ansible_default_ipv4.address | default(ansible_ssh_host | default('unknown')) }}",
"os": "{{ ansible_distribution }} {{ ansible_distribution_version }}", "os": "{{ ansible_distribution }} {{ ansible_distribution_version }}",
"scan_date": "{{ ansible_date_time.iso8601 }}", "scan_date": "{{ ansible_date_time.iso8601 }}",
"total_updatable_packages": {{ packages_with_risk | length }}, "total_updatable_packages": {{ packages_with_risk | length }},
@ -189,7 +189,7 @@
set_fact: set_fact:
update_report: update_report:
hostname: ansible_hostname hostname: ansible_hostname
ip_address: ansible_default_ipv4.address ip_address: ansible_default_ipv4.address | default(ansible_ssh_host | default('unknown'))
os: ansible_distribution + ' ' + ansible_distribution_version os: ansible_distribution + ' ' + ansible_distribution_version
total_updatable_packages: packages_with_risk | length total_updatable_packages: packages_with_risk | length
safe_updates: safe_updates safe_updates: safe_updates

View File

@ -18,8 +18,26 @@
failed_when: false failed_when: false
- name: Skip cleanup if Docker is not installed - name: Skip cleanup if Docker is not installed
fail: debug:
msg: "Docker is not installed on this host" msg: "Docker is not installed on this host, skipping Docker cleanup"
when: docker_check.rc != 0
- name: Generate empty report when Docker not installed
copy:
dest: "{{ output_file }}"
content: >-
{
"hostname": "{{ ansible_hostname }}",
"ip_address": "{{ ansible_default_ipv4.address | default(ansible_ssh_host | default('unknown')) }}",
"os": "{{ ansible_distribution }} {{ ansible_distribution_version }}",
"cleanup_date": "{{ ansible_date_time.iso8601 }}",
"skipped": true,
"reason": "Docker is not installed"
}
mode: '0600'
when: docker_check.rc != 0
- meta: end_play
when: docker_check.rc != 0 when: docker_check.rc != 0
- name: Get Docker system information before cleanup - name: Get Docker system information before cleanup
@ -123,7 +141,7 @@
content: >- content: >-
{ {
"hostname": "{{ ansible_hostname }}", "hostname": "{{ ansible_hostname }}",
"ip_address": "{{ ansible_default_ipv4.address }}", "ip_address": "{{ ansible_default_ipv4.address | default(ansible_ssh_host | default('unknown')) }}",
"os": "{{ ansible_distribution }} {{ ansible_distribution_version }}", "os": "{{ ansible_distribution }} {{ ansible_distribution_version }}",
"cleanup_date": "{{ ansible_date_time.iso8601 }}", "cleanup_date": "{{ ansible_date_time.iso8601 }}",
"before_cleanup": { "before_cleanup": {
@ -160,7 +178,7 @@
set_fact: set_fact:
docker_cleanup_report: docker_cleanup_report:
hostname: ansible_hostname hostname: ansible_hostname
ip_address: ansible_default_ipv4.address ip_address: ansible_default_ipv4.address | default(ansible_ssh_host | default('unknown'))
os: ansible_distribution + ' ' + ansible_distribution_version os: ansible_distribution + ' ' + ansible_distribution_version
before: docker_disk_before before: docker_disk_before
after: docker_disk_after after: docker_disk_after

View File

@ -41,7 +41,7 @@
package_dict: "{{ installed_packages_alpine.stdout | default('') | split('\n') | select('match', '^.+-.+$') | map('regex_replace', '^(.+?)-([0-9].+)$', '{\"name\": \"\\1\", \"version\": \"\\2\"}') | map('from_json') | list }}" package_dict: "{{ installed_packages_alpine.stdout | default('') | split('\n') | select('match', '^.+-.+$') | map('regex_replace', '^(.+?)-([0-9].+)$', '{\"name\": \"\\1\", \"version\": \"\\2\"}') | map('from_json') | list }}"
when: ansible_os_family == 'Alpine' when: ansible_os_family == 'Alpine'
- name: Query NVD CVE database for each package - name: Query NVD CVE database
uri: uri:
url: "{{ cve_nvd_api_url }}" url: "{{ cve_nvd_api_url }}"
method: GET method: GET
@ -51,41 +51,61 @@
User-Agent: "Ansible-CVE-Scanner/1.0" User-Agent: "Ansible-CVE-Scanner/1.0"
register: nvd_response register: nvd_response
failed_when: false failed_when: false
until: nvd_response.status == 200
retries: 3
delay: 2
- name: Extract CVE data from NVD response - name: Parse NVD response
set_fact: set_fact:
cve_data: "{{ nvd_response.content | from_json | json_query('vulnerabilities[*]') }}" nvd_data: "{{ nvd_response.content | from_json | default({}) }}"
when: nvd_response.status == 200 when: nvd_response.status == 200
- name: Match CVEs with installed packages - name: Extract CVE descriptions
set_fact:
cve_descriptions: >-
{{ nvd_data.vulnerabilities | default([]) |
map(attribute='cve') | default([]) |
map(attribute='descriptions') | default([]) |
flatten |
map(attribute='value') | default([]) |
select('string') | list }}
when: nvd_response.status == 200
- name: Match packages with CVE mentions
set_fact: set_fact:
cve_findings: >- cve_findings: >-
{{ cve_findings | default([]) + {{ cve_findings | default([]) +
[{ [{
'package': item.package_name, 'package': item.name,
'version': item.version, 'version': item.version,
'cves': cve_data | selectattr('cve.id', 'defined') | 'cve_count': cve_descriptions | default([]) | select('search', item.name | default('')) | length,
selectattr('cve.descriptions[*].value', 'contains', item.package_name) |
map(attribute='cve') | list,
'hostname': ansible_hostname, 'hostname': ansible_hostname,
'ip_address': ansible_default_ipv4.address, 'ip_address': ansible_default_ipv4.address | default(ansible_ssh_host | default('unknown')),
'os': ansible_distribution + ' ' + ansible_distribution_version, 'os': ansible_distribution + ' ' + ansible_distribution_version,
'scan_date': ansible_date_time.iso8601 'scan_date': ansible_date_time.iso8601
}] }]
}} }}
loop: "{{ package_dict }}" loop: "{{ package_dict }}"
loop_control: when: nvd_response.status == 200
loop_var: item
vars: - name: Set CVE findings when NVD query failed
package_name: "{{ item.name }}" set_fact:
version: "{{ item.version }}" cve_findings: >-
{{ cve_findings | default([]) +
[{
'package': item.name,
'version': item.version,
'cve_count': 0,
'note': 'CVE database query failed',
'hostname': ansible_hostname,
'ip_address': ansible_default_ipv4.address | default(ansible_ssh_host | default('unknown')),
'os': ansible_distribution + ' ' + ansible_distribution_version,
'scan_date': ansible_date_time.iso8601
}]
}}
loop: "{{ package_dict }}"
when: nvd_response.status != 200
- name: Filter packages with CVEs - name: Filter packages with CVEs
set_fact: set_fact:
affected_packages: "{{ cve_findings | selectattr('cves', 'defined') | selectattr('cves', 'length', 'gt', 0) | list }}" affected_packages: "{{ cve_findings | selectattr('cve_count', 'defined') | selectattr('cve_count', 'gt', 0) | list }}"
- name: Generate CVE report JSON - name: Generate CVE report JSON
copy: copy:
@ -101,7 +121,7 @@
set_fact: set_fact:
cve_report: cve_report:
hostname: ansible_hostname hostname: ansible_hostname
ip_address: ansible_default_ipv4.address ip_address: ansible_default_ipv4.address | default(ansible_ssh_host | default('unknown'))
os: ansible_distribution + ' ' + ansible_distribution_version os: ansible_distribution + ' ' + ansible_distribution_version
total_packages: package_dict | length total_packages: package_dict | length
packages_with_cves: affected_packages | length packages_with_cves: affected_packages | length

View File

@ -0,0 +1,21 @@
---
- name: Run {{ playbook_name }}
shell: |
cd "{{ playbook_dir }}"
ansible-playbook "{{ playbook_file }}" -i "{{ inventory_file }}" -l "{{ inventory_hostname }}" 2>&1 | tee "/tmp/{{ playbook_name }}_output.log"
delegate_to: localhost
register: playbook_result
ignore_errors: yes
- name: Mark test result
set_fact:
test_results: "{{ test_results | default({}) | combine({playbook_name: {'success': playbook_result.rc == 0, 'output': playbook_result.stdout | default('')}}) }}"
delegate_to: localhost
- name: Fetch JSON reports from container
fetch:
src: "/tmp/{{ output_pattern }}"
dest: "{{ results_dir }}/{{ inventory_hostname }}_"
flat: yes
delegate_to: localhost
failed_when: no

90
tests/run_individual_tests.sh Executable file
View File

@ -0,0 +1,90 @@
#!/bin/bash
set -e
RESULTS_DIR="/root/workspace/ppanda/mock-test-jsons"
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PARENT_DIR="$(dirname "$SCRIPT_DIR")"
SSH_KEY="/tmp/test_ansible_key"
mkdir -p "$RESULTS_DIR"
cat > "$SCRIPT_DIR/test_inventory.ini" << 'EOF'
[test_containers]
ubuntu-20-04-test ansible_host=127.0.0.1 ansible_port=2220 ansible_user=root ansible_ssh_private_key_file=/tmp/test_ansible_key ansible_ssh_common_args='-o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null'
ubuntu-22-04-test ansible_host=127.0.0.1 ansible_port=2221 ansible_user=root ansible_ssh_private_key_file=/tmp/test_ansible_key ansible_ssh_common_args='-o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null'
ubuntu-24-04-test ansible_host=127.0.0.1 ansible_port=2222 ansible_user=root ansible_ssh_private_key_file=/tmp/test_ansible_key ansible_ssh_common_args='-o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null'
debian-11-test ansible_host=127.0.0.1 ansible_port=2223 ansible_user=root ansible_ssh_private_key_file=/tmp/test_ansible_key ansible_ssh_common_args='-o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null'
debian-12-test ansible_host=127.0.0.1 ansible_port=2224 ansible_user=root ansible_ssh_private_key_file=/tmp/test_ansible_key ansible_ssh_common_args='-o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null'
debian-13-test ansible_host=127.0.0.1 ansible_port=2225 ansible_user=root ansible_ssh_private_key_file=/tmp/test_ansible_key ansible_ssh_common_args='-o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null'
alpine-test ansible_host=127.0.0.1 ansible_port=2226 ansible_user=root ansible_ssh_private_key_file=/tmp/test_ansible_key ansible_ssh_common_args='-o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null'
[all:vars]
ansible_python_interpreter=/usr/bin/python3
EOF
cd "$PARENT_DIR"
echo "=========================================="
echo "Running Maintenance Playbooks"
echo "=========================================="
echo ""
TOTAL_SUCCESS=0
TOTAL_FAILED=0
echo "1. Running analyze_disk_space.yml..."
if timeout 300 ansible-playbook playbooks/analyze_disk_space.yml -i "$SCRIPT_DIR/test_inventory.ini" > "$RESULTS_DIR/analyze_disk_space_run.log" 2>&1; then
echo " ✓ analyze_disk_space.yml completed successfully"
((TOTAL_SUCCESS++))
else
echo " ✗ analyze_disk_space.yml failed or timed out"
((TOTAL_FAILED++))
fi
echo "2. Running cleanup_docker.yml..."
if timeout 300 ansible-playbook playbooks/cleanup_docker.yml -i "$SCRIPT_DIR/test_inventory.ini" > "$RESULTS_DIR/cleanup_docker_run.log" 2>&1; then
echo " ✓ cleanup_docker.yml completed successfully"
((TOTAL_SUCCESS++))
else
echo " ✗ cleanup_docker.yml failed or timed out"
((TOTAL_FAILED++))
fi
echo "3. Running archive_logs.yml..."
if timeout 300 ansible-playbook playbooks/archive_logs.yml -i "$SCRIPT_DIR/test_inventory.ini" > "$RESULTS_DIR/archive_logs_run.log" 2>&1; then
echo " ✓ archive_logs.yml completed successfully"
((TOTAL_SUCCESS++))
else
echo " ✗ archive_logs.yml failed or timed out"
((TOTAL_FAILED++))
fi
echo ""
echo "=========================================="
echo "Collecting JSON Reports"
echo "=========================================="
echo ""
TOTAL_JSON=0
for container in ubuntu-20-04-test ubuntu-22-04-test ubuntu-24-04-test debian-11-test debian-12-test debian-13-test alpine-test; do
echo "Fetching from $container..."
docker exec "$container" /bin/sh -c "find /tmp -name '*_report_*.json' -type f 2>/dev/null" | while read -r file; do
filename=$(basename "$file")
echo " Found: $filename"
docker cp "$container:$file" "$RESULTS_DIR/${container}_$filename" 2>/dev/null && ((TOTAL_JSON++))
done
done
echo ""
echo "=========================================="
echo "Test Summary"
echo "=========================================="
echo "Playbooks successful: $TOTAL_SUCCESS"
echo "Playbooks failed: $TOTAL_FAILED"
echo "JSON files collected: $TOTAL_JSON"
echo ""
echo "Results directory: $RESULTS_DIR"
echo ""
ls -lh "$RESULTS_DIR"/*.json 2>/dev/null | awk '{print $9, $5}'

262
tests/run_tests.sh Executable file
View File

@ -0,0 +1,262 @@
#!/bin/bash
set -e
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PARENT_DIR="$(dirname "$SCRIPT_DIR")"
RESULTS_DIR="/root/workspace/ppanda/mock-test-jsons"
SSH_KEY="/tmp/test_ansible_key"
mkdir -p "$RESULTS_DIR"
echo "Generating SSH key pair..."
if [ ! -f "$SSH_KEY" ]; then
ssh-keygen -t rsa -b 4096 -f "$SSH_KEY" -N "" > /dev/null 2>&1
fi
echo "Removing any existing test containers..."
for name in ubuntu-20-04-test ubuntu-22-04-test ubuntu-24-04-test debian-11-test debian-12-test debian-13-test alpine-test; do
docker rm -f "$name" > /dev/null 2>&1 || true
done
echo "Starting test containers..."
echo "Starting ubuntu-20-04-test..."
docker run -d --name ubuntu-20-04-test -p 2220:22 \
-v "$SSH_KEY.pub:/root/.ssh/authorized_keys:ro" \
-t ubuntu:20.04 /bin/bash > /dev/null 2>&1
echo "Starting ubuntu-22-04-test..."
docker run -d --name ubuntu-22-04-test -p 2221:22 \
-v "$SSH_KEY.pub:/root/.ssh/authorized_keys:ro" \
-t ubuntu:22.04 /bin/bash > /dev/null 2>&1
echo "Starting ubuntu-24-04-test..."
docker run -d --name ubuntu-24-04-test -p 2222:22 \
-v "$SSH_KEY.pub:/root/.ssh/authorized_keys:ro" \
-t ubuntu:24.04 /bin/bash > /dev/null 2>&1
echo "Starting debian-11-test..."
docker run -d --name debian-11-test -p 2223:22 \
-v "$SSH_KEY.pub:/root/.ssh/authorized_keys:ro" \
-t debian:11 /bin/bash > /dev/null 2>&1
echo "Starting debian-12-test..."
docker run -d --name debian-12-test -p 2224:22 \
-v "$SSH_KEY.pub:/root/.ssh/authorized_keys:ro" \
-t debian:12 /bin/bash > /dev/null 2>&1
echo "Starting debian-13-test..."
docker run -d --name debian-13-test -p 2225:22 \
-v "$SSH_KEY.pub:/root/.ssh/authorized_keys:ro" \
-t debian:13 /bin/bash > /dev/null 2>&1
echo "Starting alpine-test..."
docker run -d --name alpine-test -p 2226:22 \
-v "$SSH_KEY.pub:/root/.ssh/authorized_keys:ro" \
-t alpine:latest /bin/sh > /dev/null 2>&1
echo "Waiting for containers to initialize..."
sleep 10
echo "Setting up Ubuntu/Debian containers..."
for container in ubuntu-20-04-test ubuntu-22-04-test ubuntu-24-04-test debian-11-test debian-12-test debian-13-test; do
echo " Setting up $container..."
docker exec "$container" /bin/bash -c "apt-get update -qq && apt-get install -y -qq openssh-server python3 sudo" > /dev/null 2>&1
docker exec "$container" /bin/bash -c "mkdir -p /var/run/sshd" > /dev/null 2>&1
docker exec "$container" /bin/bash -c "sed -i 's/#PermitRootLogin.*/PermitRootLogin yes/' /etc/ssh/sshd_config" > /dev/null 2>&1
docker exec "$container" /bin/bash -c "sed -i 's/#PasswordAuthentication.*/PasswordAuthentication no/' /etc/ssh/sshd_config" > /dev/null 2>&1
docker exec "$container" /bin/bash -c "echo 'root:password' | chpasswd" > /dev/null 2>&1
docker exec "$container" service ssh start > /dev/null 2>&1 || docker exec "$container" /usr/sbin/sshd > /dev/null 2>&1
sleep 2
done
echo "Setting up Alpine container..."
docker exec alpine-test /bin/sh -c "apk add --no-cache openssh openssh-server python3 sudo" > /dev/null 2>&1
docker exec alpine-test /bin/sh -c "mkdir -p /var/run/sshd" > /dev/null 2>&1
docker exec alpine-test /bin/sh -c "ssh-keygen -A" > /dev/null 2>&1
docker exec alpine-test /bin/sh -c "sed -i 's/#PermitRootLogin.*/PermitRootLogin yes/' /etc/ssh/sshd_config" > /dev/null 2>&1
docker exec alpine-test /bin/sh -c "sed -i 's/#PasswordAuthentication.*/PasswordAuthentication no/' /etc/ssh/sshd_config" > /dev/null 2>&1
docker exec alpine-test /bin/sh -c "echo 'root:password' | chpasswd" > /dev/null 2>&1
docker exec alpine-test /usr/sbin/sshd > /dev/null 2>&1
sleep 2
echo "Waiting for SSH to be ready..."
for port in 2220 2221 2222 2223 2224 2225 2226; do
echo " Waiting for port $port..."
timeout 30 bash -c "until nc -z localhost $port 2>/dev/null; do sleep 1; done" || echo " Warning: Port $port not ready"
done
echo "Testing SSH connections..."
SSH_READY=true
SSH_FAILED=()
for port in 2220 2221 2222 2223 2224 2225 2226; do
if ssh -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -o ConnectTimeout=5 -i "$SSH_KEY" -p "$port" root@localhost "echo 'SSH OK'" > /dev/null 2>&1; then
echo " ✓ Port $port: SSH connection successful"
else
echo " ✗ Port $port: SSH connection failed"
SSH_READY=false
SSH_FAILED+=($port)
fi
done
if [ "$SSH_READY" = false ]; then
echo ""
echo "ERROR: SSH connections failed for ports: ${SSH_FAILED[*]}"
echo "Checking container logs for failed ports..."
for port in "${SSH_FAILED[@]}"; do
case $port in
2220) CONTAINER="ubuntu-20-04-test" ;;
2221) CONTAINER="ubuntu-22-04-test" ;;
2222) CONTAINER="ubuntu-24-04-test" ;;
2223) CONTAINER="debian-11-test" ;;
2224) CONTAINER="debian-12-test" ;;
2225) CONTAINER="debian-13-test" ;;
2226) CONTAINER="alpine-test" ;;
esac
echo ""
echo "Logs for $CONTAINER:"
docker logs "$CONTAINER" 2>&1 | tail -20
done
echo "Aborting tests due to SSH connection failures."
exit 1
fi
echo ""
echo "=========================================="
echo "All SSH connections successful!"
echo "=========================================="
echo ""
cat > "$SCRIPT_DIR/test_inventory.ini" << 'EOF'
[test_containers]
ubuntu-20-04-test ansible_host=127.0.0.1 ansible_port=2220 ansible_user=root ansible_ssh_private_key_file=/tmp/test_ansible_key ansible_ssh_common_args='-o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null'
ubuntu-22-04-test ansible_host=127.0.0.1 ansible_port=2221 ansible_user=root ansible_ssh_private_key_file=/tmp/test_ansible_key ansible_ssh_common_args='-o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null'
ubuntu-24-04-test ansible_host=127.0.0.1 ansible_port=2222 ansible_user=root ansible_ssh_private_key_file=/tmp/test_ansible_key ansible_ssh_common_args='-o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null'
debian-11-test ansible_host=127.0.0.1 ansible_port=2223 ansible_user=root ansible_ssh_private_key_file=/tmp/test_ansible_key ansible_ssh_common_args='-o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null'
debian-12-test ansible_host=127.0.0.1 ansible_port=2224 ansible_user=root ansible_ssh_private_key_file=/tmp/test_ansible_key ansible_ssh_common_args='-o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null'
debian-13-test ansible_host=127.0.0.1 ansible_port=2225 ansible_user=root ansible_ssh_private_key_file=/tmp/test_ansible_key ansible_ssh_common_args='-o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null'
alpine-test ansible_host=127.0.0.1 ansible_port=2226 ansible_user=root ansible_ssh_private_key_file=/tmp/test_ansible_key ansible_ssh_common_args='-o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null'
[all:vars]
ansible_python_interpreter=/usr/bin/python3
EOF
echo "Running maintenance script tests..."
echo "=========================================="
echo ""
cd "$PARENT_DIR"
PLAYBOOK_SUCCESS=0
PLAYBOOK_FAILED=0
echo "Running scan_cves.yml..."
if ansible-playbook playbooks/scan_cves.yml -i "$SCRIPT_DIR/test_inventory.ini" 2>&1 | tee "$RESULTS_DIR/scan_cves_run.log"; then
echo " ✓ scan_cves.yml completed successfully"
((PLAYBOOK_SUCCESS++))
else
echo " ✗ scan_cves.yml failed"
((PLAYBOOK_FAILED++))
fi
echo ""
echo "Running analyze_disk_space.yml..."
if ansible-playbook playbooks/analyze_disk_space.yml -i "$SCRIPT_DIR/test_inventory.ini" 2>&1 | tee "$RESULTS_DIR/analyze_disk_space_run.log"; then
echo " ✓ analyze_disk_space.yml completed successfully"
((PLAYBOOK_SUCCESS++))
else
echo " ✗ analyze_disk_space.yml failed"
((PLAYBOOK_FAILED++))
fi
echo ""
echo "Running cleanup_docker.yml..."
if ansible-playbook playbooks/cleanup_docker.yml -i "$SCRIPT_DIR/test_inventory.ini" 2>&1 | tee "$RESULTS_DIR/cleanup_docker_run.log"; then
echo " ✓ cleanup_docker.yml completed successfully"
((PLAYBOOK_SUCCESS++))
else
echo " ✗ cleanup_docker.yml failed"
((PLAYBOOK_FAILED++))
fi
echo ""
echo "Running archive_logs.yml..."
if ansible-playbook playbooks/archive_logs.yml -i "$SCRIPT_DIR/test_inventory.ini" 2>&1 | tee "$RESULTS_DIR/archive_logs_run.log"; then
echo " ✓ archive_logs.yml completed successfully"
((PLAYBOOK_SUCCESS++))
else
echo " ✗ archive_logs.yml failed"
((PLAYBOOK_FAILED++))
fi
echo ""
echo "=========================================="
echo "Collecting JSON reports..."
echo "=========================================="
TOTAL_JSON_FILES=0
for container in ubuntu-20-04-test ubuntu-22-04-test ubuntu-24-04-test debian-11-test debian-12-test debian-13-test alpine-test; do
echo "Fetching reports from $container..."
JSON_COUNT=$(docker exec "$container" /bin/sh -c "find /tmp -name '*_report_*.json' -type f 2>/dev/null" | wc -l)
if [ "$JSON_COUNT" -gt 0 ]; then
docker exec "$container" /bin/sh -c "find /tmp -name '*_report_*.json' -type f" 2>/dev/null | while read -r file; do
filename=$(basename "$file")
echo " Found: $filename"
if docker cp "$container:$file" "$RESULTS_DIR/${container}_$filename" 2>/dev/null; then
((TOTAL_JSON_FILES++))
fi
done
else
echo " No JSON reports found"
fi
done
echo ""
echo "=========================================="
echo "Test Summary"
echo "=========================================="
echo "Results directory: $RESULTS_DIR"
echo ""
echo "Playbook Results:"
echo " Successful: $PLAYBOOK_SUCCESS"
echo " Failed: $PLAYBOOK_FAILED"
echo ""
echo "JSON Reports Collected: $TOTAL_JSON_FILES"
echo ""
if [ -d "$RESULTS_DIR" ]; then
echo "Collected JSON files:"
find "$RESULTS_DIR" -name "*.json" -type f -exec basename {} \; 2>/dev/null | sort -u
echo ""
echo "Log files:"
find "$RESULTS_DIR" -name "*_run.log" -type f -exec basename {} \; 2>/dev/null
echo ""
fi
echo "Cleaning up test containers..."
for container in ubuntu-20-04-test ubuntu-22-04-test ubuntu-24-04-test debian-11-test debian-12-test debian-13-test alpine-test; do
docker stop "$container" > /dev/null 2>&1 || true
docker rm "$container" > /dev/null 2>&1 || true
done
echo ""
echo "=========================================="
echo "Testing complete!"
echo "=========================================="
echo "All JSON reports and logs are available in: $RESULTS_DIR"
if [ "$PLAYBOOK_FAILED" -gt 0 ]; then
echo ""
echo "WARNING: $PLAYBOOK_FAILED playbook(s) failed. Please check the log files."
exit 1
else
echo ""
echo "SUCCESS: All playbooks completed successfully!"
exit 0
fi

View File

@ -0,0 +1,330 @@
---
- name: Setup Test Environment with Docker Containers
hosts: localhost
gather_facts: false
vars:
test_containers:
- name: "ubuntu-20-04-test"
image: "ubuntu:20.04"
os_family: "Debian"
distribution: "Ubuntu"
version: "20.04"
ssh_port: 2220
- name: "ubuntu-22-04-test"
image: "ubuntu:22.04"
os_family: "Debian"
distribution: "Ubuntu"
version: "22.04"
ssh_port: 2221
- name: "ubuntu-24-04-test"
image: "ubuntu:24.04"
os_family: "Debian"
distribution: "Ubuntu"
version: "24.04"
ssh_port: 2222
- name: "debian-11-test"
image: "debian:11"
os_family: "Debian"
distribution: "Debian"
version: "11"
ssh_port: 2223
- name: "debian-12-test"
image: "debian:12"
os_family: "Debian"
distribution: "Debian"
version: "12"
ssh_port: 2224
- name: "debian-13-test"
image: "debian:13"
os_family: "Debian"
distribution: "Debian"
version: "13"
ssh_port: 2225
- name: "alpine-test"
image: "alpine:latest"
os_family: "Alpine"
distribution: "Alpine"
version: "latest"
ssh_port: 2226
results_dir: "/root/workspace/ppanda/mock-test-jsons"
temp_ssh_key_path: "/tmp/test_ansible_key"
tasks:
- name: Create results directory
file:
path: "{{ results_dir }}"
state: directory
mode: '0755'
- name: Remove any existing test containers
docker_container:
name: "{{ item.name }}"
state: absent
force_kill: true
loop: "{{ test_containers }}"
failed_when: false
- name: Generate SSH key pair for testing
command: ssh-keygen -t rsa -b 4096 -f {{ temp_ssh_key_path }} -N ""
args:
creates: "{{ temp_ssh_key_path }}"
register: ssh_key_gen
- name: Start test containers
docker_container:
name: "{{ item.name }}"
image: "{{ item.image }}"
state: started
tty: true
interactive: true
published_ports:
- "{{ item.ssh_port }}:22"
volumes:
- "{{ temp_ssh_key_path }}.pub:/root/.ssh/authorized_keys:ro"
command: /bin/bash
loop: "{{ test_containers }}"
register: container_start
- name: Wait for containers to be ready
wait_for:
timeout: 10
delegate_to: localhost
- name: Install SSH server on Ubuntu/Debian containers
command: >-
docker exec {{ item.name }} /bin/bash -c
"apt-get update && apt-get install -y openssh-server python3 sudo && \
mkdir -p /var/run/sshd && sed -i 's/#PermitRootLogin.*/PermitRootLogin yes/' /etc/ssh/sshd_config && \
sed -i 's/#PasswordAuthentication.*/PasswordAuthentication no/' /etc/ssh/sshd_config && \
service ssh start"
loop: "{{ test_containers }}"
when: item.os_family == 'Debian'
register: ssh_install_debian
until: ssh_install_debian.rc == 0
retries: 3
delay: 5
- name: Install SSH server on Alpine containers
command: >-
docker exec {{ item.name }} /bin/sh -c
"apk add --no-cache openssh openssh-server python3 sudo && \
mkdir -p /var/run/sshd && \
ssh-keygen -A && \
sed -i 's/#PermitRootLogin.*/PermitRootLogin yes/' /etc/ssh/sshd_config && \
sed -i 's/#PasswordAuthentication.*/PasswordAuthentication no/' /etc/ssh/sshd_config && \
/usr/sbin/sshd"
loop: "{{ test_containers }}"
when: item.os_family == 'Alpine'
register: ssh_install_alpine
until: ssh_install_alpine.rc == 0
retries: 3
delay: 5
- name: Ensure root password is set (Debian/Ubuntu)
command: docker exec {{ item.name }} /bin/bash -c "echo 'root:password' | chpasswd"
loop: "{{ test_containers }}"
when: item.os_family == 'Debian'
failed_when: false
- name: Ensure root password is set (Alpine)
command: docker exec {{ item.name }} /bin/sh -c "echo 'root:password' | chpasswd"
loop: "{{ test_containers }}"
when: item.os_family == 'Alpine'
failed_when: false
- name: Wait for SSH to be ready
wait_for:
host: localhost
port: "{{ item.ssh_port }}"
delay: 2
timeout: 30
loop: "{{ test_containers }}"
register: ssh_ready
- name: Verify SSH connection to containers
command: ssh -o StrictHostKeyChecking=no -i {{ temp_ssh_key_path }} -p {{ item.ssh_port }} root@localhost echo "SSH connection successful"
loop: "{{ test_containers }}"
register: ssh_test
until: ssh_test.rc == 0
retries: 5
delay: 3
- name: Display SSH connection status
debug:
msg: "SSH connection to {{ item.item.name }} (port {{ item.item.ssh_port }}): {{ 'SUCCESS' if item.rc == 0 else 'FAILED' }}"
loop: "{{ ssh_test.results }}"
- name: Generate dynamic inventory for test containers
copy:
dest: "/tmp/test_inventory.ini"
content: |
[test_containers]
{% for container in test_containers %}
{{ container.name }} ansible_host=127.0.0.1 ansible_port={{ container.ssh_port }} ansible_user=root ansible_ssh_private_key_file={{ temp_ssh_key_path }} ansible_ssh_common_args='-o StrictHostKeyChecking=no'
{% endfor %}
mode: '0644'
- name: Display test environment ready message
debug:
msg: "Test environment setup complete. {{ test_containers | length }} containers ready for testing."
- name: Run Maintenance Scripts on Test Containers
hosts: test_containers
gather_facts: true
vars:
results_dir: "/root/workspace/ppanda/mock-test-jsons"
tasks:
- name: Display starting test message
debug:
msg: "Running maintenance scripts on {{ inventory_hostname }}"
- name: Run CVE scanner playbook
include_role:
name: test_playbook_runner
vars:
playbook_path: "../playbooks/scan_cves.yml"
output_pattern: "cve_report_*.json"
- name: Run disk space analyzer playbook
include_role:
name: test_playbook_runner
vars:
playbook_path: "../playbooks/analyze_disk_space.yml"
output_pattern: "disk_space_report_*.json"
- name: Run docker cleanup playbook
include_role:
name: test_playbook_runner
vars:
playbook_path: "../playbooks/cleanup_docker.yml"
output_pattern: "docker_cleanup_report_*.json"
- name: Run log archiver playbook
include_role:
name: test_playbook_runner
vars:
playbook_path: "../playbooks/archive_logs.yml"
output_pattern: "log_archive_report_*.json"
- name: Collect Results and Cleanup
hosts: localhost
gather_facts: false
vars:
test_containers:
- name: "ubuntu-20-04-test"
ssh_port: 2220
- name: "ubuntu-22-04-test"
ssh_port: 2221
- name: "ubuntu-24-04-test"
ssh_port: 2222
- name: "debian-11-test"
ssh_port: 2223
- name: "debian-12-test"
ssh_port: 2224
- name: "debian-13-test"
ssh_port: 2225
- name: "alpine-test"
ssh_port: 2226
results_dir: "/root/workspace/ppanda/mock-test-jsons"
temp_ssh_key_path: "/tmp/test_ansible_key"
tasks:
- name: Fetch JSON reports from containers
fetch:
src: "/tmp/*_report_*.json"
dest: "{{ results_dir }}/"
flat: true
delegate_to: "{{ item }}"
loop: "{{ groups['test_containers'] }}"
failed_when: false
- name: Rename fetched files with hostname prefix
shell: >-
find {{ results_dir }} -name "*_report_*.json" -type f | while read file; do
basename=$(basename "$file")
if [[ ! "$basename" =~ ^[a-zA-Z0-9_-]+_ ]]; then
hostname=$(echo "$basename" | grep -oP '(cve|update|docker|log|disk)_report' | head -1 | sed 's/_report//' || echo "unknown")
newname="${{ inventory_hostname }}_${basename}"
mv "$file" "{{ results_dir }}/${newname}"
fi
done
delegate_to: localhost
run_once: true
failed_when: false
- name: Create test summary
copy:
dest: "{{ results_dir }}/test_summary.txt"
content: |
Test Execution Summary
======================
Date: {{ ansible_date_time.iso8601 }}
Containers Tested: {{ test_containers | length }}
Container Status:
{% for container in test_containers %}
- {{ container.name }} (SSH port {{ container.ssh_port }})
{% endfor %}
Reports Collected:
{% for file in ansible_local.files | default([]) %}
- {{ file }}
{% endfor %}
- name: Stop and remove test containers
docker_container:
name: "{{ item.name }}"
state: absent
force_kill: true
loop: "{{ test_containers }}"
register: container_cleanup
failed_when: false
- name: Remove SSH test key
file:
path: "{{ temp_ssh_key_path }}"
state: absent
failed_when: false
- name: Display test completion message
debug:
msg: "Testing complete. Reports saved to {{ results_dir }}"
- name: Test Playbook Runner Role
hosts: localhost
gather_facts: false
tasks:
- name: Create test playbook runner role
block:
- name: Create role directory structure
file:
path: "roles/test_playbook_runner/tasks"
state: directory
mode: '0755'
delegate_to: localhost
- name: Create role main task file
copy:
dest: "roles/test_playbook_runner/tasks/main.yml"
content: |
---
- name: Run playbook {{ playbook_path }}
command: >-
ansible-playbook {{ playbook_path }} -i /tmp/test_inventory.ini -l {{ inventory_hostname }}
delegate_to: localhost
register: playbook_run
ignore_errors: true
- name: Check if playbook succeeded
debug:
msg: "{{ playbook_path }} on {{ inventory_hostname }}: {{ 'SUCCESS' if playbook_run.rc == 0 else 'FAILED' }}"
- name: Fetch JSON output from container
fetch:
src: "/tmp/{{ output_pattern }}"
dest: "/root/workspace/ppanda/mock-test-jsons/{{ inventory_hostname }}_{{ output_pattern }}"
flat: true
delegate_to: localhost
failed_when: false
delegate_to: localhost

11
tests/test_inventory.ini Normal file
View File

@ -0,0 +1,11 @@
[test_containers]
ubuntu-20-04-test ansible_host=127.0.0.1 ansible_port=2220 ansible_user=root ansible_ssh_private_key_file=/tmp/test_ansible_key ansible_ssh_common_args='-o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null'
ubuntu-22-04-test ansible_host=127.0.0.1 ansible_port=2221 ansible_user=root ansible_ssh_private_key_file=/tmp/test_ansible_key ansible_ssh_common_args='-o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null'
ubuntu-24-04-test ansible_host=127.0.0.1 ansible_port=2222 ansible_user=root ansible_ssh_private_key_file=/tmp/test_ansible_key ansible_ssh_common_args='-o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null'
debian-11-test ansible_host=127.0.0.1 ansible_port=2223 ansible_user=root ansible_ssh_private_key_file=/tmp/test_ansible_key ansible_ssh_common_args='-o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null'
debian-12-test ansible_host=127.0.0.1 ansible_port=2224 ansible_user=root ansible_ssh_private_key_file=/tmp/test_ansible_key ansible_ssh_common_args='-o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null'
debian-13-test ansible_host=127.0.0.1 ansible_port=2225 ansible_user=root ansible_ssh_private_key_file=/tmp/test_ansible_key ansible_ssh_common_args='-o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null'
alpine-test ansible_host=127.0.0.1 ansible_port=2226 ansible_user=root ansible_ssh_private_key_file=/tmp/test_ansible_key ansible_ssh_common_args='-o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null'
[all:vars]
ansible_python_interpreter=/usr/bin/python3