diff --git a/prowler/__main__.py b/prowler/__main__.py index 291adbd5..e572c5ed 100644 --- a/prowler/__main__.py +++ b/prowler/__main__.py @@ -27,6 +27,8 @@ from prowler.lib.outputs.outputs import ( close_json, display_compliance_table, display_summary_table, + extract_findings_statistics, + fill_html_overview_statistics, send_to_s3_bucket, ) from prowler.providers.aws.lib.allowlist.allowlist import parse_allowlist_file @@ -162,6 +164,9 @@ def prowler(): "There are no checks to execute. Please, check your input arguments" ) + # Extract findings stats + stats = extract_findings_statistics(findings) + if args.output_modes: for mode in args.output_modes: # Close json file if exists @@ -173,6 +178,9 @@ def prowler(): add_html_footer( audit_output_options.output_filename, args.output_directory ) + fill_html_overview_statistics( + stats, audit_output_options.output_filename, args.output_directory + ) # Send output to S3 if needed (-B / -D) if provider == "aws" and ( args.output_bucket or args.output_bucket_no_assume diff --git a/prowler/lib/outputs/models.py b/prowler/lib/outputs/models.py index 3a744152..b9425578 100644 --- a/prowler/lib/outputs/models.py +++ b/prowler/lib/outputs/models.py @@ -1,19 +1,14 @@ -from typing import List, Optional - -from pydantic import BaseModel -from prowler.lib.logger import logger -from prowler.config.config import timestamp -from prowler.lib.check.models import Remediation - -# Check_Report_AWS, Check_Report_Azure -from prowler.providers.aws.lib.audit_info.models import AWS_Organizations_Info import importlib import sys -from typing import Any from csv import DictWriter +from typing import Any, List, Optional -# from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info -# from prowler.providers.azure.lib.audit_info.models import Azure_Audit_Info +from pydantic import BaseModel + +from prowler.config.config import timestamp +from prowler.lib.check.models import Remediation +from prowler.lib.logger import logger +from prowler.providers.aws.lib.audit_info.models import AWS_Organizations_Info def generate_provider_output_csv(provider: str, finding, audit_info, mode: str, fd): diff --git a/prowler/lib/outputs/outputs.py b/prowler/lib/outputs/outputs.py index 13b055d1..ff2b905c 100644 --- a/prowler/lib/outputs/outputs.py +++ b/prowler/lib/outputs/outputs.py @@ -229,7 +229,7 @@ def report(check_findings, output_options, audit_info): csv_writer.writerow(compliance_row.__dict__) if "html" in file_descriptors: - fill_html(file_descriptors["html"], audit_info, finding) + fill_html(file_descriptors["html"], finding) file_descriptors["html"].write("") @@ -886,19 +886,43 @@ def add_html_header(file_descriptor, audit_info): - + - - + +
| Status | -Severity | -Service Name | -Region | -Check Title | -Resource ID | -Check Description | -Check ID | -Status Extended | -Risk | -Recomendation | -Recomendation URL | +Status | +Severity | +Service Name | +Region | +Check Title | +Resource ID | +Check Description | +Check ID | +Status Extended | +Risk | +Recomendation | +Recomendation URL | {finding.check_metadata.ServiceName} | {finding.region} | {finding.check_metadata.CheckTitle} | -{finding.resource_id} | +{finding.resource_id.replace("<", "<").replace(">", ">").replace("_", " |
{finding.check_metadata.Description} | -{finding.check_metadata.CheckID} | +{finding.check_metadata.CheckID.replace("_", " |
{finding.status_extended} | {finding.check_metadata.Risk} |
{finding.check_metadata.Remediation.Recommendation.Text} |
@@ -1031,6 +1087,60 @@ def fill_html(file_descriptor, audit_info, finding):
)
+def extract_findings_statistics(findings: list) -> dict:
+ stats = {}
+ total_pass = 0
+ total_fail = 0
+ resources = set()
+ findings_count = 0
+
+ for finding in findings:
+ # Save the resource_id
+ resources.add(finding.resource_id)
+ # Increment findings
+ findings_count += 1
+ if finding.status == "PASS":
+ total_pass += 1
+ if finding.status == "FAIL":
+ total_fail += 1
+
+ stats["total_pass"] = total_pass
+ stats["total_fail"] = total_fail
+ stats["resources_count"] = len(resources)
+ stats["findings_count"] = findings_count
+
+ return stats
+
+
+def fill_html_overview_statistics(stats, output_filename, output_directory):
+ try:
+ filename = f"{output_directory}/{output_filename}{html_file_suffix}"
+ # Read file
+ with open(filename, "r") as file:
+ filedata = file.read()
+
+ # Replace statistics
+ # TOTAL_FINDINGS
+ filedata = filedata.replace("TOTAL_FINDINGS", str(stats.get("findings_count")))
+ # TOTAL_RESOURCES
+ filedata = filedata.replace(
+ "TOTAL_RESOURCES", str(stats.get("resources_count"))
+ )
+ # TOTAL_PASS
+ filedata = filedata.replace("TOTAL_PASS", str(stats.get("total_pass")))
+ # TOTAL_FAIL
+ filedata = filedata.replace("TOTAL_FAIL", str(stats.get("total_fail")))
+ # Write file
+ with open(filename, "w") as file:
+ file.write(filedata)
+
+ except Exception as error:
+ logger.critical(
+ f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
+ )
+ sys.exit()
+
+
def add_html_footer(output_filename, output_directory):
try:
filename = f"{output_directory}/{output_filename}{html_file_suffix}"
@@ -1040,60 +1150,87 @@ def add_html_footer(output_filename, output_directory):
)
file_descriptor.write(
"""
-
-
|---|