mirror of
https://github.com/ghndrx/prowler.git
synced 2026-02-13 00:05:04 +00:00
feat(compliance): Loader and Execute (#1465)
This commit is contained in:
@@ -4,7 +4,7 @@ from typing import List, Optional
|
||||
from pydantic import BaseModel
|
||||
|
||||
from config.config import timestamp
|
||||
from lib.check.models import Check_Report, ComplianceItem, Remediation
|
||||
from lib.check.models import Check_Report, Remediation
|
||||
from providers.aws.lib.audit_info.models import AWS_Organizations_Info
|
||||
|
||||
|
||||
@@ -25,7 +25,6 @@ class Check_Output_JSON(BaseModel):
|
||||
OrganizationsInfo: Optional[AWS_Organizations_Info]
|
||||
Region: str = ""
|
||||
CheckID: str
|
||||
# CheckName: str
|
||||
CheckTitle: str
|
||||
CheckType: List[str]
|
||||
ServiceName: str
|
||||
@@ -46,7 +45,7 @@ class Check_Output_JSON(BaseModel):
|
||||
DependsOn: List[str]
|
||||
RelatedTo: List[str]
|
||||
Notes: str
|
||||
Compliance: List[ComplianceItem]
|
||||
# Compliance: List[ComplianceItem]
|
||||
|
||||
|
||||
# JSON ASFF Output
|
||||
@@ -92,6 +91,26 @@ class Check_Output_JSON_ASFF(BaseModel):
|
||||
Remediation: dict = None
|
||||
|
||||
|
||||
class Check_Output_CSV_ENS_RD2022(BaseModel):
|
||||
Provider: str
|
||||
AccountId: str
|
||||
Region: str
|
||||
AssessmentDate: str
|
||||
Requirements_Id: str
|
||||
Requirements_Description: str
|
||||
Requirements_Attributes_IdGrupoControl: str
|
||||
Requirements_Attributes_Marco: str
|
||||
Requirements_Attributes_Categoria: str
|
||||
Requirements_Attributes_DescripcionControl: str
|
||||
Requirements_Attributes_Nivel: str
|
||||
Requirements_Attributes_Tipo: str
|
||||
Requirements_Attributes_Dimensiones: str
|
||||
Status: str
|
||||
StatusExtended: str
|
||||
ResourceId: str
|
||||
CheckId: str
|
||||
|
||||
|
||||
@dataclass
|
||||
class Check_Output_CSV:
|
||||
assessment_start_time: str
|
||||
@@ -106,7 +125,6 @@ class Check_Output_CSV:
|
||||
account_tags: str
|
||||
region: str
|
||||
check_id: str
|
||||
# check_name: str
|
||||
check_title: str
|
||||
check_type: str
|
||||
status: str
|
||||
@@ -132,7 +150,7 @@ class Check_Output_CSV:
|
||||
depends_on: str
|
||||
related_to: str
|
||||
notes: str
|
||||
compliance: str
|
||||
# compliance: str
|
||||
|
||||
def get_csv_header(self):
|
||||
csv_header = []
|
||||
@@ -160,7 +178,6 @@ class Check_Output_CSV:
|
||||
self.account_tags = organizations.account_details_tags
|
||||
self.region = report.region
|
||||
self.check_id = report.check_metadata.CheckID
|
||||
# self.check_name = report.check_metadata.CheckName
|
||||
self.check_title = report.check_metadata.CheckTitle
|
||||
self.check_type = report.check_metadata.CheckType
|
||||
self.status = report.status
|
||||
@@ -198,7 +215,7 @@ class Check_Output_CSV:
|
||||
self.depends_on = self.__unroll_list__(report.check_metadata.DependsOn)
|
||||
self.related_to = self.__unroll_list__(report.check_metadata.RelatedTo)
|
||||
self.notes = report.check_metadata.Notes
|
||||
self.compliance = self.__unroll_compliance__(report.check_metadata.Compliance)
|
||||
# self.compliance = self.__unroll_compliance__(report.check_metadata.Compliance)
|
||||
|
||||
def __unroll_list__(self, listed_items: list):
|
||||
unrolled_items = ""
|
||||
|
||||
@@ -2,6 +2,8 @@ import json
|
||||
import os
|
||||
import sys
|
||||
from csv import DictWriter
|
||||
from io import TextIOWrapper
|
||||
from typing import Any
|
||||
|
||||
from colorama import Fore, Style
|
||||
from tabulate import tabulate
|
||||
@@ -12,12 +14,14 @@ from config.config import (
|
||||
json_file_suffix,
|
||||
orange_color,
|
||||
prowler_version,
|
||||
timestamp,
|
||||
timestamp_iso,
|
||||
timestamp_utc,
|
||||
)
|
||||
from lib.logger import logger
|
||||
from lib.outputs.models import (
|
||||
Check_Output_CSV,
|
||||
Check_Output_CSV_ENS_RD2022,
|
||||
Check_Output_JSON,
|
||||
Check_Output_JSON_ASFF,
|
||||
Compliance,
|
||||
@@ -32,18 +36,17 @@ from providers.aws.lib.security_hub.security_hub import send_to_security_hub
|
||||
|
||||
|
||||
def report(check_findings, output_options, audit_info):
|
||||
# Sort check findings
|
||||
check_findings.sort(key=lambda x: x.region)
|
||||
csv_fields = []
|
||||
# check output options
|
||||
|
||||
# Generate the required output files
|
||||
# csv_fields = []
|
||||
file_descriptors = {}
|
||||
if output_options.output_modes:
|
||||
if "csv" in output_options.output_modes:
|
||||
csv_fields = generate_csv_fields()
|
||||
|
||||
# We have to create the required output files
|
||||
file_descriptors = fill_file_descriptors(
|
||||
output_options.output_modes,
|
||||
output_options.output_directory,
|
||||
csv_fields,
|
||||
output_options.output_filename,
|
||||
)
|
||||
|
||||
@@ -70,7 +73,64 @@ def report(check_findings, output_options, audit_info):
|
||||
f"\t{color}{finding.status}{Style.RESET_ALL} {finding.region}: {finding.status_extended}"
|
||||
)
|
||||
if file_descriptors:
|
||||
# sending the finding to input options
|
||||
if "ens_rd2022_aws" in output_options.output_modes:
|
||||
# We have to retrieve all the check's compliance requirements
|
||||
check_compliance = output_options.bulk_checks_metadata[
|
||||
finding.check_metadata.CheckID
|
||||
].Compliance
|
||||
for compliance in check_compliance:
|
||||
if (
|
||||
compliance.Framework == "ENS"
|
||||
and compliance.Version == "RD2022"
|
||||
):
|
||||
for requirement in compliance.Requirements:
|
||||
requirement_description = requirement.Description
|
||||
requirement_id = requirement.Id
|
||||
for attribute in requirement.Attributes:
|
||||
compliance_row = Check_Output_CSV_ENS_RD2022(
|
||||
Provider=finding.check_metadata.Provider,
|
||||
AccountId=audit_info.audited_account,
|
||||
Region=finding.region,
|
||||
AssessmentDate=timestamp.isoformat(),
|
||||
Requirements_Id=requirement_id,
|
||||
Requirements_Description=requirement_description,
|
||||
Requirements_Attributes_IdGrupoControl=attribute.get(
|
||||
"IdGrupoControl"
|
||||
),
|
||||
Requirements_Attributes_Marco=attribute.get(
|
||||
"Marco"
|
||||
),
|
||||
Requirements_Attributes_Categoria=attribute.get(
|
||||
"Categoria"
|
||||
),
|
||||
Requirements_Attributes_DescripcionControl=attribute.get(
|
||||
"DescripcionControl"
|
||||
),
|
||||
Requirements_Attributes_Nivel=attribute.get(
|
||||
"Nivel"
|
||||
),
|
||||
Requirements_Attributes_Tipo=attribute.get(
|
||||
"Tipo"
|
||||
),
|
||||
Requirements_Attributes_Dimensiones=",".join(
|
||||
attribute.get("Dimensiones")
|
||||
),
|
||||
Status=finding.status,
|
||||
StatusExtended=finding.status_extended,
|
||||
ResourceId=finding.resource_id,
|
||||
CheckId=finding.check_metadata.CheckID,
|
||||
)
|
||||
|
||||
csv_header = generate_csv_fields(
|
||||
Check_Output_CSV_ENS_RD2022
|
||||
)
|
||||
csv_writer = DictWriter(
|
||||
file_descriptors["ens_rd2022_aws"],
|
||||
fieldnames=csv_header,
|
||||
delimiter=";",
|
||||
)
|
||||
csv_writer.writerow(compliance_row.__dict__)
|
||||
|
||||
if "csv" in file_descriptors:
|
||||
finding_output = Check_Output_CSV(
|
||||
audit_info.audited_account,
|
||||
@@ -79,7 +139,9 @@ def report(check_findings, output_options, audit_info):
|
||||
audit_info.organizations_metadata,
|
||||
)
|
||||
csv_writer = DictWriter(
|
||||
file_descriptors["csv"], fieldnames=csv_fields, delimiter=";"
|
||||
file_descriptors["csv"],
|
||||
fieldnames=generate_csv_fields(Check_Output_CSV),
|
||||
delimiter=";",
|
||||
)
|
||||
csv_writer.writerow(finding_output.__dict__)
|
||||
|
||||
@@ -117,65 +179,75 @@ def report(check_findings, output_options, audit_info):
|
||||
file_descriptors.get(file_descriptor).close()
|
||||
|
||||
|
||||
def fill_file_descriptors(output_modes, output_directory, csv_fields, output_filename):
|
||||
def initialize_file_descriptor(
|
||||
filename: str, output_mode: str, format: Any = None
|
||||
) -> TextIOWrapper:
|
||||
"""Open/Create the output file. If needed include headers or the required format"""
|
||||
|
||||
if file_exists(filename):
|
||||
file_descriptor = open_file(
|
||||
filename,
|
||||
"a",
|
||||
)
|
||||
else:
|
||||
file_descriptor = open_file(
|
||||
filename,
|
||||
"a",
|
||||
)
|
||||
|
||||
if output_mode in ("csv", "ens_rd2022_aws"):
|
||||
# Format is the class model of the CSV format to print the headers
|
||||
csv_header = [x.upper() for x in generate_csv_fields(format)]
|
||||
csv_writer = DictWriter(
|
||||
file_descriptor, fieldnames=csv_header, delimiter=";"
|
||||
)
|
||||
csv_writer.writeheader()
|
||||
|
||||
if output_mode in ("json", "json-asff"):
|
||||
file_descriptor = open_file(
|
||||
filename,
|
||||
"a",
|
||||
)
|
||||
file_descriptor.write("[")
|
||||
|
||||
return file_descriptor
|
||||
|
||||
|
||||
def fill_file_descriptors(output_modes, output_directory, output_filename):
|
||||
file_descriptors = {}
|
||||
for output_mode in output_modes:
|
||||
if output_mode == "csv":
|
||||
filename = f"{output_directory}/{output_filename}{csv_file_suffix}"
|
||||
if file_exists(filename):
|
||||
file_descriptor = open_file(
|
||||
filename,
|
||||
"a",
|
||||
if output_modes:
|
||||
for output_mode in output_modes:
|
||||
if output_mode == "csv":
|
||||
filename = f"{output_directory}/{output_filename}{csv_file_suffix}"
|
||||
file_descriptor = initialize_file_descriptor(
|
||||
filename, output_mode, Check_Output_CSV
|
||||
)
|
||||
else:
|
||||
file_descriptor = open_file(
|
||||
filename,
|
||||
"a",
|
||||
)
|
||||
csv_header = [x.upper() for x in csv_fields]
|
||||
csv_writer = DictWriter(
|
||||
file_descriptor, fieldnames=csv_header, delimiter=";"
|
||||
)
|
||||
csv_writer.writeheader()
|
||||
file_descriptors.update({output_mode: file_descriptor})
|
||||
|
||||
file_descriptors.update({output_mode: file_descriptor})
|
||||
if output_mode == "json":
|
||||
filename = f"{output_directory}/{output_filename}{json_file_suffix}"
|
||||
file_descriptor = initialize_file_descriptor(filename, output_mode)
|
||||
file_descriptors.update({output_mode: file_descriptor})
|
||||
|
||||
if output_mode == "json":
|
||||
filename = f"{output_directory}/{output_filename}{json_file_suffix}"
|
||||
if file_exists(filename):
|
||||
file_descriptor = open_file(
|
||||
filename,
|
||||
"a",
|
||||
if output_mode == "json-asff":
|
||||
filename = (
|
||||
f"{output_directory}/{output_filename}{json_asff_file_suffix}"
|
||||
)
|
||||
else:
|
||||
file_descriptor = open_file(
|
||||
filename,
|
||||
"a",
|
||||
)
|
||||
file_descriptor.write("[")
|
||||
file_descriptor = initialize_file_descriptor(filename, output_mode)
|
||||
file_descriptors.update({output_mode: file_descriptor})
|
||||
|
||||
file_descriptors.update({output_mode: file_descriptor})
|
||||
|
||||
if output_mode == "json-asff":
|
||||
filename = f"{output_directory}/{output_filename}{json_asff_file_suffix}"
|
||||
if file_exists(filename):
|
||||
file_descriptor = open_file(
|
||||
filename,
|
||||
"a",
|
||||
if output_mode == "ens_rd2022_aws":
|
||||
filename = f"{output_directory}/{output_filename}_ens_rd2022_aws{csv_file_suffix}"
|
||||
file_descriptor = initialize_file_descriptor(
|
||||
filename, output_mode, Check_Output_CSV_ENS_RD2022
|
||||
)
|
||||
else:
|
||||
file_descriptor = open_file(
|
||||
filename,
|
||||
"a",
|
||||
)
|
||||
file_descriptor.write("[")
|
||||
|
||||
file_descriptors.update({output_mode: file_descriptor})
|
||||
file_descriptors.update({output_mode: file_descriptor})
|
||||
|
||||
return file_descriptors
|
||||
|
||||
|
||||
def set_report_color(status):
|
||||
def set_report_color(status: str) -> str:
|
||||
"""Return the color for a give result status"""
|
||||
color = ""
|
||||
if status == "PASS":
|
||||
color = Fore.GREEN
|
||||
@@ -192,9 +264,10 @@ def set_report_color(status):
|
||||
return color
|
||||
|
||||
|
||||
def generate_csv_fields():
|
||||
def generate_csv_fields(format: Any) -> list[str]:
|
||||
"""Generates the CSV headers for the given class"""
|
||||
csv_fields = []
|
||||
for field in Check_Output_CSV.__dict__["__annotations__"].keys():
|
||||
for field in format.__dict__.get("__annotations__").keys():
|
||||
csv_fields.append(field)
|
||||
return csv_fields
|
||||
|
||||
@@ -271,7 +344,9 @@ def close_json(output_filename, output_directory, mode):
|
||||
file_descriptor.write("]")
|
||||
file_descriptor.close()
|
||||
except Exception as error:
|
||||
logger.critical(f"{error.__class__.__name__} -- {error}")
|
||||
logger.critical(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
|
||||
)
|
||||
sys.exit()
|
||||
|
||||
|
||||
@@ -294,7 +369,9 @@ def send_to_s3_bucket(
|
||||
s3_client.upload_file(file_name, bucket_name, object_name)
|
||||
|
||||
except Exception as error:
|
||||
logger.critical(f"{error.__class__.__name__} -- {error}")
|
||||
logger.critical(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
|
||||
)
|
||||
sys.exit()
|
||||
|
||||
|
||||
@@ -305,75 +382,76 @@ def display_summary_table(
|
||||
output_directory: str,
|
||||
):
|
||||
try:
|
||||
if findings:
|
||||
current = {
|
||||
"Service": "",
|
||||
"Provider": "",
|
||||
"Critical": 0,
|
||||
"High": 0,
|
||||
"Medium": 0,
|
||||
"Low": 0,
|
||||
}
|
||||
findings_table = {
|
||||
"Provider": [],
|
||||
"Service": [],
|
||||
"Status": [],
|
||||
"Critical": [],
|
||||
"High": [],
|
||||
"Medium": [],
|
||||
"Low": [],
|
||||
}
|
||||
pass_count = fail_count = 0
|
||||
for finding in findings:
|
||||
# If new service and not first, add previous row
|
||||
if (
|
||||
current["Service"] != finding.check_metadata.ServiceName
|
||||
and current["Service"]
|
||||
):
|
||||
current = {
|
||||
"Service": "",
|
||||
"Provider": "",
|
||||
"Total": 0,
|
||||
"Critical": 0,
|
||||
"High": 0,
|
||||
"Medium": 0,
|
||||
"Low": 0,
|
||||
}
|
||||
findings_table = {
|
||||
"Provider": [],
|
||||
"Service": [],
|
||||
"Status": [],
|
||||
"Critical": [],
|
||||
"High": [],
|
||||
"Medium": [],
|
||||
"Low": [],
|
||||
}
|
||||
pass_count = fail_count = 0
|
||||
for finding in findings:
|
||||
# If new service and not first, add previous row
|
||||
if (
|
||||
current["Service"] != finding.check_metadata.ServiceName
|
||||
and current["Service"]
|
||||
):
|
||||
|
||||
add_service_to_table(findings_table, current)
|
||||
add_service_to_table(findings_table, current)
|
||||
|
||||
current["Critical"] = current["High"] = current["Medium"] = current[
|
||||
"Low"
|
||||
] = 0
|
||||
current["Total"] = current["Critical"] = current["High"] = current[
|
||||
"Medium"
|
||||
] = current["Low"] = 0
|
||||
|
||||
current["Service"] = finding.check_metadata.ServiceName
|
||||
current["Provider"] = finding.check_metadata.Provider
|
||||
current["Service"] = finding.check_metadata.ServiceName
|
||||
current["Provider"] = finding.check_metadata.Provider
|
||||
|
||||
if finding.status == "PASS":
|
||||
pass_count += 1
|
||||
elif finding.status == "FAIL":
|
||||
fail_count += 1
|
||||
if finding.check_metadata.Severity == "critical":
|
||||
current["Critical"] += 1
|
||||
elif finding.check_metadata.Severity == "high":
|
||||
current["High"] += 1
|
||||
elif finding.check_metadata.Severity == "medium":
|
||||
current["Medium"] += 1
|
||||
elif finding.check_metadata.Severity == "low":
|
||||
current["Low"] += 1
|
||||
current["Total"] += 1
|
||||
if finding.status == "PASS":
|
||||
pass_count += 1
|
||||
elif finding.status == "FAIL":
|
||||
fail_count += 1
|
||||
if finding.check_metadata.Severity == "critical":
|
||||
current["Critical"] += 1
|
||||
elif finding.check_metadata.Severity == "high":
|
||||
current["High"] += 1
|
||||
elif finding.check_metadata.Severity == "medium":
|
||||
current["Medium"] += 1
|
||||
elif finding.check_metadata.Severity == "low":
|
||||
current["Low"] += 1
|
||||
|
||||
# Add final service
|
||||
add_service_to_table(findings_table, current)
|
||||
# Add final service
|
||||
add_service_to_table(findings_table, current)
|
||||
|
||||
print("\nOverview Results:")
|
||||
overview_table = [
|
||||
[
|
||||
f"{Fore.RED}{round(fail_count/len(findings)*100, 2)}% ({fail_count}) Failed{Style.RESET_ALL}",
|
||||
f"{Fore.GREEN}{round(pass_count/len(findings)*100, 2)}% ({pass_count}) Passed{Style.RESET_ALL}",
|
||||
]
|
||||
print("\nOverview Results:")
|
||||
overview_table = [
|
||||
[
|
||||
f"{Fore.RED}{round(fail_count/len(findings)*100, 2)}% ({fail_count}) Failed{Style.RESET_ALL}",
|
||||
f"{Fore.GREEN}{round(pass_count/len(findings)*100, 2)}% ({pass_count}) Passed{Style.RESET_ALL}",
|
||||
]
|
||||
print(tabulate(overview_table, tablefmt="rounded_grid"))
|
||||
print(
|
||||
f"\nAccount {Fore.YELLOW}{audit_info.audited_account}{Style.RESET_ALL} Scan Results (severity columns are for fails only):"
|
||||
)
|
||||
print(tabulate(findings_table, headers="keys", tablefmt="rounded_grid"))
|
||||
print(
|
||||
f"{Style.BRIGHT}* You only see here those services that contains resources.{Style.RESET_ALL}"
|
||||
)
|
||||
print("\nDetailed results are in:")
|
||||
print(f" - CSV: {output_directory}/{output_filename}.csv")
|
||||
print(f" - JSON: {output_directory}/{output_filename}.json\n")
|
||||
]
|
||||
print(tabulate(overview_table, tablefmt="rounded_grid"))
|
||||
print(
|
||||
f"\nAccount {Fore.YELLOW}{audit_info.audited_account}{Style.RESET_ALL} Scan Results (severity columns are for fails only):"
|
||||
)
|
||||
print(tabulate(findings_table, headers="keys", tablefmt="rounded_grid"))
|
||||
print(
|
||||
f"{Style.BRIGHT}* You only see here those services that contains resources.{Style.RESET_ALL}"
|
||||
)
|
||||
print("\nDetailed results are in:")
|
||||
print(f" - CSV: {output_directory}/{output_filename}.csv")
|
||||
print(f" - JSON: {output_directory}/{output_filename}.json\n")
|
||||
|
||||
except Exception as error:
|
||||
logger.critical(
|
||||
@@ -389,9 +467,12 @@ def add_service_to_table(findings_table, current):
|
||||
or current["Medium"] > 0
|
||||
or current["Low"] > 0
|
||||
):
|
||||
current["Status"] = f"{Fore.RED}FAIL{Style.RESET_ALL}"
|
||||
total_fails = (
|
||||
current["Critical"] + current["High"] + current["Medium"] + current["Low"]
|
||||
)
|
||||
current["Status"] = f"{Fore.RED}FAIL ({total_fails}){Style.RESET_ALL}"
|
||||
else:
|
||||
current["Status"] = f"{Fore.GREEN}PASS{Style.RESET_ALL}"
|
||||
current["Status"] = f"{Fore.GREEN}PASS ({current['Total']}){Style.RESET_ALL}"
|
||||
findings_table["Provider"].append(current["Provider"])
|
||||
findings_table["Service"].append(current["Service"])
|
||||
findings_table["Status"].append(current["Status"])
|
||||
@@ -403,3 +484,109 @@ def add_service_to_table(findings_table, current):
|
||||
f"{Fore.YELLOW}{current['Medium']}{Style.RESET_ALL}"
|
||||
)
|
||||
findings_table["Low"].append(f"{Fore.BLUE}{current['Low']}{Style.RESET_ALL}")
|
||||
|
||||
|
||||
def display_compliance_table(
|
||||
findings: list,
|
||||
bulk_checks_metadata: dict,
|
||||
compliance_framework: str,
|
||||
output_filename: str,
|
||||
output_directory: str,
|
||||
):
|
||||
try:
|
||||
if "ens_rd2022_aws" in compliance_framework:
|
||||
marcos = {}
|
||||
ens_compliance_table = {
|
||||
"Proveedor": [],
|
||||
"Marco/Categoria": [],
|
||||
"Estado": [],
|
||||
"PYTEC": [],
|
||||
"Alto": [],
|
||||
"Medio": [],
|
||||
"Bajo": [],
|
||||
}
|
||||
pass_count = fail_count = 0
|
||||
for finding in findings:
|
||||
check = bulk_checks_metadata[finding.check_metadata.CheckID]
|
||||
check_compliances = check.Compliance
|
||||
for compliance in check_compliances:
|
||||
if (
|
||||
compliance.Framework == "ENS"
|
||||
and compliance.Provider == "AWS"
|
||||
and compliance.Version == "RD2022"
|
||||
):
|
||||
for requirement in compliance.Requirements:
|
||||
for attribute in requirement.Attributes:
|
||||
marco_categoria = (
|
||||
f"{attribute['Marco']}/{attribute['Categoria']}"
|
||||
)
|
||||
# Check if Marco/Categoria exists
|
||||
if marco_categoria not in marcos:
|
||||
marcos[marco_categoria] = {
|
||||
"Estado": f"{Fore.GREEN}CUMPLE{Style.RESET_ALL}",
|
||||
"Pytec": 0,
|
||||
"Alto": 0,
|
||||
"Medio": 0,
|
||||
"Bajo": 0,
|
||||
}
|
||||
if finding.status == "FAIL":
|
||||
fail_count += 1
|
||||
marcos[marco_categoria][
|
||||
"Estado"
|
||||
] = f"{Fore.RED}NO CUMPLE{Style.RESET_ALL}"
|
||||
elif finding.status == "PASS":
|
||||
pass_count += 1
|
||||
if attribute["Nivel"] == "pytec":
|
||||
marcos[marco_categoria]["Pytec"] += 1
|
||||
elif attribute["Nivel"] == "alto":
|
||||
marcos[marco_categoria]["Alto"] += 1
|
||||
elif attribute["Nivel"] == "medio":
|
||||
marcos[marco_categoria]["Medio"] += 1
|
||||
elif attribute["Nivel"] == "bajo":
|
||||
marcos[marco_categoria]["Bajo"] += 1
|
||||
|
||||
# Add results to table
|
||||
for marco in marcos:
|
||||
ens_compliance_table["Proveedor"].append("aws")
|
||||
ens_compliance_table["Marco/Categoria"].append(marco)
|
||||
ens_compliance_table["Estado"].append(marcos[marco]["Estado"])
|
||||
ens_compliance_table["PYTEC"].append(
|
||||
f"{Fore.LIGHTRED_EX}{marcos[marco]['Pytec']}{Style.RESET_ALL}"
|
||||
)
|
||||
ens_compliance_table["Alto"].append(
|
||||
f"{Fore.RED}{marcos[marco]['Alto']}{Style.RESET_ALL}"
|
||||
)
|
||||
ens_compliance_table["Medio"].append(
|
||||
f"{Fore.YELLOW}{marcos[marco]['Medio']}{Style.RESET_ALL}"
|
||||
)
|
||||
ens_compliance_table["Bajo"].append(
|
||||
f"{Fore.BLUE}{marcos[marco]['Bajo']}{Style.RESET_ALL}"
|
||||
)
|
||||
|
||||
print(
|
||||
f"\nEstado de Cumplimiento de {Fore.YELLOW}ENS RD2022 - AWS{Style.RESET_ALL}:"
|
||||
)
|
||||
overview_table = [
|
||||
[
|
||||
f"{Fore.RED}{round(fail_count/(fail_count+pass_count)*100, 2)}% ({fail_count}) NO CUMPLE{Style.RESET_ALL}",
|
||||
f"{Fore.GREEN}{round(pass_count/(fail_count+pass_count)*100, 2)}% ({pass_count}) CUMPLE{Style.RESET_ALL}",
|
||||
]
|
||||
]
|
||||
print(tabulate(overview_table, tablefmt="rounded_grid"))
|
||||
print(f"\nResultados de {Fore.YELLOW}ENS RD2022 - AWS{Style.RESET_ALL}:")
|
||||
print(
|
||||
tabulate(ens_compliance_table, headers="keys", tablefmt="rounded_grid")
|
||||
)
|
||||
print(
|
||||
f"{Style.BRIGHT}* Solo aparece el Marco/Categoria que contiene resultados.{Style.RESET_ALL}"
|
||||
)
|
||||
print("\nResultados detallados en:")
|
||||
print(
|
||||
f" - CSV: {output_directory}/{output_filename}_{compliance_framework[0]}.csv\n"
|
||||
)
|
||||
|
||||
except Exception as error:
|
||||
logger.critical(
|
||||
f"{error.__class__.__name__}:{error.__traceback__.tb_lineno} -- {error}"
|
||||
)
|
||||
sys.exit()
|
||||
|
||||
@@ -2,6 +2,7 @@ import os
|
||||
from os import path, remove
|
||||
|
||||
import boto3
|
||||
import pytest
|
||||
from colorama import Fore
|
||||
from moto import mock_s3
|
||||
|
||||
@@ -9,14 +10,15 @@ from config.config import (
|
||||
csv_file_suffix,
|
||||
json_asff_file_suffix,
|
||||
json_file_suffix,
|
||||
orange_color,
|
||||
output_file_timestamp,
|
||||
prowler_version,
|
||||
timestamp_iso,
|
||||
timestamp_utc,
|
||||
orange_color,
|
||||
)
|
||||
from lib.check.models import Check_Report, load_check_metadata
|
||||
from lib.outputs.models import (
|
||||
Check_Output_CSV,
|
||||
Check_Output_JSON,
|
||||
Check_Output_JSON_ASFF,
|
||||
Compliance,
|
||||
@@ -40,7 +42,7 @@ class Test_Outputs:
|
||||
def test_fill_file_descriptors(self):
|
||||
audited_account = "123456789012"
|
||||
output_directory = f"{os.path.dirname(os.path.realpath(__file__))}"
|
||||
csv_fields = generate_csv_fields()
|
||||
generate_csv_fields(Check_Output_CSV)
|
||||
test_output_modes = [
|
||||
["csv"],
|
||||
["json"],
|
||||
@@ -98,7 +100,6 @@ class Test_Outputs:
|
||||
test_output_file_descriptors = fill_file_descriptors(
|
||||
output_mode_list,
|
||||
output_directory,
|
||||
csv_fields,
|
||||
output_filename,
|
||||
)
|
||||
for output_mode in output_mode_list:
|
||||
@@ -115,6 +116,17 @@ class Test_Outputs:
|
||||
for status in test_status:
|
||||
assert set_report_color(status) in test_colors
|
||||
|
||||
def test_set_report_color_invalid(self):
|
||||
test_status = "INVALID"
|
||||
|
||||
with pytest.raises(Exception) as exc:
|
||||
set_report_color(test_status)
|
||||
|
||||
assert "Invalid Report Status. Must be PASS, FAIL, ERROR or WARNING" in str(
|
||||
exc.value
|
||||
)
|
||||
assert exc.type == Exception
|
||||
|
||||
def test_generate_csv_fields(self):
|
||||
expected = [
|
||||
"assessment_start_time",
|
||||
@@ -154,10 +166,10 @@ class Test_Outputs:
|
||||
"depends_on",
|
||||
"related_to",
|
||||
"notes",
|
||||
"compliance",
|
||||
# "compliance",
|
||||
]
|
||||
|
||||
assert generate_csv_fields() == expected
|
||||
assert generate_csv_fields(Check_Output_CSV) == expected
|
||||
|
||||
def test_fill_json(self):
|
||||
input_audit_info = AWS_Audit_Info(
|
||||
@@ -177,7 +189,7 @@ class Test_Outputs:
|
||||
finding = Check_Report(
|
||||
load_check_metadata(
|
||||
f"{path.dirname(path.realpath(__file__))}/fixtures/metadata.json"
|
||||
)
|
||||
).json()
|
||||
)
|
||||
finding.resource_details = "Test resource details"
|
||||
finding.resource_id = "test-resource"
|
||||
@@ -221,7 +233,7 @@ class Test_Outputs:
|
||||
finding = Check_Report(
|
||||
load_check_metadata(
|
||||
f"{path.dirname(path.realpath(__file__))}/fixtures/metadata.json"
|
||||
)
|
||||
).json()
|
||||
)
|
||||
finding.resource_details = "Test resource details"
|
||||
finding.resource_id = "test-resource"
|
||||
|
||||
Reference in New Issue
Block a user