mirror of
https://github.com/ghndrx/prowler.git
synced 2026-02-10 14:55:00 +00:00
900 lines
37 KiB
Python
900 lines
37 KiB
Python
import json
|
|
import os
|
|
import sys
|
|
from csv import DictWriter
|
|
from io import TextIOWrapper
|
|
from typing import Any
|
|
|
|
from colorama import Fore, Style
|
|
from tabulate import tabulate
|
|
|
|
from config.config import (
|
|
csv_file_suffix,
|
|
html_file_suffix,
|
|
html_logo_img,
|
|
html_logo_url,
|
|
json_asff_file_suffix,
|
|
json_file_suffix,
|
|
orange_color,
|
|
output_file_timestamp,
|
|
prowler_version,
|
|
timestamp,
|
|
timestamp_iso,
|
|
timestamp_utc,
|
|
)
|
|
from lib.check.models import Output_From_Options
|
|
from lib.logger import logger
|
|
from lib.outputs.models import (
|
|
Check_Output_CSV,
|
|
Check_Output_CSV_ENS_RD2022,
|
|
Check_Output_JSON,
|
|
Check_Output_JSON_ASFF,
|
|
Compliance,
|
|
ProductFields,
|
|
Resource,
|
|
Severity,
|
|
)
|
|
from lib.utils.utils import file_exists, hash_sha512, open_file
|
|
from providers.aws.lib.allowlist.allowlist import is_allowlisted
|
|
from providers.aws.lib.audit_info.models import AWS_Audit_Info
|
|
from providers.aws.lib.security_hub.security_hub import send_to_security_hub
|
|
|
|
|
|
def report(check_findings, output_options, audit_info):
|
|
try:
|
|
# Sort check findings
|
|
check_findings.sort(key=lambda x: x.region)
|
|
|
|
# Generate the required output files
|
|
# csv_fields = []
|
|
file_descriptors = {}
|
|
if output_options.output_modes:
|
|
# We have to create the required output files
|
|
file_descriptors = fill_file_descriptors(
|
|
output_options.output_modes,
|
|
output_options.output_directory,
|
|
output_options.output_filename,
|
|
audit_info,
|
|
)
|
|
|
|
if check_findings:
|
|
for finding in check_findings:
|
|
# Check if finding is allowlisted
|
|
if output_options.allowlist_file:
|
|
if is_allowlisted(
|
|
output_options.allowlist_file,
|
|
audit_info.audited_account,
|
|
finding.check_metadata.CheckID,
|
|
finding.region,
|
|
finding.resource_id,
|
|
):
|
|
finding.status = "WARNING"
|
|
# Print findings by stdout
|
|
color = set_report_color(finding.status)
|
|
if output_options.is_quiet and "FAIL" in finding.status:
|
|
print(
|
|
f"\t{color}{finding.status}{Style.RESET_ALL} {finding.region}: {finding.status_extended}"
|
|
)
|
|
elif not output_options.is_quiet and output_options.verbose:
|
|
print(
|
|
f"\t{color}{finding.status}{Style.RESET_ALL} {finding.region}: {finding.status_extended}"
|
|
)
|
|
if file_descriptors:
|
|
if finding.check_metadata.Provider == "aws":
|
|
if "ens_rd2022_aws" in output_options.output_modes:
|
|
# We have to retrieve all the check's compliance requirements
|
|
check_compliance = output_options.bulk_checks_metadata[
|
|
finding.check_metadata.CheckID
|
|
].Compliance
|
|
for compliance in check_compliance:
|
|
if (
|
|
compliance.Framework == "ENS"
|
|
and compliance.Version == "RD2022"
|
|
):
|
|
for requirement in compliance.Requirements:
|
|
requirement_description = (
|
|
requirement.Description
|
|
)
|
|
requirement_id = requirement.Id
|
|
for attribute in requirement.Attributes:
|
|
compliance_row = Check_Output_CSV_ENS_RD2022(
|
|
Provider=finding.check_metadata.Provider,
|
|
AccountId=audit_info.audited_account,
|
|
Region=finding.region,
|
|
AssessmentDate=timestamp.isoformat(),
|
|
Requirements_Id=requirement_id,
|
|
Requirements_Description=requirement_description,
|
|
Requirements_Attributes_IdGrupoControl=attribute.get(
|
|
"IdGrupoControl"
|
|
),
|
|
Requirements_Attributes_Marco=attribute.get(
|
|
"Marco"
|
|
),
|
|
Requirements_Attributes_Categoria=attribute.get(
|
|
"Categoria"
|
|
),
|
|
Requirements_Attributes_DescripcionControl=attribute.get(
|
|
"DescripcionControl"
|
|
),
|
|
Requirements_Attributes_Nivel=attribute.get(
|
|
"Nivel"
|
|
),
|
|
Requirements_Attributes_Tipo=attribute.get(
|
|
"Tipo"
|
|
),
|
|
Requirements_Attributes_Dimensiones=",".join(
|
|
attribute.get("Dimensiones")
|
|
),
|
|
Status=finding.status,
|
|
StatusExtended=finding.status_extended,
|
|
ResourceId=finding.resource_id,
|
|
CheckId=finding.check_metadata.CheckID,
|
|
)
|
|
|
|
csv_header = generate_csv_fields(
|
|
Check_Output_CSV_ENS_RD2022
|
|
)
|
|
csv_writer = DictWriter(
|
|
file_descriptors["ens_rd2022_aws"],
|
|
fieldnames=csv_header,
|
|
delimiter=";",
|
|
)
|
|
csv_writer.writerow(compliance_row.__dict__)
|
|
|
|
if "csv" in file_descriptors:
|
|
finding_output = Check_Output_CSV(
|
|
audit_info.audited_account,
|
|
audit_info.profile,
|
|
finding,
|
|
audit_info.organizations_metadata,
|
|
)
|
|
csv_writer = DictWriter(
|
|
file_descriptors["csv"],
|
|
fieldnames=generate_csv_fields(Check_Output_CSV),
|
|
delimiter=";",
|
|
)
|
|
csv_writer.writerow(finding_output.__dict__)
|
|
|
|
if "json" in file_descriptors:
|
|
finding_output = Check_Output_JSON(
|
|
**finding.check_metadata.dict()
|
|
)
|
|
fill_json(finding_output, audit_info, finding)
|
|
|
|
json.dump(
|
|
finding_output.dict(),
|
|
file_descriptors["json"],
|
|
indent=4,
|
|
)
|
|
file_descriptors["json"].write(",")
|
|
|
|
if "json-asff" in file_descriptors:
|
|
finding_output = Check_Output_JSON_ASFF()
|
|
fill_json_asff(finding_output, audit_info, finding)
|
|
|
|
json.dump(
|
|
finding_output.dict(),
|
|
file_descriptors["json-asff"],
|
|
indent=4,
|
|
)
|
|
file_descriptors["json-asff"].write(",")
|
|
|
|
if "html" in file_descriptors:
|
|
fill_html(file_descriptors["html"], audit_info, finding)
|
|
|
|
file_descriptors["html"].write("")
|
|
|
|
# Check if it is needed to send findings to security hub
|
|
if output_options.security_hub_enabled:
|
|
send_to_security_hub(
|
|
finding.region, finding_output, audit_info.audit_session
|
|
)
|
|
else: # No service resources in the whole account
|
|
color = set_report_color("INFO")
|
|
if not output_options.is_quiet and output_options.verbose:
|
|
print(f"\t{color}INFO{Style.RESET_ALL} There are no resources")
|
|
# Separator between findings and bar
|
|
if output_options.is_quiet or output_options.verbose:
|
|
print()
|
|
if file_descriptors:
|
|
# Close all file descriptors
|
|
for file_descriptor in file_descriptors:
|
|
file_descriptors.get(file_descriptor).close()
|
|
except Exception as error:
|
|
logger.error(
|
|
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
|
)
|
|
|
|
|
|
def initialize_file_descriptor(
|
|
filename: str,
|
|
output_mode: str,
|
|
audit_info: AWS_Audit_Info,
|
|
format: Any = None,
|
|
) -> TextIOWrapper:
|
|
"""Open/Create the output file. If needed include headers or the required format"""
|
|
try:
|
|
if file_exists(filename):
|
|
file_descriptor = open_file(
|
|
filename,
|
|
"a",
|
|
)
|
|
else:
|
|
file_descriptor = open_file(
|
|
filename,
|
|
"a",
|
|
)
|
|
|
|
if output_mode in ("csv", "ens_rd2022_aws"):
|
|
# Format is the class model of the CSV format to print the headers
|
|
csv_header = [x.upper() for x in generate_csv_fields(format)]
|
|
csv_writer = DictWriter(
|
|
file_descriptor, fieldnames=csv_header, delimiter=";"
|
|
)
|
|
csv_writer.writeheader()
|
|
|
|
if output_mode in ("json", "json-asff"):
|
|
file_descriptor.write("[")
|
|
|
|
if "html" in output_mode:
|
|
add_html_header(file_descriptor, audit_info)
|
|
except Exception as error:
|
|
logger.error(
|
|
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
|
)
|
|
|
|
return file_descriptor
|
|
|
|
|
|
def fill_file_descriptors(output_modes, output_directory, output_filename, audit_info):
|
|
try:
|
|
file_descriptors = {}
|
|
if output_modes:
|
|
for output_mode in output_modes:
|
|
if output_mode == "csv":
|
|
filename = f"{output_directory}/{output_filename}{csv_file_suffix}"
|
|
file_descriptor = initialize_file_descriptor(
|
|
filename,
|
|
output_mode,
|
|
audit_info,
|
|
Check_Output_CSV,
|
|
)
|
|
file_descriptors.update({output_mode: file_descriptor})
|
|
|
|
if output_mode == "json":
|
|
filename = f"{output_directory}/{output_filename}{json_file_suffix}"
|
|
file_descriptor = initialize_file_descriptor(
|
|
filename, output_mode, audit_info
|
|
)
|
|
file_descriptors.update({output_mode: file_descriptor})
|
|
|
|
if output_mode == "json-asff":
|
|
filename = (
|
|
f"{output_directory}/{output_filename}{json_asff_file_suffix}"
|
|
)
|
|
file_descriptor = initialize_file_descriptor(
|
|
filename, output_mode, audit_info
|
|
)
|
|
file_descriptors.update({output_mode: file_descriptor})
|
|
|
|
if output_mode == "html":
|
|
filename = f"{output_directory}/{output_filename}{html_file_suffix}"
|
|
file_descriptor = initialize_file_descriptor(
|
|
filename, output_mode, audit_info
|
|
)
|
|
file_descriptors.update({output_mode: file_descriptor})
|
|
|
|
if output_mode == "ens_rd2022_aws":
|
|
filename = f"{output_directory}/{output_filename}_ens_rd2022_aws{csv_file_suffix}"
|
|
file_descriptor = initialize_file_descriptor(
|
|
filename, output_mode, audit_info, Check_Output_CSV_ENS_RD2022
|
|
)
|
|
file_descriptors.update({output_mode: file_descriptor})
|
|
except Exception as error:
|
|
logger.error(
|
|
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
|
)
|
|
|
|
return file_descriptors
|
|
|
|
|
|
def set_report_color(status: str) -> str:
|
|
"""Return the color for a give result status"""
|
|
color = ""
|
|
if status == "PASS":
|
|
color = Fore.GREEN
|
|
elif status == "FAIL":
|
|
color = Fore.RED
|
|
elif status == "ERROR":
|
|
color = Fore.BLACK
|
|
elif status == "WARNING":
|
|
color = orange_color
|
|
elif status == "INFO":
|
|
color = Fore.YELLOW
|
|
else:
|
|
raise Exception("Invalid Report Status. Must be PASS, FAIL, ERROR or WARNING")
|
|
return color
|
|
|
|
|
|
def generate_csv_fields(format: Any) -> list[str]:
|
|
"""Generates the CSV headers for the given class"""
|
|
csv_fields = []
|
|
for field in format.__dict__.get("__annotations__").keys():
|
|
csv_fields.append(field)
|
|
return csv_fields
|
|
|
|
|
|
def fill_json(finding_output, audit_info, finding):
|
|
finding_output.AssessmentStartTime = timestamp_iso
|
|
finding_output.FindingUniqueId = ""
|
|
finding_output.Profile = audit_info.profile
|
|
finding_output.AccountId = audit_info.audited_account
|
|
if audit_info.organizations_metadata:
|
|
finding_output.OrganizationsInfo = audit_info.organizations_metadata.__dict__
|
|
finding_output.Region = finding.region
|
|
finding_output.Status = finding.status
|
|
finding_output.StatusExtended = finding.status_extended
|
|
finding_output.ResourceId = finding.resource_id
|
|
finding_output.ResourceArn = finding.resource_arn
|
|
finding_output.ResourceDetails = finding.resource_details
|
|
|
|
return finding_output
|
|
|
|
|
|
def fill_json_asff(finding_output, audit_info, finding):
|
|
# Check if there are no resources in the finding
|
|
if finding.resource_id == "":
|
|
finding.resource_id = "NONE_PROVIDED"
|
|
finding_output.Id = f"prowler-{finding.check_metadata.CheckID}-{audit_info.audited_account}-{finding.region}-{hash_sha512(finding.resource_id)}"
|
|
finding_output.ProductArn = f"arn:{audit_info.audited_partition}:securityhub:{finding.region}::product/prowler/prowler"
|
|
finding_output.ProductFields = ProductFields(
|
|
ProviderVersion=prowler_version, ProwlerResourceName=finding.resource_id
|
|
)
|
|
finding_output.GeneratorId = "prowler-" + finding.check_metadata.CheckID
|
|
finding_output.AwsAccountId = audit_info.audited_account
|
|
finding_output.Types = finding.check_metadata.CheckType
|
|
finding_output.FirstObservedAt = (
|
|
finding_output.UpdatedAt
|
|
) = finding_output.CreatedAt = timestamp_utc.strftime("%Y-%m-%dT%H:%M:%SZ")
|
|
finding_output.Severity = Severity(Label=finding.check_metadata.Severity.upper())
|
|
finding_output.Title = finding.check_metadata.CheckTitle
|
|
finding_output.Description = finding.check_metadata.Description
|
|
finding_output.Resources = [
|
|
Resource(
|
|
Id=finding.resource_id,
|
|
Type=finding.check_metadata.ResourceType,
|
|
Partition=audit_info.audited_partition,
|
|
Region=finding.region,
|
|
)
|
|
]
|
|
# Add ED to PASS or FAIL (PASSED/FAILED)
|
|
finding_output.Compliance = Compliance(
|
|
Status=finding.status + "ED",
|
|
RelatedRequirements=finding.check_metadata.CheckType,
|
|
)
|
|
finding_output.Remediation = {
|
|
"Recommendation": finding.check_metadata.Remediation.Recommendation
|
|
}
|
|
|
|
return finding_output
|
|
|
|
|
|
def fill_html(file_descriptor, audit_info, finding):
|
|
row_class = "p-3 mb-2 bg-success-custom"
|
|
if finding.status == "INFO":
|
|
row_class = "table-info"
|
|
elif finding.status == "FAIL":
|
|
row_class = "table-danger"
|
|
elif finding.status == "WARNING":
|
|
row_class = "table-warning"
|
|
file_descriptor.write(
|
|
f"""
|
|
<tr class="{row_class}">
|
|
<td>{finding.status}</td>
|
|
<td>{finding.check_metadata.Severity}</td>
|
|
<td>{audit_info.audited_account}</td>
|
|
<td>{finding.region}</td>
|
|
<td>{finding.check_metadata.ServiceName}</td>
|
|
<td>{finding.check_metadata.CheckID}</td>
|
|
<td>{finding.check_metadata.CheckTitle}</td>
|
|
<td>{finding.status_extended}</td>
|
|
<td><p class="show-read-more">{finding.check_metadata.Risk}</p></td>
|
|
<td><p class="show-read-more">{finding.check_metadata.Remediation.Recommendation.Text}</p></td>
|
|
<td><a class="read-more" href="{finding.check_metadata.Remediation.Recommendation.Url}"><i class="fas fa-external-link-alt"></i></a></td>
|
|
<td>{finding.resource_id}</td>
|
|
</tr>
|
|
"""
|
|
)
|
|
|
|
|
|
def close_json(output_filename, output_directory, mode):
|
|
try:
|
|
suffix = json_file_suffix
|
|
if mode == "json-asff":
|
|
suffix = json_asff_file_suffix
|
|
filename = f"{output_directory}/{output_filename}{suffix}"
|
|
file_descriptor = open_file(
|
|
filename,
|
|
"a",
|
|
)
|
|
# Replace last comma for square bracket if not empty
|
|
if file_descriptor.tell() > 0:
|
|
file_descriptor.seek(file_descriptor.tell() - 1, os.SEEK_SET)
|
|
file_descriptor.truncate()
|
|
file_descriptor.write("]")
|
|
file_descriptor.close()
|
|
except Exception as error:
|
|
logger.critical(
|
|
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
|
|
)
|
|
sys.exit()
|
|
|
|
|
|
def send_to_s3_bucket(
|
|
output_filename, output_directory, output_mode, output_bucket, audit_session
|
|
):
|
|
try:
|
|
# Get only last part of the path
|
|
if output_mode == "csv":
|
|
filename = f"{output_filename}{csv_file_suffix}"
|
|
elif output_mode == "json":
|
|
filename = f"{output_filename}{json_file_suffix}"
|
|
elif output_mode == "json-asff":
|
|
filename = f"{output_filename}{json_asff_file_suffix}"
|
|
logger.info(f"Sending outputs to S3 bucket {output_bucket}")
|
|
file_name = output_directory + "/" + filename
|
|
bucket_name = output_bucket
|
|
object_name = output_directory + "/" + output_mode + "/" + filename
|
|
s3_client = audit_session.client("s3")
|
|
s3_client.upload_file(file_name, bucket_name, object_name)
|
|
|
|
except Exception as error:
|
|
logger.critical(
|
|
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
|
|
)
|
|
sys.exit()
|
|
|
|
|
|
def display_summary_table(
|
|
findings: list,
|
|
audit_info: AWS_Audit_Info,
|
|
output_options: Output_From_Options,
|
|
provider: str,
|
|
):
|
|
output_directory = output_options.output_directory
|
|
output_filename = output_options.output_filename
|
|
try:
|
|
if provider == "aws":
|
|
entity_type = "Account"
|
|
elif provider == "azure":
|
|
entity_type = "Tenant Domain"
|
|
|
|
if findings:
|
|
current = {
|
|
"Service": "",
|
|
"Provider": "",
|
|
"Total": 0,
|
|
"Critical": 0,
|
|
"High": 0,
|
|
"Medium": 0,
|
|
"Low": 0,
|
|
}
|
|
findings_table = {
|
|
"Provider": [],
|
|
"Service": [],
|
|
"Status": [],
|
|
"Critical": [],
|
|
"High": [],
|
|
"Medium": [],
|
|
"Low": [],
|
|
}
|
|
pass_count = fail_count = 0
|
|
for finding in findings:
|
|
# If new service and not first, add previous row
|
|
if (
|
|
current["Service"] != finding.check_metadata.ServiceName
|
|
and current["Service"]
|
|
):
|
|
|
|
add_service_to_table(findings_table, current)
|
|
|
|
current["Total"] = current["Critical"] = current["High"] = current[
|
|
"Medium"
|
|
] = current["Low"] = 0
|
|
|
|
current["Service"] = finding.check_metadata.ServiceName
|
|
current["Provider"] = finding.check_metadata.Provider
|
|
|
|
current["Total"] += 1
|
|
if finding.status == "PASS":
|
|
pass_count += 1
|
|
elif finding.status == "FAIL":
|
|
fail_count += 1
|
|
if finding.check_metadata.Severity == "critical":
|
|
current["Critical"] += 1
|
|
elif finding.check_metadata.Severity == "high":
|
|
current["High"] += 1
|
|
elif finding.check_metadata.Severity == "medium":
|
|
current["Medium"] += 1
|
|
elif finding.check_metadata.Severity == "low":
|
|
current["Low"] += 1
|
|
|
|
# Add final service
|
|
|
|
add_service_to_table(findings_table, current)
|
|
|
|
print("\nOverview Results:")
|
|
overview_table = [
|
|
[
|
|
f"{Fore.RED}{round(fail_count/len(findings)*100, 2)}% ({fail_count}) Failed{Style.RESET_ALL}",
|
|
f"{Fore.GREEN}{round(pass_count/len(findings)*100, 2)}% ({pass_count}) Passed{Style.RESET_ALL}",
|
|
]
|
|
]
|
|
print(tabulate(overview_table, tablefmt="rounded_grid"))
|
|
print(
|
|
f"\n{entity_type} {Fore.YELLOW}{audit_info.audited_account}{Style.RESET_ALL} Scan Results (severity columns are for fails only):"
|
|
)
|
|
if provider == "azure":
|
|
print(
|
|
f"\nSubscriptions scanned: {Fore.YELLOW}{' '.join(audit_info.subscriptions.keys())}{Style.RESET_ALL}"
|
|
)
|
|
print(tabulate(findings_table, headers="keys", tablefmt="rounded_grid"))
|
|
print(
|
|
f"{Style.BRIGHT}* You only see here those services that contains resources.{Style.RESET_ALL}"
|
|
)
|
|
print("\nDetailed results are in:")
|
|
if "html" in output_options.output_modes:
|
|
print(f" - HTML: {output_directory}/{output_filename}.html")
|
|
if "json-asff" in output_options.output_modes:
|
|
print(f" - JSON-ASFF: {output_directory}/{output_filename}.asff.json")
|
|
print(f" - CSV: {output_directory}/{output_filename}.csv")
|
|
print(f" - JSON: {output_directory}/{output_filename}.json")
|
|
|
|
else:
|
|
print(
|
|
f"\n {Style.BRIGHT}There are no findings in {entity_type} {Fore.YELLOW}{audit_info.audited_account}{Style.RESET_ALL}\n"
|
|
)
|
|
|
|
except Exception as error:
|
|
logger.critical(
|
|
f"{error.__class__.__name__}:{error.__traceback__.tb_lineno} -- {error}"
|
|
)
|
|
sys.exit()
|
|
|
|
|
|
def add_service_to_table(findings_table, current):
|
|
if (
|
|
current["Critical"] > 0
|
|
or current["High"] > 0
|
|
or current["Medium"] > 0
|
|
or current["Low"] > 0
|
|
):
|
|
total_fails = (
|
|
current["Critical"] + current["High"] + current["Medium"] + current["Low"]
|
|
)
|
|
current["Status"] = f"{Fore.RED}FAIL ({total_fails}){Style.RESET_ALL}"
|
|
else:
|
|
current["Status"] = f"{Fore.GREEN}PASS ({current['Total']}){Style.RESET_ALL}"
|
|
findings_table["Provider"].append(current["Provider"])
|
|
findings_table["Service"].append(current["Service"])
|
|
findings_table["Status"].append(current["Status"])
|
|
findings_table["Critical"].append(
|
|
f"{Fore.LIGHTRED_EX}{current['Critical']}{Style.RESET_ALL}"
|
|
)
|
|
findings_table["High"].append(f"{Fore.RED}{current['High']}{Style.RESET_ALL}")
|
|
findings_table["Medium"].append(
|
|
f"{Fore.YELLOW}{current['Medium']}{Style.RESET_ALL}"
|
|
)
|
|
findings_table["Low"].append(f"{Fore.BLUE}{current['Low']}{Style.RESET_ALL}")
|
|
|
|
|
|
def display_compliance_table(
|
|
findings: list,
|
|
bulk_checks_metadata: dict,
|
|
compliance_framework: str,
|
|
output_filename: str,
|
|
output_directory: str,
|
|
):
|
|
try:
|
|
if "ens_rd2022_aws" in compliance_framework:
|
|
marcos = {}
|
|
ens_compliance_table = {
|
|
"Proveedor": [],
|
|
"Marco/Categoria": [],
|
|
"Estado": [],
|
|
"PYTEC": [],
|
|
"Alto": [],
|
|
"Medio": [],
|
|
"Bajo": [],
|
|
}
|
|
pass_count = fail_count = 0
|
|
for finding in findings:
|
|
check = bulk_checks_metadata[finding.check_metadata.CheckID]
|
|
check_compliances = check.Compliance
|
|
for compliance in check_compliances:
|
|
if (
|
|
compliance.Framework == "ENS"
|
|
and compliance.Provider == "AWS"
|
|
and compliance.Version == "RD2022"
|
|
):
|
|
for requirement in compliance.Requirements:
|
|
for attribute in requirement.Attributes:
|
|
marco_categoria = (
|
|
f"{attribute['Marco']}/{attribute['Categoria']}"
|
|
)
|
|
# Check if Marco/Categoria exists
|
|
if marco_categoria not in marcos:
|
|
marcos[marco_categoria] = {
|
|
"Estado": f"{Fore.GREEN}CUMPLE{Style.RESET_ALL}",
|
|
"Pytec": 0,
|
|
"Alto": 0,
|
|
"Medio": 0,
|
|
"Bajo": 0,
|
|
}
|
|
if finding.status == "FAIL":
|
|
fail_count += 1
|
|
marcos[marco_categoria][
|
|
"Estado"
|
|
] = f"{Fore.RED}NO CUMPLE{Style.RESET_ALL}"
|
|
elif finding.status == "PASS":
|
|
pass_count += 1
|
|
if attribute["Nivel"] == "pytec":
|
|
marcos[marco_categoria]["Pytec"] += 1
|
|
elif attribute["Nivel"] == "alto":
|
|
marcos[marco_categoria]["Alto"] += 1
|
|
elif attribute["Nivel"] == "medio":
|
|
marcos[marco_categoria]["Medio"] += 1
|
|
elif attribute["Nivel"] == "bajo":
|
|
marcos[marco_categoria]["Bajo"] += 1
|
|
|
|
# Add results to table
|
|
for marco in marcos:
|
|
ens_compliance_table["Proveedor"].append("aws")
|
|
ens_compliance_table["Marco/Categoria"].append(marco)
|
|
ens_compliance_table["Estado"].append(marcos[marco]["Estado"])
|
|
ens_compliance_table["PYTEC"].append(
|
|
f"{Fore.LIGHTRED_EX}{marcos[marco]['Pytec']}{Style.RESET_ALL}"
|
|
)
|
|
ens_compliance_table["Alto"].append(
|
|
f"{Fore.RED}{marcos[marco]['Alto']}{Style.RESET_ALL}"
|
|
)
|
|
ens_compliance_table["Medio"].append(
|
|
f"{Fore.YELLOW}{marcos[marco]['Medio']}{Style.RESET_ALL}"
|
|
)
|
|
ens_compliance_table["Bajo"].append(
|
|
f"{Fore.BLUE}{marcos[marco]['Bajo']}{Style.RESET_ALL}"
|
|
)
|
|
|
|
print(
|
|
f"\nEstado de Cumplimiento de {Fore.YELLOW}ENS RD2022 - AWS{Style.RESET_ALL}:"
|
|
)
|
|
overview_table = [
|
|
[
|
|
f"{Fore.RED}{round(fail_count/(fail_count+pass_count)*100, 2)}% ({fail_count}) NO CUMPLE{Style.RESET_ALL}",
|
|
f"{Fore.GREEN}{round(pass_count/(fail_count+pass_count)*100, 2)}% ({pass_count}) CUMPLE{Style.RESET_ALL}",
|
|
]
|
|
]
|
|
print(tabulate(overview_table, tablefmt="rounded_grid"))
|
|
print(f"\nResultados de {Fore.YELLOW}ENS RD2022 - AWS{Style.RESET_ALL}:")
|
|
print(
|
|
tabulate(ens_compliance_table, headers="keys", tablefmt="rounded_grid")
|
|
)
|
|
print(
|
|
f"{Style.BRIGHT}* Solo aparece el Marco/Categoria que contiene resultados.{Style.RESET_ALL}"
|
|
)
|
|
print("\nResultados detallados en:")
|
|
print(
|
|
f" - CSV: {output_directory}/{output_filename}_{compliance_framework[0]}.csv\n"
|
|
)
|
|
|
|
except Exception as error:
|
|
logger.critical(
|
|
f"{error.__class__.__name__}:{error.__traceback__.tb_lineno} -- {error}"
|
|
)
|
|
sys.exit()
|
|
|
|
|
|
def add_html_header(file_descriptor, audit_info):
|
|
try:
|
|
if isinstance(audit_info.audited_regions, list):
|
|
audited_regions = " ".join(audit_info.audited_regions)
|
|
else:
|
|
audited_regions = audit_info.audited_regions
|
|
file_descriptor.write(
|
|
"""
|
|
<!DOCTYPE html>
|
|
<html lang="en">
|
|
<head>
|
|
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
|
|
<!-- Required meta tags -->
|
|
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no">
|
|
<style>
|
|
.read-more {color:#00f;}
|
|
.bg-success-custom {background-color: #70dc88 !important;}
|
|
</style>
|
|
<!-- Bootstrap CSS -->
|
|
<link rel="stylesheet" href="https://stackpath.bootstrapcdn.com/bootstrap/4.5.0/css/bootstrap.min.css" integrity="sha384-9aIt2nRpC12Uk9gS9baDl411NQApFmC26EwAOH8WgZl5MYYxFfc+NcPb1dKGj7Sk" crossorigin="anonymous">
|
|
<!-- https://datatables.net/download/index with jQuery, DataTables, Buttons, SearchPanes, and Select //-->
|
|
<link rel="stylesheet" type="text/css" href="https://cdn.datatables.net/v/dt/jqc-1.12.4/dt-1.10.25/b-1.7.1/sp-1.3.0/sl-1.3.3/datatables.min.css"/>
|
|
<link rel="stylesheet" href="https://pro.fontawesome.com/releases/v5.10.0/css/all.css" integrity="sha384-AYmEC3Yw5cVb3ZcuHtOA93w35dYTsvhLPVnYs9eStHfGJvOvKxVfELGroGkvsg+p" crossorigin="anonymous"/>
|
|
<style>
|
|
.show-read-more .more-text{
|
|
display: none;
|
|
}
|
|
</style>
|
|
<title>Prowler - AWS Security Assessments</title>
|
|
</head>
|
|
<body>
|
|
<nav class="navbar navbar-expand-xl sticky-top navbar-dark bg-dark">
|
|
<a class="navbar-brand" href="#">Prowler - Security Assessments in AWS</a>
|
|
</nav>
|
|
<div class="container-fluid">
|
|
<div class="row mt-3">
|
|
<div class="col-md-4">
|
|
<div class="card">
|
|
<div class="card-header">
|
|
Report Information:
|
|
</div>
|
|
<ul class="list-group list-group-flush">
|
|
<li class="list-group-item">
|
|
<div class="row">
|
|
<div class="col-md-auto">
|
|
<b>Version:</b> """
|
|
+ prowler_version
|
|
+ """
|
|
</div>
|
|
</div>
|
|
</li>
|
|
<li class="list-group-item">
|
|
<b>Parameters used:</b> """
|
|
+ " ".join(sys.argv[1:])
|
|
+ """
|
|
</li>
|
|
<li class="list-group-item">
|
|
<b>Date:</b> """
|
|
+ output_file_timestamp
|
|
+ """
|
|
</li>
|
|
<li class="list-group-item text-center">
|
|
<a href="""
|
|
+ html_logo_url
|
|
+ """><img src="""
|
|
+ html_logo_img
|
|
+ """
|
|
alt="prowler-logo"></a>
|
|
</li>
|
|
</ul>
|
|
</div>
|
|
</div>
|
|
<div class="col-md-4">
|
|
<div class="card">
|
|
<div class="card-header">
|
|
Assessment Summary:
|
|
</div>
|
|
<ul class="list-group list-group-flush">
|
|
<li class="list-group-item">
|
|
<b>AWS Account:</b> """
|
|
+ audit_info.audited_account
|
|
+ """
|
|
</li>
|
|
<li class="list-group-item">
|
|
<b>AWS-CLI Profile:</b> """
|
|
+ audit_info.profile
|
|
+ """
|
|
</li>
|
|
<li class="list-group-item">
|
|
<b>Audited Regions:</b> """
|
|
+ audited_regions
|
|
+ """
|
|
</li>
|
|
<li class="list-group-item">
|
|
<b>User Id:</b> """
|
|
+ audit_info.audited_user_id
|
|
+ """
|
|
</li>
|
|
<li class="list-group-item">
|
|
<b>Caller Identity ARN:</b> """
|
|
+ audit_info.audited_identity_arn
|
|
+ """
|
|
</li>
|
|
</ul>
|
|
</div>
|
|
</div>
|
|
<div class="row mt-3">
|
|
<div class="col-md-12">
|
|
<table class="table compact stripe row-border ordering" id="findingsTable" data-order='[[ 5, "asc" ]]' data-page-length='100'>
|
|
<thead class="thead-light">
|
|
<tr>
|
|
<th scope="col">Status</th>
|
|
<th scope="col">Severity</th>
|
|
<th scope="col">Account ID</th>
|
|
<th scope="col">Region</th>
|
|
<th scope="col">Service</th>
|
|
<th scope="col">Check ID</th>
|
|
<th style="width:20%" scope="col">Check Title</th>
|
|
<th style="width:20%" scope="col">Check Output</th>
|
|
<th scope="col">Risk</th>
|
|
<th scope="col">Remediation</th>
|
|
<th scope="col">Related URL</th>
|
|
<th scope="col">Resource ID</th>
|
|
</tr>
|
|
</thead>
|
|
<tbody>
|
|
"""
|
|
)
|
|
except Exception as error:
|
|
logger.error(
|
|
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
|
)
|
|
|
|
|
|
def add_html_footer(output_filename, output_directory):
|
|
try:
|
|
filename = f"{output_directory}/{output_filename}{html_file_suffix}"
|
|
file_descriptor = open_file(
|
|
filename,
|
|
"a",
|
|
)
|
|
file_descriptor.write(
|
|
"""
|
|
</tbody>
|
|
</table>
|
|
</div>
|
|
</div>
|
|
</div>
|
|
</div>
|
|
<!-- Table search and paginator -->
|
|
<!-- Optional JavaScript -->
|
|
<!-- jQuery first, then Popper.js, then Bootstrap JS -->
|
|
<script src="https://code.jquery.com/jquery-3.5.1.min.js" integrity="sha256-9/aliU8dGd2tb6OSsuzixeV4y/faTqgFtohetphbbj0=" crossorigin="anonymous"></script>
|
|
<script src="https://stackpath.bootstrapcdn.com/bootstrap/4.5.0/js/bootstrap.bundle.min.js" integrity="sha384-1CmrxMRARb6aLqgBO7yyAxTOQE2AKb9GfXnEo760AUcUmFx3ibVJJAzGytlQcNXd" crossorigin="anonymous"></script>
|
|
<!-- https://datatables.net/download/index with jQuery, DataTables, Buttons, SearchPanes, and Select //-->
|
|
<script type="text/javascript" src="https://cdn.datatables.net/v/dt/jqc-1.12.4/dt-1.10.25/b-1.7.1/sp-1.3.0/sl-1.3.3/datatables.min.js"></script>
|
|
<script>
|
|
$(document).ready(function(){
|
|
// Initialise the table with 50 rows, and some search/filtering panes
|
|
$('#findingsTable').DataTable( {
|
|
lengthMenu: [ [50, 100, -1], [50, 100, "All"] ],
|
|
searchPanes: {
|
|
cascadePanes: true,
|
|
viewTotal: true
|
|
},
|
|
dom: 'Plfrtip',
|
|
columnDefs: [
|
|
{
|
|
searchPanes: {
|
|
show: false
|
|
},
|
|
// Hide Compliance, Check ID (in favour of Check Title), CAF Epic, Risk, Remediation, Link
|
|
targets: [4, 6, 9, 10, 11, 12]
|
|
}
|
|
]
|
|
});
|
|
var maxLength = 30;
|
|
$(".show-read-more").each(function(){
|
|
var myStr = $(this).text();
|
|
if($.trim(myStr).length > maxLength){
|
|
var newStr = myStr.substring(0, maxLength);
|
|
var removedStr = myStr.substring(maxLength, $.trim(myStr).length);
|
|
$(this).empty().html(newStr);
|
|
$(this).append(' <a href="javascript:void(0);" class="read-more">read more...</a>');
|
|
$(this).append('<span class="more-text">' + removedStr + '</span>');
|
|
}
|
|
});
|
|
$(".read-more").click(function(){
|
|
$(this).siblings(".more-text").contents().unwrap();
|
|
$(this).remove();
|
|
});
|
|
});
|
|
</script>
|
|
</body>
|
|
</html>
|
|
"""
|
|
)
|
|
file_descriptor.close()
|
|
except Exception as error:
|
|
logger.critical(
|
|
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
|
|
)
|
|
sys.exit()
|