mirror of
https://github.com/ghndrx/prowler.git
synced 2026-02-10 14:55:00 +00:00
feat(html): add html output (#1525)
Co-authored-by: sergargar <sergio@verica.io>
This commit is contained in:
@@ -8,6 +8,10 @@ from lib.logger import logger
|
||||
timestamp = datetime.today()
|
||||
timestamp_utc = datetime.now(timezone.utc).replace(tzinfo=timezone.utc)
|
||||
prowler_version = "3.0-beta-21Nov2022"
|
||||
html_logo_url = "https://github.com/prowler-cloud/prowler/"
|
||||
html_logo_img = (
|
||||
"https://github.com/prowler-cloud/prowler/raw/master/util/html/prowler-logo-new.png"
|
||||
)
|
||||
|
||||
orange_color = "\033[38;5;208m"
|
||||
banner_color = "\033[1;92m"
|
||||
@@ -25,6 +29,7 @@ timestamp_iso = timestamp.isoformat()
|
||||
csv_file_suffix = ".csv"
|
||||
json_file_suffix = ".json"
|
||||
json_asff_file_suffix = ".asff.json"
|
||||
html_file_suffix = ".html"
|
||||
config_yaml = "providers/aws/config.yaml"
|
||||
|
||||
|
||||
|
||||
@@ -10,14 +10,19 @@ from tabulate import tabulate
|
||||
|
||||
from config.config import (
|
||||
csv_file_suffix,
|
||||
html_file_suffix,
|
||||
html_logo_img,
|
||||
html_logo_url,
|
||||
json_asff_file_suffix,
|
||||
json_file_suffix,
|
||||
orange_color,
|
||||
output_file_timestamp,
|
||||
prowler_version,
|
||||
timestamp,
|
||||
timestamp_iso,
|
||||
timestamp_utc,
|
||||
)
|
||||
from lib.check.models import Output_From_Options
|
||||
from lib.logger import logger
|
||||
from lib.outputs.models import (
|
||||
Check_Output_CSV,
|
||||
@@ -31,223 +36,264 @@ from lib.outputs.models import (
|
||||
)
|
||||
from lib.utils.utils import file_exists, hash_sha512, open_file
|
||||
from providers.aws.lib.allowlist.allowlist import is_allowlisted
|
||||
from providers.aws.lib.audit_info.models import AWS_Audit_Info
|
||||
from providers.aws.lib.security_hub.security_hub import send_to_security_hub
|
||||
|
||||
|
||||
def report(check_findings, output_options, audit_info):
|
||||
# Sort check findings
|
||||
check_findings.sort(key=lambda x: x.region)
|
||||
try:
|
||||
# Sort check findings
|
||||
check_findings.sort(key=lambda x: x.region)
|
||||
|
||||
# Generate the required output files
|
||||
# csv_fields = []
|
||||
file_descriptors = {}
|
||||
if output_options.output_modes:
|
||||
# We have to create the required output files
|
||||
file_descriptors = fill_file_descriptors(
|
||||
output_options.output_modes,
|
||||
output_options.output_directory,
|
||||
output_options.output_filename,
|
||||
)
|
||||
# Generate the required output files
|
||||
# csv_fields = []
|
||||
file_descriptors = {}
|
||||
if output_options.output_modes:
|
||||
# We have to create the required output files
|
||||
file_descriptors = fill_file_descriptors(
|
||||
output_options.output_modes,
|
||||
output_options.output_directory,
|
||||
output_options.output_filename,
|
||||
audit_info,
|
||||
)
|
||||
|
||||
if check_findings:
|
||||
for finding in check_findings:
|
||||
# Check if finding is allowlisted
|
||||
if output_options.allowlist_file:
|
||||
if is_allowlisted(
|
||||
output_options.allowlist_file,
|
||||
audit_info.audited_account,
|
||||
finding.check_metadata.CheckID,
|
||||
finding.region,
|
||||
finding.resource_id,
|
||||
):
|
||||
finding.status = "WARNING"
|
||||
# Print findings by stdout
|
||||
color = set_report_color(finding.status)
|
||||
if output_options.is_quiet and "FAIL" in finding.status:
|
||||
print(
|
||||
f"\t{color}{finding.status}{Style.RESET_ALL} {finding.region}: {finding.status_extended}"
|
||||
)
|
||||
elif not output_options.is_quiet and output_options.verbose:
|
||||
print(
|
||||
f"\t{color}{finding.status}{Style.RESET_ALL} {finding.region}: {finding.status_extended}"
|
||||
)
|
||||
if file_descriptors:
|
||||
if finding.check_metadata.Provider == "aws":
|
||||
if "ens_rd2022_aws" in output_options.output_modes:
|
||||
# We have to retrieve all the check's compliance requirements
|
||||
check_compliance = output_options.bulk_checks_metadata[
|
||||
finding.check_metadata.CheckID
|
||||
].Compliance
|
||||
for compliance in check_compliance:
|
||||
if (
|
||||
compliance.Framework == "ENS"
|
||||
and compliance.Version == "RD2022"
|
||||
):
|
||||
for requirement in compliance.Requirements:
|
||||
requirement_description = requirement.Description
|
||||
requirement_id = requirement.Id
|
||||
for attribute in requirement.Attributes:
|
||||
compliance_row = Check_Output_CSV_ENS_RD2022(
|
||||
Provider=finding.check_metadata.Provider,
|
||||
AccountId=audit_info.audited_account,
|
||||
Region=finding.region,
|
||||
AssessmentDate=timestamp.isoformat(),
|
||||
Requirements_Id=requirement_id,
|
||||
Requirements_Description=requirement_description,
|
||||
Requirements_Attributes_IdGrupoControl=attribute.get(
|
||||
"IdGrupoControl"
|
||||
),
|
||||
Requirements_Attributes_Marco=attribute.get(
|
||||
"Marco"
|
||||
),
|
||||
Requirements_Attributes_Categoria=attribute.get(
|
||||
"Categoria"
|
||||
),
|
||||
Requirements_Attributes_DescripcionControl=attribute.get(
|
||||
"DescripcionControl"
|
||||
),
|
||||
Requirements_Attributes_Nivel=attribute.get(
|
||||
"Nivel"
|
||||
),
|
||||
Requirements_Attributes_Tipo=attribute.get(
|
||||
"Tipo"
|
||||
),
|
||||
Requirements_Attributes_Dimensiones=",".join(
|
||||
attribute.get("Dimensiones")
|
||||
),
|
||||
Status=finding.status,
|
||||
StatusExtended=finding.status_extended,
|
||||
ResourceId=finding.resource_id,
|
||||
CheckId=finding.check_metadata.CheckID,
|
||||
if check_findings:
|
||||
for finding in check_findings:
|
||||
# Check if finding is allowlisted
|
||||
if output_options.allowlist_file:
|
||||
if is_allowlisted(
|
||||
output_options.allowlist_file,
|
||||
audit_info.audited_account,
|
||||
finding.check_metadata.CheckID,
|
||||
finding.region,
|
||||
finding.resource_id,
|
||||
):
|
||||
finding.status = "WARNING"
|
||||
# Print findings by stdout
|
||||
color = set_report_color(finding.status)
|
||||
if output_options.is_quiet and "FAIL" in finding.status:
|
||||
print(
|
||||
f"\t{color}{finding.status}{Style.RESET_ALL} {finding.region}: {finding.status_extended}"
|
||||
)
|
||||
elif not output_options.is_quiet and output_options.verbose:
|
||||
print(
|
||||
f"\t{color}{finding.status}{Style.RESET_ALL} {finding.region}: {finding.status_extended}"
|
||||
)
|
||||
if file_descriptors:
|
||||
if finding.check_metadata.Provider == "aws":
|
||||
if "ens_rd2022_aws" in output_options.output_modes:
|
||||
# We have to retrieve all the check's compliance requirements
|
||||
check_compliance = output_options.bulk_checks_metadata[
|
||||
finding.check_metadata.CheckID
|
||||
].Compliance
|
||||
for compliance in check_compliance:
|
||||
if (
|
||||
compliance.Framework == "ENS"
|
||||
and compliance.Version == "RD2022"
|
||||
):
|
||||
for requirement in compliance.Requirements:
|
||||
requirement_description = (
|
||||
requirement.Description
|
||||
)
|
||||
requirement_id = requirement.Id
|
||||
for attribute in requirement.Attributes:
|
||||
compliance_row = Check_Output_CSV_ENS_RD2022(
|
||||
Provider=finding.check_metadata.Provider,
|
||||
AccountId=audit_info.audited_account,
|
||||
Region=finding.region,
|
||||
AssessmentDate=timestamp.isoformat(),
|
||||
Requirements_Id=requirement_id,
|
||||
Requirements_Description=requirement_description,
|
||||
Requirements_Attributes_IdGrupoControl=attribute.get(
|
||||
"IdGrupoControl"
|
||||
),
|
||||
Requirements_Attributes_Marco=attribute.get(
|
||||
"Marco"
|
||||
),
|
||||
Requirements_Attributes_Categoria=attribute.get(
|
||||
"Categoria"
|
||||
),
|
||||
Requirements_Attributes_DescripcionControl=attribute.get(
|
||||
"DescripcionControl"
|
||||
),
|
||||
Requirements_Attributes_Nivel=attribute.get(
|
||||
"Nivel"
|
||||
),
|
||||
Requirements_Attributes_Tipo=attribute.get(
|
||||
"Tipo"
|
||||
),
|
||||
Requirements_Attributes_Dimensiones=",".join(
|
||||
attribute.get("Dimensiones")
|
||||
),
|
||||
Status=finding.status,
|
||||
StatusExtended=finding.status_extended,
|
||||
ResourceId=finding.resource_id,
|
||||
CheckId=finding.check_metadata.CheckID,
|
||||
)
|
||||
|
||||
csv_header = generate_csv_fields(
|
||||
Check_Output_CSV_ENS_RD2022
|
||||
)
|
||||
csv_writer = DictWriter(
|
||||
file_descriptors["ens_rd2022_aws"],
|
||||
fieldnames=csv_header,
|
||||
delimiter=";",
|
||||
)
|
||||
csv_writer.writerow(compliance_row.__dict__)
|
||||
csv_header = generate_csv_fields(
|
||||
Check_Output_CSV_ENS_RD2022
|
||||
)
|
||||
csv_writer = DictWriter(
|
||||
file_descriptors["ens_rd2022_aws"],
|
||||
fieldnames=csv_header,
|
||||
delimiter=";",
|
||||
)
|
||||
csv_writer.writerow(compliance_row.__dict__)
|
||||
|
||||
if "csv" in file_descriptors:
|
||||
finding_output = Check_Output_CSV(
|
||||
audit_info.audited_account,
|
||||
audit_info.profile,
|
||||
finding,
|
||||
audit_info.organizations_metadata,
|
||||
)
|
||||
csv_writer = DictWriter(
|
||||
file_descriptors["csv"],
|
||||
fieldnames=generate_csv_fields(Check_Output_CSV),
|
||||
delimiter=";",
|
||||
)
|
||||
csv_writer.writerow(finding_output.__dict__)
|
||||
if "csv" in file_descriptors:
|
||||
finding_output = Check_Output_CSV(
|
||||
audit_info.audited_account,
|
||||
audit_info.profile,
|
||||
finding,
|
||||
audit_info.organizations_metadata,
|
||||
)
|
||||
csv_writer = DictWriter(
|
||||
file_descriptors["csv"],
|
||||
fieldnames=generate_csv_fields(Check_Output_CSV),
|
||||
delimiter=";",
|
||||
)
|
||||
csv_writer.writerow(finding_output.__dict__)
|
||||
|
||||
if "json" in file_descriptors:
|
||||
finding_output = Check_Output_JSON(
|
||||
**finding.check_metadata.dict()
|
||||
)
|
||||
fill_json(finding_output, audit_info, finding)
|
||||
if "json" in file_descriptors:
|
||||
finding_output = Check_Output_JSON(
|
||||
**finding.check_metadata.dict()
|
||||
)
|
||||
fill_json(finding_output, audit_info, finding)
|
||||
|
||||
json.dump(
|
||||
finding_output.dict(), file_descriptors["json"], indent=4
|
||||
)
|
||||
file_descriptors["json"].write(",")
|
||||
json.dump(
|
||||
finding_output.dict(),
|
||||
file_descriptors["json"],
|
||||
indent=4,
|
||||
)
|
||||
file_descriptors["json"].write(",")
|
||||
|
||||
if "json-asff" in file_descriptors:
|
||||
finding_output = Check_Output_JSON_ASFF()
|
||||
fill_json_asff(finding_output, audit_info, finding)
|
||||
if "json-asff" in file_descriptors:
|
||||
finding_output = Check_Output_JSON_ASFF()
|
||||
fill_json_asff(finding_output, audit_info, finding)
|
||||
|
||||
json.dump(
|
||||
finding_output.dict(),
|
||||
file_descriptors["json-asff"],
|
||||
indent=4,
|
||||
)
|
||||
file_descriptors["json-asff"].write(",")
|
||||
json.dump(
|
||||
finding_output.dict(),
|
||||
file_descriptors["json-asff"],
|
||||
indent=4,
|
||||
)
|
||||
file_descriptors["json-asff"].write(",")
|
||||
|
||||
# Check if it is needed to send findings to security hub
|
||||
if output_options.security_hub_enabled:
|
||||
send_to_security_hub(
|
||||
finding.region, finding_output, audit_info.audit_session
|
||||
)
|
||||
else: # No service resources in the whole account
|
||||
color = set_report_color("INFO")
|
||||
if not output_options.is_quiet and output_options.verbose:
|
||||
print(f"\t{color}INFO{Style.RESET_ALL} There are no resources")
|
||||
# Separator between findings and bar
|
||||
if output_options.is_quiet or output_options.verbose:
|
||||
print()
|
||||
if file_descriptors:
|
||||
# Close all file descriptors
|
||||
for file_descriptor in file_descriptors:
|
||||
file_descriptors.get(file_descriptor).close()
|
||||
if "html" in file_descriptors:
|
||||
fill_html(file_descriptors["html"], audit_info, finding)
|
||||
|
||||
file_descriptors["html"].write("")
|
||||
|
||||
# Check if it is needed to send findings to security hub
|
||||
if output_options.security_hub_enabled:
|
||||
send_to_security_hub(
|
||||
finding.region, finding_output, audit_info.audit_session
|
||||
)
|
||||
else: # No service resources in the whole account
|
||||
color = set_report_color("INFO")
|
||||
if not output_options.is_quiet and output_options.verbose:
|
||||
print(f"\t{color}INFO{Style.RESET_ALL} There are no resources")
|
||||
# Separator between findings and bar
|
||||
if output_options.is_quiet or output_options.verbose:
|
||||
print()
|
||||
if file_descriptors:
|
||||
# Close all file descriptors
|
||||
for file_descriptor in file_descriptors:
|
||||
file_descriptors.get(file_descriptor).close()
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
|
||||
def initialize_file_descriptor(
|
||||
filename: str, output_mode: str, format: Any = None
|
||||
filename: str,
|
||||
output_mode: str,
|
||||
audit_info: AWS_Audit_Info,
|
||||
format: Any = None,
|
||||
) -> TextIOWrapper:
|
||||
"""Open/Create the output file. If needed include headers or the required format"""
|
||||
|
||||
if file_exists(filename):
|
||||
file_descriptor = open_file(
|
||||
filename,
|
||||
"a",
|
||||
)
|
||||
else:
|
||||
file_descriptor = open_file(
|
||||
filename,
|
||||
"a",
|
||||
)
|
||||
|
||||
if output_mode in ("csv", "ens_rd2022_aws"):
|
||||
# Format is the class model of the CSV format to print the headers
|
||||
csv_header = [x.upper() for x in generate_csv_fields(format)]
|
||||
csv_writer = DictWriter(
|
||||
file_descriptor, fieldnames=csv_header, delimiter=";"
|
||||
)
|
||||
csv_writer.writeheader()
|
||||
|
||||
if output_mode in ("json", "json-asff"):
|
||||
try:
|
||||
if file_exists(filename):
|
||||
file_descriptor = open_file(
|
||||
filename,
|
||||
"a",
|
||||
)
|
||||
file_descriptor.write("[")
|
||||
else:
|
||||
file_descriptor = open_file(
|
||||
filename,
|
||||
"a",
|
||||
)
|
||||
|
||||
if output_mode in ("csv", "ens_rd2022_aws"):
|
||||
# Format is the class model of the CSV format to print the headers
|
||||
csv_header = [x.upper() for x in generate_csv_fields(format)]
|
||||
csv_writer = DictWriter(
|
||||
file_descriptor, fieldnames=csv_header, delimiter=";"
|
||||
)
|
||||
csv_writer.writeheader()
|
||||
|
||||
if output_mode in ("json", "json-asff"):
|
||||
file_descriptor.write("[")
|
||||
|
||||
if "html" in output_mode:
|
||||
add_html_header(file_descriptor, audit_info)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
return file_descriptor
|
||||
|
||||
|
||||
def fill_file_descriptors(output_modes, output_directory, output_filename):
|
||||
file_descriptors = {}
|
||||
if output_modes:
|
||||
for output_mode in output_modes:
|
||||
if output_mode == "csv":
|
||||
filename = f"{output_directory}/{output_filename}{csv_file_suffix}"
|
||||
file_descriptor = initialize_file_descriptor(
|
||||
filename, output_mode, Check_Output_CSV
|
||||
)
|
||||
file_descriptors.update({output_mode: file_descriptor})
|
||||
def fill_file_descriptors(output_modes, output_directory, output_filename, audit_info):
|
||||
try:
|
||||
file_descriptors = {}
|
||||
if output_modes:
|
||||
for output_mode in output_modes:
|
||||
if output_mode == "csv":
|
||||
filename = f"{output_directory}/{output_filename}{csv_file_suffix}"
|
||||
file_descriptor = initialize_file_descriptor(
|
||||
filename,
|
||||
output_mode,
|
||||
audit_info,
|
||||
Check_Output_CSV,
|
||||
)
|
||||
file_descriptors.update({output_mode: file_descriptor})
|
||||
|
||||
if output_mode == "json":
|
||||
filename = f"{output_directory}/{output_filename}{json_file_suffix}"
|
||||
file_descriptor = initialize_file_descriptor(filename, output_mode)
|
||||
file_descriptors.update({output_mode: file_descriptor})
|
||||
if output_mode == "json":
|
||||
filename = f"{output_directory}/{output_filename}{json_file_suffix}"
|
||||
file_descriptor = initialize_file_descriptor(
|
||||
filename, output_mode, audit_info
|
||||
)
|
||||
file_descriptors.update({output_mode: file_descriptor})
|
||||
|
||||
if output_mode == "json-asff":
|
||||
filename = (
|
||||
f"{output_directory}/{output_filename}{json_asff_file_suffix}"
|
||||
)
|
||||
file_descriptor = initialize_file_descriptor(filename, output_mode)
|
||||
file_descriptors.update({output_mode: file_descriptor})
|
||||
if output_mode == "json-asff":
|
||||
filename = (
|
||||
f"{output_directory}/{output_filename}{json_asff_file_suffix}"
|
||||
)
|
||||
file_descriptor = initialize_file_descriptor(
|
||||
filename, output_mode, audit_info
|
||||
)
|
||||
file_descriptors.update({output_mode: file_descriptor})
|
||||
|
||||
if output_mode == "ens_rd2022_aws":
|
||||
filename = f"{output_directory}/{output_filename}_ens_rd2022_aws{csv_file_suffix}"
|
||||
file_descriptor = initialize_file_descriptor(
|
||||
filename, output_mode, Check_Output_CSV_ENS_RD2022
|
||||
)
|
||||
file_descriptors.update({output_mode: file_descriptor})
|
||||
if output_mode == "html":
|
||||
filename = f"{output_directory}/{output_filename}{html_file_suffix}"
|
||||
file_descriptor = initialize_file_descriptor(
|
||||
filename, output_mode, audit_info
|
||||
)
|
||||
file_descriptors.update({output_mode: file_descriptor})
|
||||
|
||||
if output_mode == "ens_rd2022_aws":
|
||||
filename = f"{output_directory}/{output_filename}_ens_rd2022_aws{csv_file_suffix}"
|
||||
file_descriptor = initialize_file_descriptor(
|
||||
filename, output_mode, audit_info, Check_Output_CSV_ENS_RD2022
|
||||
)
|
||||
file_descriptors.update({output_mode: file_descriptor})
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
return file_descriptors
|
||||
|
||||
@@ -333,6 +379,34 @@ def fill_json_asff(finding_output, audit_info, finding):
|
||||
return finding_output
|
||||
|
||||
|
||||
def fill_html(file_descriptor, audit_info, finding):
|
||||
row_class = "p-3 mb-2 bg-success-custom"
|
||||
if finding.status == "INFO":
|
||||
row_class = "table-info"
|
||||
elif finding.status == "FAIL":
|
||||
row_class = "table-danger"
|
||||
elif finding.status == "WARNING":
|
||||
row_class = "table-warning"
|
||||
file_descriptor.write(
|
||||
f"""
|
||||
<tr class="{row_class}">
|
||||
<td>{finding.status}</td>
|
||||
<td>{finding.check_metadata.Severity}</td>
|
||||
<td>{audit_info.audited_account}</td>
|
||||
<td>{finding.region}</td>
|
||||
<td>{finding.check_metadata.ServiceName}</td>
|
||||
<td>{finding.check_metadata.CheckID}</td>
|
||||
<td>{finding.check_metadata.CheckTitle}</td>
|
||||
<td>{finding.status_extended}</td>
|
||||
<td><p class="show-read-more">{finding.check_metadata.Risk}</p></td>
|
||||
<td><p class="show-read-more">{finding.check_metadata.Remediation.Recommendation.Text}</p></td>
|
||||
<td><a class="read-more" href="{finding.check_metadata.Remediation.Recommendation.Url}"><i class="fas fa-external-link-alt"></i></a></td>
|
||||
<td>{finding.resource_id}</td>
|
||||
</tr>
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
def close_json(output_filename, output_directory, mode):
|
||||
try:
|
||||
suffix = json_file_suffix
|
||||
@@ -383,11 +457,12 @@ def send_to_s3_bucket(
|
||||
|
||||
def display_summary_table(
|
||||
findings: list,
|
||||
audit_info,
|
||||
output_filename: str,
|
||||
output_directory: str,
|
||||
audit_info: AWS_Audit_Info,
|
||||
output_options: Output_From_Options,
|
||||
provider: str,
|
||||
):
|
||||
output_directory = output_options.output_directory
|
||||
output_filename = output_options.output_filename
|
||||
try:
|
||||
if provider == "aws":
|
||||
entity_type = "Account"
|
||||
@@ -397,6 +472,7 @@ def display_summary_table(
|
||||
current = {
|
||||
"Service": "",
|
||||
"Provider": "",
|
||||
"Total": 0,
|
||||
"Critical": 0,
|
||||
"High": 0,
|
||||
"Medium": 0,
|
||||
@@ -421,13 +497,14 @@ def display_summary_table(
|
||||
|
||||
add_service_to_table(findings_table, current)
|
||||
|
||||
current["Critical"] = current["High"] = current["Medium"] = current[
|
||||
"Low"
|
||||
] = 0
|
||||
current["Total"] = current["Critical"] = current["High"] = current[
|
||||
"Medium"
|
||||
] = current["Low"] = 0
|
||||
|
||||
current["Service"] = finding.check_metadata.ServiceName
|
||||
current["Provider"] = finding.check_metadata.Provider
|
||||
|
||||
current["Total"] += 1
|
||||
if finding.status == "PASS":
|
||||
pass_count += 1
|
||||
elif finding.status == "FAIL":
|
||||
@@ -461,8 +538,17 @@ def display_summary_table(
|
||||
f"{Style.BRIGHT}* You only see here those services that contains resources.{Style.RESET_ALL}"
|
||||
)
|
||||
print("\nDetailed results are in:")
|
||||
if "html" in output_options.output_modes:
|
||||
print(f" - HTML: {output_directory}/{output_filename}.html")
|
||||
if "json-asff" in output_options.output_modes:
|
||||
print(f" - JSON-ASFF: {output_directory}/{output_filename}.asff.json")
|
||||
print(f" - CSV: {output_directory}/{output_filename}.csv")
|
||||
print(f" - JSON: {output_directory}/{output_filename}.json\n")
|
||||
print(f" - JSON: {output_directory}/{output_filename}.json")
|
||||
|
||||
else:
|
||||
print(
|
||||
f"\n {Style.BRIGHT}There are no findings in {entity_type} {Fore.YELLOW}{audit_info.audited_account}{Style.RESET_ALL}\n"
|
||||
)
|
||||
|
||||
except Exception as error:
|
||||
logger.critical(
|
||||
@@ -483,7 +569,7 @@ def add_service_to_table(findings_table, current):
|
||||
)
|
||||
current["Status"] = f"{Fore.RED}FAIL ({total_fails}){Style.RESET_ALL}"
|
||||
else:
|
||||
current["Status"] = f"{Fore.GREEN}PASS {Style.RESET_ALL}"
|
||||
current["Status"] = f"{Fore.GREEN}PASS ({current['Total']}){Style.RESET_ALL}"
|
||||
findings_table["Provider"].append(current["Provider"])
|
||||
findings_table["Service"].append(current["Service"])
|
||||
findings_table["Status"].append(current["Status"])
|
||||
@@ -601,3 +687,208 @@ def display_compliance_table(
|
||||
f"{error.__class__.__name__}:{error.__traceback__.tb_lineno} -- {error}"
|
||||
)
|
||||
sys.exit()
|
||||
|
||||
|
||||
def add_html_header(file_descriptor, audit_info):
|
||||
try:
|
||||
if isinstance(audit_info.audited_regions, list):
|
||||
audited_regions = " ".join(audit_info.audited_regions)
|
||||
else:
|
||||
audited_regions = audit_info.audited_regions
|
||||
file_descriptor.write(
|
||||
"""
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
|
||||
<!-- Required meta tags -->
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no">
|
||||
<style>
|
||||
.read-more {color:#00f;}
|
||||
.bg-success-custom {background-color: #70dc88 !important;}
|
||||
</style>
|
||||
<!-- Bootstrap CSS -->
|
||||
<link rel="stylesheet" href="https://stackpath.bootstrapcdn.com/bootstrap/4.5.0/css/bootstrap.min.css" integrity="sha384-9aIt2nRpC12Uk9gS9baDl411NQApFmC26EwAOH8WgZl5MYYxFfc+NcPb1dKGj7Sk" crossorigin="anonymous">
|
||||
<!-- https://datatables.net/download/index with jQuery, DataTables, Buttons, SearchPanes, and Select //-->
|
||||
<link rel="stylesheet" type="text/css" href="https://cdn.datatables.net/v/dt/jqc-1.12.4/dt-1.10.25/b-1.7.1/sp-1.3.0/sl-1.3.3/datatables.min.css"/>
|
||||
<link rel="stylesheet" href="https://pro.fontawesome.com/releases/v5.10.0/css/all.css" integrity="sha384-AYmEC3Yw5cVb3ZcuHtOA93w35dYTsvhLPVnYs9eStHfGJvOvKxVfELGroGkvsg+p" crossorigin="anonymous"/>
|
||||
<style>
|
||||
.show-read-more .more-text{
|
||||
display: none;
|
||||
}
|
||||
</style>
|
||||
<title>Prowler - AWS Security Assessments</title>
|
||||
</head>
|
||||
<body>
|
||||
<nav class="navbar navbar-expand-xl sticky-top navbar-dark bg-dark">
|
||||
<a class="navbar-brand" href="#">Prowler - Security Assessments in AWS</a>
|
||||
</nav>
|
||||
<div class="container-fluid">
|
||||
<div class="row mt-3">
|
||||
<div class="col-md-4">
|
||||
<div class="card">
|
||||
<div class="card-header">
|
||||
Report Information:
|
||||
</div>
|
||||
<ul class="list-group list-group-flush">
|
||||
<li class="list-group-item">
|
||||
<div class="row">
|
||||
<div class="col-md-auto">
|
||||
<b>Version:</b> """
|
||||
+ prowler_version
|
||||
+ """
|
||||
</div>
|
||||
</div>
|
||||
</li>
|
||||
<li class="list-group-item">
|
||||
<b>Parameters used:</b> """
|
||||
+ " ".join(sys.argv[1:])
|
||||
+ """
|
||||
</li>
|
||||
<li class="list-group-item">
|
||||
<b>Date:</b> """
|
||||
+ output_file_timestamp
|
||||
+ """
|
||||
</li>
|
||||
<li class="list-group-item text-center">
|
||||
<a href="""
|
||||
+ html_logo_url
|
||||
+ """><img src="""
|
||||
+ html_logo_img
|
||||
+ """
|
||||
alt="prowler-logo"></a>
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-md-4">
|
||||
<div class="card">
|
||||
<div class="card-header">
|
||||
Assessment Summary:
|
||||
</div>
|
||||
<ul class="list-group list-group-flush">
|
||||
<li class="list-group-item">
|
||||
<b>AWS Account:</b> """
|
||||
+ audit_info.audited_account
|
||||
+ """
|
||||
</li>
|
||||
<li class="list-group-item">
|
||||
<b>AWS-CLI Profile:</b> """
|
||||
+ audit_info.profile
|
||||
+ """
|
||||
</li>
|
||||
<li class="list-group-item">
|
||||
<b>Audited Regions:</b> """
|
||||
+ audited_regions
|
||||
+ """
|
||||
</li>
|
||||
<li class="list-group-item">
|
||||
<b>User Id:</b> """
|
||||
+ audit_info.audited_user_id
|
||||
+ """
|
||||
</li>
|
||||
<li class="list-group-item">
|
||||
<b>Caller Identity ARN:</b> """
|
||||
+ audit_info.audited_identity_arn
|
||||
+ """
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
<div class="row mt-3">
|
||||
<div class="col-md-12">
|
||||
<table class="table compact stripe row-border ordering" id="findingsTable" data-order='[[ 5, "asc" ]]' data-page-length='100'>
|
||||
<thead class="thead-light">
|
||||
<tr>
|
||||
<th scope="col">Status</th>
|
||||
<th scope="col">Severity</th>
|
||||
<th scope="col">Account ID</th>
|
||||
<th scope="col">Region</th>
|
||||
<th scope="col">Service</th>
|
||||
<th scope="col">Check ID</th>
|
||||
<th style="width:20%" scope="col">Check Title</th>
|
||||
<th style="width:20%" scope="col">Check Output</th>
|
||||
<th scope="col">Risk</th>
|
||||
<th scope="col">Remediation</th>
|
||||
<th scope="col">Related URL</th>
|
||||
<th scope="col">Resource ID</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
"""
|
||||
)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
|
||||
def add_html_footer(output_filename, output_directory):
|
||||
try:
|
||||
filename = f"{output_directory}/{output_filename}{html_file_suffix}"
|
||||
file_descriptor = open_file(
|
||||
filename,
|
||||
"a",
|
||||
)
|
||||
file_descriptor.write(
|
||||
"""
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<!-- Table search and paginator -->
|
||||
<!-- Optional JavaScript -->
|
||||
<!-- jQuery first, then Popper.js, then Bootstrap JS -->
|
||||
<script src="https://code.jquery.com/jquery-3.5.1.min.js" integrity="sha256-9/aliU8dGd2tb6OSsuzixeV4y/faTqgFtohetphbbj0=" crossorigin="anonymous"></script>
|
||||
<script src="https://stackpath.bootstrapcdn.com/bootstrap/4.5.0/js/bootstrap.bundle.min.js" integrity="sha384-1CmrxMRARb6aLqgBO7yyAxTOQE2AKb9GfXnEo760AUcUmFx3ibVJJAzGytlQcNXd" crossorigin="anonymous"></script>
|
||||
<!-- https://datatables.net/download/index with jQuery, DataTables, Buttons, SearchPanes, and Select //-->
|
||||
<script type="text/javascript" src="https://cdn.datatables.net/v/dt/jqc-1.12.4/dt-1.10.25/b-1.7.1/sp-1.3.0/sl-1.3.3/datatables.min.js"></script>
|
||||
<script>
|
||||
$(document).ready(function(){
|
||||
// Initialise the table with 50 rows, and some search/filtering panes
|
||||
$('#findingsTable').DataTable( {
|
||||
lengthMenu: [ [50, 100, -1], [50, 100, "All"] ],
|
||||
searchPanes: {
|
||||
cascadePanes: true,
|
||||
viewTotal: true
|
||||
},
|
||||
dom: 'Plfrtip',
|
||||
columnDefs: [
|
||||
{
|
||||
searchPanes: {
|
||||
show: false
|
||||
},
|
||||
// Hide Compliance, Check ID (in favour of Check Title), CAF Epic, Risk, Remediation, Link
|
||||
targets: [4, 6, 9, 10, 11, 12]
|
||||
}
|
||||
]
|
||||
});
|
||||
var maxLength = 30;
|
||||
$(".show-read-more").each(function(){
|
||||
var myStr = $(this).text();
|
||||
if($.trim(myStr).length > maxLength){
|
||||
var newStr = myStr.substring(0, maxLength);
|
||||
var removedStr = myStr.substring(maxLength, $.trim(myStr).length);
|
||||
$(this).empty().html(newStr);
|
||||
$(this).append(' <a href="javascript:void(0);" class="read-more">read more...</a>');
|
||||
$(this).append('<span class="more-text">' + removedStr + '</span>');
|
||||
}
|
||||
});
|
||||
$(".read-more").click(function(){
|
||||
$(this).siblings(".more-text").contents().unwrap();
|
||||
$(this).remove();
|
||||
});
|
||||
});
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
)
|
||||
file_descriptor.close()
|
||||
except Exception as error:
|
||||
logger.critical(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
|
||||
)
|
||||
sys.exit()
|
||||
|
||||
@@ -42,7 +42,20 @@ class Test_Outputs:
|
||||
def test_fill_file_descriptors(self):
|
||||
audited_account = "123456789012"
|
||||
output_directory = f"{os.path.dirname(os.path.realpath(__file__))}"
|
||||
generate_csv_fields(Check_Output_CSV)
|
||||
audit_info = AWS_Audit_Info(
|
||||
original_session=None,
|
||||
audit_session=None,
|
||||
audited_account="123456789012",
|
||||
audited_identity_arn="test-arn",
|
||||
audited_user_id="test",
|
||||
audited_partition="aws",
|
||||
profile="default",
|
||||
profile_region="eu-west-1",
|
||||
credentials=None,
|
||||
assumed_role_info=None,
|
||||
audited_regions=["eu-west-2", "eu-west-1"],
|
||||
organizations_metadata=None,
|
||||
)
|
||||
test_output_modes = [
|
||||
["csv"],
|
||||
["json"],
|
||||
@@ -101,6 +114,7 @@ class Test_Outputs:
|
||||
output_mode_list,
|
||||
output_directory,
|
||||
output_filename,
|
||||
audit_info,
|
||||
)
|
||||
for output_mode in output_mode_list:
|
||||
assert (
|
||||
|
||||
@@ -21,7 +21,8 @@ class cloudformation_outputs_find_secrets(Check):
|
||||
report.region = stack.region
|
||||
report.resource_id = stack.name
|
||||
report.resource_arn = stack.arn
|
||||
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"No secrets found in Stack {stack.name} Outputs."
|
||||
if stack.outputs:
|
||||
temp_output_file = tempfile.NamedTemporaryFile(delete=False)
|
||||
|
||||
@@ -41,11 +42,6 @@ class cloudformation_outputs_find_secrets(Check):
|
||||
report.status_extended = (
|
||||
f"Potential secret found in Stack {stack.name} Outputs."
|
||||
)
|
||||
else:
|
||||
report.status = "PASS"
|
||||
report.status_extended = (
|
||||
f"No secrets found in Stack {stack.name} Outputs."
|
||||
)
|
||||
|
||||
os.remove(temp_output_file.name)
|
||||
else:
|
||||
|
||||
34
prowler
34
prowler
@@ -31,6 +31,7 @@ from lib.check.checks_loader import load_checks_to_execute
|
||||
from lib.check.compliance import update_checks_metadata_with_compliance
|
||||
from lib.logger import logger, set_logging_config
|
||||
from lib.outputs.outputs import (
|
||||
add_html_footer,
|
||||
close_json,
|
||||
display_compliance_table,
|
||||
display_summary_table,
|
||||
@@ -167,7 +168,7 @@ if __name__ == "__main__":
|
||||
nargs="+",
|
||||
help="Output mode, by default csv",
|
||||
default=["csv", "json"],
|
||||
choices=["csv", "json", "json-asff"],
|
||||
choices=["csv", "json", "json-asff", "html"],
|
||||
)
|
||||
parser.add_argument(
|
||||
"-F",
|
||||
@@ -404,6 +405,8 @@ if __name__ == "__main__":
|
||||
# Close json file if exists
|
||||
if mode == "json" or mode == "json-asff":
|
||||
close_json(output_filename, output_directory, mode)
|
||||
if mode == "html":
|
||||
add_html_footer(output_filename, output_directory)
|
||||
# Send output to S3 if needed (-B / -D)
|
||||
if args.output_bucket or args.output_bucket_no_assume:
|
||||
output_bucket = args.output_bucket
|
||||
@@ -424,18 +427,19 @@ if __name__ == "__main__":
|
||||
if args.security_hub:
|
||||
resolve_security_hub_previous_findings(output_directory, audit_info)
|
||||
|
||||
if findings:
|
||||
# Display summary table
|
||||
display_summary_table(
|
||||
findings, audit_info, output_filename, output_directory, provider
|
||||
)
|
||||
# Display summary table
|
||||
display_summary_table(
|
||||
findings,
|
||||
audit_info,
|
||||
audit_output_options,
|
||||
provider,
|
||||
)
|
||||
|
||||
if compliance_framework:
|
||||
# Display compliance table
|
||||
display_compliance_table(
|
||||
findings,
|
||||
bulk_checks_metadata,
|
||||
compliance_framework,
|
||||
output_filename,
|
||||
output_directory,
|
||||
)
|
||||
if compliance_framework and findings:
|
||||
# Display compliance table
|
||||
display_compliance_table(
|
||||
findings,
|
||||
bulk_checks_metadata,
|
||||
compliance_framework,
|
||||
audit_output_options,
|
||||
)
|
||||
|
||||
Reference in New Issue
Block a user