mirror of
https://github.com/ghndrx/prowler.git
synced 2026-02-11 07:15:15 +00:00
* feat(json): add json output * feat(pydantic): add pydantic model to json output * feat(json-asff): add json-asff ouput * Update config/config.py Co-authored-by: Pepe Fagoaga <pepe@verica.io> * Update models.py * fix(comments): Resolve comments. Co-authored-by: sergargar <sergio@verica.io> Co-authored-by: Pepe Fagoaga <pepe@verica.io>
235 lines
7.9 KiB
Python
235 lines
7.9 KiB
Python
import json
|
|
import os
|
|
from csv import DictWriter
|
|
|
|
from colorama import Fore, Style
|
|
|
|
from config.config import (
|
|
csv_file_suffix,
|
|
json_asff_file_suffix,
|
|
json_file_suffix,
|
|
prowler_version,
|
|
timestamp,
|
|
)
|
|
from lib.outputs.models import (
|
|
Check_Output_CSV,
|
|
Check_Output_JSON,
|
|
Check_Output_JSON_ASFF,
|
|
Compliance,
|
|
ProductFields,
|
|
Resource,
|
|
Severity,
|
|
)
|
|
from lib.utils.utils import file_exists, open_file
|
|
|
|
|
|
def report(check_findings, output_options, audit_info):
|
|
check_findings.sort(key=lambda x: x.region)
|
|
|
|
csv_fields = []
|
|
# check output options
|
|
file_descriptors = {}
|
|
if output_options.output_modes:
|
|
if "csv" in output_options.output_modes:
|
|
csv_fields = generate_csv_fields()
|
|
|
|
file_descriptors = fill_file_descriptors(
|
|
output_options.output_modes,
|
|
audit_info.audited_account,
|
|
output_options.output_directory,
|
|
csv_fields,
|
|
)
|
|
|
|
for finding in check_findings:
|
|
# printing the finding ...
|
|
|
|
color = set_report_color(finding.status)
|
|
if output_options.is_quiet and "FAIL" in finding.status:
|
|
print(
|
|
f"{color}{finding.status}{Style.RESET_ALL} {finding.region}: {finding.status_extended}"
|
|
)
|
|
elif not output_options.is_quiet:
|
|
print(
|
|
f"{color}{finding.status}{Style.RESET_ALL} {finding.region}: {finding.status_extended}"
|
|
)
|
|
if file_descriptors:
|
|
|
|
# sending the finding to input options
|
|
if "csv" in file_descriptors:
|
|
finding_output = Check_Output_CSV(
|
|
audit_info.audited_account,
|
|
audit_info.profile,
|
|
finding,
|
|
audit_info.organizations_metadata,
|
|
)
|
|
csv_writer = DictWriter(
|
|
file_descriptors["csv"], fieldnames=csv_fields, delimiter=";"
|
|
)
|
|
csv_writer.writerow(finding_output.__dict__)
|
|
|
|
if "json" in file_descriptors:
|
|
finding_output = Check_Output_JSON(**finding.check_metadata.dict())
|
|
fill_json(finding_output, audit_info, finding)
|
|
|
|
json.dump(finding_output.dict(), file_descriptors["json"], indent=4)
|
|
file_descriptors["json"].write(",")
|
|
|
|
if "json-asff" in file_descriptors:
|
|
finding_output = Check_Output_JSON_ASFF()
|
|
fill_json_asff(finding_output, audit_info, finding)
|
|
|
|
json.dump(
|
|
finding_output.dict(), file_descriptors["json-asff"], indent=4
|
|
)
|
|
file_descriptors["json-asff"].write(",")
|
|
|
|
if file_descriptors:
|
|
# Close all file descriptors
|
|
for file_descriptor in file_descriptors:
|
|
file_descriptors.get(file_descriptor).close()
|
|
|
|
|
|
def fill_file_descriptors(output_modes, audited_account, output_directory, csv_fields):
|
|
file_descriptors = {}
|
|
for output_mode in output_modes:
|
|
if output_mode == "csv":
|
|
filename = (
|
|
f"{output_directory}/prowler-output-{audited_account}-{csv_file_suffix}"
|
|
)
|
|
if file_exists(filename):
|
|
file_descriptor = open_file(
|
|
filename,
|
|
"a",
|
|
)
|
|
else:
|
|
file_descriptor = open_file(
|
|
filename,
|
|
"a",
|
|
)
|
|
csv_header = [x.upper() for x in csv_fields]
|
|
csv_writer = DictWriter(
|
|
file_descriptor, fieldnames=csv_header, delimiter=";"
|
|
)
|
|
csv_writer.writeheader()
|
|
|
|
file_descriptors.update({output_mode: file_descriptor})
|
|
|
|
if output_mode == "json":
|
|
filename = f"{output_directory}/prowler-output-{audited_account}-{json_file_suffix}"
|
|
if file_exists(filename):
|
|
file_descriptor = open_file(
|
|
filename,
|
|
"a",
|
|
)
|
|
else:
|
|
file_descriptor = open_file(
|
|
filename,
|
|
"a",
|
|
)
|
|
file_descriptor.write("[")
|
|
|
|
file_descriptors.update({output_mode: file_descriptor})
|
|
|
|
if output_mode == "json-asff":
|
|
filename = f"{output_directory}/prowler-output-{audited_account}-{json_asff_file_suffix}"
|
|
if file_exists(filename):
|
|
file_descriptor = open_file(
|
|
filename,
|
|
"a",
|
|
)
|
|
else:
|
|
file_descriptor = open_file(
|
|
filename,
|
|
"a",
|
|
)
|
|
file_descriptor.write("[")
|
|
|
|
file_descriptors.update({output_mode: file_descriptor})
|
|
|
|
return file_descriptors
|
|
|
|
|
|
def set_report_color(status):
|
|
color = ""
|
|
if status == "PASS":
|
|
color = Fore.GREEN
|
|
elif status == "FAIL":
|
|
color = Fore.RED
|
|
elif status == "ERROR":
|
|
color = Fore.BLACK
|
|
elif status == "WARNING":
|
|
color = Fore.YELLOW
|
|
else:
|
|
raise Exception("Invalid Report Status. Must be PASS, FAIL, ERROR or WARNING")
|
|
return color
|
|
|
|
|
|
def generate_csv_fields():
|
|
csv_fields = []
|
|
for field in Check_Output_CSV.__dict__["__annotations__"].keys():
|
|
csv_fields.append(field)
|
|
return csv_fields
|
|
|
|
|
|
def fill_json(finding_output, audit_info, finding):
|
|
finding_output.AssessmentStartTime = timestamp.isoformat()
|
|
finding_output.FindingUniqueId = ""
|
|
finding_output.Profile = audit_info.profile
|
|
finding_output.AccountId = audit_info.audited_account
|
|
if audit_info.organizations_metadata:
|
|
finding_output.OrganizationsInfo = audit_info.organizations_metadata.__dict__
|
|
finding_output.Region = finding.region
|
|
finding_output.Status = finding.status
|
|
finding_output.StatusExtended = finding.status_extended
|
|
finding_output.ResourceId = finding.resource_id
|
|
finding_output.ResourceArn = finding.resource_arn
|
|
finding_output.ResourceDetails = finding.resource_details
|
|
|
|
return finding_output
|
|
|
|
|
|
def fill_json_asff(finding_output, audit_info, finding):
|
|
finding_output.Id = f"prowler-{finding.check_metadata.CheckID}-{audit_info.audited_account}-{finding.region}-{str(hash(finding.resource_id))}"
|
|
finding_output.ProductArn = f"arn:{audit_info.audited_partition}:securityhub:{finding.region}::product/prowler/prowler"
|
|
finding_output.ProductFields = ProductFields(
|
|
ProviderVersion=prowler_version, ProwlerResourceName=finding.resource_id
|
|
)
|
|
finding_output.GeneratorId = "prowler-" + finding.check_metadata.CheckID
|
|
finding_output.AwsAccountId = audit_info.audited_account
|
|
finding_output.Types = finding.check_metadata.CheckType
|
|
finding_output.FirstObservedAt = (
|
|
finding_output.UpdatedAt
|
|
) = finding_output.CreatedAt = timestamp.isoformat()
|
|
finding_output.Severity = Severity(Label=finding.check_metadata.Severity)
|
|
finding_output.Title = finding.check_metadata.CheckTitle
|
|
finding_output.Description = finding.check_metadata.Description
|
|
finding_output.Resources = [
|
|
Resource(
|
|
Id=finding.resource_id,
|
|
Type=finding.check_metadata.ResourceType,
|
|
Partition=audit_info.audited_partition,
|
|
Region=finding.region,
|
|
)
|
|
]
|
|
finding_output.Compliance = Compliance(
|
|
Status=finding.status, RelatedRequirements=[finding.check_metadata.CheckType]
|
|
)
|
|
|
|
return finding_output
|
|
|
|
|
|
def close_json(output_directory, audited_account, mode):
|
|
suffix = json_file_suffix
|
|
if mode == "json-asff":
|
|
suffix = json_asff_file_suffix
|
|
filename = f"{output_directory}/prowler-output-{audited_account}-{suffix}"
|
|
file_descriptor = open_file(
|
|
filename,
|
|
"a",
|
|
)
|
|
# Replace last comma for square bracket
|
|
file_descriptor.seek(file_descriptor.tell() - 1, os.SEEK_SET)
|
|
file_descriptor.truncate()
|
|
file_descriptor.write("]")
|
|
file_descriptor.close()
|