From 7b9fae56051915b3ed3698de86d66b447998b1c0 Mon Sep 17 00:00:00 2001 From: Sergio Garcia <38561120+sergargar@users.noreply.github.com> Date: Wed, 6 Jul 2022 14:35:15 +0200 Subject: [PATCH] feat(json): add json output (#1251) * feat(json): add json output * feat(pydantic): add pydantic model to json output Co-authored-by: sergargar --- config/config.py | 1 + lib/outputs/models.py | 47 +++++++++++++-- lib/outputs/outputs.py | 59 ++++++++++++++++++- ..._disable_30_days_credentials.metadata.json | 2 +- ..._disable_90_days_credentials.metadata.json | 2 +- prowler | 7 ++- 6 files changed, 108 insertions(+), 10 deletions(-) diff --git a/config/config.py b/config/config.py index fdca6968..c3701897 100644 --- a/config/config.py +++ b/config/config.py @@ -13,3 +13,4 @@ aws_services_json_file = "providers/aws/aws_regions_by_service.json" default_output_directory = getcwd() + "/output" csv_file_suffix = timestamp.strftime("%Y%m%d%H%M%S") + ".csv" +json_file_suffix = timestamp.strftime("%Y%m%d%H%M%S") + ".json" diff --git a/lib/outputs/models.py b/lib/outputs/models.py index 4a3f341a..42747285 100644 --- a/lib/outputs/models.py +++ b/lib/outputs/models.py @@ -1,4 +1,7 @@ from dataclasses import asdict, dataclass +from typing import Optional + +from pydantic import BaseModel from config.config import timestamp from lib.check.models import Check_Report @@ -13,6 +16,39 @@ class Compliance_Framework: Control: list +class Check_Output_JSON(BaseModel): + AssessmentStartTime: Optional[str] + FindingUniqueId: Optional[str] + Provider: str + Profile: Optional[str] + AccountId: Optional[str] + OrganizationsInfo: Optional[dict] + Region: Optional[str] + CheckID: str + CheckName: str + CheckTitle: str + CheckType: str + ServiceName: str + SubServiceName: str + Status: Optional[str] + StatusExtended: Optional[str] + Severity: str + ResourceId: Optional[str] + ResourceArn: Optional[str] + ResourceType: str + ResourceDetails: Optional[str] + Tags: dict + Description: str + Risk: str + RelatedUrl: str + Remediation: dict + Categories: list + DependsOn: list + RelatedTo: list + Notes: str + Compliance: list + + @dataclass class Check_Output_CSV: assessment_start_time: str @@ -73,11 +109,12 @@ class Check_Output_CSV: self.provider = report.check_metadata.Provider self.profile = profile self.account_id = account - self.account_name = organizations.account_details_name - self.account_email = organizations.account_details_email - self.account_arn = organizations.account_details_arn - self.account_org = organizations.account_details_org - self.account_tags = organizations.account_details_tags + if organizations: + self.account_name = organizations.account_details_name + self.account_email = organizations.account_details_email + self.account_arn = organizations.account_details_arn + self.account_org = organizations.account_details_org + self.account_tags = organizations.account_details_tags self.region = report.region self.check_id = report.check_metadata.CheckID self.check_name = report.check_metadata.CheckName diff --git a/lib/outputs/outputs.py b/lib/outputs/outputs.py index 4ccc26ab..3cf7a784 100644 --- a/lib/outputs/outputs.py +++ b/lib/outputs/outputs.py @@ -1,9 +1,11 @@ +import json +import os from csv import DictWriter from colorama import Fore, Style -from config.config import csv_file_suffix -from lib.outputs.models import Check_Output_CSV +from config.config import csv_file_suffix, json_file_suffix, timestamp +from lib.outputs.models import Check_Output_CSV, Check_Output_JSON from lib.utils.utils import file_exists, open_file @@ -51,6 +53,13 @@ def report(check_findings, output_options, audit_info): ) csv_writer.writerow(finding_output.__dict__) + if "json" in file_descriptors: + finding_output = Check_Output_JSON(**finding.check_metadata.dict()) + fill_json(finding_output, audit_info, finding) + + json.dump(finding_output.dict(), file_descriptors["json"], indent=4) + file_descriptors["json"].write(",") + if file_descriptors: # Close all file descriptors for file_descriptor in file_descriptors: @@ -81,6 +90,23 @@ def fill_file_descriptors(output_modes, audited_account, output_directory, csv_f csv_writer.writeheader() file_descriptors.update({output_mode: file_descriptor}) + + if output_mode == "json": + filename = f"{output_directory}/prowler-output-{audited_account}-{json_file_suffix}" + if file_exists(filename): + file_descriptor = open_file( + filename, + "a", + ) + else: + file_descriptor = open_file( + filename, + "a", + ) + file_descriptor.write("[") + + file_descriptors.update({output_mode: file_descriptor}) + return file_descriptors @@ -104,3 +130,32 @@ def generate_csv_fields(): for field in Check_Output_CSV.__dict__["__annotations__"].keys(): csv_fields.append(field) return csv_fields + + +def fill_json(finding_output, audit_info, finding): + finding_output.AssessmentStartTime = timestamp.isoformat() + finding_output.FindingUniqueId = "" + finding_output.Profile = audit_info.profile + finding_output.AccountId = audit_info.audited_account + if audit_info.organizations_metadata: + finding_output.OrganizationsInfo = audit_info.organizations_metadata.__dict__ + finding_output.Region = finding.region + finding_output.Status = finding.status + finding_output.StatusExtended = finding.status_extended + finding_output.ResourceId = finding.resource_id + finding_output.ResourceArn = finding.resource_arn + finding_output.ResourceDetails = finding.resource_details + + return finding_output + + +def close_json(output_directory, audited_account): + filename = f"{output_directory}/prowler-output-{audited_account}-{json_file_suffix}" + file_descriptor = open_file( + filename, + "a", + ) + file_descriptor.seek(file_descriptor.tell() - 1, os.SEEK_SET) + file_descriptor.truncate() + file_descriptor.write("]") + file_descriptor.close() diff --git a/providers/aws/services/iam/iam_disable_30_days_credentials/iam_disable_30_days_credentials.metadata.json b/providers/aws/services/iam/iam_disable_30_days_credentials/iam_disable_30_days_credentials.metadata.json index 5b821948..05e1d6a0 100644 --- a/providers/aws/services/iam/iam_disable_30_days_credentials/iam_disable_30_days_credentials.metadata.json +++ b/providers/aws/services/iam/iam_disable_30_days_credentials/iam_disable_30_days_credentials.metadata.json @@ -3,7 +3,7 @@ "cat1", "cat2" ], - "CheckAlias": "extra764", + "CheckAlias": "extra774", "CheckID": "iam_disable_30_days_credentials", "CheckName": "iam_disable_30_days_credentials", "CheckTitle": "Ensure credentials unused for 30 days or greater are disabled", diff --git a/providers/aws/services/iam/iam_disable_90_days_credentials/iam_disable_90_days_credentials.metadata.json b/providers/aws/services/iam/iam_disable_90_days_credentials/iam_disable_90_days_credentials.metadata.json index fbe39cd7..3fdaddd2 100644 --- a/providers/aws/services/iam/iam_disable_90_days_credentials/iam_disable_90_days_credentials.metadata.json +++ b/providers/aws/services/iam/iam_disable_90_days_credentials/iam_disable_90_days_credentials.metadata.json @@ -3,7 +3,7 @@ "cat1", "cat2" ], - "CheckAlias": "extra764", + "CheckAlias": "check13", "CheckID": "iam_disable_90_days_credentials", "CheckName": "iam_disable_90_days_credentials", "CheckTitle": "Ensure credentials unused for 90 days or greater are disabled", diff --git a/prowler b/prowler index 9ea3f5a5..60af7564 100755 --- a/prowler +++ b/prowler @@ -23,6 +23,7 @@ from lib.check.check import ( ) from lib.check.checks_loader import load_checks_to_execute from lib.logger import logger, set_logging_config +from lib.outputs.outputs import close_json from providers.aws.aws_provider import provider_set_session if __name__ == "__main__": @@ -124,7 +125,7 @@ if __name__ == "__main__": "--output-modes", nargs="+", help="Output mode, by default csv", - choices=["csv"], + choices=["csv", "json"], ) parser.add_argument( "-o", @@ -269,3 +270,7 @@ if __name__ == "__main__": logger.error( "There are no checks to execute. Please, check your input arguments" ) + + # Close json file if exists + if "json" in output_modes: + close_json(output_directory, audit_info.audited_account)