mirror of
https://github.com/ghndrx/prowler.git
synced 2026-02-10 14:55:00 +00:00
feat(custom_filename): custom output filename (#1345)
* feat(s3_output): send outputs to S3 bucket * feat(custom_filename): custom output filename Co-authored-by: sergargar <sergio@verica.io>
This commit is contained in:
@@ -15,6 +15,6 @@ default_output_directory = getcwd() + "/output"
|
||||
|
||||
output_file_timestamp = timestamp.strftime("%Y%m%d%H%M%S")
|
||||
timestamp_iso = timestamp.isoformat()
|
||||
csv_file_suffix = f"{output_file_timestamp}.csv"
|
||||
json_file_suffix = f"{output_file_timestamp}.json"
|
||||
json_asff_file_suffix = f"{output_file_timestamp}.asff.json"
|
||||
csv_file_suffix = ".csv"
|
||||
json_file_suffix = ".json"
|
||||
json_asff_file_suffix = ".asff.json"
|
||||
|
||||
@@ -178,13 +178,15 @@ def set_output_options(
|
||||
output_modes: list,
|
||||
input_output_directory: str,
|
||||
security_hub_enabled: bool,
|
||||
output_filename: str,
|
||||
):
|
||||
global output_options
|
||||
output_options = Output_From_Options(
|
||||
is_quiet=quiet,
|
||||
output_modes=output_modes,
|
||||
output_directory=input_output_directory,
|
||||
security_hub_enabled=security_hub_enabled
|
||||
security_hub_enabled=security_hub_enabled,
|
||||
output_filename=output_filename,
|
||||
# set input options here
|
||||
)
|
||||
return output_options
|
||||
|
||||
@@ -14,6 +14,7 @@ class Output_From_Options:
|
||||
output_modes: list
|
||||
output_directory: str
|
||||
security_hub_enabled: bool
|
||||
output_filename: str
|
||||
|
||||
|
||||
# Testing Pending
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
from csv import DictWriter
|
||||
|
||||
from colorama import Fore, Style
|
||||
@@ -12,6 +13,7 @@ from config.config import (
|
||||
timestamp_iso,
|
||||
timestamp_utc,
|
||||
)
|
||||
from lib.logger import logger
|
||||
from lib.outputs.models import (
|
||||
Check_Output_CSV,
|
||||
Check_Output_JSON,
|
||||
@@ -37,9 +39,9 @@ def report(check_findings, output_options, audit_info):
|
||||
|
||||
file_descriptors = fill_file_descriptors(
|
||||
output_options.output_modes,
|
||||
audit_info.audited_account,
|
||||
output_options.output_directory,
|
||||
csv_fields,
|
||||
output_options.output_filename,
|
||||
)
|
||||
|
||||
if check_findings:
|
||||
@@ -101,13 +103,11 @@ def report(check_findings, output_options, audit_info):
|
||||
file_descriptors.get(file_descriptor).close()
|
||||
|
||||
|
||||
def fill_file_descriptors(output_modes, audited_account, output_directory, csv_fields):
|
||||
def fill_file_descriptors(output_modes, output_directory, csv_fields, output_filename):
|
||||
file_descriptors = {}
|
||||
for output_mode in output_modes:
|
||||
if output_mode == "csv":
|
||||
filename = (
|
||||
f"{output_directory}/prowler-output-{audited_account}-{csv_file_suffix}"
|
||||
)
|
||||
filename = f"{output_directory}/{output_filename}{csv_file_suffix}"
|
||||
if file_exists(filename):
|
||||
file_descriptor = open_file(
|
||||
filename,
|
||||
@@ -127,7 +127,7 @@ def fill_file_descriptors(output_modes, audited_account, output_directory, csv_f
|
||||
file_descriptors.update({output_mode: file_descriptor})
|
||||
|
||||
if output_mode == "json":
|
||||
filename = f"{output_directory}/prowler-output-{audited_account}-{json_file_suffix}"
|
||||
filename = f"{output_directory}/{output_filename}{json_file_suffix}"
|
||||
if file_exists(filename):
|
||||
file_descriptor = open_file(
|
||||
filename,
|
||||
@@ -143,7 +143,7 @@ def fill_file_descriptors(output_modes, audited_account, output_directory, csv_f
|
||||
file_descriptors.update({output_mode: file_descriptor})
|
||||
|
||||
if output_mode == "json-asff":
|
||||
filename = f"{output_directory}/prowler-output-{audited_account}-{json_asff_file_suffix}"
|
||||
filename = f"{output_directory}/{output_filename}{json_asff_file_suffix}"
|
||||
if file_exists(filename):
|
||||
file_descriptor = open_file(
|
||||
filename,
|
||||
@@ -238,17 +238,47 @@ def fill_json_asff(finding_output, audit_info, finding):
|
||||
return finding_output
|
||||
|
||||
|
||||
def close_json(output_directory, audited_account, mode):
|
||||
suffix = json_file_suffix
|
||||
if mode == "json-asff":
|
||||
suffix = json_asff_file_suffix
|
||||
filename = f"{output_directory}/prowler-output-{audited_account}-{suffix}"
|
||||
file_descriptor = open_file(
|
||||
filename,
|
||||
"a",
|
||||
)
|
||||
# Replace last comma for square bracket
|
||||
file_descriptor.seek(file_descriptor.tell() - 1, os.SEEK_SET)
|
||||
file_descriptor.truncate()
|
||||
file_descriptor.write("]")
|
||||
file_descriptor.close()
|
||||
def close_json(output_filename, output_directory, mode):
|
||||
try:
|
||||
suffix = json_file_suffix
|
||||
if mode == "json-asff":
|
||||
suffix = json_asff_file_suffix
|
||||
filename = f"{output_directory}/{output_filename}{suffix}"
|
||||
file_descriptor = open_file(
|
||||
filename,
|
||||
"a",
|
||||
)
|
||||
# Replace last comma for square bracket
|
||||
file_descriptor.seek(file_descriptor.tell() - 1, os.SEEK_SET)
|
||||
file_descriptor.truncate()
|
||||
file_descriptor.write("]")
|
||||
file_descriptor.close()
|
||||
except Exception as error:
|
||||
logger.critical(f"{error.__class__.__name__} -- {error}")
|
||||
sys.exit()
|
||||
|
||||
|
||||
def send_to_s3_bucket(
|
||||
output_filename, output_directory, output_mode, output_bucket, audit_session
|
||||
):
|
||||
try:
|
||||
# Get only last part of the path
|
||||
output_directory = output_directory.split("/")[-1]
|
||||
if output_mode == "csv":
|
||||
filename = f"{output_filename}{csv_file_suffix}"
|
||||
elif output_mode == "json":
|
||||
filename = f"{output_filename}{json_file_suffix}"
|
||||
elif output_mode == "json-asff":
|
||||
filename = f"{output_filename}{json_asff_file_suffix}"
|
||||
logger.info(f"Sending outputs to S3 bucket {output_bucket}")
|
||||
# Check if security hub is enabled in current region
|
||||
s3_client = audit_session.client("s3")
|
||||
s3_client.upload_file(
|
||||
output_directory + "/" + filename,
|
||||
output_bucket,
|
||||
output_directory + "/" + output_mode + "/" + filename,
|
||||
)
|
||||
|
||||
except Exception as error:
|
||||
logger.critical(f"{error.__class__.__name__} -- {error}")
|
||||
sys.exit()
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import os
|
||||
from os import path, remove
|
||||
|
||||
from colorama import Fore
|
||||
@@ -6,6 +7,7 @@ from config.config import (
|
||||
csv_file_suffix,
|
||||
json_asff_file_suffix,
|
||||
json_file_suffix,
|
||||
output_file_timestamp,
|
||||
prowler_version,
|
||||
timestamp_iso,
|
||||
timestamp_utc,
|
||||
@@ -33,7 +35,7 @@ from providers.aws.models import AWS_Audit_Info
|
||||
class Test_Outputs:
|
||||
def test_fill_file_descriptors(self):
|
||||
audited_account = "123456789012"
|
||||
output_directory = "."
|
||||
output_directory = f"{os.path.dirname(os.path.realpath(__file__))}"
|
||||
csv_fields = generate_csv_fields()
|
||||
test_output_modes = [
|
||||
["csv"],
|
||||
@@ -42,47 +44,47 @@ class Test_Outputs:
|
||||
["csv", "json"],
|
||||
["csv", "json", "json-asff"],
|
||||
]
|
||||
|
||||
output_filename = f"prowler-output-{audited_account}-{output_file_timestamp}"
|
||||
expected = [
|
||||
{
|
||||
"csv": open_file(
|
||||
f"{output_directory}/prowler-output-{audited_account}-{csv_file_suffix}",
|
||||
f"{output_directory}/{output_filename}{csv_file_suffix}",
|
||||
"a",
|
||||
)
|
||||
},
|
||||
{
|
||||
"json": open_file(
|
||||
f"{output_directory}/prowler-output-{audited_account}-{json_file_suffix}",
|
||||
f"{output_directory}/{output_filename}{json_file_suffix}",
|
||||
"a",
|
||||
)
|
||||
},
|
||||
{
|
||||
"json-asff": open_file(
|
||||
f"{output_directory}/prowler-output-{audited_account}-{json_asff_file_suffix}",
|
||||
f"{output_directory}/{output_filename}{json_asff_file_suffix}",
|
||||
"a",
|
||||
)
|
||||
},
|
||||
{
|
||||
"csv": open_file(
|
||||
f"{output_directory}/prowler-output-{audited_account}-{csv_file_suffix}",
|
||||
f"{output_directory}/{output_filename}{csv_file_suffix}",
|
||||
"a",
|
||||
),
|
||||
"json": open_file(
|
||||
f"{output_directory}/prowler-output-{audited_account}-{json_file_suffix}",
|
||||
f"{output_directory}/{output_filename}{json_file_suffix}",
|
||||
"a",
|
||||
),
|
||||
},
|
||||
{
|
||||
"csv": open_file(
|
||||
f"{output_directory}/prowler-output-{audited_account}-{csv_file_suffix}",
|
||||
f"{output_directory}/{output_filename}{csv_file_suffix}",
|
||||
"a",
|
||||
),
|
||||
"json": open_file(
|
||||
f"{output_directory}/prowler-output-{audited_account}-{json_file_suffix}",
|
||||
f"{output_directory}/{output_filename}{json_file_suffix}",
|
||||
"a",
|
||||
),
|
||||
"json-asff": open_file(
|
||||
f"{output_directory}/prowler-output-{audited_account}-{json_asff_file_suffix}",
|
||||
f"{output_directory}/{output_filename}{json_asff_file_suffix}",
|
||||
"a",
|
||||
),
|
||||
},
|
||||
@@ -90,7 +92,10 @@ class Test_Outputs:
|
||||
|
||||
for index, output_mode_list in enumerate(test_output_modes):
|
||||
test_output_file_descriptors = fill_file_descriptors(
|
||||
output_mode_list, audited_account, output_directory, csv_fields
|
||||
output_mode_list,
|
||||
output_directory,
|
||||
csv_fields,
|
||||
output_filename,
|
||||
)
|
||||
for output_mode in output_mode_list:
|
||||
assert (
|
||||
|
||||
63
prowler
63
prowler
@@ -6,7 +6,7 @@ import sys
|
||||
from os import mkdir
|
||||
from os.path import isdir
|
||||
|
||||
from config.config import default_output_directory
|
||||
from config.config import default_output_directory, output_file_timestamp
|
||||
from lib.banner import print_banner, print_version
|
||||
from lib.check.check import (
|
||||
bulk_load_checks_metadata,
|
||||
@@ -23,7 +23,7 @@ from lib.check.check import (
|
||||
)
|
||||
from lib.check.checks_loader import load_checks_to_execute
|
||||
from lib.logger import logger, set_logging_config
|
||||
from lib.outputs.outputs import close_json
|
||||
from lib.outputs.outputs import close_json, send_to_s3_bucket
|
||||
from providers.aws.aws_provider import provider_set_session
|
||||
from providers.aws.lib.security_hub import resolve_security_hub_previous_findings
|
||||
|
||||
@@ -132,9 +132,16 @@ if __name__ == "__main__":
|
||||
help="Output mode, by default csv",
|
||||
choices=["csv", "json", "json-asff"],
|
||||
)
|
||||
parser.add_argument(
|
||||
"-F",
|
||||
"--output-filename",
|
||||
nargs="?",
|
||||
default=None,
|
||||
help="Custom output report name, if not specified will use default output/prowler-output-ACCOUNT_NUM-OUTPUT_DATE.format.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-o",
|
||||
"--custom-output-directory",
|
||||
"--output-directory",
|
||||
nargs="?",
|
||||
help="Custom output directory, by default the folder where Prowler is stored",
|
||||
default=default_output_directory,
|
||||
@@ -151,6 +158,13 @@ if __name__ == "__main__":
|
||||
action="store_true",
|
||||
help="Send check output to AWS Security Hub",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-B",
|
||||
"--output-bucket",
|
||||
nargs="?",
|
||||
default=None,
|
||||
help="Custom output bucket, requires -M <mode> and it can work also with -o flag.",
|
||||
)
|
||||
# Parse Arguments
|
||||
args = parser.parse_args()
|
||||
|
||||
@@ -162,7 +176,8 @@ if __name__ == "__main__":
|
||||
services = args.services
|
||||
groups = args.groups
|
||||
checks_file = args.checks_file
|
||||
output_directory = args.custom_output_directory
|
||||
output_directory = args.output_directory
|
||||
output_filename = args.output_filename
|
||||
severities = args.severity
|
||||
output_modes = args.output_modes
|
||||
|
||||
@@ -238,21 +253,11 @@ if __name__ == "__main__":
|
||||
else:
|
||||
output_modes.append("json-asff")
|
||||
|
||||
# Setting output options
|
||||
audit_output_options = set_output_options(
|
||||
args.quiet, output_modes, output_directory, args.security_hub
|
||||
)
|
||||
|
||||
# Check output directory, if it is default and not created -> create it
|
||||
# If is custom and not created -> error
|
||||
# Check output directory, if it is not created -> create it
|
||||
if output_directory:
|
||||
if not isdir(output_directory):
|
||||
if output_directory == default_output_directory:
|
||||
if output_modes:
|
||||
mkdir(default_output_directory)
|
||||
else:
|
||||
logger.critical("Output directory does not exist")
|
||||
sys.exit()
|
||||
if output_modes:
|
||||
mkdir(output_directory)
|
||||
|
||||
# Set global session
|
||||
audit_info = provider_set_session(
|
||||
@@ -264,6 +269,17 @@ if __name__ == "__main__":
|
||||
args.organizations_role,
|
||||
)
|
||||
|
||||
# Check if custom output filename was input, if not, set the default
|
||||
if not output_filename:
|
||||
output_filename = (
|
||||
f"prowler-output-{audit_info.audited_account}-{output_file_timestamp}"
|
||||
)
|
||||
|
||||
# Setting output options
|
||||
audit_output_options = set_output_options(
|
||||
args.quiet, output_modes, output_directory, args.security_hub, output_filename
|
||||
)
|
||||
|
||||
# Execute checks
|
||||
if len(checks_to_execute):
|
||||
for check_name in checks_to_execute:
|
||||
@@ -291,11 +307,20 @@ if __name__ == "__main__":
|
||||
"There are no checks to execute. Please, check your input arguments"
|
||||
)
|
||||
|
||||
# Close json file if exists
|
||||
if output_modes:
|
||||
for mode in output_modes:
|
||||
# Close json file if exists
|
||||
if mode == "json" or mode == "json-asff":
|
||||
close_json(output_directory, audit_info.audited_account, mode)
|
||||
close_json(output_filename, output_directory, mode)
|
||||
# Send output to S3 if needed
|
||||
if args.output_bucket:
|
||||
send_to_s3_bucket(
|
||||
output_filename,
|
||||
output_directory,
|
||||
mode,
|
||||
args.output_bucket,
|
||||
audit_info.audit_session,
|
||||
)
|
||||
|
||||
# Resolve previous fails of Security Hub
|
||||
if args.security_hub:
|
||||
|
||||
Reference in New Issue
Block a user