From 7d0a95e98fd4a73587eee830b8e3684b3d2c5d67 Mon Sep 17 00:00:00 2001 From: Sergio Garcia <38561120+sergargar@users.noreply.github.com> Date: Thu, 21 Jul 2022 12:22:56 +0200 Subject: [PATCH] feat(shub): add Security Hub integration (#1255) --- config/config.py | 3 +- lib/check/check.py | 10 +- lib/check/models.py | 1 + lib/outputs/models.py | 1 + lib/outputs/outputs.py | 28 ++++- lib/utils/utils.py | 6 + providers/aws/aws_provider.py | 106 ++++++++++++++++++ .../iam_disable_30_days_credentials.py | 2 +- .../iam_disable_90_days_credentials.py | 1 + prowler | 29 ++++- 10 files changed, 171 insertions(+), 16 deletions(-) diff --git a/config/config.py b/config/config.py index d8aa2988..bd616f64 100644 --- a/config/config.py +++ b/config/config.py @@ -1,7 +1,8 @@ -from datetime import datetime +from datetime import datetime, timezone from os import getcwd timestamp = datetime.today() +timestamp_utc = datetime.now(timezone.utc).replace(tzinfo=timezone.utc) prowler_version = "3.0-alfa" # Groups diff --git a/lib/check/check.py b/lib/check/check.py index 9b4cc1a2..97a9e861 100644 --- a/lib/check/check.py +++ b/lib/check/check.py @@ -173,12 +173,18 @@ def import_check(check_path: str) -> ModuleType: return lib -def set_output_options(quiet: bool, output_modes: list, input_output_directory: str): +def set_output_options( + quiet: bool, + output_modes: list, + input_output_directory: str, + security_hub_enabled: bool, +): global output_options output_options = Output_From_Options( is_quiet=quiet, output_modes=output_modes, - output_directory=input_output_directory + output_directory=input_output_directory, + security_hub_enabled=security_hub_enabled # set input options here ) return output_options diff --git a/lib/check/models.py b/lib/check/models.py index 82a67756..834502c9 100644 --- a/lib/check/models.py +++ b/lib/check/models.py @@ -13,6 +13,7 @@ class Output_From_Options: is_quiet: bool output_modes: list output_directory: str + security_hub_enabled: bool # Testing Pending diff --git a/lib/outputs/models.py b/lib/outputs/models.py index 6a31c53a..51a6b5f2 100644 --- a/lib/outputs/models.py +++ b/lib/outputs/models.py @@ -89,6 +89,7 @@ class Check_Output_JSON_ASFF(BaseModel): Description: str = "" Resources: List[Resource] = None Compliance: Compliance = None + Remediation: dict = None @dataclass diff --git a/lib/outputs/outputs.py b/lib/outputs/outputs.py index af928960..bb9453b8 100644 --- a/lib/outputs/outputs.py +++ b/lib/outputs/outputs.py @@ -10,6 +10,7 @@ from config.config import ( json_file_suffix, prowler_version, timestamp, + timestamp_utc, ) from lib.outputs.models import ( Check_Output_CSV, @@ -20,7 +21,8 @@ from lib.outputs.models import ( Resource, Severity, ) -from lib.utils.utils import file_exists, open_file +from lib.utils.utils import file_exists, hash_sha512, open_file +from providers.aws.aws_provider import send_to_security_hub def report(check_findings, output_options, audit_info): @@ -83,6 +85,12 @@ def report(check_findings, output_options, audit_info): ) file_descriptors["json-asff"].write(",") + # Check if it is needed to send findings to security hub + if output_options.security_hub_enabled: + send_to_security_hub( + finding.region, finding_output, audit_info.audit_session + ) + if file_descriptors: # Close all file descriptors for file_descriptor in file_descriptors: @@ -189,18 +197,21 @@ def fill_json(finding_output, audit_info, finding): def fill_json_asff(finding_output, audit_info, finding): - finding_output.Id = f"prowler-{finding.check_metadata.CheckID}-{audit_info.audited_account}-{finding.region}-{str(hash(finding.resource_id))}" + # Check if there are no resources in the finding + if finding.resource_id == "": + finding.resource_id = "NONE_PROVIDED" + finding_output.Id = f"prowler-{finding.check_metadata.CheckID}-{audit_info.audited_account}-{finding.region}-{hash_sha512(finding.resource_id)}" finding_output.ProductArn = f"arn:{audit_info.audited_partition}:securityhub:{finding.region}::product/prowler/prowler" finding_output.ProductFields = ProductFields( ProviderVersion=prowler_version, ProwlerResourceName=finding.resource_id ) finding_output.GeneratorId = "prowler-" + finding.check_metadata.CheckID finding_output.AwsAccountId = audit_info.audited_account - finding_output.Types = finding.check_metadata.CheckType + finding_output.Types = [finding.check_metadata.CheckType] finding_output.FirstObservedAt = ( finding_output.UpdatedAt - ) = finding_output.CreatedAt = timestamp.isoformat() - finding_output.Severity = Severity(Label=finding.check_metadata.Severity) + ) = finding_output.CreatedAt = timestamp_utc.strftime("%Y-%m-%dT%H:%M:%SZ") + finding_output.Severity = Severity(Label=finding.check_metadata.Severity.upper()) finding_output.Title = finding.check_metadata.CheckTitle finding_output.Description = finding.check_metadata.Description finding_output.Resources = [ @@ -211,9 +222,14 @@ def fill_json_asff(finding_output, audit_info, finding): Region=finding.region, ) ] + # Add ED to PASS or FAIL (PASSED/FAILED) finding_output.Compliance = Compliance( - Status=finding.status, RelatedRequirements=[finding.check_metadata.CheckType] + Status=finding.status + "ED", + RelatedRequirements=[finding.check_metadata.CheckType], ) + finding_output.Remediation = { + "Recommendation": finding.check_metadata.Remediation.Recommendation + } return finding_output diff --git a/lib/utils/utils.py b/lib/utils/utils.py index 4519bbe1..9f0f485a 100644 --- a/lib/utils/utils.py +++ b/lib/utils/utils.py @@ -1,5 +1,6 @@ import json import sys +from hashlib import sha512 from io import TextIOWrapper from os.path import exists from typing import Any @@ -40,3 +41,8 @@ def file_exists(filename: str): return True else: return False + + +# create sha512 hash for string +def hash_sha512(string: str) -> str: + return sha512(string.encode("utf-8")).hexdigest()[0:9] diff --git a/providers/aws/aws_provider.py b/providers/aws/aws_provider.py index ec8897c5..55769f5f 100644 --- a/providers/aws/aws_provider.py +++ b/providers/aws/aws_provider.py @@ -1,12 +1,17 @@ +import json import sys +from itertools import groupby +from operator import itemgetter from arnparse import arnparse from boto3 import client, session from botocore.credentials import RefreshableCredentials from botocore.session import get_session +from config.config import json_asff_file_suffix, timestamp_utc from lib.arn.arn import arn_parsing from lib.logger import logger +from lib.outputs.models import Check_Output_JSON_ASFF from providers.aws.models import ( AWS_Assume_Role, AWS_Audit_Info, @@ -271,3 +276,104 @@ def get_organizations_metadata( account_details_tags=account_details_tags, ) return organizations_info + + +def send_to_security_hub( + region: str, finding_output: Check_Output_JSON_ASFF, session: session.Session +): + try: + logger.info("Sending findings to Security Hub.") + # Check if security hub is enabled in current region + security_hub_client = session.client("securityhub", region_name=region) + security_hub_client.describe_hub() + + # Check if Prowler integration is enabled in Security Hub + if "prowler/prowler" not in str( + security_hub_client.list_enabled_products_for_import() + ): + logger.error( + f"Security Hub is enabled in {region} but Prowler integration does not accept findings. More info: https://github.com/prowler-cloud/prowler/#security-hub-integration" + ) + + # Send finding to Security Hub + batch_import = security_hub_client.batch_import_findings( + Findings=[finding_output.dict()] + ) + if batch_import["FailedCount"] > 0: + failed_import = batch_import["FailedFindings"][0] + logger.error( + f"Failed to send archived findings to AWS Security Hub -- {failed_import['ErrorCode']} -- {failed_import['ErrorMessage']}" + ) + + except Exception as error: + logger.error(f"{error.__class__.__name__} -- {error} in region {region}") + + +# Move previous Security Hub check findings to ARCHIVED (as prowler didn't re-detect them) +def resolve_security_hub_previous_findings( + output_directory: str, audit_info: AWS_Audit_Info +) -> list: + logger.info("Checking previous findings in Security Hub to archive them.") + # Read current findings from json-asff file + with open( + f"{output_directory}/prowler-output-{audit_info.audited_account}-{json_asff_file_suffix}" + ) as f: + json_asff_file = json.load(f) + + # Sort by region + json_asff_file = sorted(json_asff_file, key=itemgetter("ProductArn")) + # Group by region + for product_arn, current_findings in groupby( + json_asff_file, key=itemgetter("ProductArn") + ): + region = product_arn.split(":")[3] + try: + # Check if security hub is enabled in current region + security_hub_client = audit_info.audit_session.client( + "securityhub", region_name=region + ) + security_hub_client.describe_hub() + # Get current findings IDs + current_findings_ids = [] + for finding in current_findings: + current_findings_ids.append(finding["Id"]) + # Get findings of that region + security_hub_client = audit_info.audit_session.client( + "securityhub", region_name=region + ) + findings_filter = { + "ProductName": [{"Value": "Prowler", "Comparison": "EQUALS"}], + "RecordState": [{"Value": "ACTIVE", "Comparison": "EQUALS"}], + "AwsAccountId": [ + {"Value": audit_info.audited_account, "Comparison": "EQUALS"} + ], + "Region": [{"Value": region, "Comparison": "EQUALS"}], + } + get_findings_paginator = security_hub_client.get_paginator("get_findings") + findings_to_archive = [] + for page in get_findings_paginator.paginate(Filters=findings_filter): + # Archive findings that have not appear in this execution + for finding in page["Findings"]: + if finding["Id"] not in current_findings_ids: + finding["RecordState"] = "ARCHIVED" + finding["UpdatedAt"] = timestamp_utc.strftime( + "%Y-%m-%dT%H:%M:%SZ" + ) + findings_to_archive.append(finding) + logger.info(f"Archiving {len(findings_to_archive)} findings.") + # Send archive findings to SHub + list_chunked = [ + findings_to_archive[i : i + 100] + for i in range(0, len(findings_to_archive), 100) + ] + for findings in list_chunked: + batch_import = security_hub_client.batch_import_findings( + Findings=findings + ) + if batch_import["FailedCount"] > 0: + failed_import = batch_import["FailedFindings"][0] + logger.error( + f"Failed to send archived findings to AWS Security Hub -- {failed_import['ErrorCode']} -- {failed_import['ErrorMessage']}" + ) + except Exception as error: + logger.error(f"{error.__class__.__name__} -- {error} in region {region}") diff --git a/providers/aws/services/iam/iam_disable_30_days_credentials/iam_disable_30_days_credentials.py b/providers/aws/services/iam/iam_disable_30_days_credentials/iam_disable_30_days_credentials.py index 661c1e2f..820bc095 100644 --- a/providers/aws/services/iam/iam_disable_30_days_credentials/iam_disable_30_days_credentials.py +++ b/providers/aws/services/iam/iam_disable_30_days_credentials/iam_disable_30_days_credentials.py @@ -41,7 +41,7 @@ class iam_disable_30_days_credentials(Check): else: report = Check_Report(self.metadata) report.status = "PASS" - report.result_extended = "There is no IAM users" + report.status_extended = "There is no IAM users" report.region = iam_client.region findings.append(report) diff --git a/providers/aws/services/iam/iam_disable_90_days_credentials/iam_disable_90_days_credentials.py b/providers/aws/services/iam/iam_disable_90_days_credentials/iam_disable_90_days_credentials.py index 0ed41797..8d96d3fb 100644 --- a/providers/aws/services/iam/iam_disable_90_days_credentials/iam_disable_90_days_credentials.py +++ b/providers/aws/services/iam/iam_disable_90_days_credentials/iam_disable_90_days_credentials.py @@ -43,5 +43,6 @@ class iam_disable_90_days_credentials(Check): report.status = "PASS" report.status_extended = "There is no IAM users" report.region = "us-east-1" + findings.append(report) return findings diff --git a/prowler b/prowler index a7731c20..7a9d6da4 100755 --- a/prowler +++ b/prowler @@ -24,7 +24,10 @@ from lib.check.check import ( from lib.check.checks_loader import load_checks_to_execute from lib.logger import logger, set_logging_config from lib.outputs.outputs import close_json -from providers.aws.aws_provider import provider_set_session +from providers.aws.aws_provider import ( + provider_set_session, + resolve_security_hub_previous_findings, +) if __name__ == "__main__": # CLI Arguments @@ -47,9 +50,7 @@ if __name__ == "__main__": # Exclude checks options parser.add_argument("-e", "--excluded-checks", nargs="+", help="Checks to exclude") parser.add_argument("-E", "--excluded-groups", nargs="+", help="Groups to exclude") - parser.add_argument( - "-S", "--excluded-services", nargs="+", help="Services to exclude" - ) + parser.add_argument("--excluded-services", nargs="+", help="Services to exclude") # List checks options list_group = parser.add_mutually_exclusive_group() list_group.add_argument( @@ -140,7 +141,12 @@ if __name__ == "__main__": nargs="?", help="Specify AWS Organizations management role ARN to be assumed, to get Organization metadata", ) - + parser.add_argument( + "-S", + "--security-hub", + action="store_true", + help="Send check output to AWS Security Hub", + ) # Parse Arguments args = parser.parse_args() @@ -229,9 +235,16 @@ if __name__ == "__main__": print_checks(provider, checks_to_execute, bulk_checks_metadata) sys.exit() + # If security hub sending enabled, it is need to create json-asff output + if args.security_hub: + if not output_modes: + output_modes = ["json-asff"] + else: + output_modes.append("json-asff") + # Setting output options audit_output_options = set_output_options( - args.quiet, output_modes, output_directory + args.quiet, output_modes, output_directory, args.security_hub ) # Set global session @@ -276,3 +289,7 @@ if __name__ == "__main__": for mode in output_modes: if mode == "json" or mode == "json-asff": close_json(output_directory, audit_info.audited_account, mode) + + # Resolve previous fails of Security Hub + if args.security_hub: + resolve_security_hub_previous_findings(output_directory, audit_info)