refactor(security_hub): Send findings in batches (#2868)

Co-authored-by: Sergio Garcia <sergargar1@gmail.com>
This commit is contained in:
Pepe Fagoaga
2023-09-26 14:10:25 +02:00
committed by GitHub
parent 35e5bbdaf1
commit 6687f76736
7 changed files with 466 additions and 177 deletions

View File

@@ -4,6 +4,8 @@
import os
import sys
from colorama import Fore, Style
from prowler.lib.banner import print_banner
from prowler.lib.check.check import (
bulk_load_checks_metadata,
@@ -32,9 +34,13 @@ from prowler.lib.outputs.json import close_json
from prowler.lib.outputs.outputs import extract_findings_statistics
from prowler.lib.outputs.slack import send_slack_message
from prowler.lib.outputs.summary_table import display_summary_table
from prowler.providers.aws.aws_provider import get_available_aws_service_regions
from prowler.providers.aws.lib.s3.s3 import send_to_s3_bucket
from prowler.providers.aws.lib.security_hub.security_hub import (
batch_send_to_security_hub,
prepare_security_hub_findings,
resolve_security_hub_previous_findings,
verify_security_hub_integration_enabled_per_region,
)
from prowler.providers.common.allowlist import set_provider_allowlist
from prowler.providers.common.audit_info import (
@@ -225,13 +231,51 @@ def prowler():
bucket_session,
)
# Resolve previous fails of Security Hub
if provider == "aws" and args.security_hub and not args.skip_sh_update:
resolve_security_hub_previous_findings(
audit_output_options.output_directory,
audit_output_options.output_filename,
audit_info,
# AWS Security Hub Integration
if provider == "aws" and args.security_hub:
print(
f"{Style.BRIGHT}\nSending findings to AWS Security Hub, please wait...{Style.RESET_ALL}"
)
# Verify where AWS Security Hub is enabled
aws_security_enabled_regions = []
security_hub_regions = (
get_available_aws_service_regions("securityhub", audit_info)
if not audit_info.audited_regions
else audit_info.audited_regions
)
for region in security_hub_regions:
# Save the regions where AWS Security Hub is enabled
if verify_security_hub_integration_enabled_per_region(
region, audit_info.audit_session
):
aws_security_enabled_regions.append(region)
# Prepare the findings to be sent to Security Hub
security_hub_findings_per_region = prepare_security_hub_findings(
findings, audit_info, audit_output_options, aws_security_enabled_regions
)
# Send the findings to Security Hub
findings_sent_to_security_hub = batch_send_to_security_hub(
security_hub_findings_per_region, audit_info.audit_session
)
print(
f"{Style.BRIGHT}{Fore.GREEN}\n{findings_sent_to_security_hub} findings sent to AWS Security Hub!{Style.RESET_ALL}"
)
# Resolve previous fails of Security Hub
if not args.skip_sh_update:
print(
f"{Style.BRIGHT}\nArchiving previous findings in AWS Security Hub, please wait...{Style.RESET_ALL}"
)
findings_archived_in_security_hub = resolve_security_hub_previous_findings(
security_hub_findings_per_region,
audit_info,
)
print(
f"{Style.BRIGHT}{Fore.GREEN}\n{findings_archived_in_security_hub} findings archived in AWS Security Hub!{Style.RESET_ALL}"
)
# Display summary table
if not args.only_logs:

View File

@@ -693,7 +693,7 @@ class Compliance(BaseModel):
class Check_Output_JSON_ASFF(BaseModel):
"""
Check_Output_JSON_ASFF generates a finding's output in JSON ASFF format.
Check_Output_JSON_ASFF generates a finding's output in JSON ASFF format: https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-findings-format-syntax.html
"""
SchemaVersion: str = "2018-10-08"

View File

@@ -16,7 +16,6 @@ from prowler.lib.outputs.models import (
)
from prowler.providers.aws.lib.allowlist.allowlist import is_allowlisted
from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
from prowler.providers.aws.lib.security_hub.security_hub import send_to_security_hub
from prowler.providers.azure.lib.audit_info.models import Azure_Audit_Info
@@ -93,6 +92,7 @@ def report(check_findings, output_options, audit_info):
audit_info,
file_descriptors,
)
# AWS specific outputs
if finding.check_metadata.Provider == "aws":
if "json-asff" in file_descriptors:
@@ -108,19 +108,6 @@ def report(check_findings, output_options, audit_info):
)
file_descriptors["json-asff"].write(",")
# Check if it is needed to send findings to security hub
if (
output_options.security_hub_enabled
and finding.status != "INFO"
):
send_to_security_hub(
output_options.is_quiet,
finding.status,
finding.region,
finding_output,
audit_info.audit_session,
)
# Common outputs
if "html" in file_descriptors:
fill_html(file_descriptors["html"], finding, output_options)

View File

@@ -1,52 +1,105 @@
import json
from itertools import groupby
from operator import itemgetter
from boto3 import session
from prowler.config.config import json_asff_file_suffix, timestamp_utc
from prowler.config.config import timestamp_utc
from prowler.lib.logger import logger
from prowler.lib.outputs.json import fill_json_asff
from prowler.lib.outputs.models import Check_Output_JSON_ASFF
from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
SECURITY_HUB_INTEGRATION_NAME = "prowler/prowler"
SECURITY_HUB_MAX_BATCH = 100
def send_to_security_hub(
is_quiet: bool,
finding_status: str,
def prepare_security_hub_findings(
findings: [], audit_info, output_options, enabled_regions: []
) -> dict:
security_hub_findings_per_region = {}
for finding in findings:
# We don't send the INFO findings to AWS Security Hub
if finding.status == "INFO":
continue
# We don't send findings to not enabled regions
if finding.region not in enabled_regions:
continue
# Handle quiet mode
if output_options.is_quiet and finding.status != "FAIL":
continue
# Get the finding region
region = finding.region
# Check if the security_hub_findings_per_region has the region, if not we have to create it
if region not in security_hub_findings_per_region:
security_hub_findings_per_region[region] = []
# Format the finding in the JSON ASFF format
finding_json_asff = fill_json_asff(
Check_Output_JSON_ASFF(), audit_info, finding, output_options
)
# Include that finding within their region in the JSON format
security_hub_findings_per_region[region].append(finding_json_asff.dict())
return security_hub_findings_per_region
def verify_security_hub_integration_enabled_per_region(
region: str,
finding_output: Check_Output_JSON_ASFF,
session: session.Session,
) -> bool:
f"""verify_security_hub_integration_enabled returns True if the {SECURITY_HUB_INTEGRATION_NAME} is enabled for the given region. Otherwise returns false."""
prowler_integration_enabled = False
try:
logger.info(
f"Checking if the {SECURITY_HUB_INTEGRATION_NAME} is enabled in the {region} region."
)
# Check if security hub is enabled in current region
security_hub_client = session.client("securityhub", region_name=region)
security_hub_client.describe_hub()
# Check if Prowler integration is enabled in Security Hub
if "prowler/prowler" not in str(
security_hub_client.list_enabled_products_for_import()
):
logger.error(
f"Security Hub is enabled in {region} but Prowler integration does not accept findings. More info: https://docs.prowler.cloud/en/latest/tutorials/aws/securityhub/"
)
else:
prowler_integration_enabled = True
except Exception as error:
logger.error(
f"{error.__class__.__name__} -- [{error.__traceback__.tb_lineno}]:{error} in region {region}"
)
finally:
return prowler_integration_enabled
def batch_send_to_security_hub(
security_hub_findings_per_region: dict,
session: session.Session,
) -> int:
"""
send_to_security_hub send each finding to Security Hub and return the number of findings that were successfully sent
send_to_security_hub sends findings to Security Hub and returns the number of findings that were successfully sent.
"""
success_count = 0
try:
# Check if -q option is set
if not is_quiet or (is_quiet and finding_status == "FAIL"):
logger.info("Sending findings to Security Hub.")
# Check if security hub is enabled in current region
security_hub_client = session.client("securityhub", region_name=region)
security_hub_client.describe_hub()
# Iterate findings by region
for region, findings in security_hub_findings_per_region.items():
# Send findings to Security Hub
logger.info(f"Sending findings to Security Hub in the region {region}")
security_hub_client = session.client("securityhub", region_name=region)
success_count = __send_findings_to_security_hub__(
findings, region, security_hub_client
)
# Check if Prowler integration is enabled in Security Hub
if "prowler/prowler" not in str(
security_hub_client.list_enabled_products_for_import()
):
logger.error(
f"Security Hub is enabled in {region} but Prowler integration does not accept findings. More info: https://docs.prowler.cloud/en/latest/tutorials/aws/securityhub/"
)
else:
# Send finding to Security Hub
batch_import = security_hub_client.batch_import_findings(
Findings=[finding_output.dict()]
)
if batch_import["FailedCount"] > 0:
failed_import = batch_import["FailedFindings"][0]
logger.error(
f"Failed to send archived findings to AWS Security Hub -- {failed_import['ErrorCode']} -- {failed_import['ErrorMessage']}"
)
success_count = batch_import["SuccessCount"]
except Exception as error:
logger.error(
f"{error.__class__.__name__} -- [{error.__traceback__.tb_lineno}]:{error} in region {region}"
@@ -56,29 +109,15 @@ def send_to_security_hub(
# Move previous Security Hub check findings to ARCHIVED (as prowler didn't re-detect them)
def resolve_security_hub_previous_findings(
output_directory: str, output_filename: str, audit_info: AWS_Audit_Info
security_hub_findings_per_region: dict, audit_info: AWS_Audit_Info
) -> list:
"""
resolve_security_hub_previous_findings archives all the findings that does not appear in the current execution
"""
logger.info("Checking previous findings in Security Hub to archive them.")
# Read current findings from json-asff file
with open(f"{output_directory}/{output_filename}{json_asff_file_suffix}") as f:
json_asff_file = json.load(f)
# Sort by region
json_asff_file = sorted(json_asff_file, key=itemgetter("ProductArn"))
# Group by region
for product_arn, current_findings in groupby(
json_asff_file, key=itemgetter("ProductArn")
):
region = product_arn.split(":")[3]
for region, current_findings in security_hub_findings_per_region.items():
try:
# Check if security hub is enabled in current region
security_hub_client = audit_info.audit_session.client(
"securityhub", region_name=region
)
security_hub_client.describe_hub()
# Get current findings IDs
current_findings_ids = []
for finding in current_findings:
@@ -108,21 +147,41 @@ def resolve_security_hub_previous_findings(
findings_to_archive.append(finding)
logger.info(f"Archiving {len(findings_to_archive)} findings.")
# Send archive findings to SHub
list_chunked = [
findings_to_archive[i : i + 100]
for i in range(0, len(findings_to_archive), 100)
]
for findings in list_chunked:
batch_import = security_hub_client.batch_import_findings(
Findings=findings
)
if batch_import["FailedCount"] > 0:
failed_import = batch_import["FailedFindings"][0]
logger.error(
f"Failed to send archived findings to AWS Security Hub -- {failed_import['ErrorCode']} -- {failed_import['ErrorMessage']}"
)
success_count = __send_findings_to_security_hub__(
findings_to_archive, region, security_hub_client
)
return success_count
except Exception as error:
logger.error(
f"{error.__class__.__name__} -- [{error.__traceback__.tb_lineno}]:{error} in region {region}"
)
def __send_findings_to_security_hub__(
findings: [dict], region: str, security_hub_client
):
"""Private function send_findings_to_security_hub chunks the findings in groups of 100 findings and send them to AWS Security Hub. It returns the number of sent findings."""
success_count = 0
try:
list_chunked = [
findings[i : i + SECURITY_HUB_MAX_BATCH]
for i in range(0, len(findings), SECURITY_HUB_MAX_BATCH)
]
for findings in list_chunked:
batch_import = security_hub_client.batch_import_findings(Findings=findings)
if batch_import["FailedCount"] > 0:
failed_import = batch_import["FailedFindings"][0]
logger.error(
f"Failed to send findings to AWS Security Hub -- {failed_import['ErrorCode']} -- {failed_import['ErrorMessage']}"
)
success_count += batch_import["SuccessCount"]
except Exception as error:
logger.error(
f"{error.__class__.__name__} -- [{error.__traceback__.tb_lineno}]:{error} in region {region}"
)
finally:
return success_count

View File

@@ -3,8 +3,6 @@ from os import path, remove
from time import mktime
from unittest import mock
import boto3
import botocore
import pytest
from colorama import Fore
from mock import patch
@@ -65,33 +63,10 @@ from prowler.lib.outputs.models import (
from prowler.lib.outputs.outputs import extract_findings_statistics, set_report_color
from prowler.lib.utils.utils import hash_sha512, open_file
from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
from prowler.providers.aws.lib.security_hub.security_hub import send_to_security_hub
from prowler.providers.common.models import Audit_Metadata
AWS_ACCOUNT_ID = "123456789012"
# Mocking Security Hub Get Findings
make_api_call = botocore.client.BaseClient._make_api_call
def mock_make_api_call(self, operation_name, kwarg):
if operation_name == "BatchImportFindings":
return {
"FailedCount": 0,
"SuccessCount": 1,
}
if operation_name == "DescribeHub":
return {
"HubArn": "test-hub",
}
if operation_name == "ListEnabledProductsForImport":
return {
"ProductSubscriptions": [
"prowler/prowler",
],
}
return make_api_call(self, operation_name, kwarg)
class Test_Outputs:
def test_fill_file_descriptors(self):
@@ -1284,74 +1259,6 @@ class Test_Outputs:
assert stats["resources_count"] == 0
assert stats["findings_count"] == 0
@mock.patch("botocore.client.BaseClient._make_api_call", new=mock_make_api_call)
def test_send_to_security_hub(self):
# Create mock session
session = boto3.session.Session(
region_name="eu-west-1",
)
input_audit_info = AWS_Audit_Info(
session_config=None,
original_session=None,
audit_session=session,
audited_account=AWS_ACCOUNT_ID,
audited_account_arn=f"arn:aws:iam::{AWS_ACCOUNT_ID}:root",
audited_identity_arn="test-arn",
audited_user_id="test",
audited_partition="aws",
profile="default",
profile_region="eu-west-1",
credentials=None,
assumed_role_info=None,
audited_regions=["eu-west-2", "eu-west-1"],
organizations_metadata=None,
audit_resources=None,
mfa_enabled=False,
audit_metadata=Audit_Metadata(
services_scanned=0,
expected_checks=[],
completed_checks=0,
audit_progress=0,
),
)
finding = Check_Report(
load_check_metadata(
f"{path.dirname(path.realpath(__file__))}/fixtures/metadata.json"
).json()
)
finding.resource_details = "Test resource details"
finding.resource_id = "test-resource"
finding.resource_arn = "test-arn"
finding.region = "eu-west-1"
finding.status = "PASS"
finding.status_extended = "This is a test"
finding_output = Check_Output_JSON_ASFF()
output_options = mock.MagicMock()
fill_json_asff(finding_output, input_audit_info, finding, output_options)
assert (
send_to_security_hub(
False,
finding.status,
finding.region,
finding_output,
input_audit_info.audit_session,
)
== 1
)
# Setting is_quiet to True
assert (
send_to_security_hub(
True,
finding.status,
finding.region,
finding_output,
input_audit_info.audit_session,
)
== 0
)
def test_get_check_compliance(self):
bulk_check_metadata = [
Compliance_Base_Model(

View File

@@ -0,0 +1,58 @@
{
"Categories": [
"cat1",
"cat2"
],
"CheckID": "iam_disable_30_days_credentials",
"CheckTitle": "Ensure credentials unused for 30 days or greater are disabled",
"CheckType": [
"Software and Configuration Checks"
],
"Compliance": [
{
"Control": [
"4.4"
],
"Framework": "CIS-AWS",
"Group": [
"level1",
"level2"
],
"Version": "1.4"
}
],
"DependsOn": [
"othercheck1",
"othercheck2"
],
"Description": "Ensure credentials unused for 30 days or greater are disabled",
"Notes": "additional information",
"Provider": "aws",
"RelatedTo": [
"othercheck3",
"othercheck4"
],
"RelatedUrl": "https://serviceofficialsiteorpageforthissubject",
"Remediation": {
"Code": {
"CLI": "cli command or URL to the cli command location.",
"NativeIaC": "code or URL to the code location.",
"Other": "cli command or URL to the cli command location.",
"Terraform": "code or URL to the code location."
},
"Recommendation": {
"Text": "Run sudo yum update and cross your fingers and toes.",
"Url": "https://myfp.com/recommendations/dangerous_things_and_how_to_fix_them.html"
}
},
"ResourceIdTemplate": "arn:partition:service:region:account-id:resource-id",
"ResourceType": "AwsIamAccessAnalyzer",
"Risk": "Risk associated.",
"ServiceName": "iam",
"Severity": "low",
"SubServiceName": "accessanalyzer",
"Tags": {
"Tag1Key": "value",
"Tag2Key": "value"
}
}

View File

@@ -0,0 +1,234 @@
from os import path
import botocore
from boto3 import session
from mock import MagicMock, patch
from prowler.config.config import timestamp_utc
from prowler.lib.check.models import Check_Report, load_check_metadata
from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
# from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
from prowler.providers.aws.lib.security_hub.security_hub import (
batch_send_to_security_hub,
prepare_security_hub_findings,
verify_security_hub_integration_enabled_per_region,
)
from prowler.providers.common.models import Audit_Metadata
AWS_ACCOUNT_ID = "123456789012"
AWS_REGION_1 = "eu-west-1"
AWS_REGION_2 = "eu-west-2"
# Mocking Security Hub Get Findings
make_api_call = botocore.client.BaseClient._make_api_call
def mock_make_api_call(self, operation_name, kwarg):
if operation_name == "BatchImportFindings":
return {
"FailedCount": 0,
"SuccessCount": 1,
}
if operation_name == "DescribeHub":
return {
"HubArn": f"arn:aws:securityhub:{AWS_REGION_1}:{AWS_ACCOUNT_ID}:hub/default",
"SubscribedAt": "2023-02-07T09:45:43.742Z",
"AutoEnableControls": True,
"ControlFindingGenerator": "STANDARD_CONTROL",
}
if operation_name == "ListEnabledProductsForImport":
return {
"ProductSubscriptions": [
f"arn:aws:securityhub:{AWS_REGION_1}:{AWS_ACCOUNT_ID}:product-subscription/prowler/prowler",
]
}
return make_api_call(self, operation_name, kwarg)
class Test_SecurityHub:
def set_mocked_audit_info(self):
return AWS_Audit_Info(
session_config=None,
original_session=None,
audit_session=None,
audited_account=AWS_ACCOUNT_ID,
audited_account_arn=f"arn:aws:iam::{AWS_ACCOUNT_ID}:root",
audited_identity_arn="test-arn",
audited_user_id="test",
audited_partition="aws",
profile="default",
profile_region="eu-west-1",
credentials=None,
assumed_role_info=None,
audited_regions=["eu-west-2", "eu-west-1"],
organizations_metadata=None,
audit_resources=None,
mfa_enabled=False,
audit_metadata=Audit_Metadata(
services_scanned=0,
expected_checks=[],
completed_checks=0,
audit_progress=0,
),
)
def generate_finding(self, status, region):
finding = Check_Report(
load_check_metadata(
f"{path.dirname(path.realpath(__file__))}/fixtures/metadata.json"
).json()
)
finding.status = status
finding.status_extended = "test"
finding.resource_id = "test"
finding.resource_arn = "test"
finding.region = region
return finding
def set_mocked_output_options(self, is_quiet):
output_options = MagicMock
output_options.bulk_checks_metadata = {}
output_options.is_quiet = is_quiet
return output_options
def set_mocked_session(self, region):
# Create mock session
return session.Session(
region_name=region,
)
@patch("botocore.client.BaseClient._make_api_call", new=mock_make_api_call)
def test_verify_security_hub_integration_enabled_per_region(self):
session = self.set_mocked_session(AWS_REGION_1)
assert verify_security_hub_integration_enabled_per_region(AWS_REGION_1, session)
def test_prepare_security_hub_findings_enabled_region_not_quiet(self):
enabled_regions = [AWS_REGION_1]
output_options = self.set_mocked_output_options(is_quiet=False)
findings = [self.generate_finding("PASS", AWS_REGION_1)]
audit_info = self.set_mocked_audit_info()
assert prepare_security_hub_findings(
findings,
audit_info,
output_options,
enabled_regions,
) == {
AWS_REGION_1: [
{
"SchemaVersion": "2018-10-08",
"Id": f"prowler-iam_disable_30_days_credentials-{AWS_ACCOUNT_ID}-{AWS_REGION_1}-ee26b0dd4",
"ProductArn": f"arn:aws:securityhub:{AWS_REGION_1}::product/prowler/prowler",
"RecordState": "ACTIVE",
"ProductFields": {
"ProviderName": "Prowler",
"ProviderVersion": "3.9.0",
"ProwlerResourceName": "test",
},
"GeneratorId": "prowler-iam_disable_30_days_credentials",
"AwsAccountId": f"{AWS_ACCOUNT_ID}",
"Types": ["Software and Configuration Checks"],
"FirstObservedAt": timestamp_utc.strftime("%Y-%m-%dT%H:%M:%SZ"),
"UpdatedAt": timestamp_utc.strftime("%Y-%m-%dT%H:%M:%SZ"),
"CreatedAt": timestamp_utc.strftime("%Y-%m-%dT%H:%M:%SZ"),
"Severity": {"Label": "LOW"},
"Title": "Ensure credentials unused for 30 days or greater are disabled",
"Description": "test",
"Resources": [
{
"Type": "AwsIamAccessAnalyzer",
"Id": "test",
"Partition": "aws",
"Region": f"{AWS_REGION_1}",
}
],
"Compliance": {
"Status": "PASSED",
"RelatedRequirements": [],
"AssociatedStandards": [],
},
"Remediation": {
"Recommendation": {
"Text": "Run sudo yum update and cross your fingers and toes.",
"Url": "https://myfp.com/recommendations/dangerous_things_and_how_to_fix_them.html",
}
},
}
]
}
def test_prepare_security_hub_findings_quiet_INFO_finding(self):
enabled_regions = [AWS_REGION_1]
output_options = self.set_mocked_output_options(is_quiet=False)
findings = [self.generate_finding("INFO", AWS_REGION_1)]
audit_info = self.set_mocked_audit_info()
assert (
prepare_security_hub_findings(
findings,
audit_info,
output_options,
enabled_regions,
)
== {}
)
def test_prepare_security_hub_findings_disabled_region(self):
enabled_regions = [AWS_REGION_1]
output_options = self.set_mocked_output_options(is_quiet=False)
findings = [self.generate_finding("PASS", AWS_REGION_2)]
audit_info = self.set_mocked_audit_info()
assert (
prepare_security_hub_findings(
findings,
audit_info,
output_options,
enabled_regions,
)
== {}
)
def test_prepare_security_hub_findings_quiet(self):
enabled_regions = [AWS_REGION_1]
output_options = self.set_mocked_output_options(is_quiet=True)
findings = [self.generate_finding("PASS", AWS_REGION_1)]
audit_info = self.set_mocked_audit_info()
assert (
prepare_security_hub_findings(
findings,
audit_info,
output_options,
enabled_regions,
)
== {}
)
@patch("botocore.client.BaseClient._make_api_call", new=mock_make_api_call)
def test_batch_send_to_security_hub_one_finding(self):
enabled_regions = [AWS_REGION_1]
output_options = self.set_mocked_output_options(is_quiet=False)
findings = [self.generate_finding("PASS", AWS_REGION_1)]
audit_info = self.set_mocked_audit_info()
session = self.set_mocked_session(AWS_REGION_1)
security_hub_findings = prepare_security_hub_findings(
findings,
audit_info,
output_options,
enabled_regions,
)
assert (
batch_send_to_security_hub(
security_hub_findings,
session,
)
== 1
)