mirror of
https://github.com/ghndrx/prowler.git
synced 2026-02-10 14:55:00 +00:00
feat(shub): add Security Hub integration (#1255)
This commit is contained in:
@@ -1,7 +1,8 @@
|
|||||||
from datetime import datetime
|
from datetime import datetime, timezone
|
||||||
from os import getcwd
|
from os import getcwd
|
||||||
|
|
||||||
timestamp = datetime.today()
|
timestamp = datetime.today()
|
||||||
|
timestamp_utc = datetime.now(timezone.utc).replace(tzinfo=timezone.utc)
|
||||||
prowler_version = "3.0-alfa"
|
prowler_version = "3.0-alfa"
|
||||||
|
|
||||||
# Groups
|
# Groups
|
||||||
|
|||||||
@@ -173,12 +173,18 @@ def import_check(check_path: str) -> ModuleType:
|
|||||||
return lib
|
return lib
|
||||||
|
|
||||||
|
|
||||||
def set_output_options(quiet: bool, output_modes: list, input_output_directory: str):
|
def set_output_options(
|
||||||
|
quiet: bool,
|
||||||
|
output_modes: list,
|
||||||
|
input_output_directory: str,
|
||||||
|
security_hub_enabled: bool,
|
||||||
|
):
|
||||||
global output_options
|
global output_options
|
||||||
output_options = Output_From_Options(
|
output_options = Output_From_Options(
|
||||||
is_quiet=quiet,
|
is_quiet=quiet,
|
||||||
output_modes=output_modes,
|
output_modes=output_modes,
|
||||||
output_directory=input_output_directory
|
output_directory=input_output_directory,
|
||||||
|
security_hub_enabled=security_hub_enabled
|
||||||
# set input options here
|
# set input options here
|
||||||
)
|
)
|
||||||
return output_options
|
return output_options
|
||||||
|
|||||||
@@ -13,6 +13,7 @@ class Output_From_Options:
|
|||||||
is_quiet: bool
|
is_quiet: bool
|
||||||
output_modes: list
|
output_modes: list
|
||||||
output_directory: str
|
output_directory: str
|
||||||
|
security_hub_enabled: bool
|
||||||
|
|
||||||
|
|
||||||
# Testing Pending
|
# Testing Pending
|
||||||
|
|||||||
@@ -89,6 +89,7 @@ class Check_Output_JSON_ASFF(BaseModel):
|
|||||||
Description: str = ""
|
Description: str = ""
|
||||||
Resources: List[Resource] = None
|
Resources: List[Resource] = None
|
||||||
Compliance: Compliance = None
|
Compliance: Compliance = None
|
||||||
|
Remediation: dict = None
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
|
|||||||
@@ -10,6 +10,7 @@ from config.config import (
|
|||||||
json_file_suffix,
|
json_file_suffix,
|
||||||
prowler_version,
|
prowler_version,
|
||||||
timestamp,
|
timestamp,
|
||||||
|
timestamp_utc,
|
||||||
)
|
)
|
||||||
from lib.outputs.models import (
|
from lib.outputs.models import (
|
||||||
Check_Output_CSV,
|
Check_Output_CSV,
|
||||||
@@ -20,7 +21,8 @@ from lib.outputs.models import (
|
|||||||
Resource,
|
Resource,
|
||||||
Severity,
|
Severity,
|
||||||
)
|
)
|
||||||
from lib.utils.utils import file_exists, open_file
|
from lib.utils.utils import file_exists, hash_sha512, open_file
|
||||||
|
from providers.aws.aws_provider import send_to_security_hub
|
||||||
|
|
||||||
|
|
||||||
def report(check_findings, output_options, audit_info):
|
def report(check_findings, output_options, audit_info):
|
||||||
@@ -83,6 +85,12 @@ def report(check_findings, output_options, audit_info):
|
|||||||
)
|
)
|
||||||
file_descriptors["json-asff"].write(",")
|
file_descriptors["json-asff"].write(",")
|
||||||
|
|
||||||
|
# Check if it is needed to send findings to security hub
|
||||||
|
if output_options.security_hub_enabled:
|
||||||
|
send_to_security_hub(
|
||||||
|
finding.region, finding_output, audit_info.audit_session
|
||||||
|
)
|
||||||
|
|
||||||
if file_descriptors:
|
if file_descriptors:
|
||||||
# Close all file descriptors
|
# Close all file descriptors
|
||||||
for file_descriptor in file_descriptors:
|
for file_descriptor in file_descriptors:
|
||||||
@@ -189,18 +197,21 @@ def fill_json(finding_output, audit_info, finding):
|
|||||||
|
|
||||||
|
|
||||||
def fill_json_asff(finding_output, audit_info, finding):
|
def fill_json_asff(finding_output, audit_info, finding):
|
||||||
finding_output.Id = f"prowler-{finding.check_metadata.CheckID}-{audit_info.audited_account}-{finding.region}-{str(hash(finding.resource_id))}"
|
# Check if there are no resources in the finding
|
||||||
|
if finding.resource_id == "":
|
||||||
|
finding.resource_id = "NONE_PROVIDED"
|
||||||
|
finding_output.Id = f"prowler-{finding.check_metadata.CheckID}-{audit_info.audited_account}-{finding.region}-{hash_sha512(finding.resource_id)}"
|
||||||
finding_output.ProductArn = f"arn:{audit_info.audited_partition}:securityhub:{finding.region}::product/prowler/prowler"
|
finding_output.ProductArn = f"arn:{audit_info.audited_partition}:securityhub:{finding.region}::product/prowler/prowler"
|
||||||
finding_output.ProductFields = ProductFields(
|
finding_output.ProductFields = ProductFields(
|
||||||
ProviderVersion=prowler_version, ProwlerResourceName=finding.resource_id
|
ProviderVersion=prowler_version, ProwlerResourceName=finding.resource_id
|
||||||
)
|
)
|
||||||
finding_output.GeneratorId = "prowler-" + finding.check_metadata.CheckID
|
finding_output.GeneratorId = "prowler-" + finding.check_metadata.CheckID
|
||||||
finding_output.AwsAccountId = audit_info.audited_account
|
finding_output.AwsAccountId = audit_info.audited_account
|
||||||
finding_output.Types = finding.check_metadata.CheckType
|
finding_output.Types = [finding.check_metadata.CheckType]
|
||||||
finding_output.FirstObservedAt = (
|
finding_output.FirstObservedAt = (
|
||||||
finding_output.UpdatedAt
|
finding_output.UpdatedAt
|
||||||
) = finding_output.CreatedAt = timestamp.isoformat()
|
) = finding_output.CreatedAt = timestamp_utc.strftime("%Y-%m-%dT%H:%M:%SZ")
|
||||||
finding_output.Severity = Severity(Label=finding.check_metadata.Severity)
|
finding_output.Severity = Severity(Label=finding.check_metadata.Severity.upper())
|
||||||
finding_output.Title = finding.check_metadata.CheckTitle
|
finding_output.Title = finding.check_metadata.CheckTitle
|
||||||
finding_output.Description = finding.check_metadata.Description
|
finding_output.Description = finding.check_metadata.Description
|
||||||
finding_output.Resources = [
|
finding_output.Resources = [
|
||||||
@@ -211,9 +222,14 @@ def fill_json_asff(finding_output, audit_info, finding):
|
|||||||
Region=finding.region,
|
Region=finding.region,
|
||||||
)
|
)
|
||||||
]
|
]
|
||||||
|
# Add ED to PASS or FAIL (PASSED/FAILED)
|
||||||
finding_output.Compliance = Compliance(
|
finding_output.Compliance = Compliance(
|
||||||
Status=finding.status, RelatedRequirements=[finding.check_metadata.CheckType]
|
Status=finding.status + "ED",
|
||||||
|
RelatedRequirements=[finding.check_metadata.CheckType],
|
||||||
)
|
)
|
||||||
|
finding_output.Remediation = {
|
||||||
|
"Recommendation": finding.check_metadata.Remediation.Recommendation
|
||||||
|
}
|
||||||
|
|
||||||
return finding_output
|
return finding_output
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
import json
|
import json
|
||||||
import sys
|
import sys
|
||||||
|
from hashlib import sha512
|
||||||
from io import TextIOWrapper
|
from io import TextIOWrapper
|
||||||
from os.path import exists
|
from os.path import exists
|
||||||
from typing import Any
|
from typing import Any
|
||||||
@@ -40,3 +41,8 @@ def file_exists(filename: str):
|
|||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
# create sha512 hash for string
|
||||||
|
def hash_sha512(string: str) -> str:
|
||||||
|
return sha512(string.encode("utf-8")).hexdigest()[0:9]
|
||||||
|
|||||||
@@ -1,12 +1,17 @@
|
|||||||
|
import json
|
||||||
import sys
|
import sys
|
||||||
|
from itertools import groupby
|
||||||
|
from operator import itemgetter
|
||||||
|
|
||||||
from arnparse import arnparse
|
from arnparse import arnparse
|
||||||
from boto3 import client, session
|
from boto3 import client, session
|
||||||
from botocore.credentials import RefreshableCredentials
|
from botocore.credentials import RefreshableCredentials
|
||||||
from botocore.session import get_session
|
from botocore.session import get_session
|
||||||
|
|
||||||
|
from config.config import json_asff_file_suffix, timestamp_utc
|
||||||
from lib.arn.arn import arn_parsing
|
from lib.arn.arn import arn_parsing
|
||||||
from lib.logger import logger
|
from lib.logger import logger
|
||||||
|
from lib.outputs.models import Check_Output_JSON_ASFF
|
||||||
from providers.aws.models import (
|
from providers.aws.models import (
|
||||||
AWS_Assume_Role,
|
AWS_Assume_Role,
|
||||||
AWS_Audit_Info,
|
AWS_Audit_Info,
|
||||||
@@ -271,3 +276,104 @@ def get_organizations_metadata(
|
|||||||
account_details_tags=account_details_tags,
|
account_details_tags=account_details_tags,
|
||||||
)
|
)
|
||||||
return organizations_info
|
return organizations_info
|
||||||
|
|
||||||
|
|
||||||
|
def send_to_security_hub(
|
||||||
|
region: str, finding_output: Check_Output_JSON_ASFF, session: session.Session
|
||||||
|
):
|
||||||
|
try:
|
||||||
|
logger.info("Sending findings to Security Hub.")
|
||||||
|
# Check if security hub is enabled in current region
|
||||||
|
security_hub_client = session.client("securityhub", region_name=region)
|
||||||
|
security_hub_client.describe_hub()
|
||||||
|
|
||||||
|
# Check if Prowler integration is enabled in Security Hub
|
||||||
|
if "prowler/prowler" not in str(
|
||||||
|
security_hub_client.list_enabled_products_for_import()
|
||||||
|
):
|
||||||
|
logger.error(
|
||||||
|
f"Security Hub is enabled in {region} but Prowler integration does not accept findings. More info: https://github.com/prowler-cloud/prowler/#security-hub-integration"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Send finding to Security Hub
|
||||||
|
batch_import = security_hub_client.batch_import_findings(
|
||||||
|
Findings=[finding_output.dict()]
|
||||||
|
)
|
||||||
|
if batch_import["FailedCount"] > 0:
|
||||||
|
failed_import = batch_import["FailedFindings"][0]
|
||||||
|
logger.error(
|
||||||
|
f"Failed to send archived findings to AWS Security Hub -- {failed_import['ErrorCode']} -- {failed_import['ErrorMessage']}"
|
||||||
|
)
|
||||||
|
|
||||||
|
except Exception as error:
|
||||||
|
logger.error(f"{error.__class__.__name__} -- {error} in region {region}")
|
||||||
|
|
||||||
|
|
||||||
|
# Move previous Security Hub check findings to ARCHIVED (as prowler didn't re-detect them)
|
||||||
|
def resolve_security_hub_previous_findings(
|
||||||
|
output_directory: str, audit_info: AWS_Audit_Info
|
||||||
|
) -> list:
|
||||||
|
logger.info("Checking previous findings in Security Hub to archive them.")
|
||||||
|
# Read current findings from json-asff file
|
||||||
|
with open(
|
||||||
|
f"{output_directory}/prowler-output-{audit_info.audited_account}-{json_asff_file_suffix}"
|
||||||
|
) as f:
|
||||||
|
json_asff_file = json.load(f)
|
||||||
|
|
||||||
|
# Sort by region
|
||||||
|
json_asff_file = sorted(json_asff_file, key=itemgetter("ProductArn"))
|
||||||
|
# Group by region
|
||||||
|
for product_arn, current_findings in groupby(
|
||||||
|
json_asff_file, key=itemgetter("ProductArn")
|
||||||
|
):
|
||||||
|
region = product_arn.split(":")[3]
|
||||||
|
try:
|
||||||
|
# Check if security hub is enabled in current region
|
||||||
|
security_hub_client = audit_info.audit_session.client(
|
||||||
|
"securityhub", region_name=region
|
||||||
|
)
|
||||||
|
security_hub_client.describe_hub()
|
||||||
|
# Get current findings IDs
|
||||||
|
current_findings_ids = []
|
||||||
|
for finding in current_findings:
|
||||||
|
current_findings_ids.append(finding["Id"])
|
||||||
|
# Get findings of that region
|
||||||
|
security_hub_client = audit_info.audit_session.client(
|
||||||
|
"securityhub", region_name=region
|
||||||
|
)
|
||||||
|
findings_filter = {
|
||||||
|
"ProductName": [{"Value": "Prowler", "Comparison": "EQUALS"}],
|
||||||
|
"RecordState": [{"Value": "ACTIVE", "Comparison": "EQUALS"}],
|
||||||
|
"AwsAccountId": [
|
||||||
|
{"Value": audit_info.audited_account, "Comparison": "EQUALS"}
|
||||||
|
],
|
||||||
|
"Region": [{"Value": region, "Comparison": "EQUALS"}],
|
||||||
|
}
|
||||||
|
get_findings_paginator = security_hub_client.get_paginator("get_findings")
|
||||||
|
findings_to_archive = []
|
||||||
|
for page in get_findings_paginator.paginate(Filters=findings_filter):
|
||||||
|
# Archive findings that have not appear in this execution
|
||||||
|
for finding in page["Findings"]:
|
||||||
|
if finding["Id"] not in current_findings_ids:
|
||||||
|
finding["RecordState"] = "ARCHIVED"
|
||||||
|
finding["UpdatedAt"] = timestamp_utc.strftime(
|
||||||
|
"%Y-%m-%dT%H:%M:%SZ"
|
||||||
|
)
|
||||||
|
findings_to_archive.append(finding)
|
||||||
|
logger.info(f"Archiving {len(findings_to_archive)} findings.")
|
||||||
|
# Send archive findings to SHub
|
||||||
|
list_chunked = [
|
||||||
|
findings_to_archive[i : i + 100]
|
||||||
|
for i in range(0, len(findings_to_archive), 100)
|
||||||
|
]
|
||||||
|
for findings in list_chunked:
|
||||||
|
batch_import = security_hub_client.batch_import_findings(
|
||||||
|
Findings=findings
|
||||||
|
)
|
||||||
|
if batch_import["FailedCount"] > 0:
|
||||||
|
failed_import = batch_import["FailedFindings"][0]
|
||||||
|
logger.error(
|
||||||
|
f"Failed to send archived findings to AWS Security Hub -- {failed_import['ErrorCode']} -- {failed_import['ErrorMessage']}"
|
||||||
|
)
|
||||||
|
except Exception as error:
|
||||||
|
logger.error(f"{error.__class__.__name__} -- {error} in region {region}")
|
||||||
|
|||||||
@@ -41,7 +41,7 @@ class iam_disable_30_days_credentials(Check):
|
|||||||
else:
|
else:
|
||||||
report = Check_Report(self.metadata)
|
report = Check_Report(self.metadata)
|
||||||
report.status = "PASS"
|
report.status = "PASS"
|
||||||
report.result_extended = "There is no IAM users"
|
report.status_extended = "There is no IAM users"
|
||||||
report.region = iam_client.region
|
report.region = iam_client.region
|
||||||
findings.append(report)
|
findings.append(report)
|
||||||
|
|
||||||
|
|||||||
@@ -43,5 +43,6 @@ class iam_disable_90_days_credentials(Check):
|
|||||||
report.status = "PASS"
|
report.status = "PASS"
|
||||||
report.status_extended = "There is no IAM users"
|
report.status_extended = "There is no IAM users"
|
||||||
report.region = "us-east-1"
|
report.region = "us-east-1"
|
||||||
|
findings.append(report)
|
||||||
|
|
||||||
return findings
|
return findings
|
||||||
|
|||||||
29
prowler
29
prowler
@@ -24,7 +24,10 @@ from lib.check.check import (
|
|||||||
from lib.check.checks_loader import load_checks_to_execute
|
from lib.check.checks_loader import load_checks_to_execute
|
||||||
from lib.logger import logger, set_logging_config
|
from lib.logger import logger, set_logging_config
|
||||||
from lib.outputs.outputs import close_json
|
from lib.outputs.outputs import close_json
|
||||||
from providers.aws.aws_provider import provider_set_session
|
from providers.aws.aws_provider import (
|
||||||
|
provider_set_session,
|
||||||
|
resolve_security_hub_previous_findings,
|
||||||
|
)
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
# CLI Arguments
|
# CLI Arguments
|
||||||
@@ -47,9 +50,7 @@ if __name__ == "__main__":
|
|||||||
# Exclude checks options
|
# Exclude checks options
|
||||||
parser.add_argument("-e", "--excluded-checks", nargs="+", help="Checks to exclude")
|
parser.add_argument("-e", "--excluded-checks", nargs="+", help="Checks to exclude")
|
||||||
parser.add_argument("-E", "--excluded-groups", nargs="+", help="Groups to exclude")
|
parser.add_argument("-E", "--excluded-groups", nargs="+", help="Groups to exclude")
|
||||||
parser.add_argument(
|
parser.add_argument("--excluded-services", nargs="+", help="Services to exclude")
|
||||||
"-S", "--excluded-services", nargs="+", help="Services to exclude"
|
|
||||||
)
|
|
||||||
# List checks options
|
# List checks options
|
||||||
list_group = parser.add_mutually_exclusive_group()
|
list_group = parser.add_mutually_exclusive_group()
|
||||||
list_group.add_argument(
|
list_group.add_argument(
|
||||||
@@ -140,7 +141,12 @@ if __name__ == "__main__":
|
|||||||
nargs="?",
|
nargs="?",
|
||||||
help="Specify AWS Organizations management role ARN to be assumed, to get Organization metadata",
|
help="Specify AWS Organizations management role ARN to be assumed, to get Organization metadata",
|
||||||
)
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-S",
|
||||||
|
"--security-hub",
|
||||||
|
action="store_true",
|
||||||
|
help="Send check output to AWS Security Hub",
|
||||||
|
)
|
||||||
# Parse Arguments
|
# Parse Arguments
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
@@ -229,9 +235,16 @@ if __name__ == "__main__":
|
|||||||
print_checks(provider, checks_to_execute, bulk_checks_metadata)
|
print_checks(provider, checks_to_execute, bulk_checks_metadata)
|
||||||
sys.exit()
|
sys.exit()
|
||||||
|
|
||||||
|
# If security hub sending enabled, it is need to create json-asff output
|
||||||
|
if args.security_hub:
|
||||||
|
if not output_modes:
|
||||||
|
output_modes = ["json-asff"]
|
||||||
|
else:
|
||||||
|
output_modes.append("json-asff")
|
||||||
|
|
||||||
# Setting output options
|
# Setting output options
|
||||||
audit_output_options = set_output_options(
|
audit_output_options = set_output_options(
|
||||||
args.quiet, output_modes, output_directory
|
args.quiet, output_modes, output_directory, args.security_hub
|
||||||
)
|
)
|
||||||
|
|
||||||
# Set global session
|
# Set global session
|
||||||
@@ -276,3 +289,7 @@ if __name__ == "__main__":
|
|||||||
for mode in output_modes:
|
for mode in output_modes:
|
||||||
if mode == "json" or mode == "json-asff":
|
if mode == "json" or mode == "json-asff":
|
||||||
close_json(output_directory, audit_info.audited_account, mode)
|
close_json(output_directory, audit_info.audited_account, mode)
|
||||||
|
|
||||||
|
# Resolve previous fails of Security Hub
|
||||||
|
if args.security_hub:
|
||||||
|
resolve_security_hub_previous_findings(output_directory, audit_info)
|
||||||
|
|||||||
Reference in New Issue
Block a user