mirror of
https://github.com/ghndrx/prowler.git
synced 2026-02-10 14:55:00 +00:00
feat(s3_checks): add several checks for s3 (#1266)
* feat(checks): add extra718 * feat(checks): add extra763 Co-authored-by: sergargar <sergio@verica.io>
This commit is contained in:
@@ -8,10 +8,10 @@ from boto3 import client, session
|
|||||||
from botocore.credentials import RefreshableCredentials
|
from botocore.credentials import RefreshableCredentials
|
||||||
from botocore.session import get_session
|
from botocore.session import get_session
|
||||||
|
|
||||||
from config.config import json_asff_file_suffix, timestamp_utc
|
from config.config import aws_services_json_file
|
||||||
from lib.arn.arn import arn_parsing
|
from lib.arn.arn import arn_parsing
|
||||||
from lib.logger import logger
|
from lib.logger import logger
|
||||||
from lib.outputs.models import Check_Output_JSON_ASFF
|
from lib.utils.utils import open_file, parse_json_file
|
||||||
from providers.aws.models import (
|
from providers.aws.models import (
|
||||||
AWS_Assume_Role,
|
AWS_Assume_Role,
|
||||||
AWS_Audit_Info,
|
AWS_Audit_Info,
|
||||||
@@ -278,102 +278,21 @@ def get_organizations_metadata(
|
|||||||
return organizations_info
|
return organizations_info
|
||||||
|
|
||||||
|
|
||||||
def send_to_security_hub(
|
def generate_regional_clients(service, audit_info):
|
||||||
region: str, finding_output: Check_Output_JSON_ASFF, session: session.Session
|
regional_clients = []
|
||||||
):
|
# Get json locally
|
||||||
try:
|
f = open_file(aws_services_json_file)
|
||||||
logger.info("Sending findings to Security Hub.")
|
data = parse_json_file(f)
|
||||||
# Check if security hub is enabled in current region
|
json_regions = data["services"][service]["regions"][audit_info.audited_partition]
|
||||||
security_hub_client = session.client("securityhub", region_name=region)
|
if audit_info.audited_regions: # Check for input aws audit_info.audited_regions
|
||||||
security_hub_client.describe_hub()
|
regions = list(
|
||||||
|
set(json_regions).intersection(audit_info.audited_regions)
|
||||||
|
) # Get common regions between input and json
|
||||||
|
else: # Get all regions from json of the service and partition
|
||||||
|
regions = json_regions
|
||||||
|
for region in regions:
|
||||||
|
regional_client = audit_info.audit_session.client(service, region_name=region)
|
||||||
|
regional_client.region = region
|
||||||
|
regional_clients.append(regional_client)
|
||||||
|
|
||||||
# Check if Prowler integration is enabled in Security Hub
|
return regional_clients
|
||||||
if "prowler/prowler" not in str(
|
|
||||||
security_hub_client.list_enabled_products_for_import()
|
|
||||||
):
|
|
||||||
logger.error(
|
|
||||||
f"Security Hub is enabled in {region} but Prowler integration does not accept findings. More info: https://github.com/prowler-cloud/prowler/#security-hub-integration"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Send finding to Security Hub
|
|
||||||
batch_import = security_hub_client.batch_import_findings(
|
|
||||||
Findings=[finding_output.dict()]
|
|
||||||
)
|
|
||||||
if batch_import["FailedCount"] > 0:
|
|
||||||
failed_import = batch_import["FailedFindings"][0]
|
|
||||||
logger.error(
|
|
||||||
f"Failed to send archived findings to AWS Security Hub -- {failed_import['ErrorCode']} -- {failed_import['ErrorMessage']}"
|
|
||||||
)
|
|
||||||
|
|
||||||
except Exception as error:
|
|
||||||
logger.error(f"{error.__class__.__name__} -- {error} in region {region}")
|
|
||||||
|
|
||||||
|
|
||||||
# Move previous Security Hub check findings to ARCHIVED (as prowler didn't re-detect them)
|
|
||||||
def resolve_security_hub_previous_findings(
|
|
||||||
output_directory: str, audit_info: AWS_Audit_Info
|
|
||||||
) -> list:
|
|
||||||
logger.info("Checking previous findings in Security Hub to archive them.")
|
|
||||||
# Read current findings from json-asff file
|
|
||||||
with open(
|
|
||||||
f"{output_directory}/prowler-output-{audit_info.audited_account}-{json_asff_file_suffix}"
|
|
||||||
) as f:
|
|
||||||
json_asff_file = json.load(f)
|
|
||||||
|
|
||||||
# Sort by region
|
|
||||||
json_asff_file = sorted(json_asff_file, key=itemgetter("ProductArn"))
|
|
||||||
# Group by region
|
|
||||||
for product_arn, current_findings in groupby(
|
|
||||||
json_asff_file, key=itemgetter("ProductArn")
|
|
||||||
):
|
|
||||||
region = product_arn.split(":")[3]
|
|
||||||
try:
|
|
||||||
# Check if security hub is enabled in current region
|
|
||||||
security_hub_client = audit_info.audit_session.client(
|
|
||||||
"securityhub", region_name=region
|
|
||||||
)
|
|
||||||
security_hub_client.describe_hub()
|
|
||||||
# Get current findings IDs
|
|
||||||
current_findings_ids = []
|
|
||||||
for finding in current_findings:
|
|
||||||
current_findings_ids.append(finding["Id"])
|
|
||||||
# Get findings of that region
|
|
||||||
security_hub_client = audit_info.audit_session.client(
|
|
||||||
"securityhub", region_name=region
|
|
||||||
)
|
|
||||||
findings_filter = {
|
|
||||||
"ProductName": [{"Value": "Prowler", "Comparison": "EQUALS"}],
|
|
||||||
"RecordState": [{"Value": "ACTIVE", "Comparison": "EQUALS"}],
|
|
||||||
"AwsAccountId": [
|
|
||||||
{"Value": audit_info.audited_account, "Comparison": "EQUALS"}
|
|
||||||
],
|
|
||||||
"Region": [{"Value": region, "Comparison": "EQUALS"}],
|
|
||||||
}
|
|
||||||
get_findings_paginator = security_hub_client.get_paginator("get_findings")
|
|
||||||
findings_to_archive = []
|
|
||||||
for page in get_findings_paginator.paginate(Filters=findings_filter):
|
|
||||||
# Archive findings that have not appear in this execution
|
|
||||||
for finding in page["Findings"]:
|
|
||||||
if finding["Id"] not in current_findings_ids:
|
|
||||||
finding["RecordState"] = "ARCHIVED"
|
|
||||||
finding["UpdatedAt"] = timestamp_utc.strftime(
|
|
||||||
"%Y-%m-%dT%H:%M:%SZ"
|
|
||||||
)
|
|
||||||
findings_to_archive.append(finding)
|
|
||||||
logger.info(f"Archiving {len(findings_to_archive)} findings.")
|
|
||||||
# Send archive findings to SHub
|
|
||||||
list_chunked = [
|
|
||||||
findings_to_archive[i : i + 100]
|
|
||||||
for i in range(0, len(findings_to_archive), 100)
|
|
||||||
]
|
|
||||||
for findings in list_chunked:
|
|
||||||
batch_import = security_hub_client.batch_import_findings(
|
|
||||||
Findings=findings
|
|
||||||
)
|
|
||||||
if batch_import["FailedCount"] > 0:
|
|
||||||
failed_import = batch_import["FailedFindings"][0]
|
|
||||||
logger.error(
|
|
||||||
f"Failed to send archived findings to AWS Security Hub -- {failed_import['ErrorCode']} -- {failed_import['ErrorMessage']}"
|
|
||||||
)
|
|
||||||
except Exception as error:
|
|
||||||
logger.error(f"{error.__class__.__name__} -- {error} in region {region}")
|
|
||||||
|
|||||||
@@ -1,9 +1,7 @@
|
|||||||
import threading
|
import threading
|
||||||
|
|
||||||
from config.config import aws_services_json_file
|
|
||||||
from lib.logger import logger
|
from lib.logger import logger
|
||||||
from lib.utils.utils import open_file, parse_json_file
|
from providers.aws.aws_provider import current_audit_info, generate_regional_clients
|
||||||
from providers.aws.aws_provider import current_audit_info
|
|
||||||
|
|
||||||
|
|
||||||
################## EC2
|
################## EC2
|
||||||
@@ -12,37 +10,12 @@ class EC2:
|
|||||||
self.service = "ec2"
|
self.service = "ec2"
|
||||||
self.session = audit_info.audit_session
|
self.session = audit_info.audit_session
|
||||||
self.audited_account = audit_info.audited_account
|
self.audited_account = audit_info.audited_account
|
||||||
self.regional_clients = self.__generate_regional_clients__(
|
self.regional_clients = generate_regional_clients(self.service, audit_info)
|
||||||
self.service, audit_info
|
|
||||||
)
|
|
||||||
self.__threading_call__(self.__describe_snapshots__)
|
self.__threading_call__(self.__describe_snapshots__)
|
||||||
|
|
||||||
def __get_session__(self):
|
def __get_session__(self):
|
||||||
return self.session
|
return self.session
|
||||||
|
|
||||||
def __generate_regional_clients__(self, service, audit_info):
|
|
||||||
regional_clients = []
|
|
||||||
# Get json locally
|
|
||||||
f = open_file(aws_services_json_file)
|
|
||||||
data = parse_json_file(f)
|
|
||||||
json_regions = data["services"][service]["regions"][
|
|
||||||
audit_info.audited_partition
|
|
||||||
]
|
|
||||||
if audit_info.audited_regions: # Check for input aws audit_info.audited_regions
|
|
||||||
regions = list(
|
|
||||||
set(json_regions).intersection(audit_info.audited_regions)
|
|
||||||
) # Get common regions between input and json
|
|
||||||
else: # Get all regions from json of the service and partition
|
|
||||||
regions = json_regions
|
|
||||||
for region in regions:
|
|
||||||
regional_client = audit_info.audit_session.client(
|
|
||||||
service, region_name=region
|
|
||||||
)
|
|
||||||
regional_client.region = region
|
|
||||||
regional_clients.append(regional_client)
|
|
||||||
|
|
||||||
return regional_clients
|
|
||||||
|
|
||||||
def __threading_call__(self, call):
|
def __threading_call__(self, call):
|
||||||
threads = []
|
threads = []
|
||||||
for regional_client in self.regional_clients:
|
for regional_client in self.regional_clients:
|
||||||
|
|||||||
0
providers/aws/services/s3/__init__.py
Normal file
0
providers/aws/services/s3/__init__.py
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
{
|
||||||
|
"Categories": [],
|
||||||
|
"CheckAlias": "extra763",
|
||||||
|
"CheckID": "s3_bucket_object_versioning",
|
||||||
|
"CheckName": "s3_bucket_object_versioning",
|
||||||
|
"CheckTitle": "Check if S3 buckets have object versioning enabled",
|
||||||
|
"CheckType": "Data Protection",
|
||||||
|
"Compliance": [],
|
||||||
|
"DependsOn": [],
|
||||||
|
"Description": "Check if S3 buckets have object versioning enabled",
|
||||||
|
"Notes": "",
|
||||||
|
"Provider": "aws",
|
||||||
|
"RelatedTo": [],
|
||||||
|
"RelatedUrl": "",
|
||||||
|
"Remediation": {
|
||||||
|
"Code": {
|
||||||
|
"CLI": "",
|
||||||
|
"NativeIaC": "",
|
||||||
|
"Other": "",
|
||||||
|
"Terraform": ""
|
||||||
|
},
|
||||||
|
"Recommendation": {
|
||||||
|
"Text": "Configure versioning using the Amazon console or API for buckets with sensitive information that is changing frecuently; and backup may not be enough to capture all the changes.",
|
||||||
|
"Url": "https://docs.aws.amazon.com/AmazonS3/latest/dev-retired/Versioning.html"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"ResourceIdTemplate": "arn:partition:service:region:account-id:resource-id",
|
||||||
|
"ResourceType": "AwsS3Bucket",
|
||||||
|
"Risk": "With versioning, you can easily recover from both unintended user actions and application failures.",
|
||||||
|
"ServiceName": "s3",
|
||||||
|
"Severity": "medium",
|
||||||
|
"SubServiceName": "",
|
||||||
|
"Tags": {
|
||||||
|
"Tag1Key": "value",
|
||||||
|
"Tag2Key": "value"
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,33 @@
|
|||||||
|
from lib.check.models import Check, Check_Report
|
||||||
|
from providers.aws.services.s3.s3_service import s3_client
|
||||||
|
|
||||||
|
|
||||||
|
class s3_bucket_object_versioning(Check):
|
||||||
|
def execute(self):
|
||||||
|
findings = []
|
||||||
|
for regional_client in s3_client.regional_clients:
|
||||||
|
region = regional_client.region
|
||||||
|
if regional_client.buckets:
|
||||||
|
for bucket in regional_client.buckets:
|
||||||
|
report = Check_Report(self.metadata)
|
||||||
|
report.region = region
|
||||||
|
report.resource_id = bucket.name
|
||||||
|
if bucket.versioning:
|
||||||
|
report.status = "PASS"
|
||||||
|
report.status_extended = (
|
||||||
|
f"S3 Bucket {bucket.name} has versioning enabled."
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
report.status = "FAIL"
|
||||||
|
report.status_extended = (
|
||||||
|
f"S3 Bucket {bucket.name} has versioning disabled."
|
||||||
|
)
|
||||||
|
findings.append(report)
|
||||||
|
else:
|
||||||
|
report = Check_Report(self.metadata)
|
||||||
|
report.status = "PASS"
|
||||||
|
report.status_extended = "There are no S3 buckets."
|
||||||
|
report.region = region
|
||||||
|
findings.append(report)
|
||||||
|
|
||||||
|
return findings
|
||||||
@@ -0,0 +1,37 @@
|
|||||||
|
{
|
||||||
|
"Categories": [],
|
||||||
|
"CheckAlias": "extra718",
|
||||||
|
"CheckID": "s3_bucket_server_access_logging_enabled",
|
||||||
|
"CheckName": "s3_bucket_server_access_logging_enabled",
|
||||||
|
"CheckTitle": "Check if S3 buckets have server access logging enabled",
|
||||||
|
"CheckType": "Logging and Monitoring",
|
||||||
|
"Compliance": [],
|
||||||
|
"DependsOn": [],
|
||||||
|
"Description": "Check if S3 buckets have server access logging enabled",
|
||||||
|
"Notes": "",
|
||||||
|
"Provider": "aws",
|
||||||
|
"RelatedTo": [],
|
||||||
|
"RelatedUrl": "",
|
||||||
|
"Remediation": {
|
||||||
|
"Code": {
|
||||||
|
"CLI": "",
|
||||||
|
"NativeIaC": "",
|
||||||
|
"Other": "",
|
||||||
|
"Terraform": ""
|
||||||
|
},
|
||||||
|
"Recommendation": {
|
||||||
|
"Text": "Ensure that S3 buckets have Logging enabled. CloudTrail data events can be used in place of S3 bucket logging. If that is the case, this finding can be considered a false positive.",
|
||||||
|
"Url": "https://docs.aws.amazon.com/AmazonS3/latest/dev/security-best-practices.html"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"ResourceIdTemplate": "arn:partition:service:region:account-id:resource-id",
|
||||||
|
"ResourceType": "AwsS3Bucket",
|
||||||
|
"Risk": "Server access logs can assist you in security and access audits; help you learn about your customer base; and understand your Amazon S3 bill.",
|
||||||
|
"ServiceName": "s3",
|
||||||
|
"Severity": "medium",
|
||||||
|
"SubServiceName": "",
|
||||||
|
"Tags": {
|
||||||
|
"Tag1Key": "value",
|
||||||
|
"Tag2Key": "value"
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,29 @@
|
|||||||
|
from lib.check.models import Check, Check_Report
|
||||||
|
from providers.aws.services.s3.s3_service import s3_client
|
||||||
|
|
||||||
|
|
||||||
|
class s3_bucket_server_access_logging_enabled(Check):
|
||||||
|
def execute(self):
|
||||||
|
findings = []
|
||||||
|
for regional_client in s3_client.regional_clients:
|
||||||
|
region = regional_client.region
|
||||||
|
if regional_client.buckets:
|
||||||
|
for bucket in regional_client.buckets:
|
||||||
|
report = Check_Report(self.metadata)
|
||||||
|
report.region = region
|
||||||
|
report.resource_id = bucket.name
|
||||||
|
if bucket.logging:
|
||||||
|
report.status = "PASS"
|
||||||
|
report.status_extended = f"S3 Bucket {bucket.name} has server access logging enabled."
|
||||||
|
else:
|
||||||
|
report.status = "FAIL"
|
||||||
|
report.status_extended = f"S3 Bucket {bucket.name} has server access logging disabled."
|
||||||
|
findings.append(report)
|
||||||
|
else:
|
||||||
|
report = Check_Report(self.metadata)
|
||||||
|
report.status = "PASS"
|
||||||
|
report.status_extended = "There are no S3 buckets."
|
||||||
|
report.region = region
|
||||||
|
findings.append(report)
|
||||||
|
|
||||||
|
return findings
|
||||||
100
providers/aws/services/s3/s3_service.py
Normal file
100
providers/aws/services/s3/s3_service.py
Normal file
@@ -0,0 +1,100 @@
|
|||||||
|
import threading
|
||||||
|
from dataclasses import dataclass
|
||||||
|
|
||||||
|
from lib.logger import logger
|
||||||
|
from providers.aws.aws_provider import current_audit_info, generate_regional_clients
|
||||||
|
|
||||||
|
|
||||||
|
################## S3
|
||||||
|
class S3:
|
||||||
|
def __init__(self, audit_info):
|
||||||
|
self.service = "s3"
|
||||||
|
self.session = audit_info.audit_session
|
||||||
|
self.audited_account = audit_info.audited_account
|
||||||
|
self.regional_clients = generate_regional_clients(self.service, audit_info)
|
||||||
|
self.__threading_call__(self.__list_buckets__)
|
||||||
|
self.__threading_call__(self.__get_bucket_versioning__)
|
||||||
|
self.__threading_call__(self.__get_bucket_logging__)
|
||||||
|
|
||||||
|
def __get_session__(self):
|
||||||
|
return self.session
|
||||||
|
|
||||||
|
def __threading_call__(self, call):
|
||||||
|
threads = []
|
||||||
|
for regional_client in self.regional_clients:
|
||||||
|
threads.append(threading.Thread(target=call, args=(regional_client,)))
|
||||||
|
for t in threads:
|
||||||
|
t.start()
|
||||||
|
for t in threads:
|
||||||
|
t.join()
|
||||||
|
|
||||||
|
def __list_buckets__(self, regional_client):
|
||||||
|
logger.info("S3 - Listing buckets...")
|
||||||
|
try:
|
||||||
|
list_buckets = regional_client.list_buckets()
|
||||||
|
buckets = []
|
||||||
|
for bucket in list_buckets["Buckets"]:
|
||||||
|
try:
|
||||||
|
bucket_region = regional_client.get_bucket_location(
|
||||||
|
Bucket=bucket["Name"]
|
||||||
|
)["LocationConstraint"]
|
||||||
|
if regional_client.region == bucket_region or (
|
||||||
|
regional_client.region == "us-east-1" and not bucket_region
|
||||||
|
): # If us-east-1, bucket_region is none
|
||||||
|
buckets.append(Bucket(bucket["Name"]))
|
||||||
|
except Exception as error:
|
||||||
|
if error.__class__.__name__ != "NoSuchBucket":
|
||||||
|
logger.error(
|
||||||
|
f"{regional_client.region} -- {error.__class__.__name__}: {error}"
|
||||||
|
)
|
||||||
|
regional_client.buckets = buckets
|
||||||
|
except Exception as error:
|
||||||
|
logger.error(
|
||||||
|
f"{regional_client.region} -- {error.__class__.__name__}: {error}"
|
||||||
|
)
|
||||||
|
|
||||||
|
def __get_bucket_versioning__(self, regional_client):
|
||||||
|
logger.info("S3 - Get buckets versioning...")
|
||||||
|
try:
|
||||||
|
if hasattr(regional_client, "buckets"):
|
||||||
|
for bucket in regional_client.buckets:
|
||||||
|
bucket_versioning = regional_client.get_bucket_versioning(
|
||||||
|
Bucket=bucket.name
|
||||||
|
)
|
||||||
|
if "Status" in bucket_versioning:
|
||||||
|
if "Enabled" == bucket_versioning["Status"]:
|
||||||
|
bucket.versioning = True
|
||||||
|
except Exception as error:
|
||||||
|
logger.error(
|
||||||
|
f"{regional_client.region} -- {error.__class__.__name__}: {error}"
|
||||||
|
)
|
||||||
|
|
||||||
|
def __get_bucket_logging__(self, regional_client):
|
||||||
|
logger.info("S3 - Get buckets logging...")
|
||||||
|
try:
|
||||||
|
if hasattr(regional_client, "buckets"):
|
||||||
|
for bucket in regional_client.buckets:
|
||||||
|
bucket_logging = regional_client.get_bucket_logging(
|
||||||
|
Bucket=bucket.name
|
||||||
|
)
|
||||||
|
if "LoggingEnabled" in bucket_logging:
|
||||||
|
bucket.logging = True
|
||||||
|
except Exception as error:
|
||||||
|
logger.error(
|
||||||
|
f"{regional_client.region} -- {error.__class__.__name__}: {error}"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class Bucket:
|
||||||
|
name: str
|
||||||
|
versioning: bool
|
||||||
|
logging: bool
|
||||||
|
|
||||||
|
def __init__(self, name):
|
||||||
|
self.name = name
|
||||||
|
self.versioning = False
|
||||||
|
self.logging = False
|
||||||
|
|
||||||
|
|
||||||
|
s3_client = S3(current_audit_info)
|
||||||
Reference in New Issue
Block a user