feat(scanner): Tag-based scan (#1751)

Co-authored-by: Toni de la Fuente <toni@blyx.com>
Co-authored-by: Pepe Fagoaga <pepe@verica.io>
This commit is contained in:
Sergio Garcia
2023-01-31 12:19:29 +01:00
committed by GitHub
parent 0d1a5318ec
commit 3ac4dc8392
110 changed files with 1224 additions and 635 deletions

View File

@@ -0,0 +1,9 @@
# Tags-based Scan
Prowler allows you to scan only the resources that contain specific tags. This can be done with the flag `-t/--scan-tags` followed by the tags `Key=Value` separated by space:
```
prowler aws --scan-tags Environment=dev Project=prowler
```
This example will only scan the resources that contains both tags.

View File

@@ -44,6 +44,7 @@ nav:
- Scan Multiple AWS Accounts: tutorials/aws/multiaccount.md - Scan Multiple AWS Accounts: tutorials/aws/multiaccount.md
- AWS CloudShell: tutorials/aws/cloudshell.md - AWS CloudShell: tutorials/aws/cloudshell.md
- Checks v2 to v3 Mapping: tutorials/aws/v2_to_v3_checks_mapping.md - Checks v2 to v3 Mapping: tutorials/aws/v2_to_v3_checks_mapping.md
- Tag-based Scan: tutorials/aws/tag-based-scan.md
- Azure: - Azure:
- Authentication: tutorials/azure/authentication.md - Authentication: tutorials/azure/authentication.md
- Subscriptions: tutorials/azure/subscriptions.md - Subscriptions: tutorials/azure/subscriptions.md

View File

@@ -343,6 +343,15 @@ Detailed documentation at https://docs.prowler.cloud
default=None, default=None,
help="Path for allowlist yaml file. See example prowler/config/allowlist.yaml for reference and format. It also accepts AWS DynamoDB Table or Lambda ARNs or S3 URIs, see more in https://docs.prowler.cloud/en/latest/tutorials/allowlist/", help="Path for allowlist yaml file. See example prowler/config/allowlist.yaml for reference and format. It also accepts AWS DynamoDB Table or Lambda ARNs or S3 URIs, see more in https://docs.prowler.cloud/en/latest/tutorials/allowlist/",
) )
# Allowlist
audit_tags_subparser = aws_parser.add_argument_group("Tags-based Scan")
audit_tags_subparser.add_argument(
"-t",
"--scan-tags",
nargs="+",
default=None,
help="Scan only resources with specific tags (Key=Value), e.g., Environment=dev Project=prowler",
)
def __init_azure_parser__(self): def __init_azure_parser__(self):
"""Init the Azure Provider CLI parser""" """Init the Azure Provider CLI parser"""

View File

View File

@@ -0,0 +1,17 @@
from prowler.lib.logger import logger
def is_resource_filtered(resource: str, audit_resources: list) -> bool:
"""
Check if the resource passed as argument is present in the audit_resources.
Returns True if it is filtered and False if it does not match the input filters
"""
try:
if resource in str(audit_resources):
return True
return False
except Exception as error:
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error} ({resource})"
)

View File

@@ -23,7 +23,7 @@ def parse_allowlist_file(audit_info, allowlist_file):
s3_client.get_object(Bucket=bucket, Key=key)["Body"] s3_client.get_object(Bucket=bucket, Key=key)["Body"]
)["Allowlist"] )["Allowlist"]
# Check if file is a Lambda Function ARN # Check if file is a Lambda Function ARN
elif re.search("^arn:(\w+):lambda:", allowlist_file): elif re.search(r"^arn:(\w+):lambda:", allowlist_file):
lambda_region = allowlist_file.split(":")[3] lambda_region = allowlist_file.split(":")[3]
lambda_client = audit_info.audit_session.client( lambda_client = audit_info.audit_session.client(
"lambda", region_name=lambda_region "lambda", region_name=lambda_region

View File

@@ -21,6 +21,7 @@ current_audit_info = AWS_Audit_Info(
session_duration=None, session_duration=None,
external_id=None, external_id=None,
), ),
audit_resources=None,
audited_regions=None, audited_regions=None,
organizations_metadata=None, organizations_metadata=None,
audit_metadata=None, audit_metadata=None,

View File

@@ -42,5 +42,6 @@ class AWS_Audit_Info:
credentials: AWS_Credentials credentials: AWS_Credentials
assumed_role_info: AWS_Assume_Role assumed_role_info: AWS_Assume_Role
audited_regions: list audited_regions: list
audit_resources: list
organizations_metadata: AWS_Organizations_Info organizations_metadata: AWS_Organizations_Info
audit_metadata: Optional[Any] = None audit_metadata: Optional[Any] = None

View File

@@ -3,6 +3,7 @@ import threading
from pydantic import BaseModel from pydantic import BaseModel
from prowler.lib.logger import logger from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -12,6 +13,7 @@ class AccessAnalyzer:
self.service = "accessanalyzer" self.service = "accessanalyzer"
self.session = audit_info.audit_session self.session = audit_info.audit_session
self.audited_account = audit_info.audited_account self.audited_account = audit_info.audited_account
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info) self.regional_clients = generate_regional_clients(self.service, audit_info)
self.analyzers = [] self.analyzers = []
self.__threading_call__(self.__list_analyzers__) self.__threading_call__(self.__list_analyzers__)
@@ -36,18 +38,21 @@ class AccessAnalyzer:
list_analyzers_paginator = regional_client.get_paginator("list_analyzers") list_analyzers_paginator = regional_client.get_paginator("list_analyzers")
analyzer_count = 0 analyzer_count = 0
for page in list_analyzers_paginator.paginate(): for page in list_analyzers_paginator.paginate():
analyzer_count += len(page["analyzers"])
for analyzer in page["analyzers"]: for analyzer in page["analyzers"]:
self.analyzers.append( if not self.audit_resources or (
Analyzer( is_resource_filtered(analyzer["arn"], self.audit_resources)
arn=analyzer["arn"], ):
name=analyzer["name"], analyzer_count += 1
status=analyzer["status"], self.analyzers.append(
tags=str(analyzer["tags"]), Analyzer(
type=analyzer["type"], arn=analyzer["arn"],
region=regional_client.region, name=analyzer["name"],
status=analyzer["status"],
tags=str(analyzer["tags"]),
type=analyzer["type"],
region=regional_client.region,
)
) )
)
# No analyzers in region # No analyzers in region
if analyzer_count == 0: if analyzer_count == 0:
self.analyzers.append( self.analyzers.append(

View File

@@ -3,6 +3,7 @@ from dataclasses import dataclass
from prowler.config.config import timestamp_utc from prowler.config.config import timestamp_utc
from prowler.lib.logger import logger from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -12,6 +13,7 @@ class ACM:
self.service = "acm" self.service = "acm"
self.session = audit_info.audit_session self.session = audit_info.audit_session
self.audited_account = audit_info.audited_account self.audited_account = audit_info.audited_account
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info) self.regional_clients = generate_regional_clients(self.service, audit_info)
self.certificates = [] self.certificates = []
self.__threading_call__(self.__list_certificates__) self.__threading_call__(self.__list_certificates__)
@@ -36,15 +38,20 @@ class ACM:
"list_certificates" "list_certificates"
) )
for page in list_certificates_paginator.paginate(): for page in list_certificates_paginator.paginate():
for analyzer in page["CertificateSummaryList"]: for certificate in page["CertificateSummaryList"]:
self.certificates.append( if not self.audit_resources or (
Certificate( is_resource_filtered(
analyzer["CertificateArn"], certificate["CertificateArn"], self.audit_resources
analyzer["DomainName"], )
False, ):
regional_client.region, self.certificates.append(
Certificate(
certificate["CertificateArn"],
certificate["DomainName"],
False,
regional_client.region,
)
) )
)
except Exception as error: except Exception as error:
logger.error( logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"

View File

@@ -2,6 +2,7 @@ import threading
from dataclasses import dataclass from dataclasses import dataclass
from prowler.lib.logger import logger from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -11,6 +12,7 @@ class APIGateway:
self.service = "apigateway" self.service = "apigateway"
self.session = audit_info.audit_session self.session = audit_info.audit_session
self.audited_account = audit_info.audited_account self.audited_account = audit_info.audited_account
self.audit_resources = audit_info.audit_resources
self.audited_partition = audit_info.audited_partition self.audited_partition = audit_info.audited_partition
self.regional_clients = generate_regional_clients(self.service, audit_info) self.regional_clients = generate_regional_clients(self.service, audit_info)
self.rest_apis = [] self.rest_apis = []
@@ -38,14 +40,17 @@ class APIGateway:
for page in get_rest_apis_paginator.paginate(): for page in get_rest_apis_paginator.paginate():
for apigw in page["items"]: for apigw in page["items"]:
arn = f"arn:{self.audited_partition}:apigateway:{regional_client.region}::/apis/{apigw['id']}" arn = f"arn:{self.audited_partition}:apigateway:{regional_client.region}::/apis/{apigw['id']}"
self.rest_apis.append( if not self.audit_resources or (
RestAPI( is_resource_filtered(arn, self.audit_resources)
apigw["id"], ):
arn, self.rest_apis.append(
regional_client.region, RestAPI(
apigw["name"], apigw["id"],
arn,
regional_client.region,
apigw["name"],
)
) )
)
except Exception as error: except Exception as error:
logger.error( logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"

View File

@@ -2,6 +2,7 @@ import threading
from dataclasses import dataclass from dataclasses import dataclass
from prowler.lib.logger import logger from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -11,6 +12,7 @@ class ApiGatewayV2:
self.service = "apigatewayv2" self.service = "apigatewayv2"
self.session = audit_info.audit_session self.session = audit_info.audit_session
self.audited_account = audit_info.audited_account self.audited_account = audit_info.audited_account
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info) self.regional_clients = generate_regional_clients(self.service, audit_info)
self.apis = [] self.apis = []
self.__threading_call__(self.__get_apis__) self.__threading_call__(self.__get_apis__)
@@ -35,13 +37,16 @@ class ApiGatewayV2:
get_rest_apis_paginator = regional_client.get_paginator("get_apis") get_rest_apis_paginator = regional_client.get_paginator("get_apis")
for page in get_rest_apis_paginator.paginate(): for page in get_rest_apis_paginator.paginate():
for apigw in page["Items"]: for apigw in page["Items"]:
self.apis.append( if not self.audit_resources or (
API( is_resource_filtered(apigw["ApiId"], self.audit_resources)
apigw["ApiId"], ):
regional_client.region, self.apis.append(
apigw["Name"], API(
apigw["ApiId"],
regional_client.region,
apigw["Name"],
)
) )
)
except Exception as error: except Exception as error:
logger.error( logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"

View File

@@ -2,6 +2,7 @@ import threading
from dataclasses import dataclass from dataclasses import dataclass
from prowler.lib.logger import logger from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -11,6 +12,7 @@ class AppStream:
self.service = "appstream" self.service = "appstream"
self.session = audit_info.audit_session self.session = audit_info.audit_session
self.audited_account = audit_info.audited_account self.audited_account = audit_info.audited_account
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info) self.regional_clients = generate_regional_clients(self.service, audit_info)
self.fleets = [] self.fleets = []
self.__threading_call__(self.__describe_fleets__) self.__threading_call__(self.__describe_fleets__)
@@ -33,25 +35,28 @@ class AppStream:
describe_fleets_paginator = regional_client.get_paginator("describe_fleets") describe_fleets_paginator = regional_client.get_paginator("describe_fleets")
for page in describe_fleets_paginator.paginate(): for page in describe_fleets_paginator.paginate():
for fleet in page["Fleets"]: for fleet in page["Fleets"]:
self.fleets.append( if not self.audit_resources or (
Fleet( is_resource_filtered(fleet["Arn"], self.audit_resources)
arn=fleet["Arn"], ):
name=fleet["Name"], self.fleets.append(
max_user_duration_in_seconds=fleet[ Fleet(
"MaxUserDurationInSeconds" arn=fleet["Arn"],
], name=fleet["Name"],
disconnect_timeout_in_seconds=fleet[ max_user_duration_in_seconds=fleet[
"DisconnectTimeoutInSeconds" "MaxUserDurationInSeconds"
], ],
idle_disconnect_timeout_in_seconds=fleet[ disconnect_timeout_in_seconds=fleet[
"IdleDisconnectTimeoutInSeconds" "DisconnectTimeoutInSeconds"
], ],
enable_default_internet_access=fleet[ idle_disconnect_timeout_in_seconds=fleet[
"EnableDefaultInternetAccess" "IdleDisconnectTimeoutInSeconds"
], ],
region=regional_client.region, enable_default_internet_access=fleet[
"EnableDefaultInternetAccess"
],
region=regional_client.region,
)
) )
)
except Exception as error: except Exception as error:
logger.error( logger.error(

View File

@@ -2,6 +2,7 @@ import threading
from dataclasses import dataclass from dataclasses import dataclass
from prowler.lib.logger import logger from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -11,6 +12,7 @@ class AutoScaling:
self.service = "autoscaling" self.service = "autoscaling"
self.session = audit_info.audit_session self.session = audit_info.audit_session
self.audited_account = audit_info.audited_account self.audited_account = audit_info.audited_account
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info) self.regional_clients = generate_regional_clients(self.service, audit_info)
self.launch_configurations = [] self.launch_configurations = []
self.__threading_call__(self.__describe_launch_configurations__) self.__threading_call__(self.__describe_launch_configurations__)
@@ -35,15 +37,21 @@ class AutoScaling:
) )
for page in describe_launch_configurations_paginator.paginate(): for page in describe_launch_configurations_paginator.paginate():
for configuration in page["LaunchConfigurations"]: for configuration in page["LaunchConfigurations"]:
self.launch_configurations.append( if not self.audit_resources or (
LaunchConfiguration( is_resource_filtered(
configuration["LaunchConfigurationARN"], configuration["LaunchConfigurationARN"],
configuration["LaunchConfigurationName"], self.audit_resources,
configuration["UserData"], )
configuration["ImageId"], ):
regional_client.region, self.launch_configurations.append(
LaunchConfiguration(
configuration["LaunchConfigurationARN"],
configuration["LaunchConfigurationName"],
configuration["UserData"],
configuration["ImageId"],
regional_client.region,
)
) )
)
except Exception as error: except Exception as error:
logger.error( logger.error(

View File

@@ -10,6 +10,7 @@ from botocore.client import ClientError
from pydantic import BaseModel from pydantic import BaseModel
from prowler.lib.logger import logger from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -19,6 +20,7 @@ class Lambda:
self.service = "lambda" self.service = "lambda"
self.session = audit_info.audit_session self.session = audit_info.audit_session
self.audited_account = audit_info.audited_account self.audited_account = audit_info.audited_account
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info) self.regional_clients = generate_regional_clients(self.service, audit_info)
self.functions = {} self.functions = {}
self.__threading_call__(self.__list_functions__) self.__threading_call__(self.__list_functions__)
@@ -44,18 +46,23 @@ class Lambda:
list_functions_paginator = regional_client.get_paginator("list_functions") list_functions_paginator = regional_client.get_paginator("list_functions")
for page in list_functions_paginator.paginate(): for page in list_functions_paginator.paginate():
for function in page["Functions"]: for function in page["Functions"]:
lambda_name = function["FunctionName"] if not self.audit_resources or (
lambda_arn = function["FunctionArn"] is_resource_filtered(
lambda_runtime = function["Runtime"] function["FunctionArn"], self.audit_resources
self.functions[lambda_name] = Function( )
name=lambda_name, ):
arn=lambda_arn, lambda_name = function["FunctionName"]
runtime=lambda_runtime, lambda_arn = function["FunctionArn"]
region=regional_client.region, lambda_runtime = function["Runtime"]
) self.functions[lambda_name] = Function(
if "Environment" in function: name=lambda_name,
lambda_environment = function["Environment"]["Variables"] arn=lambda_arn,
self.functions[lambda_name].environment = lambda_environment runtime=lambda_runtime,
region=regional_client.region,
)
if "Environment" in function:
lambda_environment = function["Environment"]["Variables"]
self.functions[lambda_name].environment = lambda_environment
except Exception as error: except Exception as error:
logger.error( logger.error(

View File

@@ -2,6 +2,7 @@ import threading
from dataclasses import dataclass from dataclasses import dataclass
from prowler.lib.logger import logger from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -11,6 +12,7 @@ class CloudFormation:
self.service = "cloudformation" self.service = "cloudformation"
self.session = audit_info.audit_session self.session = audit_info.audit_session
self.audited_account = audit_info.audited_account self.audited_account = audit_info.audited_account
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info) self.regional_clients = generate_regional_clients(self.service, audit_info)
self.stacks = [] self.stacks = []
self.__threading_call__(self.__describe_stacks__) self.__threading_call__(self.__describe_stacks__)
@@ -35,20 +37,23 @@ class CloudFormation:
describe_stacks_paginator = regional_client.get_paginator("describe_stacks") describe_stacks_paginator = regional_client.get_paginator("describe_stacks")
for page in describe_stacks_paginator.paginate(): for page in describe_stacks_paginator.paginate():
for stack in page["Stacks"]: for stack in page["Stacks"]:
outputs = [] if not self.audit_resources or (
if "Outputs" in stack: is_resource_filtered(stack["StackId"], self.audit_resources)
for output in stack["Outputs"]: ):
outputs.append( outputs = []
f"{output['OutputKey']}:{output['OutputValue']}" if "Outputs" in stack:
for output in stack["Outputs"]:
outputs.append(
f"{output['OutputKey']}:{output['OutputValue']}"
)
self.stacks.append(
Stack(
arn=stack["StackId"],
name=stack["StackName"],
outputs=outputs,
region=regional_client.region,
) )
self.stacks.append(
Stack(
arn=stack["StackId"],
name=stack["StackName"],
outputs=outputs,
region=regional_client.region,
) )
)
except Exception as error: except Exception as error:
logger.error( logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"

View File

@@ -2,6 +2,7 @@ from dataclasses import dataclass
from enum import Enum from enum import Enum
from prowler.lib.logger import logger from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -11,6 +12,7 @@ class CloudFront:
self.service = "cloudfront" self.service = "cloudfront"
self.session = audit_info.audit_session self.session = audit_info.audit_session
self.audited_account = audit_info.audited_account self.audited_account = audit_info.audited_account
self.audit_resources = audit_info.audit_resources
global_client = generate_regional_clients( global_client = generate_regional_clients(
self.service, audit_info, global_service=True self.service, audit_info, global_service=True
) )
@@ -34,16 +36,19 @@ class CloudFront:
for page in list_ditributions_paginator.paginate(): for page in list_ditributions_paginator.paginate():
if "Items" in page["DistributionList"]: if "Items" in page["DistributionList"]:
for item in page["DistributionList"]["Items"]: for item in page["DistributionList"]["Items"]:
distribution_id = item["Id"] if not self.audit_resources or (
distribution_arn = item["ARN"] is_resource_filtered(item["ARN"], self.audit_resources)
origins = item["Origins"]["Items"] ):
distribution = Distribution( distribution_id = item["Id"]
arn=distribution_arn, distribution_arn = item["ARN"]
id=distribution_id, origins = item["Origins"]["Items"]
origins=origins, distribution = Distribution(
region=region, arn=distribution_arn,
) id=distribution_id,
distributions[distribution_id] = distribution origins=origins,
region=region,
)
distributions[distribution_id] = distribution
return distributions return distributions

View File

@@ -3,6 +3,7 @@ import threading
from dataclasses import dataclass from dataclasses import dataclass
from prowler.lib.logger import logger from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -12,6 +13,7 @@ class Cloudtrail:
self.service = "cloudtrail" self.service = "cloudtrail"
self.session = audit_info.audit_session self.session = audit_info.audit_session
self.audited_account = audit_info.audited_account self.audited_account = audit_info.audited_account
self.audit_resources = audit_info.audit_resources
self.audited_partition = audit_info.audited_partition self.audited_partition = audit_info.audited_partition
self.region = audit_info.profile_region self.region = audit_info.profile_region
self.regional_clients = generate_regional_clients(self.service, audit_info) self.regional_clients = generate_regional_clients(self.service, audit_info)
@@ -36,8 +38,12 @@ class Cloudtrail:
logger.info("Cloudtrail - Getting trails...") logger.info("Cloudtrail - Getting trails...")
try: try:
describe_trails = regional_client.describe_trails()["trailList"] describe_trails = regional_client.describe_trails()["trailList"]
if describe_trails: trails_count = 0
for trail in describe_trails: for trail in describe_trails:
if not self.audit_resources or (
is_resource_filtered(trail["TrailARN"], self.audit_resources)
):
trails_count += 1
kms_key_id = None kms_key_id = None
log_group_arn = None log_group_arn = None
if "KmsKeyId" in trail: if "KmsKeyId" in trail:
@@ -62,7 +68,7 @@ class Cloudtrail:
data_events=[], data_events=[],
) )
) )
else: if trails_count == 0:
self.trails.append( self.trails.append(
Trail( Trail(
name=None, name=None,

View File

@@ -2,6 +2,7 @@ import threading
from dataclasses import dataclass from dataclasses import dataclass
from prowler.lib.logger import logger from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -11,6 +12,7 @@ class CloudWatch:
self.service = "cloudwatch" self.service = "cloudwatch"
self.session = audit_info.audit_session self.session = audit_info.audit_session
self.audited_account = audit_info.audited_account self.audited_account = audit_info.audited_account
self.audit_resources = audit_info.audit_resources
self.region = list( self.region = list(
generate_regional_clients( generate_regional_clients(
self.service, audit_info, global_service=True self.service, audit_info, global_service=True
@@ -38,15 +40,18 @@ class CloudWatch:
describe_alarms_paginator = regional_client.get_paginator("describe_alarms") describe_alarms_paginator = regional_client.get_paginator("describe_alarms")
for page in describe_alarms_paginator.paginate(): for page in describe_alarms_paginator.paginate():
for alarm in page["MetricAlarms"]: for alarm in page["MetricAlarms"]:
self.metric_alarms.append( if not self.audit_resources or (
MetricAlarm( is_resource_filtered(alarm["AlarmArn"], self.audit_resources)
alarm["AlarmArn"], ):
alarm["AlarmName"], self.metric_alarms.append(
alarm["MetricName"], MetricAlarm(
alarm["Namespace"], alarm["AlarmArn"],
regional_client.region, alarm["AlarmName"],
alarm["MetricName"],
alarm["Namespace"],
regional_client.region,
)
) )
)
except Exception as error: except Exception as error:
logger.error( logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
@@ -59,6 +64,7 @@ class Logs:
self.service = "logs" self.service = "logs"
self.session = audit_info.audit_session self.session = audit_info.audit_session
self.audited_account = audit_info.audited_account self.audited_account = audit_info.audited_account
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info) self.regional_clients = generate_regional_clients(self.service, audit_info)
self.metric_filters = [] self.metric_filters = []
self.log_groups = [] self.log_groups = []
@@ -85,15 +91,18 @@ class Logs:
) )
for page in describe_metric_filters_paginator.paginate(): for page in describe_metric_filters_paginator.paginate():
for filter in page["metricFilters"]: for filter in page["metricFilters"]:
self.metric_filters.append( if not self.audit_resources or (
MetricFilter( is_resource_filtered(filter["filterName"], self.audit_resources)
filter["filterName"], ):
filter["metricTransformations"][0]["metricName"], self.metric_filters.append(
filter["filterPattern"], MetricFilter(
filter["logGroupName"], filter["filterName"],
regional_client.region, filter["metricTransformations"][0]["metricName"],
filter["filterPattern"],
filter["logGroupName"],
regional_client.region,
)
) )
)
except Exception as error: except Exception as error:
logger.error( logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
@@ -106,22 +115,25 @@ class Logs:
"describe_log_groups" "describe_log_groups"
) )
for page in describe_log_groups_paginator.paginate(): for page in describe_log_groups_paginator.paginate():
for filter in page["logGroups"]: for log_group in page["logGroups"]:
kms = None if not self.audit_resources or (
retention_days = 0 is_resource_filtered(log_group["arn"], self.audit_resources)
if "kmsKeyId" in filter: ):
kms = filter["kmsKeyId"] kms = None
if "retentionInDays" in filter: retention_days = 0
retention_days = filter["retentionInDays"] if "kmsKeyId" in log_group:
self.log_groups.append( kms = log_group["kmsKeyId"]
LogGroup( if "retentionInDays" in log_group:
filter["arn"], retention_days = log_group["retentionInDays"]
filter["logGroupName"], self.log_groups.append(
retention_days, LogGroup(
kms, log_group["arn"],
regional_client.region, log_group["logGroupName"],
retention_days,
kms,
regional_client.region,
)
) )
)
except Exception as error: except Exception as error:
logger.error( logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"

View File

@@ -5,6 +5,7 @@ from typing import Optional
from pydantic import BaseModel from pydantic import BaseModel
from prowler.lib.logger import logger from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -14,6 +15,7 @@ class CodeArtifact:
self.service = "codeartifact" self.service = "codeartifact"
self.session = audit_info.audit_session self.session = audit_info.audit_session
self.audited_account = audit_info.audited_account self.audited_account = audit_info.audited_account
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info) self.regional_clients = generate_regional_clients(self.service, audit_info)
# repositories is a dictionary containing all the codeartifact service information # repositories is a dictionary containing all the codeartifact service information
self.repositories = {} self.repositories = {}
@@ -40,18 +42,21 @@ class CodeArtifact:
) )
for page in list_repositories_paginator.paginate(): for page in list_repositories_paginator.paginate():
for repository in page["repositories"]: for repository in page["repositories"]:
package_name = repository["name"] if not self.audit_resources or (
package_domain_name = repository["domainName"] is_resource_filtered(repository["arn"], self.audit_resources)
package_domain_owner = repository["domainOwner"] ):
package_arn = repository["arn"] package_name = repository["name"]
# Save Repository package_domain_name = repository["domainName"]
self.repositories[package_name] = Repository( package_domain_owner = repository["domainOwner"]
name=package_name, package_arn = repository["arn"]
arn=package_arn, # Save Repository
domain_name=package_domain_name, self.repositories[package_name] = Repository(
domain_owner=package_domain_owner, name=package_name,
region=regional_client.region, arn=package_arn,
) domain_name=package_domain_name,
domain_owner=package_domain_owner,
region=regional_client.region,
)
except Exception as error: except Exception as error:
logger.error( logger.error(

View File

@@ -4,6 +4,7 @@ from dataclasses import dataclass
from typing import Optional from typing import Optional
from prowler.lib.logger import logger from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -13,6 +14,7 @@ class Codebuild:
self.service = "codebuild" self.service = "codebuild"
self.session = audit_info.audit_session self.session = audit_info.audit_session
self.audited_account = audit_info.audited_account self.audited_account = audit_info.audited_account
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info) self.regional_clients = generate_regional_clients(self.service, audit_info)
self.projects = [] self.projects = []
self.__threading_call__(self.__list_projects__) self.__threading_call__(self.__list_projects__)
@@ -36,14 +38,17 @@ class Codebuild:
list_projects_paginator = regional_client.get_paginator("list_projects") list_projects_paginator = regional_client.get_paginator("list_projects")
for page in list_projects_paginator.paginate(): for page in list_projects_paginator.paginate():
for project in page["projects"]: for project in page["projects"]:
self.projects.append( if not self.audit_resources or (
CodebuildProject( is_resource_filtered(project, self.audit_resources)
name=project, ):
region=regional_client.region, self.projects.append(
last_invoked_time=None, CodebuildProject(
buildspec=None, name=project,
region=regional_client.region,
last_invoked_time=None,
buildspec=None,
)
) )
)
except Exception as error: except Exception as error:
logger.error( logger.error(

View File

@@ -2,6 +2,7 @@ import threading
from dataclasses import dataclass from dataclasses import dataclass
from prowler.lib.logger import logger from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -11,6 +12,7 @@ class Config:
self.service = "config" self.service = "config"
self.session = audit_info.audit_session self.session = audit_info.audit_session
self.audited_account = audit_info.audited_account self.audited_account = audit_info.audited_account
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info) self.regional_clients = generate_regional_clients(self.service, audit_info)
self.recorders = [] self.recorders = []
self.__threading_call__(self.__describe_configuration_recorder_status__) self.__threading_call__(self.__describe_configuration_recorder_status__)
@@ -33,8 +35,12 @@ class Config:
recorders = regional_client.describe_configuration_recorder_status()[ recorders = regional_client.describe_configuration_recorder_status()[
"ConfigurationRecordersStatus" "ConfigurationRecordersStatus"
] ]
if recorders: recorders_count = 0
for recorder in recorders: for recorder in recorders:
if not self.audit_resources or (
is_resource_filtered(recorder["name"], self.audit_resources)
):
recorders_count += 1
if "lastStatus" in recorder: if "lastStatus" in recorder:
self.recorders.append( self.recorders.append(
Recorder( Recorder(
@@ -54,7 +60,7 @@ class Config:
) )
) )
# No config recorders in region # No config recorders in region
else: if recorders_count == 0:
self.recorders.append( self.recorders.append(
Recorder( Recorder(
self.audited_account, self.audited_account,

View File

@@ -6,6 +6,7 @@ from typing import Union
from pydantic import BaseModel from pydantic import BaseModel
from prowler.lib.logger import logger from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -15,6 +16,7 @@ class DirectoryService:
self.service = "ds" self.service = "ds"
self.session = audit_info.audit_session self.session = audit_info.audit_session
self.audited_account = audit_info.audited_account self.audited_account = audit_info.audited_account
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info) self.regional_clients = generate_regional_clients(self.service, audit_info)
self.directories = {} self.directories = {}
self.__threading_call__(self.__describe_directories__) self.__threading_call__(self.__describe_directories__)
@@ -43,31 +45,36 @@ class DirectoryService:
) )
for page in describe_fleets_paginator.paginate(): for page in describe_fleets_paginator.paginate():
for directory in page["DirectoryDescriptions"]: for directory in page["DirectoryDescriptions"]:
directory_id = directory["DirectoryId"] if not self.audit_resources or (
directory_name = directory["Name"] is_resource_filtered(
directory_type = directory["Type"] directory["DirectoryId"], self.audit_resources
# Radius Configuration )
radius_authentication_protocol = ( ):
directory["RadiusSettings"]["AuthenticationProtocol"] directory_id = directory["DirectoryId"]
if "RadiusSettings" in directory directory_name = directory["Name"]
else None directory_type = directory["Type"]
) # Radius Configuration
radius_status = ( radius_authentication_protocol = (
directory["RadiusStatus"] directory["RadiusSettings"]["AuthenticationProtocol"]
if "RadiusStatus" in directory if "RadiusSettings" in directory
else None else None
) )
radius_status = (
directory["RadiusStatus"]
if "RadiusStatus" in directory
else None
)
self.directories[directory_id] = Directory( self.directories[directory_id] = Directory(
name=directory_name, name=directory_name,
id=directory_id, id=directory_id,
type=directory_type, type=directory_type,
region=regional_client.region, region=regional_client.region,
radius_settings=RadiusSettings( radius_settings=RadiusSettings(
authentication_protocol=radius_authentication_protocol, authentication_protocol=radius_authentication_protocol,
status=radius_status, status=radius_status,
), ),
) )
except Exception as error: except Exception as error:
logger.error( logger.error(

View File

@@ -2,6 +2,7 @@ import threading
from dataclasses import dataclass from dataclasses import dataclass
from prowler.lib.logger import logger from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -11,6 +12,7 @@ class DynamoDB:
self.service = "dynamodb" self.service = "dynamodb"
self.session = audit_info.audit_session self.session = audit_info.audit_session
self.audited_account = audit_info.audited_account self.audited_account = audit_info.audited_account
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info) self.regional_clients = generate_regional_clients(self.service, audit_info)
self.tables = [] self.tables = []
self.__threading_call__(self.__list_tables__) self.__threading_call__(self.__list_tables__)
@@ -35,15 +37,18 @@ class DynamoDB:
list_tables_paginator = regional_client.get_paginator("list_tables") list_tables_paginator = regional_client.get_paginator("list_tables")
for page in list_tables_paginator.paginate(): for page in list_tables_paginator.paginate():
for table in page["TableNames"]: for table in page["TableNames"]:
self.tables.append( if not self.audit_resources or (
Table( is_resource_filtered(table, self.audit_resources)
arn="", ):
name=table, self.tables.append(
encryption_type=None, Table(
kms_arn=None, arn="",
region=regional_client.region, name=table,
encryption_type=None,
kms_arn=None,
region=regional_client.region,
)
) )
)
except Exception as error: except Exception as error:
logger.error( logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
@@ -96,6 +101,7 @@ class DAX:
self.service = "dax" self.service = "dax"
self.session = audit_info.audit_session self.session = audit_info.audit_session
self.audited_account = audit_info.audited_account self.audited_account = audit_info.audited_account
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info) self.regional_clients = generate_regional_clients(self.service, audit_info)
self.clusters = [] self.clusters = []
self.__threading_call__(self.__describe_clusters__) self.__threading_call__(self.__describe_clusters__)
@@ -120,18 +126,23 @@ class DAX:
) )
for page in describe_clusters_paginator.paginate(): for page in describe_clusters_paginator.paginate():
for cluster in page["Clusters"]: for cluster in page["Clusters"]:
encryption = False if not self.audit_resources or (
if "SSEDescription" in cluster: is_resource_filtered(
if cluster["SSEDescription"]["Status"] == "ENABLED": cluster["ClusterArn"], self.audit_resources
encryption = True )
self.clusters.append( ):
Cluster( encryption = False
cluster["ClusterArn"], if "SSEDescription" in cluster:
cluster["ClusterName"], if cluster["SSEDescription"]["Status"] == "ENABLED":
encryption, encryption = True
regional_client.region, self.clusters.append(
Cluster(
cluster["ClusterArn"],
cluster["ClusterName"],
encryption,
regional_client.region,
)
) )
)
except Exception as error: except Exception as error:
logger.error( logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"

View File

@@ -2,6 +2,7 @@ import threading
from dataclasses import dataclass from dataclasses import dataclass
from prowler.lib.logger import logger from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -12,6 +13,7 @@ class EC2:
self.session = audit_info.audit_session self.session = audit_info.audit_session
self.audited_partition = audit_info.audited_partition self.audited_partition = audit_info.audited_partition
self.audited_account = audit_info.audited_account self.audited_account = audit_info.audited_account
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info) self.regional_clients = generate_regional_clients(self.service, audit_info)
self.instances = [] self.instances = []
self.__threading_call__(self.__describe_instances__) self.__threading_call__(self.__describe_instances__)
@@ -55,41 +57,46 @@ class EC2:
for reservation in page["Reservations"]: for reservation in page["Reservations"]:
for instance in reservation["Instances"]: for instance in reservation["Instances"]:
arn = f"arn:{self.audited_partition}:ec2:{regional_client.region}:{self.audited_account}:instance/{instance['InstanceId']}" arn = f"arn:{self.audited_partition}:ec2:{regional_client.region}:{self.audited_account}:instance/{instance['InstanceId']}"
http_tokens = None if not self.audit_resources or (
http_endpoint = None is_resource_filtered(arn, self.audit_resources)
public_dns = None
public_ip = None
instance_profile = None
if "MetadataOptions" in instance:
http_tokens = instance["MetadataOptions"]["HttpTokens"]
http_endpoint = instance["MetadataOptions"]["HttpEndpoint"]
if (
"PublicDnsName" in instance
and "PublicIpAddress" in instance
): ):
public_dns = instance["PublicDnsName"] http_tokens = None
public_ip = instance["PublicIpAddress"] http_endpoint = None
if "IamInstanceProfile" in instance: public_dns = None
instance_profile = instance["IamInstanceProfile"] public_ip = None
instance_profile = None
if "MetadataOptions" in instance:
http_tokens = instance["MetadataOptions"]["HttpTokens"]
http_endpoint = instance["MetadataOptions"][
"HttpEndpoint"
]
if (
"PublicDnsName" in instance
and "PublicIpAddress" in instance
):
public_dns = instance["PublicDnsName"]
public_ip = instance["PublicIpAddress"]
if "IamInstanceProfile" in instance:
instance_profile = instance["IamInstanceProfile"]
self.instances.append( self.instances.append(
Instance( Instance(
instance["InstanceId"], instance["InstanceId"],
arn, arn,
instance["State"]["Name"], instance["State"]["Name"],
regional_client.region, regional_client.region,
instance["InstanceType"], instance["InstanceType"],
instance["ImageId"], instance["ImageId"],
instance["LaunchTime"], instance["LaunchTime"],
instance["PrivateDnsName"], instance["PrivateDnsName"],
instance["PrivateIpAddress"], instance["PrivateIpAddress"],
public_dns, public_dns,
public_ip, public_ip,
http_tokens, http_tokens,
http_endpoint, http_endpoint,
instance_profile, instance_profile,
)
) )
)
except Exception as error: except Exception as error:
logger.error( logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
@@ -104,16 +111,19 @@ class EC2:
for page in describe_security_groups_paginator.paginate(): for page in describe_security_groups_paginator.paginate():
for sg in page["SecurityGroups"]: for sg in page["SecurityGroups"]:
arn = f"arn:{self.audited_partition}:ec2:{regional_client.region}:{self.audited_account}:security-group/{sg['GroupId']}" arn = f"arn:{self.audited_partition}:ec2:{regional_client.region}:{self.audited_account}:security-group/{sg['GroupId']}"
self.security_groups.append( if not self.audit_resources or (
SecurityGroup( is_resource_filtered(arn, self.audit_resources)
sg["GroupName"], ):
arn, self.security_groups.append(
regional_client.region, SecurityGroup(
sg["GroupId"], sg["GroupName"],
sg["IpPermissions"], arn,
sg["IpPermissionsEgress"], regional_client.region,
sg["GroupId"],
sg["IpPermissions"],
sg["IpPermissionsEgress"],
)
) )
)
except Exception as error: except Exception as error:
logger.error( logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
@@ -128,14 +138,17 @@ class EC2:
for page in describe_network_acls_paginator.paginate(): for page in describe_network_acls_paginator.paginate():
for nacl in page["NetworkAcls"]: for nacl in page["NetworkAcls"]:
arn = f"arn:{self.audited_partition}:ec2:{regional_client.region}:{self.audited_account}:network-acl/{nacl['NetworkAclId']}" arn = f"arn:{self.audited_partition}:ec2:{regional_client.region}:{self.audited_account}:network-acl/{nacl['NetworkAclId']}"
self.network_acls.append( if not self.audit_resources or (
NetworkACL( is_resource_filtered(arn, self.audit_resources)
nacl["NetworkAclId"], ):
arn, self.network_acls.append(
regional_client.region, NetworkACL(
nacl["Entries"], nacl["NetworkAclId"],
arn,
regional_client.region,
nacl["Entries"],
)
) )
)
except Exception as error: except Exception as error:
logger.error( logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
@@ -151,16 +164,19 @@ class EC2:
for page in describe_snapshots_paginator.paginate(OwnerIds=["self"]): for page in describe_snapshots_paginator.paginate(OwnerIds=["self"]):
for snapshot in page["Snapshots"]: for snapshot in page["Snapshots"]:
arn = f"arn:{self.audited_partition}:ec2:{regional_client.region}:{self.audited_account}:snapshot/{snapshot['SnapshotId']}" arn = f"arn:{self.audited_partition}:ec2:{regional_client.region}:{self.audited_account}:snapshot/{snapshot['SnapshotId']}"
if snapshot["Encrypted"]: if not self.audit_resources or (
encrypted = True is_resource_filtered(arn, self.audit_resources)
self.snapshots.append( ):
Snapshot( if snapshot["Encrypted"]:
snapshot["SnapshotId"], encrypted = True
arn, self.snapshots.append(
regional_client.region, Snapshot(
encrypted, snapshot["SnapshotId"],
arn,
regional_client.region,
encrypted,
)
) )
)
except Exception as error: except Exception as error:
logger.error( logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
@@ -231,17 +247,20 @@ class EC2:
public = False public = False
for image in regional_client.describe_images(Owners=["self"])["Images"]: for image in regional_client.describe_images(Owners=["self"])["Images"]:
arn = f"arn:{self.audited_partition}:ec2:{regional_client.region}:{self.audited_account}:image/{image['ImageId']}" arn = f"arn:{self.audited_partition}:ec2:{regional_client.region}:{self.audited_account}:image/{image['ImageId']}"
if image["Public"]: if not self.audit_resources or (
public = True is_resource_filtered(arn, self.audit_resources)
self.images.append( ):
Image( if image["Public"]:
image["ImageId"], public = True
arn, self.images.append(
image["Name"], Image(
public, image["ImageId"],
regional_client.region, arn,
image["Name"],
public,
regional_client.region,
)
) )
)
except Exception as error: except Exception as error:
logger.error( logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
@@ -256,14 +275,17 @@ class EC2:
for page in describe_volumes_paginator.paginate(): for page in describe_volumes_paginator.paginate():
for volume in page["Volumes"]: for volume in page["Volumes"]:
arn = f"arn:{self.audited_partition}:ec2:{regional_client.region}:{self.audited_account}:volume/{volume['VolumeId']}" arn = f"arn:{self.audited_partition}:ec2:{regional_client.region}:{self.audited_account}:volume/{volume['VolumeId']}"
self.volumes.append( if not self.audit_resources or (
Volume( is_resource_filtered(arn, self.audit_resources)
volume["VolumeId"], ):
arn, self.volumes.append(
regional_client.region, Volume(
volume["Encrypted"], volume["VolumeId"],
arn,
regional_client.region,
volume["Encrypted"],
)
) )
)
except Exception as error: except Exception as error:
logger.error( logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
@@ -283,16 +305,18 @@ class EC2:
if "AllocationId" in address: if "AllocationId" in address:
allocation_id = address["AllocationId"] allocation_id = address["AllocationId"]
elastic_ip_arn = f"arn:{self.audited_partition}:ec2:{regional_client.region}:{self.audited_account}:eip-allocation/{allocation_id}" elastic_ip_arn = f"arn:{self.audited_partition}:ec2:{regional_client.region}:{self.audited_account}:eip-allocation/{allocation_id}"
if not self.audit_resources or (
self.elastic_ips.append( is_resource_filtered(elastic_ip_arn, self.audit_resources)
ElasticIP( ):
public_ip, self.elastic_ips.append(
association_id, ElasticIP(
allocation_id, public_ip,
elastic_ip_arn, association_id,
regional_client.region, allocation_id,
elastic_ip_arn,
regional_client.region,
)
) )
)
except Exception as error: except Exception as error:
logger.error( logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"

View File

@@ -3,6 +3,7 @@ from dataclasses import dataclass
from json import loads from json import loads
from prowler.lib.logger import logger from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -11,6 +12,7 @@ class ECR:
def __init__(self, audit_info): def __init__(self, audit_info):
self.service = "ecr" self.service = "ecr"
self.session = audit_info.audit_session self.session = audit_info.audit_session
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info) self.regional_clients = generate_regional_clients(self.service, audit_info)
self.repositories = [] self.repositories = []
self.__threading_call__(self.__describe_repositories__) self.__threading_call__(self.__describe_repositories__)
@@ -38,19 +40,24 @@ class ECR:
) )
for page in describe_ecr_paginator.paginate(): for page in describe_ecr_paginator.paginate():
for repository in page["repositories"]: for repository in page["repositories"]:
self.repositories.append( if not self.audit_resources or (
Repository( is_resource_filtered(
name=repository["repositoryName"], repository["repositoryArn"], self.audit_resources
arn=repository["repositoryArn"], )
region=regional_client.region, ):
scan_on_push=repository["imageScanningConfiguration"][ self.repositories.append(
"scanOnPush" Repository(
], name=repository["repositoryName"],
policy=None, arn=repository["repositoryArn"],
images_details=[], region=regional_client.region,
lyfecicle_policy=None, scan_on_push=repository["imageScanningConfiguration"][
"scanOnPush"
],
policy=None,
images_details=[],
lyfecicle_policy=None,
)
) )
)
except Exception as error: except Exception as error:
logger.error( logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"

View File

@@ -4,6 +4,7 @@ from re import sub
from pydantic import BaseModel from pydantic import BaseModel
from prowler.lib.logger import logger from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -12,6 +13,7 @@ class ECS:
def __init__(self, audit_info): def __init__(self, audit_info):
self.service = "ecs" self.service = "ecs"
self.session = audit_info.audit_session self.session = audit_info.audit_session
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info) self.regional_clients = generate_regional_clients(self.service, audit_info)
self.task_definitions = [] self.task_definitions = []
self.__threading_call__(self.__list_task_definitions__) self.__threading_call__(self.__list_task_definitions__)
@@ -36,16 +38,19 @@ class ECS:
list_ecs_paginator = regional_client.get_paginator("list_task_definitions") list_ecs_paginator = regional_client.get_paginator("list_task_definitions")
for page in list_ecs_paginator.paginate(): for page in list_ecs_paginator.paginate():
for task_definition in page["taskDefinitionArns"]: for task_definition in page["taskDefinitionArns"]:
self.task_definitions.append( if not self.audit_resources or (
TaskDefinition( is_resource_filtered(task_definition, self.audit_resources)
# we want the family name without the revision ):
name=sub(":.*", "", task_definition.split("/")[1]), self.task_definitions.append(
arn=task_definition, TaskDefinition(
revision=task_definition.split(":")[-1], # we want the family name without the revision
region=regional_client.region, name=sub(":.*", "", task_definition.split("/")[1]),
environment_variables=[], arn=task_definition,
revision=task_definition.split(":")[-1],
region=regional_client.region,
environment_variables=[],
)
) )
)
except Exception as error: except Exception as error:
logger.error( logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"

View File

@@ -4,6 +4,7 @@ from dataclasses import dataclass
from botocore.client import ClientError from botocore.client import ClientError
from prowler.lib.logger import logger from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -12,6 +13,7 @@ class EFS:
def __init__(self, audit_info): def __init__(self, audit_info):
self.service = "efs" self.service = "efs"
self.session = audit_info.audit_session self.session = audit_info.audit_session
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info) self.regional_clients = generate_regional_clients(self.service, audit_info)
self.filesystems = [] self.filesystems = []
self.__threading_call__(self.__describe_file_systems__) self.__threading_call__(self.__describe_file_systems__)
@@ -37,15 +39,18 @@ class EFS:
) )
for page in describe_efs_paginator.paginate(): for page in describe_efs_paginator.paginate():
for efs in page["FileSystems"]: for efs in page["FileSystems"]:
self.filesystems.append( if not self.audit_resources or (
FileSystem( is_resource_filtered(efs["FileSystemId"], self.audit_resources)
id=efs["FileSystemId"], ):
region=regional_client.region, self.filesystems.append(
policy=None, FileSystem(
backup_policy=None, id=efs["FileSystemId"],
encrypted=efs["Encrypted"], region=regional_client.region,
policy=None,
backup_policy=None,
encrypted=efs["Encrypted"],
)
) )
)
except Exception as error: except Exception as error:
logger.error( logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"

View File

@@ -3,6 +3,7 @@ import threading
from pydantic import BaseModel from pydantic import BaseModel
from prowler.lib.logger import logger from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -11,6 +12,7 @@ class EKS:
def __init__(self, audit_info): def __init__(self, audit_info):
self.service = "eks" self.service = "eks"
self.session = audit_info.audit_session self.session = audit_info.audit_session
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info) self.regional_clients = generate_regional_clients(self.service, audit_info)
self.clusters = [] self.clusters = []
self.__threading_call__(self.__list_clusters__) self.__threading_call__(self.__list_clusters__)
@@ -34,12 +36,15 @@ class EKS:
list_clusters_paginator = regional_client.get_paginator("list_clusters") list_clusters_paginator = regional_client.get_paginator("list_clusters")
for page in list_clusters_paginator.paginate(): for page in list_clusters_paginator.paginate():
for cluster in page["clusters"]: for cluster in page["clusters"]:
self.clusters.append( if not self.audit_resources or (
EKSCluster( is_resource_filtered(cluster, self.audit_resources)
name=cluster, ):
region=regional_client.region, self.clusters.append(
EKSCluster(
name=cluster,
region=regional_client.region,
)
) )
)
except Exception as error: except Exception as error:
logger.error( logger.error(

View File

@@ -4,6 +4,7 @@ from typing import Optional
from pydantic import BaseModel from pydantic import BaseModel
from prowler.lib.logger import logger from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -14,6 +15,7 @@ class ELB:
self.session = audit_info.audit_session self.session = audit_info.audit_session
self.audited_partition = audit_info.audited_partition self.audited_partition = audit_info.audited_partition
self.audited_account = audit_info.audited_account self.audited_account = audit_info.audited_account
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info) self.regional_clients = generate_regional_clients(self.service, audit_info)
self.loadbalancers = [] self.loadbalancers = []
self.__threading_call__(self.__describe_load_balancers__) self.__threading_call__(self.__describe_load_balancers__)
@@ -39,24 +41,28 @@ class ELB:
) )
for page in describe_elb_paginator.paginate(): for page in describe_elb_paginator.paginate():
for elb in page["LoadBalancerDescriptions"]: for elb in page["LoadBalancerDescriptions"]:
listeners = [] arn = f"arn:{self.audited_partition}:elasticloadbalancing:{regional_client.region}:{self.audited_account}:loadbalancer/{elb['LoadBalancerName']}"
for listener in elb["ListenerDescriptions"]: if not self.audit_resources or (
listeners.append( is_resource_filtered(arn, self.audit_resources)
Listener( ):
protocol=listener["Listener"]["Protocol"], listeners = []
policies=listener["PolicyNames"], for listener in elb["ListenerDescriptions"]:
listeners.append(
Listener(
protocol=listener["Listener"]["Protocol"],
policies=listener["PolicyNames"],
)
)
self.loadbalancers.append(
LoadBalancer(
name=elb["LoadBalancerName"],
arn=arn,
dns=elb["DNSName"],
region=regional_client.region,
scheme=elb["Scheme"],
listeners=listeners,
) )
) )
self.loadbalancers.append(
LoadBalancer(
name=elb["LoadBalancerName"],
arn=f"arn:{self.audited_partition}:elasticloadbalancing:{regional_client.region}:{self.audited_account}:loadbalancer/{elb['LoadBalancerName']}",
dns=elb["DNSName"],
region=regional_client.region,
scheme=elb["Scheme"],
listeners=listeners,
)
)
except Exception as error: except Exception as error:
logger.error( logger.error(

View File

@@ -4,6 +4,7 @@ from typing import Optional
from pydantic import BaseModel from pydantic import BaseModel
from prowler.lib.logger import logger from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -12,6 +13,7 @@ class ELBv2:
def __init__(self, audit_info): def __init__(self, audit_info):
self.service = "elbv2" self.service = "elbv2"
self.session = audit_info.audit_session self.session = audit_info.audit_session
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info) self.regional_clients = generate_regional_clients(self.service, audit_info)
self.loadbalancersv2 = [] self.loadbalancersv2 = []
self.__threading_call__(self.__describe_load_balancers__) self.__threading_call__(self.__describe_load_balancers__)
@@ -40,17 +42,22 @@ class ELBv2:
) )
for page in describe_elbv2_paginator.paginate(): for page in describe_elbv2_paginator.paginate():
for elbv2 in page["LoadBalancers"]: for elbv2 in page["LoadBalancers"]:
self.loadbalancersv2.append( if not self.audit_resources or (
LoadBalancerv2( is_resource_filtered(
name=elbv2["LoadBalancerName"], elbv2["LoadBalancerArn"], self.audit_resources
dns=elbv2["DNSName"], )
region=regional_client.region, ):
arn=elbv2["LoadBalancerArn"], self.loadbalancersv2.append(
scheme=elbv2["Scheme"], LoadBalancerv2(
type=elbv2["Type"], name=elbv2["LoadBalancerName"],
listeners=[], dns=elbv2["DNSName"],
region=regional_client.region,
arn=elbv2["LoadBalancerArn"],
scheme=elbv2["Scheme"],
type=elbv2["Type"],
listeners=[],
)
) )
)
except Exception as error: except Exception as error:
logger.error( logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"

View File

@@ -4,6 +4,7 @@ from enum import Enum
from pydantic import BaseModel from pydantic import BaseModel
from prowler.lib.logger import logger from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -13,6 +14,7 @@ class EMR:
self.service = "emr" self.service = "emr"
self.session = audit_info.audit_session self.session = audit_info.audit_session
self.audited_account = audit_info.audited_account self.audited_account = audit_info.audited_account
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info) self.regional_clients = generate_regional_clients(self.service, audit_info)
self.clusters = {} self.clusters = {}
self.block_public_access_configuration = {} self.block_public_access_configuration = {}
@@ -38,18 +40,23 @@ class EMR:
list_clusters_paginator = regional_client.get_paginator("list_clusters") list_clusters_paginator = regional_client.get_paginator("list_clusters")
for page in list_clusters_paginator.paginate(): for page in list_clusters_paginator.paginate():
for cluster in page["Clusters"]: for cluster in page["Clusters"]:
cluster_name = cluster["Name"] if not self.audit_resources or (
cluster_id = cluster["Id"] is_resource_filtered(
cluster_arn = cluster["ClusterArn"] cluster["ClusterArn"], self.audit_resources
cluster_status = cluster["Status"]["State"] )
):
cluster_name = cluster["Name"]
cluster_id = cluster["Id"]
cluster_arn = cluster["ClusterArn"]
cluster_status = cluster["Status"]["State"]
self.clusters[cluster_id] = Cluster( self.clusters[cluster_id] = Cluster(
id=cluster_id, id=cluster_id,
name=cluster_name, name=cluster_name,
arn=cluster_arn, arn=cluster_arn,
status=cluster_status, status=cluster_status,
region=regional_client.region, region=regional_client.region,
) )
except Exception as error: except Exception as error:
logger.error( logger.error(

View File

@@ -5,6 +5,7 @@ from botocore.client import ClientError
from pydantic import BaseModel from pydantic import BaseModel
from prowler.lib.logger import logger from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -14,6 +15,7 @@ class Glacier:
self.service = "glacier" self.service = "glacier"
self.session = audit_info.audit_session self.session = audit_info.audit_session
self.audited_account = audit_info.audited_account self.audited_account = audit_info.audited_account
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info) self.regional_clients = generate_regional_clients(self.service, audit_info)
self.vaults = {} self.vaults = {}
self.__threading_call__(self.__list_vaults__) self.__threading_call__(self.__list_vaults__)
@@ -37,13 +39,16 @@ class Glacier:
list_vaults_paginator = regional_client.get_paginator("list_vaults") list_vaults_paginator = regional_client.get_paginator("list_vaults")
for page in list_vaults_paginator.paginate(): for page in list_vaults_paginator.paginate():
for vault in page["VaultList"]: for vault in page["VaultList"]:
vault_name = vault["VaultName"] if not self.audit_resources or (
vault_arn = vault["VaultARN"] is_resource_filtered(vault["VaultARN"], self.audit_resources)
self.vaults[vault_name] = Vault( ):
name=vault_name, vault_name = vault["VaultName"]
arn=vault_arn, vault_arn = vault["VaultARN"]
region=regional_client.region, self.vaults[vault_name] = Vault(
) name=vault_name,
arn=vault_arn,
region=regional_client.region,
)
except Exception as error: except Exception as error:
logger.error( logger.error(

View File

@@ -1,6 +1,7 @@
from pydantic import BaseModel from pydantic import BaseModel
from prowler.lib.logger import logger from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
################### GlobalAccelerator ################### GlobalAccelerator
@@ -9,6 +10,7 @@ class GlobalAccelerator:
self.service = "globalaccelerator" self.service = "globalaccelerator"
self.session = audit_info.audit_session self.session = audit_info.audit_session
self.audited_account = audit_info.audited_account self.audited_account = audit_info.audited_account
self.audit_resources = audit_info.audit_resources
self.accelerators = {} self.accelerators = {}
if audit_info.audited_partition == "aws": if audit_info.audited_partition == "aws":
# Global Accelerator is a global service that supports endpoints in multiple AWS Regions # Global Accelerator is a global service that supports endpoints in multiple AWS Regions
@@ -27,15 +29,20 @@ class GlobalAccelerator:
list_accelerators_paginator = self.client.get_paginator("list_accelerators") list_accelerators_paginator = self.client.get_paginator("list_accelerators")
for page in list_accelerators_paginator.paginate(): for page in list_accelerators_paginator.paginate():
for accelerator in page["Accelerators"]: for accelerator in page["Accelerators"]:
accelerator_arn = accelerator["AcceleratorArn"] if not self.audit_resources or (
accelerator_name = accelerator["Name"] is_resource_filtered(
enabled = accelerator["Enabled"] accelerator["AcceleratorArn"], self.audit_resources
self.accelerators[accelerator_name] = Accelerator( )
name=accelerator_name, ):
arn=accelerator_arn, accelerator_arn = accelerator["AcceleratorArn"]
region=self.region, accelerator_name = accelerator["Name"]
enabled=enabled, enabled = accelerator["Enabled"]
) self.accelerators[accelerator_name] = Accelerator(
name=accelerator_name,
arn=accelerator_arn,
region=self.region,
enabled=enabled,
)
except Exception as error: except Exception as error:
logger.error( logger.error(

View File

@@ -4,6 +4,7 @@ from typing import Optional
from pydantic import BaseModel from pydantic import BaseModel
from prowler.lib.logger import logger from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -13,6 +14,7 @@ class Glue:
self.service = "glue" self.service = "glue"
self.session = audit_info.audit_session self.session = audit_info.audit_session
self.audited_account = audit_info.audited_account self.audited_account = audit_info.audited_account
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info) self.regional_clients = generate_regional_clients(self.service, audit_info)
self.connections = [] self.connections = []
self.__threading_call__(self.__get_connections__) self.__threading_call__(self.__get_connections__)
@@ -45,14 +47,17 @@ class Glue:
get_connections_paginator = regional_client.get_paginator("get_connections") get_connections_paginator = regional_client.get_paginator("get_connections")
for page in get_connections_paginator.paginate(): for page in get_connections_paginator.paginate():
for conn in page["ConnectionList"]: for conn in page["ConnectionList"]:
self.connections.append( if not self.audit_resources or (
Connection( is_resource_filtered(conn["Name"], self.audit_resources)
name=conn["Name"], ):
type=conn["ConnectionType"], self.connections.append(
properties=conn["ConnectionProperties"], Connection(
region=regional_client.region, name=conn["Name"],
type=conn["ConnectionType"],
properties=conn["ConnectionProperties"],
region=regional_client.region,
)
) )
)
except Exception as error: except Exception as error:
logger.error( logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
@@ -66,13 +71,18 @@ class Glue:
) )
for page in get_dev_endpoints_paginator.paginate(): for page in get_dev_endpoints_paginator.paginate():
for endpoint in page["DevEndpoints"]: for endpoint in page["DevEndpoints"]:
self.dev_endpoints.append( if not self.audit_resources or (
DevEndpoint( is_resource_filtered(
name=endpoint["EndpointName"], endpoint["EndpointName"], self.audit_resources
security=endpoint.get("SecurityConfiguration"), )
region=regional_client.region, ):
self.dev_endpoints.append(
DevEndpoint(
name=endpoint["EndpointName"],
security=endpoint.get("SecurityConfiguration"),
region=regional_client.region,
)
) )
)
except Exception as error: except Exception as error:
logger.error( logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
@@ -84,14 +94,17 @@ class Glue:
get_jobs_paginator = regional_client.get_paginator("get_jobs") get_jobs_paginator = regional_client.get_paginator("get_jobs")
for page in get_jobs_paginator.paginate(): for page in get_jobs_paginator.paginate():
for job in page["Jobs"]: for job in page["Jobs"]:
self.jobs.append( if not self.audit_resources or (
Job( is_resource_filtered(job["Name"], self.audit_resources)
name=job["Name"], ):
security=job.get("SecurityConfiguration"), self.jobs.append(
arguments=job.get("DefaultArguments"), Job(
region=regional_client.region, name=job["Name"],
security=job.get("SecurityConfiguration"),
arguments=job.get("DefaultArguments"),
region=regional_client.region,
)
) )
)
except Exception as error: except Exception as error:
logger.error( logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
@@ -105,30 +118,33 @@ class Glue:
) )
for page in get_security_configurations_paginator.paginate(): for page in get_security_configurations_paginator.paginate():
for config in page["SecurityConfigurations"]: for config in page["SecurityConfigurations"]:
self.security_configs.append( if not self.audit_resources or (
SecurityConfig( is_resource_filtered(config["Name"], self.audit_resources)
name=config["Name"], ):
s3_encryption=config["EncryptionConfiguration"][ self.security_configs.append(
"S3Encryption" SecurityConfig(
][0]["S3EncryptionMode"], name=config["Name"],
s3_key_arn=config["EncryptionConfiguration"][ s3_encryption=config["EncryptionConfiguration"][
"S3Encryption" "S3Encryption"
][0].get("KmsKeyArn"), ][0]["S3EncryptionMode"],
cw_encryption=config["EncryptionConfiguration"][ s3_key_arn=config["EncryptionConfiguration"][
"CloudWatchEncryption" "S3Encryption"
]["CloudWatchEncryptionMode"], ][0].get("KmsKeyArn"),
cw_key_arn=config["EncryptionConfiguration"][ cw_encryption=config["EncryptionConfiguration"][
"CloudWatchEncryption" "CloudWatchEncryption"
].get("KmsKeyArn"), ]["CloudWatchEncryptionMode"],
jb_encryption=config["EncryptionConfiguration"][ cw_key_arn=config["EncryptionConfiguration"][
"JobBookmarksEncryption" "CloudWatchEncryption"
]["JobBookmarksEncryptionMode"], ].get("KmsKeyArn"),
jb_key_arn=config["EncryptionConfiguration"][ jb_encryption=config["EncryptionConfiguration"][
"JobBookmarksEncryption" "JobBookmarksEncryption"
].get("KmsKeyArn"), ]["JobBookmarksEncryptionMode"],
region=regional_client.region, jb_key_arn=config["EncryptionConfiguration"][
"JobBookmarksEncryption"
].get("KmsKeyArn"),
region=regional_client.region,
)
) )
)
except Exception as error: except Exception as error:
logger.error( logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
@@ -138,14 +154,17 @@ class Glue:
logger.info("Glue - Search Tables...") logger.info("Glue - Search Tables...")
try: try:
for table in regional_client.search_tables()["TableList"]: for table in regional_client.search_tables()["TableList"]:
self.tables.append( if not self.audit_resources or (
Table( is_resource_filtered(table["Name"], self.audit_resources)
name=table["Name"], ):
database=table["DatabaseName"], self.tables.append(
catalog=table["CatalogId"], Table(
region=regional_client.region, name=table["Name"],
database=table["DatabaseName"],
catalog=table["CatalogId"],
region=regional_client.region,
)
) )
)
except Exception as error: except Exception as error:
logger.error( logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"

View File

@@ -3,6 +3,7 @@ import threading
from pydantic import BaseModel from pydantic import BaseModel
from prowler.lib.logger import logger from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -11,6 +12,7 @@ class GuardDuty:
def __init__(self, audit_info): def __init__(self, audit_info):
self.service = "guardduty" self.service = "guardduty"
self.session = audit_info.audit_session self.session = audit_info.audit_session
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info) self.regional_clients = generate_regional_clients(self.service, audit_info)
self.detectors = [] self.detectors = []
self.__threading_call__(self.__list_detectors__) self.__threading_call__(self.__list_detectors__)
@@ -35,9 +37,12 @@ class GuardDuty:
list_detectors_paginator = regional_client.get_paginator("list_detectors") list_detectors_paginator = regional_client.get_paginator("list_detectors")
for page in list_detectors_paginator.paginate(): for page in list_detectors_paginator.paginate():
for detector in page["DetectorIds"]: for detector in page["DetectorIds"]:
self.detectors.append( if not self.audit_resources or (
Detector(id=detector, region=regional_client.region) is_resource_filtered(detector, self.audit_resources)
) ):
self.detectors.append(
Detector(id=detector, region=regional_client.region)
)
except Exception as error: except Exception as error:
logger.error( logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"

View File

@@ -3,6 +3,7 @@ from dataclasses import dataclass
from datetime import datetime from datetime import datetime
from prowler.lib.logger import logger from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -29,6 +30,7 @@ class IAM:
self.service = "iam" self.service = "iam"
self.session = audit_info.audit_session self.session = audit_info.audit_session
self.account = audit_info.audited_account self.account = audit_info.audited_account
self.audit_resources = audit_info.audit_resources
self.partition = audit_info.audited_partition self.partition = audit_info.audited_partition
self.client = self.session.client(self.service) self.client = self.session.client(self.service)
global_client = generate_regional_clients( global_client = generate_regional_clients(
@@ -68,14 +70,17 @@ class IAM:
roles = [] roles = []
for page in get_roles_paginator.paginate(): for page in get_roles_paginator.paginate():
for role in page["Roles"]: for role in page["Roles"]:
roles.append( if not self.audit_resources or (
Role( is_resource_filtered(role["Arn"], self.audit_resources)
name=role["RoleName"], ):
arn=role["Arn"], roles.append(
assume_role_policy=role["AssumeRolePolicyDocument"], Role(
is_service_role=is_service_role(role), name=role["RoleName"],
arn=role["Arn"],
assume_role_policy=role["AssumeRolePolicyDocument"],
is_service_role=is_service_role(role),
)
) )
)
return roles return roles
except Exception as error: except Exception as error:
logger.error( logger.error(
@@ -112,7 +117,10 @@ class IAM:
groups = [] groups = []
for page in get_groups_paginator.paginate(): for page in get_groups_paginator.paginate():
for group in page["Groups"]: for group in page["Groups"]:
groups.append(Group(group["GroupName"], group["Arn"])) if not self.audit_resources or (
is_resource_filtered(group["Arn"], self.audit_resources)
):
groups.append(Group(group["GroupName"], group["Arn"]))
return groups return groups
@@ -175,14 +183,19 @@ class IAM:
users = [] users = []
for page in get_users_paginator.paginate(): for page in get_users_paginator.paginate():
for user in page["Users"]: for user in page["Users"]:
if "PasswordLastUsed" not in user: if not self.audit_resources or (
users.append(User(user["UserName"], user["Arn"], None)) is_resource_filtered(user["Arn"], self.audit_resources)
else: ):
users.append( if "PasswordLastUsed" not in user:
User( users.append(User(user["UserName"], user["Arn"], None))
user["UserName"], user["Arn"], user["PasswordLastUsed"] else:
users.append(
User(
user["UserName"],
user["Arn"],
user["PasswordLastUsed"],
)
) )
)
return users return users
@@ -330,7 +343,10 @@ class IAM:
list_policies_paginator = self.client.get_paginator("list_policies") list_policies_paginator = self.client.get_paginator("list_policies")
for page in list_policies_paginator.paginate(Scope="Local"): for page in list_policies_paginator.paginate(Scope="Local"):
for policy in page["Policies"]: for policy in page["Policies"]:
policies.append(policy) if not self.audit_resources or (
is_resource_filtered(policy["Arn"], self.audit_resources)
):
policies.append(policy)
except Exception as error: except Exception as error:
logger.error( logger.error(
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
@@ -369,14 +385,17 @@ class IAM:
for certificate in self.client.list_server_certificates()[ for certificate in self.client.list_server_certificates()[
"ServerCertificateMetadataList" "ServerCertificateMetadataList"
]: ]:
server_certificates.append( if not self.audit_resources or (
Certificate( is_resource_filtered(certificate["Arn"], self.audit_resources)
certificate["ServerCertificateName"], ):
certificate["ServerCertificateId"], server_certificates.append(
certificate["Arn"], Certificate(
certificate["Expiration"], certificate["ServerCertificateName"],
certificate["ServerCertificateId"],
certificate["Arn"],
certificate["Expiration"],
)
) )
)
except Exception as error: except Exception as error:
logger.error( logger.error(
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"

View File

@@ -3,6 +3,7 @@ import threading
from dataclasses import dataclass from dataclasses import dataclass
from prowler.lib.logger import logger from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -12,6 +13,7 @@ class KMS:
self.service = "kms" self.service = "kms"
self.session = audit_info.audit_session self.session = audit_info.audit_session
self.audited_account = audit_info.audited_account self.audited_account = audit_info.audited_account
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info) self.regional_clients = generate_regional_clients(self.service, audit_info)
self.keys = [] self.keys = []
self.__threading_call__(self.__list_keys__) self.__threading_call__(self.__list_keys__)
@@ -37,13 +39,16 @@ class KMS:
list_keys_paginator = regional_client.get_paginator("list_keys") list_keys_paginator = regional_client.get_paginator("list_keys")
for page in list_keys_paginator.paginate(): for page in list_keys_paginator.paginate():
for key in page["Keys"]: for key in page["Keys"]:
self.keys.append( if not self.audit_resources or (
Key( is_resource_filtered(key["KeyArn"], self.audit_resources)
key["KeyId"], ):
key["KeyArn"], self.keys.append(
regional_client.region, Key(
key["KeyId"],
key["KeyArn"],
regional_client.region,
)
) )
)
except Exception as error: except Exception as error:
logger.error( logger.error(
f"{regional_client.region} -- {error.__class__.__name__}:{error.__traceback__.tb_lineno} -- {error}" f"{regional_client.region} -- {error.__class__.__name__}:{error.__traceback__.tb_lineno} -- {error}"

View File

@@ -4,6 +4,7 @@ from json import loads
from pydantic import BaseModel from pydantic import BaseModel
from prowler.lib.logger import logger from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -12,6 +13,7 @@ class OpenSearchService:
def __init__(self, audit_info): def __init__(self, audit_info):
self.service = "opensearch" self.service = "opensearch"
self.session = audit_info.audit_session self.session = audit_info.audit_session
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info) self.regional_clients = generate_regional_clients(self.service, audit_info)
self.opensearch_domains = [] self.opensearch_domains = []
self.__threading_call__(self.__list_domain_names__) self.__threading_call__(self.__list_domain_names__)
@@ -35,11 +37,14 @@ class OpenSearchService:
try: try:
domains = regional_client.list_domain_names() domains = regional_client.list_domain_names()
for domain in domains["DomainNames"]: for domain in domains["DomainNames"]:
self.opensearch_domains.append( if not self.audit_resources or (
OpenSearchDomain( is_resource_filtered(domain["DomainName"], self.audit_resources)
name=domain["DomainName"], region=regional_client.region ):
self.opensearch_domains.append(
OpenSearchDomain(
name=domain["DomainName"], region=regional_client.region
)
) )
)
except Exception as error: except Exception as error:
logger.error( logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"

View File

@@ -4,6 +4,7 @@ from typing import Optional
from pydantic import BaseModel from pydantic import BaseModel
from prowler.lib.logger import logger from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -13,6 +14,7 @@ class RDS:
self.service = "rds" self.service = "rds"
self.session = audit_info.audit_session self.session = audit_info.audit_session
self.audited_account = audit_info.audited_account self.audited_account = audit_info.audited_account
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info) self.regional_clients = generate_regional_clients(self.service, audit_info)
self.db_instances = [] self.db_instances = []
self.db_snapshots = [] self.db_snapshots = []
@@ -43,32 +45,37 @@ class RDS:
) )
for page in describe_db_instances_paginator.paginate(): for page in describe_db_instances_paginator.paginate():
for instance in page["DBInstances"]: for instance in page["DBInstances"]:
if instance["Engine"] != "docdb": if not self.audit_resources or (
self.db_instances.append( is_resource_filtered(
DBInstance( instance["DBInstanceIdentifier"], self.audit_resources
id=instance["DBInstanceIdentifier"],
endpoint=instance["Endpoint"]["Address"],
engine=instance["Engine"],
status=instance["DBInstanceStatus"],
public=instance["PubliclyAccessible"],
encrypted=instance["StorageEncrypted"],
auto_minor_version_upgrade=instance[
"AutoMinorVersionUpgrade"
],
backup_retention_period=instance.get(
"BackupRetentionPeriod"
),
cloudwatch_logs=instance.get(
"EnabledCloudwatchLogsExports"
),
deletion_protection=instance["DeletionProtection"],
enhanced_monitoring_arn=instance.get(
"EnhancedMonitoringResourceArn"
),
multi_az=instance["MultiAZ"],
region=regional_client.region,
)
) )
):
if instance["Engine"] != "docdb":
self.db_instances.append(
DBInstance(
id=instance["DBInstanceIdentifier"],
endpoint=instance["Endpoint"]["Address"],
engine=instance["Engine"],
status=instance["DBInstanceStatus"],
public=instance["PubliclyAccessible"],
encrypted=instance["StorageEncrypted"],
auto_minor_version_upgrade=instance[
"AutoMinorVersionUpgrade"
],
backup_retention_period=instance.get(
"BackupRetentionPeriod"
),
cloudwatch_logs=instance.get(
"EnabledCloudwatchLogsExports"
),
deletion_protection=instance["DeletionProtection"],
enhanced_monitoring_arn=instance.get(
"EnhancedMonitoringResourceArn"
),
multi_az=instance["MultiAZ"],
region=regional_client.region,
)
)
except Exception as error: except Exception as error:
logger.error( logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
@@ -82,14 +89,19 @@ class RDS:
) )
for page in describe_db_snapshots_paginator.paginate(): for page in describe_db_snapshots_paginator.paginate():
for snapshot in page["DBSnapshots"]: for snapshot in page["DBSnapshots"]:
if snapshot["Engine"] != "docdb": if not self.audit_resources or (
self.db_snapshots.append( is_resource_filtered(
DBSnapshot( snapshot["DBSnapshotIdentifier"], self.audit_resources
id=snapshot["DBSnapshotIdentifier"],
instance_id=snapshot["DBInstanceIdentifier"],
region=regional_client.region,
)
) )
):
if snapshot["Engine"] != "docdb":
self.db_snapshots.append(
DBSnapshot(
id=snapshot["DBSnapshotIdentifier"],
instance_id=snapshot["DBInstanceIdentifier"],
region=regional_client.region,
)
)
except Exception as error: except Exception as error:
logger.error( logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
@@ -120,14 +132,20 @@ class RDS:
) )
for page in describe_db_snapshots_paginator.paginate(): for page in describe_db_snapshots_paginator.paginate():
for snapshot in page["DBClusterSnapshots"]: for snapshot in page["DBClusterSnapshots"]:
if snapshot["Engine"] != "docdb": if not self.audit_resources or (
self.db_cluster_snapshots.append( is_resource_filtered(
ClusterSnapshot( snapshot["DBClusterSnapshotIdentifier"],
id=snapshot["DBClusterSnapshotIdentifier"], self.audit_resources,
cluster_id=snapshot["DBClusterIdentifier"],
region=regional_client.region,
)
) )
):
if snapshot["Engine"] != "docdb":
self.db_cluster_snapshots.append(
ClusterSnapshot(
id=snapshot["DBClusterSnapshotIdentifier"],
cluster_id=snapshot["DBClusterIdentifier"],
region=regional_client.region,
)
)
except Exception as error: except Exception as error:
logger.error( logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"

View File

@@ -3,6 +3,7 @@ import threading
from pydantic import BaseModel from pydantic import BaseModel
from prowler.lib.logger import logger from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -11,6 +12,7 @@ class Redshift:
def __init__(self, audit_info): def __init__(self, audit_info):
self.service = "redshift" self.service = "redshift"
self.session = audit_info.audit_session self.session = audit_info.audit_session
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info) self.regional_clients = generate_regional_clients(self.service, audit_info)
self.clusters = [] self.clusters = []
self.__threading_call__(self.__describe_clusters__) self.__threading_call__(self.__describe_clusters__)
@@ -35,25 +37,30 @@ class Redshift:
list_clusters_paginator = regional_client.get_paginator("describe_clusters") list_clusters_paginator = regional_client.get_paginator("describe_clusters")
for page in list_clusters_paginator.paginate(): for page in list_clusters_paginator.paginate():
for cluster in page["Clusters"]: for cluster in page["Clusters"]:
cluster_to_append = Cluster( if not self.audit_resources or (
id=cluster["ClusterIdentifier"], is_resource_filtered(
region=regional_client.region, cluster["ClusterIdentifier"], self.audit_resources
) )
if (
"PubliclyAccessible" in cluster
and cluster["PubliclyAccessible"]
): ):
cluster_to_append.public_access = True cluster_to_append = Cluster(
if "Endpoint" in cluster and "Address" in cluster["Endpoint"]: id=cluster["ClusterIdentifier"],
cluster_to_append.endpoint_address = cluster["Endpoint"][ region=regional_client.region,
"Address" )
] if (
if ( "PubliclyAccessible" in cluster
"AllowVersionUpgrade" in cluster and cluster["PubliclyAccessible"]
and cluster["AllowVersionUpgrade"] ):
): cluster_to_append.public_access = True
cluster_to_append.allow_version_upgrade = True if "Endpoint" in cluster and "Address" in cluster["Endpoint"]:
self.clusters.append(cluster_to_append) cluster_to_append.endpoint_address = cluster["Endpoint"][
"Address"
]
if (
"AllowVersionUpgrade" in cluster
and cluster["AllowVersionUpgrade"]
):
cluster_to_append.allow_version_upgrade = True
self.clusters.append(cluster_to_append)
except Exception as error: except Exception as error:
logger.error( logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"

View File

@@ -1,6 +1,7 @@
from pydantic import BaseModel from pydantic import BaseModel
from prowler.lib.logger import logger from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -10,6 +11,7 @@ class Route53:
self.service = "route53" self.service = "route53"
self.session = audit_info.audit_session self.session = audit_info.audit_session
self.audited_partition = audit_info.audited_partition self.audited_partition = audit_info.audited_partition
self.audit_resources = audit_info.audit_resources
self.hosted_zones = {} self.hosted_zones = {}
global_client = generate_regional_clients( global_client = generate_regional_clients(
self.service, audit_info, global_service=True self.service, audit_info, global_service=True
@@ -30,16 +32,20 @@ class Route53:
for page in list_hosted_zones_paginator.paginate(): for page in list_hosted_zones_paginator.paginate():
for hosted_zone in page["HostedZones"]: for hosted_zone in page["HostedZones"]:
hosted_zone_id = hosted_zone["Id"].replace("/hostedzone/", "") hosted_zone_id = hosted_zone["Id"].replace("/hostedzone/", "")
hosted_zone_name = hosted_zone["Name"] arn = f"arn:{self.audited_partition}:route53:::{hosted_zone_id}"
private_zone = hosted_zone["Config"]["PrivateZone"] if not self.audit_resources or (
is_resource_filtered(arn, self.audit_resources)
):
hosted_zone_name = hosted_zone["Name"]
private_zone = hosted_zone["Config"]["PrivateZone"]
self.hosted_zones[hosted_zone_id] = HostedZone( self.hosted_zones[hosted_zone_id] = HostedZone(
id=hosted_zone_id, id=hosted_zone_id,
name=hosted_zone_name, name=hosted_zone_name,
private_zone=private_zone, private_zone=private_zone,
arn=f"arn:{self.audited_partition}:route53:::{hosted_zone_id}", arn=arn,
region=self.region, region=self.region,
) )
except Exception as error: except Exception as error:
logger.error( logger.error(

View File

@@ -3,6 +3,7 @@ import threading
from dataclasses import dataclass from dataclasses import dataclass
from prowler.lib.logger import logger from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -13,6 +14,7 @@ class S3:
self.session = audit_info.audit_session self.session = audit_info.audit_session
self.client = self.session.client(self.service) self.client = self.session.client(self.service)
self.audited_account = audit_info.audited_account self.audited_account = audit_info.audited_account
self.audit_resources = audit_info.audit_resources
self.audited_partition = audit_info.audited_partition self.audited_partition = audit_info.audited_partition
self.regional_clients = generate_regional_clients(self.service, audit_info) self.regional_clients = generate_regional_clients(self.service, audit_info)
self.buckets = self.__list_buckets__(audit_info) self.buckets = self.__list_buckets__(audit_info)
@@ -50,12 +52,17 @@ class S3:
bucket_region = "us-east-1" bucket_region = "us-east-1"
# Arn # Arn
arn = f"arn:{self.audited_partition}:s3:::{bucket['Name']}" arn = f"arn:{self.audited_partition}:s3:::{bucket['Name']}"
# Check if there are filter regions if not self.audit_resources or (
if audit_info.audited_regions: is_resource_filtered(arn, self.audit_resources)
if bucket_region in audit_info.audited_regions: ):
# Check if there are filter regions
if audit_info.audited_regions:
if bucket_region in audit_info.audited_regions:
buckets.append(
Bucket(bucket["Name"], arn, bucket_region)
)
else:
buckets.append(Bucket(bucket["Name"], arn, bucket_region)) buckets.append(Bucket(bucket["Name"], arn, bucket_region))
else:
buckets.append(Bucket(bucket["Name"], arn, bucket_region))
except Exception as error: except Exception as error:
logger.error( logger.error(
f"{bucket_region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" f"{bucket_region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"

View File

@@ -3,6 +3,7 @@ import threading
from pydantic import BaseModel from pydantic import BaseModel
from prowler.lib.logger import logger from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -11,6 +12,7 @@ class SageMaker:
def __init__(self, audit_info): def __init__(self, audit_info):
self.service = "sagemaker" self.service = "sagemaker"
self.session = audit_info.audit_session self.session = audit_info.audit_session
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info) self.regional_clients = generate_regional_clients(self.service, audit_info)
self.sagemaker_notebook_instances = [] self.sagemaker_notebook_instances = []
self.sagemaker_models = [] self.sagemaker_models = []
@@ -42,13 +44,19 @@ class SageMaker:
) )
for page in list_notebook_instances_paginator.paginate(): for page in list_notebook_instances_paginator.paginate():
for notebook_instance in page["NotebookInstances"]: for notebook_instance in page["NotebookInstances"]:
self.sagemaker_notebook_instances.append( if not self.audit_resources or (
NotebookInstance( is_resource_filtered(
name=notebook_instance["NotebookInstanceName"], notebook_instance["NotebookInstanceArn"],
region=regional_client.region, self.audit_resources,
arn=notebook_instance["NotebookInstanceArn"], )
):
self.sagemaker_notebook_instances.append(
NotebookInstance(
name=notebook_instance["NotebookInstanceName"],
region=regional_client.region,
arn=notebook_instance["NotebookInstanceArn"],
)
) )
)
except Exception as error: except Exception as error:
logger.error( logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
@@ -60,13 +68,16 @@ class SageMaker:
list_models_paginator = regional_client.get_paginator("list_models") list_models_paginator = regional_client.get_paginator("list_models")
for page in list_models_paginator.paginate(): for page in list_models_paginator.paginate():
for model in page["Models"]: for model in page["Models"]:
self.sagemaker_models.append( if not self.audit_resources or (
Model( is_resource_filtered(model["ModelArn"], self.audit_resources)
name=model["ModelName"], ):
region=regional_client.region, self.sagemaker_models.append(
arn=model["ModelArn"], Model(
name=model["ModelName"],
region=regional_client.region,
arn=model["ModelArn"],
)
) )
)
except Exception as error: except Exception as error:
logger.error( logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
@@ -80,13 +91,18 @@ class SageMaker:
) )
for page in list_training_jobs_paginator.paginate(): for page in list_training_jobs_paginator.paginate():
for training_job in page["TrainingJobSummaries"]: for training_job in page["TrainingJobSummaries"]:
self.sagemaker_training_jobs.append( if not self.audit_resources or (
TrainingJob( is_resource_filtered(
name=training_job["TrainingJobName"], training_job["TrainingJobArn"], self.audit_resources
region=regional_client.region, )
arn=training_job["TrainingJobArn"], ):
self.sagemaker_training_jobs.append(
TrainingJob(
name=training_job["TrainingJobName"],
region=regional_client.region,
arn=training_job["TrainingJobArn"],
)
) )
)
except Exception as error: except Exception as error:
logger.error( logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"

View File

@@ -3,6 +3,7 @@ import threading
from pydantic import BaseModel from pydantic import BaseModel
from prowler.lib.logger import logger from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -12,6 +13,7 @@ class SecretsManager:
self.service = "secretsmanager" self.service = "secretsmanager"
self.session = audit_info.audit_session self.session = audit_info.audit_session
self.audited_account = audit_info.audited_account self.audited_account = audit_info.audited_account
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info) self.regional_clients = generate_regional_clients(self.service, audit_info)
self.secrets = {} self.secrets = {}
self.__threading_call__(self.__list_secrets__) self.__threading_call__(self.__list_secrets__)
@@ -34,15 +36,18 @@ class SecretsManager:
list_secrets_paginator = regional_client.get_paginator("list_secrets") list_secrets_paginator = regional_client.get_paginator("list_secrets")
for page in list_secrets_paginator.paginate(): for page in list_secrets_paginator.paginate():
for secret in page["SecretList"]: for secret in page["SecretList"]:
self.secrets[secret["Name"]] = Secret( if not self.audit_resources or (
arn=secret["ARN"], is_resource_filtered(secret["ARN"], self.audit_resources)
name=secret["Name"], ):
region=regional_client.region, self.secrets[secret["Name"]] = Secret(
) arn=secret["ARN"],
if "RotationEnabled" in secret: name=secret["Name"],
self.secrets[secret["Name"]].rotation_enabled = secret[ region=regional_client.region,
"RotationEnabled" )
] if "RotationEnabled" in secret:
self.secrets[secret["Name"]].rotation_enabled = secret[
"RotationEnabled"
]
except Exception as error: except Exception as error:
logger.error( logger.error(

View File

@@ -2,6 +2,7 @@ import threading
from dataclasses import dataclass from dataclasses import dataclass
from prowler.lib.logger import logger from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -11,6 +12,7 @@ class SecurityHub:
self.service = "securityhub" self.service = "securityhub"
self.session = audit_info.audit_session self.session = audit_info.audit_session
self.audited_account = audit_info.audited_account self.audited_account = audit_info.audited_account
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info) self.regional_clients = generate_regional_clients(self.service, audit_info)
self.securityhubs = [] self.securityhubs = []
self.__threading_call__(self.__describe_hub__) self.__threading_call__(self.__describe_hub__)
@@ -51,16 +53,29 @@ class SecurityHub:
else: else:
# SecurityHub is active so get HubArn # SecurityHub is active so get HubArn
hub_arn = regional_client.describe_hub()["HubArn"] hub_arn = regional_client.describe_hub()["HubArn"]
hub_id = hub_arn.split("/")[1] if not self.audit_resources or (
self.securityhubs.append( is_resource_filtered(hub_arn, self.audit_resources)
SecurityHubHub( ):
hub_arn, hub_id = hub_arn.split("/")[1]
hub_id, self.securityhubs.append(
"ACTIVE", SecurityHubHub(
standards, hub_arn,
regional_client.region, hub_id,
"ACTIVE",
standards,
regional_client.region,
)
)
else:
self.securityhubs.append(
SecurityHubHub(
"",
"Security Hub",
"NOT_AVAILABLE",
"",
regional_client.region,
)
) )
)
except Exception as error: except Exception as error:
# Check if Account is subscribed to Security Hub # Check if Account is subscribed to Security Hub

View File

@@ -4,6 +4,7 @@ from json import loads
from pydantic import BaseModel from pydantic import BaseModel
from prowler.lib.logger import logger from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -12,6 +13,7 @@ class SNS:
def __init__(self, audit_info): def __init__(self, audit_info):
self.service = "sns" self.service = "sns"
self.session = audit_info.audit_session self.session = audit_info.audit_session
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info) self.regional_clients = generate_regional_clients(self.service, audit_info)
self.topics = [] self.topics = []
self.__threading_call__(self.__list_topics__) self.__threading_call__(self.__list_topics__)
@@ -35,13 +37,18 @@ class SNS:
list_topics_paginator = regional_client.get_paginator("list_topics") list_topics_paginator = regional_client.get_paginator("list_topics")
for page in list_topics_paginator.paginate(): for page in list_topics_paginator.paginate():
for topic_arn in page["Topics"]: for topic_arn in page["Topics"]:
self.topics.append( if not self.audit_resources or (
Topic( is_resource_filtered(
name=topic_arn["TopicArn"].rsplit(":", 1)[1], topic_arn["TopicArn"], self.audit_resources
arn=topic_arn["TopicArn"], )
region=regional_client.region, ):
self.topics.append(
Topic(
name=topic_arn["TopicArn"].rsplit(":", 1)[1],
arn=topic_arn["TopicArn"],
region=regional_client.region,
)
) )
)
except Exception as error: except Exception as error:
logger.error( logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"

View File

@@ -4,6 +4,7 @@ from json import loads
from pydantic import BaseModel from pydantic import BaseModel
from prowler.lib.logger import logger from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -12,6 +13,7 @@ class SQS:
def __init__(self, audit_info): def __init__(self, audit_info):
self.service = "sqs" self.service = "sqs"
self.session = audit_info.audit_session self.session = audit_info.audit_session
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info) self.regional_clients = generate_regional_clients(self.service, audit_info)
self.queues = [] self.queues = []
self.__threading_call__(self.__list_queues__) self.__threading_call__(self.__list_queues__)
@@ -36,12 +38,15 @@ class SQS:
for page in list_queues_paginator.paginate(): for page in list_queues_paginator.paginate():
if "QueueUrls" in page: if "QueueUrls" in page:
for queue in page["QueueUrls"]: for queue in page["QueueUrls"]:
self.queues.append( if not self.audit_resources or (
Queue( is_resource_filtered(queue, self.audit_resources)
id=queue, ):
region=regional_client.region, self.queues.append(
Queue(
id=queue,
region=regional_client.region,
)
) )
)
except Exception as error: except Exception as error:
logger.error( logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"

View File

@@ -5,6 +5,7 @@ from enum import Enum
from pydantic import BaseModel from pydantic import BaseModel
from prowler.lib.logger import logger from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -14,6 +15,7 @@ class SSM:
self.service = "ssm" self.service = "ssm"
self.session = audit_info.audit_session self.session = audit_info.audit_session
self.audited_account = audit_info.audited_account self.audited_account = audit_info.audited_account
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info) self.regional_clients = generate_regional_clients(self.service, audit_info)
self.documents = {} self.documents = {}
self.compliance_resources = {} self.compliance_resources = {}
@@ -53,12 +55,15 @@ class SSM:
list_documents_paginator = regional_client.get_paginator("list_documents") list_documents_paginator = regional_client.get_paginator("list_documents")
for page in list_documents_paginator.paginate(**list_documents_parameters): for page in list_documents_paginator.paginate(**list_documents_parameters):
for document in page["DocumentIdentifiers"]: for document in page["DocumentIdentifiers"]:
document_name = document["Name"] if not self.audit_resources or (
is_resource_filtered(document["Name"], self.audit_resources)
):
document_name = document["Name"]
self.documents[document_name] = Document( self.documents[document_name] = Document(
name=document_name, name=document_name,
region=regional_client.region, region=regional_client.region,
) )
except Exception as error: except Exception as error:
logger.error( logger.error(

View File

@@ -3,6 +3,7 @@ import threading
from dataclasses import dataclass from dataclasses import dataclass
from prowler.lib.logger import logger from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -12,6 +13,7 @@ class VPC:
self.service = "ec2" self.service = "ec2"
self.session = audit_info.audit_session self.session = audit_info.audit_session
self.audited_account = audit_info.audited_account self.audited_account = audit_info.audited_account
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info) self.regional_clients = generate_regional_clients(self.service, audit_info)
self.vpcs = [] self.vpcs = []
self.vpc_peering_connections = [] self.vpc_peering_connections = []
@@ -43,14 +45,17 @@ class VPC:
describe_vpcs_paginator = regional_client.get_paginator("describe_vpcs") describe_vpcs_paginator = regional_client.get_paginator("describe_vpcs")
for page in describe_vpcs_paginator.paginate(): for page in describe_vpcs_paginator.paginate():
for vpc in page["Vpcs"]: for vpc in page["Vpcs"]:
self.vpcs.append( if not self.audit_resources or (
VPCs( is_resource_filtered(vpc["VpcId"], self.audit_resources)
vpc["VpcId"], ):
vpc["IsDefault"], self.vpcs.append(
vpc["CidrBlock"], VPCs(
regional_client.region, vpc["VpcId"],
vpc["IsDefault"],
vpc["CidrBlock"],
regional_client.region,
)
) )
)
except Exception as error: except Exception as error:
logger.error( logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
@@ -64,16 +69,21 @@ class VPC:
) )
for page in describe_vpc_peering_connections_paginator.paginate(): for page in describe_vpc_peering_connections_paginator.paginate():
for conn in page["VpcPeeringConnections"]: for conn in page["VpcPeeringConnections"]:
self.vpc_peering_connections.append( if not self.audit_resources or (
VpcPeeringConnection( is_resource_filtered(
conn["VpcPeeringConnectionId"], conn["VpcPeeringConnectionId"], self.audit_resources
conn["AccepterVpcInfo"]["VpcId"], )
conn["AccepterVpcInfo"]["CidrBlock"], ):
conn["RequesterVpcInfo"]["VpcId"], self.vpc_peering_connections.append(
conn["RequesterVpcInfo"]["CidrBlock"], VpcPeeringConnection(
regional_client.region, conn["VpcPeeringConnectionId"],
conn["AccepterVpcInfo"]["VpcId"],
conn["AccepterVpcInfo"]["CidrBlock"],
conn["RequesterVpcInfo"]["VpcId"],
conn["RequesterVpcInfo"]["CidrBlock"],
regional_client.region,
)
) )
)
except Exception as error: except Exception as error:
logger.error( logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
@@ -140,19 +150,24 @@ class VPC:
) )
for page in describe_vpc_endpoints_paginator.paginate(): for page in describe_vpc_endpoints_paginator.paginate():
for endpoint in page["VpcEndpoints"]: for endpoint in page["VpcEndpoints"]:
endpoint_policy = None if not self.audit_resources or (
if endpoint.get("PolicyDocument"): is_resource_filtered(
endpoint_policy = json.loads(endpoint["PolicyDocument"]) endpoint["VpcEndpointId"], self.audit_resources
self.vpc_endpoints.append( )
VpcEndpoint( ):
endpoint["VpcEndpointId"], endpoint_policy = None
endpoint["VpcId"], if endpoint.get("PolicyDocument"):
endpoint["State"], endpoint_policy = json.loads(endpoint["PolicyDocument"])
endpoint_policy, self.vpc_endpoints.append(
endpoint["OwnerId"], VpcEndpoint(
regional_client.region, endpoint["VpcEndpointId"],
endpoint["VpcId"],
endpoint["State"],
endpoint_policy,
endpoint["OwnerId"],
regional_client.region,
)
) )
)
except Exception as error: except Exception as error:
logger.error( logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
@@ -167,14 +182,19 @@ class VPC:
for page in describe_vpc_endpoint_services_paginator.paginate(): for page in describe_vpc_endpoint_services_paginator.paginate():
for endpoint in page["ServiceDetails"]: for endpoint in page["ServiceDetails"]:
if endpoint["Owner"] != "amazon": if endpoint["Owner"] != "amazon":
self.vpc_endpoint_services.append( if not self.audit_resources or (
VpcEndpointService( is_resource_filtered(
endpoint["ServiceId"], endpoint["ServiceId"], self.audit_resources
endpoint["ServiceName"], )
endpoint["Owner"], ):
regional_client.region, self.vpc_endpoint_services.append(
VpcEndpointService(
endpoint["ServiceId"],
endpoint["ServiceName"],
endpoint["Owner"],
regional_client.region,
)
) )
)
except Exception as error: except Exception as error:
logger.error( logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"

View File

@@ -3,6 +3,7 @@ import threading
from pydantic import BaseModel from pydantic import BaseModel
from prowler.lib.logger import logger from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -11,6 +12,7 @@ class WAF:
def __init__(self, audit_info): def __init__(self, audit_info):
self.service = "waf-regional" self.service = "waf-regional"
self.session = audit_info.audit_session self.session = audit_info.audit_session
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info) self.regional_clients = generate_regional_clients(self.service, audit_info)
self.web_acls = [] self.web_acls = []
self.__threading_call__(self.__list_web_acls__) self.__threading_call__(self.__list_web_acls__)
@@ -32,14 +34,17 @@ class WAF:
logger.info("WAF - Listing Regional Web ACLs...") logger.info("WAF - Listing Regional Web ACLs...")
try: try:
for waf in regional_client.list_web_acls()["WebACLs"]: for waf in regional_client.list_web_acls()["WebACLs"]:
self.web_acls.append( if not self.audit_resources or (
WebAcl( is_resource_filtered(waf["WebACLId"], self.audit_resources)
name=waf["Name"], ):
id=waf["WebACLId"], self.web_acls.append(
albs=[], WebAcl(
region=regional_client.region, name=waf["Name"],
id=waf["WebACLId"],
albs=[],
region=regional_client.region,
)
) )
)
except Exception as error: except Exception as error:
logger.error( logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"

View File

@@ -3,6 +3,7 @@ import threading
from pydantic import BaseModel from pydantic import BaseModel
from prowler.lib.logger import logger from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -11,6 +12,7 @@ class WAFv2:
def __init__(self, audit_info): def __init__(self, audit_info):
self.service = "wafv2" self.service = "wafv2"
self.session = audit_info.audit_session self.session = audit_info.audit_session
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info) self.regional_clients = generate_regional_clients(self.service, audit_info)
self.web_acls = [] self.web_acls = []
self.__threading_call__(self.__list_web_acls__) self.__threading_call__(self.__list_web_acls__)
@@ -32,15 +34,18 @@ class WAFv2:
logger.info("WAFv2 - Listing Regional Web ACLs...") logger.info("WAFv2 - Listing Regional Web ACLs...")
try: try:
for wafv2 in regional_client.list_web_acls(Scope="REGIONAL")["WebACLs"]: for wafv2 in regional_client.list_web_acls(Scope="REGIONAL")["WebACLs"]:
self.web_acls.append( if not self.audit_resources or (
WebAclv2( is_resource_filtered(wafv2["ARN"], self.audit_resources)
arn=wafv2["ARN"], ):
name=wafv2["Name"], self.web_acls.append(
id=wafv2["Id"], WebAclv2(
albs=[], arn=wafv2["ARN"],
region=regional_client.region, name=wafv2["Name"],
id=wafv2["Id"],
albs=[],
region=regional_client.region,
)
) )
)
except Exception as error: except Exception as error:
logger.error( logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"

View File

@@ -3,6 +3,7 @@ import threading
from pydantic import BaseModel from pydantic import BaseModel
from prowler.lib.logger import logger from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -11,6 +12,7 @@ class WorkSpaces:
def __init__(self, audit_info): def __init__(self, audit_info):
self.service = "workspaces" self.service = "workspaces"
self.session = audit_info.audit_session self.session = audit_info.audit_session
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info) self.regional_clients = generate_regional_clients(self.service, audit_info)
self.workspaces = [] self.workspaces = []
self.__threading_call__(self.__describe_workspaces__) self.__threading_call__(self.__describe_workspaces__)
@@ -35,20 +37,25 @@ class WorkSpaces:
) )
for page in describe_workspaces_paginator.paginate(): for page in describe_workspaces_paginator.paginate():
for workspace in page["Workspaces"]: for workspace in page["Workspaces"]:
workspace_to_append = WorkSpace( if not self.audit_resources or (
id=workspace["WorkspaceId"], region=regional_client.region is_resource_filtered(
) workspace["WorkspaceId"], self.audit_resources
if ( )
"UserVolumeEncryptionEnabled" in workspace
and workspace["UserVolumeEncryptionEnabled"]
): ):
workspace_to_append.user_volume_encryption_enabled = True workspace_to_append = WorkSpace(
if ( id=workspace["WorkspaceId"], region=regional_client.region
"RootVolumeEncryptionEnabled" in workspace )
and workspace["RootVolumeEncryptionEnabled"] if (
): "UserVolumeEncryptionEnabled" in workspace
workspace_to_append.root_volume_encryption_enabled = True and workspace["UserVolumeEncryptionEnabled"]
self.workspaces.append(workspace_to_append) ):
workspace_to_append.user_volume_encryption_enabled = True
if (
"RootVolumeEncryptionEnabled" in workspace
and workspace["RootVolumeEncryptionEnabled"]
):
workspace_to_append.root_volume_encryption_enabled = True
self.workspaces.append(workspace_to_append)
except Exception as error: except Exception as error:
logger.error( logger.error(

View File

@@ -96,7 +96,7 @@ Caller Identity ARN: {Fore.YELLOW}[{audit_info.audited_identity_arn}]{Style.RESE
""" """
set_aws_audit_info returns the AWS_Audit_Info set_aws_audit_info returns the AWS_Audit_Info
""" """
logger.info("Setting Azure session ...") logger.info("Setting AWS session ...")
# Assume Role Options # Assume Role Options
input_role = arguments.get("role") input_role = arguments.get("role")
@@ -236,6 +236,11 @@ Caller Identity ARN: {Fore.YELLOW}[{audit_info.audited_identity_arn}]{Style.RESE
if not arguments.get("only_logs"): if not arguments.get("only_logs"):
self.print_audit_credentials(current_audit_info) self.print_audit_credentials(current_audit_info)
# Parse Scan Tags
input_scan_tags = arguments.get("scan_tags")
current_audit_info.audit_resources = get_tagged_resources(
input_scan_tags, current_audit_info
)
return current_audit_info return current_audit_info
def set_azure_audit_info(self, arguments) -> Azure_Audit_Info: def set_azure_audit_info(self, arguments) -> Azure_Audit_Info:
@@ -287,3 +292,33 @@ def set_provider_audit_info(provider: str, arguments: dict):
sys.exit() sys.exit()
else: else:
return provider_audit_info return provider_audit_info
def get_tagged_resources(input_scan_tags: list, current_audit_info: AWS_Audit_Info):
"""
get_tagged_resources returns a list of the resources that are going to be scanned based on the given input tags
"""
try:
scan_tags = []
tagged_resources = []
if input_scan_tags:
for tag in input_scan_tags:
key = tag.split("=")[0]
value = tag.split("=")[1]
scan_tags.append({"Key": key, "Values": [value]})
# Get Resources with scan_tags for all regions
for region in current_audit_info.audited_regions:
client = current_audit_info.audit_session.client(
"resourcegroupstaggingapi", region_name=region
)
get_resources_paginator = client.get_paginator("get_resources")
for page in get_resources_paginator.paginate(TagFilters=scan_tags):
for resource in page["ResourceTagMappingList"]:
tagged_resources.append(resource["ResourceARN"])
except Exception as error:
logger.critical(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
sys.exit()
else:
return tagged_resources

View File

@@ -54,6 +54,7 @@ class Test_Parser:
assert not parsed.output_bucket_no_assume assert not parsed.output_bucket_no_assume
assert not parsed.shodan assert not parsed.shodan
assert not parsed.allowlist_file assert not parsed.allowlist_file
assert not parsed.scan_tags
def test_default_parser_no_arguments_azure(self): def test_default_parser_no_arguments_azure(self):
provider = "azure" provider = "azure"
@@ -795,6 +796,24 @@ class Test_Parser:
parsed = self.parser.parse(command) parsed = self.parser.parse(command)
assert parsed.allowlist_file == allowlist_file assert parsed.allowlist_file == allowlist_file
def test_aws_parser_scan_tags_short(self):
argument = "-t"
scan_tag = "Key=Value"
command = [prowler_command, argument, scan_tag]
parsed = self.parser.parse(command)
assert len(parsed.scan_tags) == 1
assert scan_tag in parsed.scan_tags
def test_aws_parser_scan_tags_long(self):
argument = "--scan-tags"
scan_tag1 = "Key=Value"
scan_tag2 = "Key2=Value2"
command = [prowler_command, argument, scan_tag1, scan_tag2]
parsed = self.parser.parse(command)
assert len(parsed.scan_tags) == 2
assert scan_tag1 in parsed.scan_tags
assert scan_tag2 in parsed.scan_tags
def test_parser_azure_auth_sp(self): def test_parser_azure_auth_sp(self):
argument = "--sp-env-auth" argument = "--sp-env-auth"
command = [prowler_command, "azure", argument] command = [prowler_command, "azure", argument]

View File

@@ -81,6 +81,7 @@ class Test_Outputs:
assumed_role_info=None, assumed_role_info=None,
audited_regions=["eu-west-2", "eu-west-1"], audited_regions=["eu-west-2", "eu-west-1"],
organizations_metadata=None, organizations_metadata=None,
audit_resources=None,
) )
test_output_modes = [ test_output_modes = [
["csv"], ["csv"],
@@ -258,6 +259,7 @@ class Test_Outputs:
assumed_role_info=None, assumed_role_info=None,
audited_regions=["eu-west-2", "eu-west-1"], audited_regions=["eu-west-2", "eu-west-1"],
organizations_metadata=None, organizations_metadata=None,
audit_resources=None,
) )
finding = Check_Report( finding = Check_Report(
load_check_metadata( load_check_metadata(
@@ -327,6 +329,7 @@ class Test_Outputs:
assumed_role_info=None, assumed_role_info=None,
audited_regions=["eu-west-2", "eu-west-1"], audited_regions=["eu-west-2", "eu-west-1"],
organizations_metadata=None, organizations_metadata=None,
audit_resources=None,
) )
# Creat mock bucket # Creat mock bucket
bucket_name = "test_bucket" bucket_name = "test_bucket"
@@ -429,6 +432,7 @@ class Test_Outputs:
assumed_role_info=None, assumed_role_info=None,
audited_regions=["eu-west-2", "eu-west-1"], audited_regions=["eu-west-2", "eu-west-1"],
organizations_metadata=None, organizations_metadata=None,
audit_resources=None,
) )
finding = Check_Report( finding = Check_Report(
load_check_metadata( load_check_metadata(

View File

@@ -0,0 +1,17 @@
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
class Test_Scan_Filters:
def test_is_resource_filtered(self):
audit_resources = [
"arn:aws:iam::123456789012:user/test_user",
"arn:aws:s3:::test_bucket",
]
assert is_resource_filtered(
"arn:aws:iam::123456789012:user/test_user", audit_resources
)
assert not is_resource_filtered(
"arn:aws:iam::123456789012:user/test1", audit_resources
)
assert is_resource_filtered("test_bucket", audit_resources)
assert is_resource_filtered("arn:aws:s3:::test_bucket", audit_resources)

View File

@@ -56,6 +56,7 @@ class Test_AWS_Provider:
), ),
audited_regions=audited_regions, audited_regions=audited_regions,
organizations_metadata=None, organizations_metadata=None,
audit_resources=None,
) )
# Call assume_role # Call assume_role
@@ -109,6 +110,7 @@ class Test_AWS_Provider:
assumed_role_info=None, assumed_role_info=None,
audited_regions=audited_regions, audited_regions=audited_regions,
organizations_metadata=None, organizations_metadata=None,
audit_resources=None,
) )
generate_regional_clients_response = generate_regional_clients( generate_regional_clients_response = generate_regional_clients(
"ec2", audit_info "ec2", audit_info
@@ -137,6 +139,7 @@ class Test_AWS_Provider:
assumed_role_info=None, assumed_role_info=None,
audited_regions=audited_regions, audited_regions=audited_regions,
organizations_metadata=None, organizations_metadata=None,
audit_resources=None,
) )
generate_regional_clients_response = generate_regional_clients( generate_regional_clients_response = generate_regional_clients(
"route53", audit_info, global_service=True "route53", audit_info, global_service=True
@@ -164,6 +167,7 @@ class Test_AWS_Provider:
assumed_role_info=None, assumed_role_info=None,
audited_regions=audited_regions, audited_regions=audited_regions,
organizations_metadata=None, organizations_metadata=None,
audit_resources=None,
) )
generate_regional_clients_response = generate_regional_clients( generate_regional_clients_response = generate_regional_clients(
"shield", audit_info, global_service=True "shield", audit_info, global_service=True

View File

@@ -32,6 +32,7 @@ class Test_Allowlist:
assumed_role_info=None, assumed_role_info=None,
audited_regions=None, audited_regions=None,
organizations_metadata=None, organizations_metadata=None,
audit_resources=None,
) )
return audit_info return audit_info

View File

@@ -27,6 +27,7 @@ class Test_ACM_Service:
assumed_role_info=None, assumed_role_info=None,
audited_regions=None, audited_regions=None,
organizations_metadata=None, organizations_metadata=None,
audit_resources=None,
) )
return audit_info return audit_info

View File

@@ -27,6 +27,7 @@ class Test_APIGateway_Service:
assumed_role_info=None, assumed_role_info=None,
audited_regions=None, audited_regions=None,
organizations_metadata=None, organizations_metadata=None,
audit_resources=None,
) )
return audit_info return audit_info

View File

@@ -58,6 +58,7 @@ class Test_ApiGatewayV2_Service:
assumed_role_info=None, assumed_role_info=None,
audited_regions=None, audited_regions=None,
organizations_metadata=None, organizations_metadata=None,
audit_resources=None,
) )
return audit_info return audit_info

View File

@@ -29,6 +29,7 @@ class Test_AutoScaling_Service:
assumed_role_info=None, assumed_role_info=None,
audited_regions=None, audited_regions=None,
organizations_metadata=None, organizations_metadata=None,
audit_resources=None,
) )
return audit_info return audit_info

View File

@@ -42,6 +42,7 @@ class Test_awslambda_function_invoke_api_operations_cloudtrail_logging_enabled:
assumed_role_info=None, assumed_role_info=None,
audited_regions=None, audited_regions=None,
organizations_metadata=None, organizations_metadata=None,
audit_resources=None,
) )
return audit_info return audit_info

View File

@@ -74,6 +74,7 @@ class Test_Lambda_Service:
assumed_role_info=None, assumed_role_info=None,
audited_regions=None, audited_regions=None,
organizations_metadata=None, organizations_metadata=None,
audit_resources=None,
) )
return audit_info return audit_info

View File

@@ -151,6 +151,7 @@ class Test_CloudFormation_Service:
assumed_role_info=None, assumed_role_info=None,
audited_regions=None, audited_regions=None,
organizations_metadata=None, organizations_metadata=None,
audit_resources=None,
) )
return audit_info return audit_info

View File

@@ -161,6 +161,7 @@ class Test_CloudFront_Service:
assumed_role_info=None, assumed_role_info=None,
audited_regions=None, audited_regions=None,
organizations_metadata=None, organizations_metadata=None,
audit_resources=None,
) )
return audit_info return audit_info

View File

@@ -26,6 +26,7 @@ class Test_Cloudtrail_Service:
assumed_role_info=None, assumed_role_info=None,
audited_regions=["eu-west-1", "us-east-1"], audited_regions=["eu-west-1", "us-east-1"],
organizations_metadata=None, organizations_metadata=None,
audit_resources=None,
) )
return audit_info return audit_info

View File

@@ -27,6 +27,7 @@ class Test_CloudWatch_Service:
assumed_role_info=None, assumed_role_info=None,
audited_regions=None, audited_regions=None,
organizations_metadata=None, organizations_metadata=None,
audit_resources=None,
) )
return audit_info return audit_info

View File

@@ -70,6 +70,7 @@ class Test_Codebuild_Service:
assumed_role_info=None, assumed_role_info=None,
audited_regions=None, audited_regions=None,
organizations_metadata=None, organizations_metadata=None,
audit_resources=None,
) )
return audit_info return audit_info

View File

@@ -27,6 +27,7 @@ class Test_Config_Service:
assumed_role_info=None, assumed_role_info=None,
audited_regions=["eu-west-1", "us-east-1"], audited_regions=["eu-west-1", "us-east-1"],
organizations_metadata=None, organizations_metadata=None,
audit_resources=None,
) )
return audit_info return audit_info

View File

@@ -27,6 +27,7 @@ class Test_DynamoDB_Service:
assumed_role_info=None, assumed_role_info=None,
audited_regions=None, audited_regions=None,
organizations_metadata=None, organizations_metadata=None,
audit_resources=None,
) )
return audit_info return audit_info

View File

@@ -36,6 +36,7 @@ class Test_EC2_Service:
assumed_role_info=None, assumed_role_info=None,
audited_regions=["eu-west-1", "us-east-1"], audited_regions=["eu-west-1", "us-east-1"],
organizations_metadata=None, organizations_metadata=None,
audit_resources=None,
) )
return audit_info return audit_info

View File

@@ -87,6 +87,7 @@ class Test_ECR_Service:
assumed_role_info=None, assumed_role_info=None,
audited_regions=None, audited_regions=None,
organizations_metadata=None, organizations_metadata=None,
audit_resources=None,
) )
return audit_info return audit_info

View File

@@ -39,6 +39,7 @@ class Test_ECS_Service:
assumed_role_info=None, assumed_role_info=None,
audited_regions=None, audited_regions=None,
organizations_metadata=None, organizations_metadata=None,
audit_resources=None,
) )
return audit_info return audit_info

View File

@@ -70,6 +70,7 @@ class Test_EFS:
assumed_role_info=None, assumed_role_info=None,
audited_regions=None, audited_regions=None,
organizations_metadata=None, organizations_metadata=None,
audit_resources=None,
) )
return audit_info return audit_info

View File

@@ -44,6 +44,7 @@ class Test_EKS_Service:
assumed_role_info=None, assumed_role_info=None,
audited_regions=None, audited_regions=None,
organizations_metadata=None, organizations_metadata=None,
audit_resources=None,
) )
return audit_info return audit_info

View File

@@ -27,6 +27,7 @@ class Test_ELB_Service:
assumed_role_info=None, assumed_role_info=None,
audited_regions=None, audited_regions=None,
organizations_metadata=None, organizations_metadata=None,
audit_resources=None,
) )
return audit_info return audit_info

View File

@@ -27,6 +27,7 @@ class Test_ELBv2_Service:
assumed_role_info=None, assumed_role_info=None,
audited_regions=None, audited_regions=None,
organizations_metadata=None, organizations_metadata=None,
audit_resources=None,
) )
return audit_info return audit_info

View File

@@ -30,6 +30,7 @@ class Test_emr_cluster_publicly_accesible:
assumed_role_info=None, assumed_role_info=None,
audited_regions=None, audited_regions=None,
organizations_metadata=None, organizations_metadata=None,
audit_resources=None,
) )
return audit_info return audit_info

View File

@@ -66,6 +66,7 @@ class Test_EMR_Service:
assumed_role_info=None, assumed_role_info=None,
audited_regions=None, audited_regions=None,
organizations_metadata=None, organizations_metadata=None,
audit_resources=None,
) )
return audit_info return audit_info

View File

@@ -65,6 +65,7 @@ class Test_GlobalAccelerator_Service:
assumed_role_info=None, assumed_role_info=None,
audited_regions=None, audited_regions=None,
organizations_metadata=None, organizations_metadata=None,
audit_resources=None,
) )
return audit_info return audit_info

View File

@@ -135,6 +135,7 @@ class Test_Glue_Service:
assumed_role_info=None, assumed_role_info=None,
audited_regions=None, audited_regions=None,
organizations_metadata=None, organizations_metadata=None,
audit_resources=None,
) )
return audit_info return audit_info

View File

@@ -49,6 +49,7 @@ class Test_GuardDuty_Service:
assumed_role_info=None, assumed_role_info=None,
audited_regions=None, audited_regions=None,
organizations_metadata=None, organizations_metadata=None,
audit_resources=None,
) )
return audit_info return audit_info

View File

@@ -30,6 +30,7 @@ class Test_IAM_Service:
assumed_role_info=None, assumed_role_info=None,
audited_regions=None, audited_regions=None,
organizations_metadata=None, organizations_metadata=None,
audit_resources=None,
) )
return audit_info return audit_info

View File

@@ -29,6 +29,7 @@ class Test_ACM_Service:
assumed_role_info=None, assumed_role_info=None,
audited_regions=None, audited_regions=None,
organizations_metadata=None, organizations_metadata=None,
audit_resources=None,
) )
return audit_info return audit_info

View File

@@ -115,6 +115,7 @@ class Test_OpenSearchService_Service:
assumed_role_info=None, assumed_role_info=None,
audited_regions=None, audited_regions=None,
organizations_metadata=None, organizations_metadata=None,
audit_resources=None,
) )
return audit_info return audit_info

View File

@@ -27,6 +27,7 @@ class Test_RDS_Service:
assumed_role_info=None, assumed_role_info=None,
audited_regions=None, audited_regions=None,
organizations_metadata=None, organizations_metadata=None,
audit_resources=None,
) )
return audit_info return audit_info

View File

@@ -75,6 +75,7 @@ class Test_Redshift_Service:
assumed_role_info=None, assumed_role_info=None,
audited_regions=None, audited_regions=None,
organizations_metadata=None, organizations_metadata=None,
audit_resources=None,
) )
return audit_info return audit_info

View File

@@ -43,6 +43,7 @@ class Test_Route53_Service:
assumed_role_info=None, assumed_role_info=None,
audited_regions=None, audited_regions=None,
organizations_metadata=None, organizations_metadata=None,
audit_resources=None,
) )
return audit_info return audit_info

View File

@@ -82,6 +82,7 @@ class Test_Route53_Service:
assumed_role_info=None, assumed_role_info=None,
audited_regions=None, audited_regions=None,
organizations_metadata=None, organizations_metadata=None,
audit_resources=None,
) )
return audit_info return audit_info

View File

@@ -29,6 +29,7 @@ class Test_s3_account_level_public_access_blocks:
assumed_role_info=None, assumed_role_info=None,
audited_regions=None, audited_regions=None,
organizations_metadata=None, organizations_metadata=None,
audit_resources=None,
) )
return audit_info return audit_info

View File

@@ -30,6 +30,7 @@ class Test_s3_bucket_public_access:
assumed_role_info=None, assumed_role_info=None,
audited_regions=None, audited_regions=None,
organizations_metadata=None, organizations_metadata=None,
audit_resources=None,
) )
return audit_info return audit_info

View File

@@ -30,6 +30,7 @@ class Test_S3_Service:
assumed_role_info=None, assumed_role_info=None,
audited_regions=None, audited_regions=None,
organizations_metadata=None, organizations_metadata=None,
audit_resources=None,
) )
return audit_info return audit_info

View File

@@ -116,6 +116,7 @@ class Test_SageMaker_Service:
assumed_role_info=None, assumed_role_info=None,
audited_regions=None, audited_regions=None,
organizations_metadata=None, organizations_metadata=None,
audit_resources=None,
) )
return audit_info return audit_info

View File

@@ -45,6 +45,7 @@ class Test_SecretsManager_Service:
assumed_role_info=None, assumed_role_info=None,
audited_regions=None, audited_regions=None,
organizations_metadata=None, organizations_metadata=None,
audit_resources=None,
) )
return audit_info return audit_info

View File

@@ -42,6 +42,7 @@ class Test_shield_advanced_protection_in_associated_elastic_ips:
assumed_role_info=None, assumed_role_info=None,
audited_regions=None, audited_regions=None,
organizations_metadata=None, organizations_metadata=None,
audit_resources=None,
) )
return audit_info return audit_info

View File

@@ -29,6 +29,7 @@ class Test_shield_advanced_protection_in_classic_load_balancers:
assumed_role_info=None, assumed_role_info=None,
audited_regions=None, audited_regions=None,
organizations_metadata=None, organizations_metadata=None,
audit_resources=None,
) )
return audit_info return audit_info

View File

@@ -42,6 +42,7 @@ class Test_shield_advanced_protection_in_internet_facing_load_balancers:
assumed_role_info=None, assumed_role_info=None,
audited_regions=None, audited_regions=None,
organizations_metadata=None, organizations_metadata=None,
audit_resources=None,
) )
return audit_info return audit_info

View File

@@ -52,6 +52,7 @@ class Test_Shield_Service:
assumed_role_info=None, assumed_role_info=None,
audited_regions=None, audited_regions=None,
organizations_metadata=None, organizations_metadata=None,
audit_resources=None,
) )
return audit_info return audit_info

Some files were not shown because too many files have changed in this diff Show More