feat(scanner): Tag-based scan (#1751)

Co-authored-by: Toni de la Fuente <toni@blyx.com>
Co-authored-by: Pepe Fagoaga <pepe@verica.io>
This commit is contained in:
Sergio Garcia
2023-01-31 12:19:29 +01:00
committed by GitHub
parent 0d1a5318ec
commit 3ac4dc8392
110 changed files with 1224 additions and 635 deletions

View File

@@ -0,0 +1,9 @@
# Tags-based Scan
Prowler allows you to scan only the resources that contain specific tags. This can be done with the flag `-t/--scan-tags` followed by the tags `Key=Value` separated by space:
```
prowler aws --scan-tags Environment=dev Project=prowler
```
This example will only scan the resources that contains both tags.

View File

@@ -44,6 +44,7 @@ nav:
- Scan Multiple AWS Accounts: tutorials/aws/multiaccount.md
- AWS CloudShell: tutorials/aws/cloudshell.md
- Checks v2 to v3 Mapping: tutorials/aws/v2_to_v3_checks_mapping.md
- Tag-based Scan: tutorials/aws/tag-based-scan.md
- Azure:
- Authentication: tutorials/azure/authentication.md
- Subscriptions: tutorials/azure/subscriptions.md

View File

@@ -343,6 +343,15 @@ Detailed documentation at https://docs.prowler.cloud
default=None,
help="Path for allowlist yaml file. See example prowler/config/allowlist.yaml for reference and format. It also accepts AWS DynamoDB Table or Lambda ARNs or S3 URIs, see more in https://docs.prowler.cloud/en/latest/tutorials/allowlist/",
)
# Allowlist
audit_tags_subparser = aws_parser.add_argument_group("Tags-based Scan")
audit_tags_subparser.add_argument(
"-t",
"--scan-tags",
nargs="+",
default=None,
help="Scan only resources with specific tags (Key=Value), e.g., Environment=dev Project=prowler",
)
def __init_azure_parser__(self):
"""Init the Azure Provider CLI parser"""

View File

View File

@@ -0,0 +1,17 @@
from prowler.lib.logger import logger
def is_resource_filtered(resource: str, audit_resources: list) -> bool:
"""
Check if the resource passed as argument is present in the audit_resources.
Returns True if it is filtered and False if it does not match the input filters
"""
try:
if resource in str(audit_resources):
return True
return False
except Exception as error:
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error} ({resource})"
)

View File

@@ -23,7 +23,7 @@ def parse_allowlist_file(audit_info, allowlist_file):
s3_client.get_object(Bucket=bucket, Key=key)["Body"]
)["Allowlist"]
# Check if file is a Lambda Function ARN
elif re.search("^arn:(\w+):lambda:", allowlist_file):
elif re.search(r"^arn:(\w+):lambda:", allowlist_file):
lambda_region = allowlist_file.split(":")[3]
lambda_client = audit_info.audit_session.client(
"lambda", region_name=lambda_region

View File

@@ -21,6 +21,7 @@ current_audit_info = AWS_Audit_Info(
session_duration=None,
external_id=None,
),
audit_resources=None,
audited_regions=None,
organizations_metadata=None,
audit_metadata=None,

View File

@@ -42,5 +42,6 @@ class AWS_Audit_Info:
credentials: AWS_Credentials
assumed_role_info: AWS_Assume_Role
audited_regions: list
audit_resources: list
organizations_metadata: AWS_Organizations_Info
audit_metadata: Optional[Any] = None

View File

@@ -3,6 +3,7 @@ import threading
from pydantic import BaseModel
from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -12,6 +13,7 @@ class AccessAnalyzer:
self.service = "accessanalyzer"
self.session = audit_info.audit_session
self.audited_account = audit_info.audited_account
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info)
self.analyzers = []
self.__threading_call__(self.__list_analyzers__)
@@ -36,18 +38,21 @@ class AccessAnalyzer:
list_analyzers_paginator = regional_client.get_paginator("list_analyzers")
analyzer_count = 0
for page in list_analyzers_paginator.paginate():
analyzer_count += len(page["analyzers"])
for analyzer in page["analyzers"]:
self.analyzers.append(
Analyzer(
arn=analyzer["arn"],
name=analyzer["name"],
status=analyzer["status"],
tags=str(analyzer["tags"]),
type=analyzer["type"],
region=regional_client.region,
if not self.audit_resources or (
is_resource_filtered(analyzer["arn"], self.audit_resources)
):
analyzer_count += 1
self.analyzers.append(
Analyzer(
arn=analyzer["arn"],
name=analyzer["name"],
status=analyzer["status"],
tags=str(analyzer["tags"]),
type=analyzer["type"],
region=regional_client.region,
)
)
)
# No analyzers in region
if analyzer_count == 0:
self.analyzers.append(

View File

@@ -3,6 +3,7 @@ from dataclasses import dataclass
from prowler.config.config import timestamp_utc
from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -12,6 +13,7 @@ class ACM:
self.service = "acm"
self.session = audit_info.audit_session
self.audited_account = audit_info.audited_account
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info)
self.certificates = []
self.__threading_call__(self.__list_certificates__)
@@ -36,15 +38,20 @@ class ACM:
"list_certificates"
)
for page in list_certificates_paginator.paginate():
for analyzer in page["CertificateSummaryList"]:
self.certificates.append(
Certificate(
analyzer["CertificateArn"],
analyzer["DomainName"],
False,
regional_client.region,
for certificate in page["CertificateSummaryList"]:
if not self.audit_resources or (
is_resource_filtered(
certificate["CertificateArn"], self.audit_resources
)
):
self.certificates.append(
Certificate(
certificate["CertificateArn"],
certificate["DomainName"],
False,
regional_client.region,
)
)
)
except Exception as error:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"

View File

@@ -2,6 +2,7 @@ import threading
from dataclasses import dataclass
from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -11,6 +12,7 @@ class APIGateway:
self.service = "apigateway"
self.session = audit_info.audit_session
self.audited_account = audit_info.audited_account
self.audit_resources = audit_info.audit_resources
self.audited_partition = audit_info.audited_partition
self.regional_clients = generate_regional_clients(self.service, audit_info)
self.rest_apis = []
@@ -38,14 +40,17 @@ class APIGateway:
for page in get_rest_apis_paginator.paginate():
for apigw in page["items"]:
arn = f"arn:{self.audited_partition}:apigateway:{regional_client.region}::/apis/{apigw['id']}"
self.rest_apis.append(
RestAPI(
apigw["id"],
arn,
regional_client.region,
apigw["name"],
if not self.audit_resources or (
is_resource_filtered(arn, self.audit_resources)
):
self.rest_apis.append(
RestAPI(
apigw["id"],
arn,
regional_client.region,
apigw["name"],
)
)
)
except Exception as error:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"

View File

@@ -2,6 +2,7 @@ import threading
from dataclasses import dataclass
from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -11,6 +12,7 @@ class ApiGatewayV2:
self.service = "apigatewayv2"
self.session = audit_info.audit_session
self.audited_account = audit_info.audited_account
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info)
self.apis = []
self.__threading_call__(self.__get_apis__)
@@ -35,13 +37,16 @@ class ApiGatewayV2:
get_rest_apis_paginator = regional_client.get_paginator("get_apis")
for page in get_rest_apis_paginator.paginate():
for apigw in page["Items"]:
self.apis.append(
API(
apigw["ApiId"],
regional_client.region,
apigw["Name"],
if not self.audit_resources or (
is_resource_filtered(apigw["ApiId"], self.audit_resources)
):
self.apis.append(
API(
apigw["ApiId"],
regional_client.region,
apigw["Name"],
)
)
)
except Exception as error:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"

View File

@@ -2,6 +2,7 @@ import threading
from dataclasses import dataclass
from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -11,6 +12,7 @@ class AppStream:
self.service = "appstream"
self.session = audit_info.audit_session
self.audited_account = audit_info.audited_account
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info)
self.fleets = []
self.__threading_call__(self.__describe_fleets__)
@@ -33,25 +35,28 @@ class AppStream:
describe_fleets_paginator = regional_client.get_paginator("describe_fleets")
for page in describe_fleets_paginator.paginate():
for fleet in page["Fleets"]:
self.fleets.append(
Fleet(
arn=fleet["Arn"],
name=fleet["Name"],
max_user_duration_in_seconds=fleet[
"MaxUserDurationInSeconds"
],
disconnect_timeout_in_seconds=fleet[
"DisconnectTimeoutInSeconds"
],
idle_disconnect_timeout_in_seconds=fleet[
"IdleDisconnectTimeoutInSeconds"
],
enable_default_internet_access=fleet[
"EnableDefaultInternetAccess"
],
region=regional_client.region,
if not self.audit_resources or (
is_resource_filtered(fleet["Arn"], self.audit_resources)
):
self.fleets.append(
Fleet(
arn=fleet["Arn"],
name=fleet["Name"],
max_user_duration_in_seconds=fleet[
"MaxUserDurationInSeconds"
],
disconnect_timeout_in_seconds=fleet[
"DisconnectTimeoutInSeconds"
],
idle_disconnect_timeout_in_seconds=fleet[
"IdleDisconnectTimeoutInSeconds"
],
enable_default_internet_access=fleet[
"EnableDefaultInternetAccess"
],
region=regional_client.region,
)
)
)
except Exception as error:
logger.error(

View File

@@ -2,6 +2,7 @@ import threading
from dataclasses import dataclass
from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -11,6 +12,7 @@ class AutoScaling:
self.service = "autoscaling"
self.session = audit_info.audit_session
self.audited_account = audit_info.audited_account
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info)
self.launch_configurations = []
self.__threading_call__(self.__describe_launch_configurations__)
@@ -35,15 +37,21 @@ class AutoScaling:
)
for page in describe_launch_configurations_paginator.paginate():
for configuration in page["LaunchConfigurations"]:
self.launch_configurations.append(
LaunchConfiguration(
if not self.audit_resources or (
is_resource_filtered(
configuration["LaunchConfigurationARN"],
configuration["LaunchConfigurationName"],
configuration["UserData"],
configuration["ImageId"],
regional_client.region,
self.audit_resources,
)
):
self.launch_configurations.append(
LaunchConfiguration(
configuration["LaunchConfigurationARN"],
configuration["LaunchConfigurationName"],
configuration["UserData"],
configuration["ImageId"],
regional_client.region,
)
)
)
except Exception as error:
logger.error(

View File

@@ -10,6 +10,7 @@ from botocore.client import ClientError
from pydantic import BaseModel
from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -19,6 +20,7 @@ class Lambda:
self.service = "lambda"
self.session = audit_info.audit_session
self.audited_account = audit_info.audited_account
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info)
self.functions = {}
self.__threading_call__(self.__list_functions__)
@@ -44,18 +46,23 @@ class Lambda:
list_functions_paginator = regional_client.get_paginator("list_functions")
for page in list_functions_paginator.paginate():
for function in page["Functions"]:
lambda_name = function["FunctionName"]
lambda_arn = function["FunctionArn"]
lambda_runtime = function["Runtime"]
self.functions[lambda_name] = Function(
name=lambda_name,
arn=lambda_arn,
runtime=lambda_runtime,
region=regional_client.region,
)
if "Environment" in function:
lambda_environment = function["Environment"]["Variables"]
self.functions[lambda_name].environment = lambda_environment
if not self.audit_resources or (
is_resource_filtered(
function["FunctionArn"], self.audit_resources
)
):
lambda_name = function["FunctionName"]
lambda_arn = function["FunctionArn"]
lambda_runtime = function["Runtime"]
self.functions[lambda_name] = Function(
name=lambda_name,
arn=lambda_arn,
runtime=lambda_runtime,
region=regional_client.region,
)
if "Environment" in function:
lambda_environment = function["Environment"]["Variables"]
self.functions[lambda_name].environment = lambda_environment
except Exception as error:
logger.error(

View File

@@ -2,6 +2,7 @@ import threading
from dataclasses import dataclass
from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -11,6 +12,7 @@ class CloudFormation:
self.service = "cloudformation"
self.session = audit_info.audit_session
self.audited_account = audit_info.audited_account
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info)
self.stacks = []
self.__threading_call__(self.__describe_stacks__)
@@ -35,20 +37,23 @@ class CloudFormation:
describe_stacks_paginator = regional_client.get_paginator("describe_stacks")
for page in describe_stacks_paginator.paginate():
for stack in page["Stacks"]:
outputs = []
if "Outputs" in stack:
for output in stack["Outputs"]:
outputs.append(
f"{output['OutputKey']}:{output['OutputValue']}"
if not self.audit_resources or (
is_resource_filtered(stack["StackId"], self.audit_resources)
):
outputs = []
if "Outputs" in stack:
for output in stack["Outputs"]:
outputs.append(
f"{output['OutputKey']}:{output['OutputValue']}"
)
self.stacks.append(
Stack(
arn=stack["StackId"],
name=stack["StackName"],
outputs=outputs,
region=regional_client.region,
)
self.stacks.append(
Stack(
arn=stack["StackId"],
name=stack["StackName"],
outputs=outputs,
region=regional_client.region,
)
)
except Exception as error:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"

View File

@@ -2,6 +2,7 @@ from dataclasses import dataclass
from enum import Enum
from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -11,6 +12,7 @@ class CloudFront:
self.service = "cloudfront"
self.session = audit_info.audit_session
self.audited_account = audit_info.audited_account
self.audit_resources = audit_info.audit_resources
global_client = generate_regional_clients(
self.service, audit_info, global_service=True
)
@@ -34,16 +36,19 @@ class CloudFront:
for page in list_ditributions_paginator.paginate():
if "Items" in page["DistributionList"]:
for item in page["DistributionList"]["Items"]:
distribution_id = item["Id"]
distribution_arn = item["ARN"]
origins = item["Origins"]["Items"]
distribution = Distribution(
arn=distribution_arn,
id=distribution_id,
origins=origins,
region=region,
)
distributions[distribution_id] = distribution
if not self.audit_resources or (
is_resource_filtered(item["ARN"], self.audit_resources)
):
distribution_id = item["Id"]
distribution_arn = item["ARN"]
origins = item["Origins"]["Items"]
distribution = Distribution(
arn=distribution_arn,
id=distribution_id,
origins=origins,
region=region,
)
distributions[distribution_id] = distribution
return distributions

View File

@@ -3,6 +3,7 @@ import threading
from dataclasses import dataclass
from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -12,6 +13,7 @@ class Cloudtrail:
self.service = "cloudtrail"
self.session = audit_info.audit_session
self.audited_account = audit_info.audited_account
self.audit_resources = audit_info.audit_resources
self.audited_partition = audit_info.audited_partition
self.region = audit_info.profile_region
self.regional_clients = generate_regional_clients(self.service, audit_info)
@@ -36,8 +38,12 @@ class Cloudtrail:
logger.info("Cloudtrail - Getting trails...")
try:
describe_trails = regional_client.describe_trails()["trailList"]
if describe_trails:
for trail in describe_trails:
trails_count = 0
for trail in describe_trails:
if not self.audit_resources or (
is_resource_filtered(trail["TrailARN"], self.audit_resources)
):
trails_count += 1
kms_key_id = None
log_group_arn = None
if "KmsKeyId" in trail:
@@ -62,7 +68,7 @@ class Cloudtrail:
data_events=[],
)
)
else:
if trails_count == 0:
self.trails.append(
Trail(
name=None,

View File

@@ -2,6 +2,7 @@ import threading
from dataclasses import dataclass
from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -11,6 +12,7 @@ class CloudWatch:
self.service = "cloudwatch"
self.session = audit_info.audit_session
self.audited_account = audit_info.audited_account
self.audit_resources = audit_info.audit_resources
self.region = list(
generate_regional_clients(
self.service, audit_info, global_service=True
@@ -38,15 +40,18 @@ class CloudWatch:
describe_alarms_paginator = regional_client.get_paginator("describe_alarms")
for page in describe_alarms_paginator.paginate():
for alarm in page["MetricAlarms"]:
self.metric_alarms.append(
MetricAlarm(
alarm["AlarmArn"],
alarm["AlarmName"],
alarm["MetricName"],
alarm["Namespace"],
regional_client.region,
if not self.audit_resources or (
is_resource_filtered(alarm["AlarmArn"], self.audit_resources)
):
self.metric_alarms.append(
MetricAlarm(
alarm["AlarmArn"],
alarm["AlarmName"],
alarm["MetricName"],
alarm["Namespace"],
regional_client.region,
)
)
)
except Exception as error:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
@@ -59,6 +64,7 @@ class Logs:
self.service = "logs"
self.session = audit_info.audit_session
self.audited_account = audit_info.audited_account
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info)
self.metric_filters = []
self.log_groups = []
@@ -85,15 +91,18 @@ class Logs:
)
for page in describe_metric_filters_paginator.paginate():
for filter in page["metricFilters"]:
self.metric_filters.append(
MetricFilter(
filter["filterName"],
filter["metricTransformations"][0]["metricName"],
filter["filterPattern"],
filter["logGroupName"],
regional_client.region,
if not self.audit_resources or (
is_resource_filtered(filter["filterName"], self.audit_resources)
):
self.metric_filters.append(
MetricFilter(
filter["filterName"],
filter["metricTransformations"][0]["metricName"],
filter["filterPattern"],
filter["logGroupName"],
regional_client.region,
)
)
)
except Exception as error:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
@@ -106,22 +115,25 @@ class Logs:
"describe_log_groups"
)
for page in describe_log_groups_paginator.paginate():
for filter in page["logGroups"]:
kms = None
retention_days = 0
if "kmsKeyId" in filter:
kms = filter["kmsKeyId"]
if "retentionInDays" in filter:
retention_days = filter["retentionInDays"]
self.log_groups.append(
LogGroup(
filter["arn"],
filter["logGroupName"],
retention_days,
kms,
regional_client.region,
for log_group in page["logGroups"]:
if not self.audit_resources or (
is_resource_filtered(log_group["arn"], self.audit_resources)
):
kms = None
retention_days = 0
if "kmsKeyId" in log_group:
kms = log_group["kmsKeyId"]
if "retentionInDays" in log_group:
retention_days = log_group["retentionInDays"]
self.log_groups.append(
LogGroup(
log_group["arn"],
log_group["logGroupName"],
retention_days,
kms,
regional_client.region,
)
)
)
except Exception as error:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"

View File

@@ -5,6 +5,7 @@ from typing import Optional
from pydantic import BaseModel
from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -14,6 +15,7 @@ class CodeArtifact:
self.service = "codeartifact"
self.session = audit_info.audit_session
self.audited_account = audit_info.audited_account
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info)
# repositories is a dictionary containing all the codeartifact service information
self.repositories = {}
@@ -40,18 +42,21 @@ class CodeArtifact:
)
for page in list_repositories_paginator.paginate():
for repository in page["repositories"]:
package_name = repository["name"]
package_domain_name = repository["domainName"]
package_domain_owner = repository["domainOwner"]
package_arn = repository["arn"]
# Save Repository
self.repositories[package_name] = Repository(
name=package_name,
arn=package_arn,
domain_name=package_domain_name,
domain_owner=package_domain_owner,
region=regional_client.region,
)
if not self.audit_resources or (
is_resource_filtered(repository["arn"], self.audit_resources)
):
package_name = repository["name"]
package_domain_name = repository["domainName"]
package_domain_owner = repository["domainOwner"]
package_arn = repository["arn"]
# Save Repository
self.repositories[package_name] = Repository(
name=package_name,
arn=package_arn,
domain_name=package_domain_name,
domain_owner=package_domain_owner,
region=regional_client.region,
)
except Exception as error:
logger.error(

View File

@@ -4,6 +4,7 @@ from dataclasses import dataclass
from typing import Optional
from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -13,6 +14,7 @@ class Codebuild:
self.service = "codebuild"
self.session = audit_info.audit_session
self.audited_account = audit_info.audited_account
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info)
self.projects = []
self.__threading_call__(self.__list_projects__)
@@ -36,14 +38,17 @@ class Codebuild:
list_projects_paginator = regional_client.get_paginator("list_projects")
for page in list_projects_paginator.paginate():
for project in page["projects"]:
self.projects.append(
CodebuildProject(
name=project,
region=regional_client.region,
last_invoked_time=None,
buildspec=None,
if not self.audit_resources or (
is_resource_filtered(project, self.audit_resources)
):
self.projects.append(
CodebuildProject(
name=project,
region=regional_client.region,
last_invoked_time=None,
buildspec=None,
)
)
)
except Exception as error:
logger.error(

View File

@@ -2,6 +2,7 @@ import threading
from dataclasses import dataclass
from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -11,6 +12,7 @@ class Config:
self.service = "config"
self.session = audit_info.audit_session
self.audited_account = audit_info.audited_account
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info)
self.recorders = []
self.__threading_call__(self.__describe_configuration_recorder_status__)
@@ -33,8 +35,12 @@ class Config:
recorders = regional_client.describe_configuration_recorder_status()[
"ConfigurationRecordersStatus"
]
if recorders:
for recorder in recorders:
recorders_count = 0
for recorder in recorders:
if not self.audit_resources or (
is_resource_filtered(recorder["name"], self.audit_resources)
):
recorders_count += 1
if "lastStatus" in recorder:
self.recorders.append(
Recorder(
@@ -54,7 +60,7 @@ class Config:
)
)
# No config recorders in region
else:
if recorders_count == 0:
self.recorders.append(
Recorder(
self.audited_account,

View File

@@ -6,6 +6,7 @@ from typing import Union
from pydantic import BaseModel
from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -15,6 +16,7 @@ class DirectoryService:
self.service = "ds"
self.session = audit_info.audit_session
self.audited_account = audit_info.audited_account
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info)
self.directories = {}
self.__threading_call__(self.__describe_directories__)
@@ -43,31 +45,36 @@ class DirectoryService:
)
for page in describe_fleets_paginator.paginate():
for directory in page["DirectoryDescriptions"]:
directory_id = directory["DirectoryId"]
directory_name = directory["Name"]
directory_type = directory["Type"]
# Radius Configuration
radius_authentication_protocol = (
directory["RadiusSettings"]["AuthenticationProtocol"]
if "RadiusSettings" in directory
else None
)
radius_status = (
directory["RadiusStatus"]
if "RadiusStatus" in directory
else None
)
if not self.audit_resources or (
is_resource_filtered(
directory["DirectoryId"], self.audit_resources
)
):
directory_id = directory["DirectoryId"]
directory_name = directory["Name"]
directory_type = directory["Type"]
# Radius Configuration
radius_authentication_protocol = (
directory["RadiusSettings"]["AuthenticationProtocol"]
if "RadiusSettings" in directory
else None
)
radius_status = (
directory["RadiusStatus"]
if "RadiusStatus" in directory
else None
)
self.directories[directory_id] = Directory(
name=directory_name,
id=directory_id,
type=directory_type,
region=regional_client.region,
radius_settings=RadiusSettings(
authentication_protocol=radius_authentication_protocol,
status=radius_status,
),
)
self.directories[directory_id] = Directory(
name=directory_name,
id=directory_id,
type=directory_type,
region=regional_client.region,
radius_settings=RadiusSettings(
authentication_protocol=radius_authentication_protocol,
status=radius_status,
),
)
except Exception as error:
logger.error(

View File

@@ -2,6 +2,7 @@ import threading
from dataclasses import dataclass
from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -11,6 +12,7 @@ class DynamoDB:
self.service = "dynamodb"
self.session = audit_info.audit_session
self.audited_account = audit_info.audited_account
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info)
self.tables = []
self.__threading_call__(self.__list_tables__)
@@ -35,15 +37,18 @@ class DynamoDB:
list_tables_paginator = regional_client.get_paginator("list_tables")
for page in list_tables_paginator.paginate():
for table in page["TableNames"]:
self.tables.append(
Table(
arn="",
name=table,
encryption_type=None,
kms_arn=None,
region=regional_client.region,
if not self.audit_resources or (
is_resource_filtered(table, self.audit_resources)
):
self.tables.append(
Table(
arn="",
name=table,
encryption_type=None,
kms_arn=None,
region=regional_client.region,
)
)
)
except Exception as error:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
@@ -96,6 +101,7 @@ class DAX:
self.service = "dax"
self.session = audit_info.audit_session
self.audited_account = audit_info.audited_account
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info)
self.clusters = []
self.__threading_call__(self.__describe_clusters__)
@@ -120,18 +126,23 @@ class DAX:
)
for page in describe_clusters_paginator.paginate():
for cluster in page["Clusters"]:
encryption = False
if "SSEDescription" in cluster:
if cluster["SSEDescription"]["Status"] == "ENABLED":
encryption = True
self.clusters.append(
Cluster(
cluster["ClusterArn"],
cluster["ClusterName"],
encryption,
regional_client.region,
if not self.audit_resources or (
is_resource_filtered(
cluster["ClusterArn"], self.audit_resources
)
):
encryption = False
if "SSEDescription" in cluster:
if cluster["SSEDescription"]["Status"] == "ENABLED":
encryption = True
self.clusters.append(
Cluster(
cluster["ClusterArn"],
cluster["ClusterName"],
encryption,
regional_client.region,
)
)
)
except Exception as error:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"

View File

@@ -2,6 +2,7 @@ import threading
from dataclasses import dataclass
from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -12,6 +13,7 @@ class EC2:
self.session = audit_info.audit_session
self.audited_partition = audit_info.audited_partition
self.audited_account = audit_info.audited_account
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info)
self.instances = []
self.__threading_call__(self.__describe_instances__)
@@ -55,41 +57,46 @@ class EC2:
for reservation in page["Reservations"]:
for instance in reservation["Instances"]:
arn = f"arn:{self.audited_partition}:ec2:{regional_client.region}:{self.audited_account}:instance/{instance['InstanceId']}"
http_tokens = None
http_endpoint = None
public_dns = None
public_ip = None
instance_profile = None
if "MetadataOptions" in instance:
http_tokens = instance["MetadataOptions"]["HttpTokens"]
http_endpoint = instance["MetadataOptions"]["HttpEndpoint"]
if (
"PublicDnsName" in instance
and "PublicIpAddress" in instance
if not self.audit_resources or (
is_resource_filtered(arn, self.audit_resources)
):
public_dns = instance["PublicDnsName"]
public_ip = instance["PublicIpAddress"]
if "IamInstanceProfile" in instance:
instance_profile = instance["IamInstanceProfile"]
http_tokens = None
http_endpoint = None
public_dns = None
public_ip = None
instance_profile = None
if "MetadataOptions" in instance:
http_tokens = instance["MetadataOptions"]["HttpTokens"]
http_endpoint = instance["MetadataOptions"][
"HttpEndpoint"
]
if (
"PublicDnsName" in instance
and "PublicIpAddress" in instance
):
public_dns = instance["PublicDnsName"]
public_ip = instance["PublicIpAddress"]
if "IamInstanceProfile" in instance:
instance_profile = instance["IamInstanceProfile"]
self.instances.append(
Instance(
instance["InstanceId"],
arn,
instance["State"]["Name"],
regional_client.region,
instance["InstanceType"],
instance["ImageId"],
instance["LaunchTime"],
instance["PrivateDnsName"],
instance["PrivateIpAddress"],
public_dns,
public_ip,
http_tokens,
http_endpoint,
instance_profile,
self.instances.append(
Instance(
instance["InstanceId"],
arn,
instance["State"]["Name"],
regional_client.region,
instance["InstanceType"],
instance["ImageId"],
instance["LaunchTime"],
instance["PrivateDnsName"],
instance["PrivateIpAddress"],
public_dns,
public_ip,
http_tokens,
http_endpoint,
instance_profile,
)
)
)
except Exception as error:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
@@ -104,16 +111,19 @@ class EC2:
for page in describe_security_groups_paginator.paginate():
for sg in page["SecurityGroups"]:
arn = f"arn:{self.audited_partition}:ec2:{regional_client.region}:{self.audited_account}:security-group/{sg['GroupId']}"
self.security_groups.append(
SecurityGroup(
sg["GroupName"],
arn,
regional_client.region,
sg["GroupId"],
sg["IpPermissions"],
sg["IpPermissionsEgress"],
if not self.audit_resources or (
is_resource_filtered(arn, self.audit_resources)
):
self.security_groups.append(
SecurityGroup(
sg["GroupName"],
arn,
regional_client.region,
sg["GroupId"],
sg["IpPermissions"],
sg["IpPermissionsEgress"],
)
)
)
except Exception as error:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
@@ -128,14 +138,17 @@ class EC2:
for page in describe_network_acls_paginator.paginate():
for nacl in page["NetworkAcls"]:
arn = f"arn:{self.audited_partition}:ec2:{regional_client.region}:{self.audited_account}:network-acl/{nacl['NetworkAclId']}"
self.network_acls.append(
NetworkACL(
nacl["NetworkAclId"],
arn,
regional_client.region,
nacl["Entries"],
if not self.audit_resources or (
is_resource_filtered(arn, self.audit_resources)
):
self.network_acls.append(
NetworkACL(
nacl["NetworkAclId"],
arn,
regional_client.region,
nacl["Entries"],
)
)
)
except Exception as error:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
@@ -151,16 +164,19 @@ class EC2:
for page in describe_snapshots_paginator.paginate(OwnerIds=["self"]):
for snapshot in page["Snapshots"]:
arn = f"arn:{self.audited_partition}:ec2:{regional_client.region}:{self.audited_account}:snapshot/{snapshot['SnapshotId']}"
if snapshot["Encrypted"]:
encrypted = True
self.snapshots.append(
Snapshot(
snapshot["SnapshotId"],
arn,
regional_client.region,
encrypted,
if not self.audit_resources or (
is_resource_filtered(arn, self.audit_resources)
):
if snapshot["Encrypted"]:
encrypted = True
self.snapshots.append(
Snapshot(
snapshot["SnapshotId"],
arn,
regional_client.region,
encrypted,
)
)
)
except Exception as error:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
@@ -231,17 +247,20 @@ class EC2:
public = False
for image in regional_client.describe_images(Owners=["self"])["Images"]:
arn = f"arn:{self.audited_partition}:ec2:{regional_client.region}:{self.audited_account}:image/{image['ImageId']}"
if image["Public"]:
public = True
self.images.append(
Image(
image["ImageId"],
arn,
image["Name"],
public,
regional_client.region,
if not self.audit_resources or (
is_resource_filtered(arn, self.audit_resources)
):
if image["Public"]:
public = True
self.images.append(
Image(
image["ImageId"],
arn,
image["Name"],
public,
regional_client.region,
)
)
)
except Exception as error:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
@@ -256,14 +275,17 @@ class EC2:
for page in describe_volumes_paginator.paginate():
for volume in page["Volumes"]:
arn = f"arn:{self.audited_partition}:ec2:{regional_client.region}:{self.audited_account}:volume/{volume['VolumeId']}"
self.volumes.append(
Volume(
volume["VolumeId"],
arn,
regional_client.region,
volume["Encrypted"],
if not self.audit_resources or (
is_resource_filtered(arn, self.audit_resources)
):
self.volumes.append(
Volume(
volume["VolumeId"],
arn,
regional_client.region,
volume["Encrypted"],
)
)
)
except Exception as error:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
@@ -283,16 +305,18 @@ class EC2:
if "AllocationId" in address:
allocation_id = address["AllocationId"]
elastic_ip_arn = f"arn:{self.audited_partition}:ec2:{regional_client.region}:{self.audited_account}:eip-allocation/{allocation_id}"
self.elastic_ips.append(
ElasticIP(
public_ip,
association_id,
allocation_id,
elastic_ip_arn,
regional_client.region,
if not self.audit_resources or (
is_resource_filtered(elastic_ip_arn, self.audit_resources)
):
self.elastic_ips.append(
ElasticIP(
public_ip,
association_id,
allocation_id,
elastic_ip_arn,
regional_client.region,
)
)
)
except Exception as error:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"

View File

@@ -3,6 +3,7 @@ from dataclasses import dataclass
from json import loads
from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -11,6 +12,7 @@ class ECR:
def __init__(self, audit_info):
self.service = "ecr"
self.session = audit_info.audit_session
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info)
self.repositories = []
self.__threading_call__(self.__describe_repositories__)
@@ -38,19 +40,24 @@ class ECR:
)
for page in describe_ecr_paginator.paginate():
for repository in page["repositories"]:
self.repositories.append(
Repository(
name=repository["repositoryName"],
arn=repository["repositoryArn"],
region=regional_client.region,
scan_on_push=repository["imageScanningConfiguration"][
"scanOnPush"
],
policy=None,
images_details=[],
lyfecicle_policy=None,
if not self.audit_resources or (
is_resource_filtered(
repository["repositoryArn"], self.audit_resources
)
):
self.repositories.append(
Repository(
name=repository["repositoryName"],
arn=repository["repositoryArn"],
region=regional_client.region,
scan_on_push=repository["imageScanningConfiguration"][
"scanOnPush"
],
policy=None,
images_details=[],
lyfecicle_policy=None,
)
)
)
except Exception as error:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"

View File

@@ -4,6 +4,7 @@ from re import sub
from pydantic import BaseModel
from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -12,6 +13,7 @@ class ECS:
def __init__(self, audit_info):
self.service = "ecs"
self.session = audit_info.audit_session
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info)
self.task_definitions = []
self.__threading_call__(self.__list_task_definitions__)
@@ -36,16 +38,19 @@ class ECS:
list_ecs_paginator = regional_client.get_paginator("list_task_definitions")
for page in list_ecs_paginator.paginate():
for task_definition in page["taskDefinitionArns"]:
self.task_definitions.append(
TaskDefinition(
# we want the family name without the revision
name=sub(":.*", "", task_definition.split("/")[1]),
arn=task_definition,
revision=task_definition.split(":")[-1],
region=regional_client.region,
environment_variables=[],
if not self.audit_resources or (
is_resource_filtered(task_definition, self.audit_resources)
):
self.task_definitions.append(
TaskDefinition(
# we want the family name without the revision
name=sub(":.*", "", task_definition.split("/")[1]),
arn=task_definition,
revision=task_definition.split(":")[-1],
region=regional_client.region,
environment_variables=[],
)
)
)
except Exception as error:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"

View File

@@ -4,6 +4,7 @@ from dataclasses import dataclass
from botocore.client import ClientError
from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -12,6 +13,7 @@ class EFS:
def __init__(self, audit_info):
self.service = "efs"
self.session = audit_info.audit_session
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info)
self.filesystems = []
self.__threading_call__(self.__describe_file_systems__)
@@ -37,15 +39,18 @@ class EFS:
)
for page in describe_efs_paginator.paginate():
for efs in page["FileSystems"]:
self.filesystems.append(
FileSystem(
id=efs["FileSystemId"],
region=regional_client.region,
policy=None,
backup_policy=None,
encrypted=efs["Encrypted"],
if not self.audit_resources or (
is_resource_filtered(efs["FileSystemId"], self.audit_resources)
):
self.filesystems.append(
FileSystem(
id=efs["FileSystemId"],
region=regional_client.region,
policy=None,
backup_policy=None,
encrypted=efs["Encrypted"],
)
)
)
except Exception as error:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"

View File

@@ -3,6 +3,7 @@ import threading
from pydantic import BaseModel
from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -11,6 +12,7 @@ class EKS:
def __init__(self, audit_info):
self.service = "eks"
self.session = audit_info.audit_session
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info)
self.clusters = []
self.__threading_call__(self.__list_clusters__)
@@ -34,12 +36,15 @@ class EKS:
list_clusters_paginator = regional_client.get_paginator("list_clusters")
for page in list_clusters_paginator.paginate():
for cluster in page["clusters"]:
self.clusters.append(
EKSCluster(
name=cluster,
region=regional_client.region,
if not self.audit_resources or (
is_resource_filtered(cluster, self.audit_resources)
):
self.clusters.append(
EKSCluster(
name=cluster,
region=regional_client.region,
)
)
)
except Exception as error:
logger.error(

View File

@@ -4,6 +4,7 @@ from typing import Optional
from pydantic import BaseModel
from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -14,6 +15,7 @@ class ELB:
self.session = audit_info.audit_session
self.audited_partition = audit_info.audited_partition
self.audited_account = audit_info.audited_account
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info)
self.loadbalancers = []
self.__threading_call__(self.__describe_load_balancers__)
@@ -39,24 +41,28 @@ class ELB:
)
for page in describe_elb_paginator.paginate():
for elb in page["LoadBalancerDescriptions"]:
listeners = []
for listener in elb["ListenerDescriptions"]:
listeners.append(
Listener(
protocol=listener["Listener"]["Protocol"],
policies=listener["PolicyNames"],
arn = f"arn:{self.audited_partition}:elasticloadbalancing:{regional_client.region}:{self.audited_account}:loadbalancer/{elb['LoadBalancerName']}"
if not self.audit_resources or (
is_resource_filtered(arn, self.audit_resources)
):
listeners = []
for listener in elb["ListenerDescriptions"]:
listeners.append(
Listener(
protocol=listener["Listener"]["Protocol"],
policies=listener["PolicyNames"],
)
)
self.loadbalancers.append(
LoadBalancer(
name=elb["LoadBalancerName"],
arn=arn,
dns=elb["DNSName"],
region=regional_client.region,
scheme=elb["Scheme"],
listeners=listeners,
)
)
self.loadbalancers.append(
LoadBalancer(
name=elb["LoadBalancerName"],
arn=f"arn:{self.audited_partition}:elasticloadbalancing:{regional_client.region}:{self.audited_account}:loadbalancer/{elb['LoadBalancerName']}",
dns=elb["DNSName"],
region=regional_client.region,
scheme=elb["Scheme"],
listeners=listeners,
)
)
except Exception as error:
logger.error(

View File

@@ -4,6 +4,7 @@ from typing import Optional
from pydantic import BaseModel
from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -12,6 +13,7 @@ class ELBv2:
def __init__(self, audit_info):
self.service = "elbv2"
self.session = audit_info.audit_session
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info)
self.loadbalancersv2 = []
self.__threading_call__(self.__describe_load_balancers__)
@@ -40,17 +42,22 @@ class ELBv2:
)
for page in describe_elbv2_paginator.paginate():
for elbv2 in page["LoadBalancers"]:
self.loadbalancersv2.append(
LoadBalancerv2(
name=elbv2["LoadBalancerName"],
dns=elbv2["DNSName"],
region=regional_client.region,
arn=elbv2["LoadBalancerArn"],
scheme=elbv2["Scheme"],
type=elbv2["Type"],
listeners=[],
if not self.audit_resources or (
is_resource_filtered(
elbv2["LoadBalancerArn"], self.audit_resources
)
):
self.loadbalancersv2.append(
LoadBalancerv2(
name=elbv2["LoadBalancerName"],
dns=elbv2["DNSName"],
region=regional_client.region,
arn=elbv2["LoadBalancerArn"],
scheme=elbv2["Scheme"],
type=elbv2["Type"],
listeners=[],
)
)
)
except Exception as error:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"

View File

@@ -4,6 +4,7 @@ from enum import Enum
from pydantic import BaseModel
from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -13,6 +14,7 @@ class EMR:
self.service = "emr"
self.session = audit_info.audit_session
self.audited_account = audit_info.audited_account
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info)
self.clusters = {}
self.block_public_access_configuration = {}
@@ -38,18 +40,23 @@ class EMR:
list_clusters_paginator = regional_client.get_paginator("list_clusters")
for page in list_clusters_paginator.paginate():
for cluster in page["Clusters"]:
cluster_name = cluster["Name"]
cluster_id = cluster["Id"]
cluster_arn = cluster["ClusterArn"]
cluster_status = cluster["Status"]["State"]
if not self.audit_resources or (
is_resource_filtered(
cluster["ClusterArn"], self.audit_resources
)
):
cluster_name = cluster["Name"]
cluster_id = cluster["Id"]
cluster_arn = cluster["ClusterArn"]
cluster_status = cluster["Status"]["State"]
self.clusters[cluster_id] = Cluster(
id=cluster_id,
name=cluster_name,
arn=cluster_arn,
status=cluster_status,
region=regional_client.region,
)
self.clusters[cluster_id] = Cluster(
id=cluster_id,
name=cluster_name,
arn=cluster_arn,
status=cluster_status,
region=regional_client.region,
)
except Exception as error:
logger.error(

View File

@@ -5,6 +5,7 @@ from botocore.client import ClientError
from pydantic import BaseModel
from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -14,6 +15,7 @@ class Glacier:
self.service = "glacier"
self.session = audit_info.audit_session
self.audited_account = audit_info.audited_account
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info)
self.vaults = {}
self.__threading_call__(self.__list_vaults__)
@@ -37,13 +39,16 @@ class Glacier:
list_vaults_paginator = regional_client.get_paginator("list_vaults")
for page in list_vaults_paginator.paginate():
for vault in page["VaultList"]:
vault_name = vault["VaultName"]
vault_arn = vault["VaultARN"]
self.vaults[vault_name] = Vault(
name=vault_name,
arn=vault_arn,
region=regional_client.region,
)
if not self.audit_resources or (
is_resource_filtered(vault["VaultARN"], self.audit_resources)
):
vault_name = vault["VaultName"]
vault_arn = vault["VaultARN"]
self.vaults[vault_name] = Vault(
name=vault_name,
arn=vault_arn,
region=regional_client.region,
)
except Exception as error:
logger.error(

View File

@@ -1,6 +1,7 @@
from pydantic import BaseModel
from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
################### GlobalAccelerator
@@ -9,6 +10,7 @@ class GlobalAccelerator:
self.service = "globalaccelerator"
self.session = audit_info.audit_session
self.audited_account = audit_info.audited_account
self.audit_resources = audit_info.audit_resources
self.accelerators = {}
if audit_info.audited_partition == "aws":
# Global Accelerator is a global service that supports endpoints in multiple AWS Regions
@@ -27,15 +29,20 @@ class GlobalAccelerator:
list_accelerators_paginator = self.client.get_paginator("list_accelerators")
for page in list_accelerators_paginator.paginate():
for accelerator in page["Accelerators"]:
accelerator_arn = accelerator["AcceleratorArn"]
accelerator_name = accelerator["Name"]
enabled = accelerator["Enabled"]
self.accelerators[accelerator_name] = Accelerator(
name=accelerator_name,
arn=accelerator_arn,
region=self.region,
enabled=enabled,
)
if not self.audit_resources or (
is_resource_filtered(
accelerator["AcceleratorArn"], self.audit_resources
)
):
accelerator_arn = accelerator["AcceleratorArn"]
accelerator_name = accelerator["Name"]
enabled = accelerator["Enabled"]
self.accelerators[accelerator_name] = Accelerator(
name=accelerator_name,
arn=accelerator_arn,
region=self.region,
enabled=enabled,
)
except Exception as error:
logger.error(

View File

@@ -4,6 +4,7 @@ from typing import Optional
from pydantic import BaseModel
from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -13,6 +14,7 @@ class Glue:
self.service = "glue"
self.session = audit_info.audit_session
self.audited_account = audit_info.audited_account
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info)
self.connections = []
self.__threading_call__(self.__get_connections__)
@@ -45,14 +47,17 @@ class Glue:
get_connections_paginator = regional_client.get_paginator("get_connections")
for page in get_connections_paginator.paginate():
for conn in page["ConnectionList"]:
self.connections.append(
Connection(
name=conn["Name"],
type=conn["ConnectionType"],
properties=conn["ConnectionProperties"],
region=regional_client.region,
if not self.audit_resources or (
is_resource_filtered(conn["Name"], self.audit_resources)
):
self.connections.append(
Connection(
name=conn["Name"],
type=conn["ConnectionType"],
properties=conn["ConnectionProperties"],
region=regional_client.region,
)
)
)
except Exception as error:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
@@ -66,13 +71,18 @@ class Glue:
)
for page in get_dev_endpoints_paginator.paginate():
for endpoint in page["DevEndpoints"]:
self.dev_endpoints.append(
DevEndpoint(
name=endpoint["EndpointName"],
security=endpoint.get("SecurityConfiguration"),
region=regional_client.region,
if not self.audit_resources or (
is_resource_filtered(
endpoint["EndpointName"], self.audit_resources
)
):
self.dev_endpoints.append(
DevEndpoint(
name=endpoint["EndpointName"],
security=endpoint.get("SecurityConfiguration"),
region=regional_client.region,
)
)
)
except Exception as error:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
@@ -84,14 +94,17 @@ class Glue:
get_jobs_paginator = regional_client.get_paginator("get_jobs")
for page in get_jobs_paginator.paginate():
for job in page["Jobs"]:
self.jobs.append(
Job(
name=job["Name"],
security=job.get("SecurityConfiguration"),
arguments=job.get("DefaultArguments"),
region=regional_client.region,
if not self.audit_resources or (
is_resource_filtered(job["Name"], self.audit_resources)
):
self.jobs.append(
Job(
name=job["Name"],
security=job.get("SecurityConfiguration"),
arguments=job.get("DefaultArguments"),
region=regional_client.region,
)
)
)
except Exception as error:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
@@ -105,30 +118,33 @@ class Glue:
)
for page in get_security_configurations_paginator.paginate():
for config in page["SecurityConfigurations"]:
self.security_configs.append(
SecurityConfig(
name=config["Name"],
s3_encryption=config["EncryptionConfiguration"][
"S3Encryption"
][0]["S3EncryptionMode"],
s3_key_arn=config["EncryptionConfiguration"][
"S3Encryption"
][0].get("KmsKeyArn"),
cw_encryption=config["EncryptionConfiguration"][
"CloudWatchEncryption"
]["CloudWatchEncryptionMode"],
cw_key_arn=config["EncryptionConfiguration"][
"CloudWatchEncryption"
].get("KmsKeyArn"),
jb_encryption=config["EncryptionConfiguration"][
"JobBookmarksEncryption"
]["JobBookmarksEncryptionMode"],
jb_key_arn=config["EncryptionConfiguration"][
"JobBookmarksEncryption"
].get("KmsKeyArn"),
region=regional_client.region,
if not self.audit_resources or (
is_resource_filtered(config["Name"], self.audit_resources)
):
self.security_configs.append(
SecurityConfig(
name=config["Name"],
s3_encryption=config["EncryptionConfiguration"][
"S3Encryption"
][0]["S3EncryptionMode"],
s3_key_arn=config["EncryptionConfiguration"][
"S3Encryption"
][0].get("KmsKeyArn"),
cw_encryption=config["EncryptionConfiguration"][
"CloudWatchEncryption"
]["CloudWatchEncryptionMode"],
cw_key_arn=config["EncryptionConfiguration"][
"CloudWatchEncryption"
].get("KmsKeyArn"),
jb_encryption=config["EncryptionConfiguration"][
"JobBookmarksEncryption"
]["JobBookmarksEncryptionMode"],
jb_key_arn=config["EncryptionConfiguration"][
"JobBookmarksEncryption"
].get("KmsKeyArn"),
region=regional_client.region,
)
)
)
except Exception as error:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
@@ -138,14 +154,17 @@ class Glue:
logger.info("Glue - Search Tables...")
try:
for table in regional_client.search_tables()["TableList"]:
self.tables.append(
Table(
name=table["Name"],
database=table["DatabaseName"],
catalog=table["CatalogId"],
region=regional_client.region,
if not self.audit_resources or (
is_resource_filtered(table["Name"], self.audit_resources)
):
self.tables.append(
Table(
name=table["Name"],
database=table["DatabaseName"],
catalog=table["CatalogId"],
region=regional_client.region,
)
)
)
except Exception as error:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"

View File

@@ -3,6 +3,7 @@ import threading
from pydantic import BaseModel
from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -11,6 +12,7 @@ class GuardDuty:
def __init__(self, audit_info):
self.service = "guardduty"
self.session = audit_info.audit_session
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info)
self.detectors = []
self.__threading_call__(self.__list_detectors__)
@@ -35,9 +37,12 @@ class GuardDuty:
list_detectors_paginator = regional_client.get_paginator("list_detectors")
for page in list_detectors_paginator.paginate():
for detector in page["DetectorIds"]:
self.detectors.append(
Detector(id=detector, region=regional_client.region)
)
if not self.audit_resources or (
is_resource_filtered(detector, self.audit_resources)
):
self.detectors.append(
Detector(id=detector, region=regional_client.region)
)
except Exception as error:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"

View File

@@ -3,6 +3,7 @@ from dataclasses import dataclass
from datetime import datetime
from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -29,6 +30,7 @@ class IAM:
self.service = "iam"
self.session = audit_info.audit_session
self.account = audit_info.audited_account
self.audit_resources = audit_info.audit_resources
self.partition = audit_info.audited_partition
self.client = self.session.client(self.service)
global_client = generate_regional_clients(
@@ -68,14 +70,17 @@ class IAM:
roles = []
for page in get_roles_paginator.paginate():
for role in page["Roles"]:
roles.append(
Role(
name=role["RoleName"],
arn=role["Arn"],
assume_role_policy=role["AssumeRolePolicyDocument"],
is_service_role=is_service_role(role),
if not self.audit_resources or (
is_resource_filtered(role["Arn"], self.audit_resources)
):
roles.append(
Role(
name=role["RoleName"],
arn=role["Arn"],
assume_role_policy=role["AssumeRolePolicyDocument"],
is_service_role=is_service_role(role),
)
)
)
return roles
except Exception as error:
logger.error(
@@ -112,7 +117,10 @@ class IAM:
groups = []
for page in get_groups_paginator.paginate():
for group in page["Groups"]:
groups.append(Group(group["GroupName"], group["Arn"]))
if not self.audit_resources or (
is_resource_filtered(group["Arn"], self.audit_resources)
):
groups.append(Group(group["GroupName"], group["Arn"]))
return groups
@@ -175,14 +183,19 @@ class IAM:
users = []
for page in get_users_paginator.paginate():
for user in page["Users"]:
if "PasswordLastUsed" not in user:
users.append(User(user["UserName"], user["Arn"], None))
else:
users.append(
User(
user["UserName"], user["Arn"], user["PasswordLastUsed"]
if not self.audit_resources or (
is_resource_filtered(user["Arn"], self.audit_resources)
):
if "PasswordLastUsed" not in user:
users.append(User(user["UserName"], user["Arn"], None))
else:
users.append(
User(
user["UserName"],
user["Arn"],
user["PasswordLastUsed"],
)
)
)
return users
@@ -330,7 +343,10 @@ class IAM:
list_policies_paginator = self.client.get_paginator("list_policies")
for page in list_policies_paginator.paginate(Scope="Local"):
for policy in page["Policies"]:
policies.append(policy)
if not self.audit_resources or (
is_resource_filtered(policy["Arn"], self.audit_resources)
):
policies.append(policy)
except Exception as error:
logger.error(
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
@@ -369,14 +385,17 @@ class IAM:
for certificate in self.client.list_server_certificates()[
"ServerCertificateMetadataList"
]:
server_certificates.append(
Certificate(
certificate["ServerCertificateName"],
certificate["ServerCertificateId"],
certificate["Arn"],
certificate["Expiration"],
if not self.audit_resources or (
is_resource_filtered(certificate["Arn"], self.audit_resources)
):
server_certificates.append(
Certificate(
certificate["ServerCertificateName"],
certificate["ServerCertificateId"],
certificate["Arn"],
certificate["Expiration"],
)
)
)
except Exception as error:
logger.error(
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"

View File

@@ -3,6 +3,7 @@ import threading
from dataclasses import dataclass
from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -12,6 +13,7 @@ class KMS:
self.service = "kms"
self.session = audit_info.audit_session
self.audited_account = audit_info.audited_account
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info)
self.keys = []
self.__threading_call__(self.__list_keys__)
@@ -37,13 +39,16 @@ class KMS:
list_keys_paginator = regional_client.get_paginator("list_keys")
for page in list_keys_paginator.paginate():
for key in page["Keys"]:
self.keys.append(
Key(
key["KeyId"],
key["KeyArn"],
regional_client.region,
if not self.audit_resources or (
is_resource_filtered(key["KeyArn"], self.audit_resources)
):
self.keys.append(
Key(
key["KeyId"],
key["KeyArn"],
regional_client.region,
)
)
)
except Exception as error:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}:{error.__traceback__.tb_lineno} -- {error}"

View File

@@ -4,6 +4,7 @@ from json import loads
from pydantic import BaseModel
from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -12,6 +13,7 @@ class OpenSearchService:
def __init__(self, audit_info):
self.service = "opensearch"
self.session = audit_info.audit_session
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info)
self.opensearch_domains = []
self.__threading_call__(self.__list_domain_names__)
@@ -35,11 +37,14 @@ class OpenSearchService:
try:
domains = regional_client.list_domain_names()
for domain in domains["DomainNames"]:
self.opensearch_domains.append(
OpenSearchDomain(
name=domain["DomainName"], region=regional_client.region
if not self.audit_resources or (
is_resource_filtered(domain["DomainName"], self.audit_resources)
):
self.opensearch_domains.append(
OpenSearchDomain(
name=domain["DomainName"], region=regional_client.region
)
)
)
except Exception as error:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"

View File

@@ -4,6 +4,7 @@ from typing import Optional
from pydantic import BaseModel
from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -13,6 +14,7 @@ class RDS:
self.service = "rds"
self.session = audit_info.audit_session
self.audited_account = audit_info.audited_account
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info)
self.db_instances = []
self.db_snapshots = []
@@ -43,32 +45,37 @@ class RDS:
)
for page in describe_db_instances_paginator.paginate():
for instance in page["DBInstances"]:
if instance["Engine"] != "docdb":
self.db_instances.append(
DBInstance(
id=instance["DBInstanceIdentifier"],
endpoint=instance["Endpoint"]["Address"],
engine=instance["Engine"],
status=instance["DBInstanceStatus"],
public=instance["PubliclyAccessible"],
encrypted=instance["StorageEncrypted"],
auto_minor_version_upgrade=instance[
"AutoMinorVersionUpgrade"
],
backup_retention_period=instance.get(
"BackupRetentionPeriod"
),
cloudwatch_logs=instance.get(
"EnabledCloudwatchLogsExports"
),
deletion_protection=instance["DeletionProtection"],
enhanced_monitoring_arn=instance.get(
"EnhancedMonitoringResourceArn"
),
multi_az=instance["MultiAZ"],
region=regional_client.region,
)
if not self.audit_resources or (
is_resource_filtered(
instance["DBInstanceIdentifier"], self.audit_resources
)
):
if instance["Engine"] != "docdb":
self.db_instances.append(
DBInstance(
id=instance["DBInstanceIdentifier"],
endpoint=instance["Endpoint"]["Address"],
engine=instance["Engine"],
status=instance["DBInstanceStatus"],
public=instance["PubliclyAccessible"],
encrypted=instance["StorageEncrypted"],
auto_minor_version_upgrade=instance[
"AutoMinorVersionUpgrade"
],
backup_retention_period=instance.get(
"BackupRetentionPeriod"
),
cloudwatch_logs=instance.get(
"EnabledCloudwatchLogsExports"
),
deletion_protection=instance["DeletionProtection"],
enhanced_monitoring_arn=instance.get(
"EnhancedMonitoringResourceArn"
),
multi_az=instance["MultiAZ"],
region=regional_client.region,
)
)
except Exception as error:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
@@ -82,14 +89,19 @@ class RDS:
)
for page in describe_db_snapshots_paginator.paginate():
for snapshot in page["DBSnapshots"]:
if snapshot["Engine"] != "docdb":
self.db_snapshots.append(
DBSnapshot(
id=snapshot["DBSnapshotIdentifier"],
instance_id=snapshot["DBInstanceIdentifier"],
region=regional_client.region,
)
if not self.audit_resources or (
is_resource_filtered(
snapshot["DBSnapshotIdentifier"], self.audit_resources
)
):
if snapshot["Engine"] != "docdb":
self.db_snapshots.append(
DBSnapshot(
id=snapshot["DBSnapshotIdentifier"],
instance_id=snapshot["DBInstanceIdentifier"],
region=regional_client.region,
)
)
except Exception as error:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
@@ -120,14 +132,20 @@ class RDS:
)
for page in describe_db_snapshots_paginator.paginate():
for snapshot in page["DBClusterSnapshots"]:
if snapshot["Engine"] != "docdb":
self.db_cluster_snapshots.append(
ClusterSnapshot(
id=snapshot["DBClusterSnapshotIdentifier"],
cluster_id=snapshot["DBClusterIdentifier"],
region=regional_client.region,
)
if not self.audit_resources or (
is_resource_filtered(
snapshot["DBClusterSnapshotIdentifier"],
self.audit_resources,
)
):
if snapshot["Engine"] != "docdb":
self.db_cluster_snapshots.append(
ClusterSnapshot(
id=snapshot["DBClusterSnapshotIdentifier"],
cluster_id=snapshot["DBClusterIdentifier"],
region=regional_client.region,
)
)
except Exception as error:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"

View File

@@ -3,6 +3,7 @@ import threading
from pydantic import BaseModel
from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -11,6 +12,7 @@ class Redshift:
def __init__(self, audit_info):
self.service = "redshift"
self.session = audit_info.audit_session
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info)
self.clusters = []
self.__threading_call__(self.__describe_clusters__)
@@ -35,25 +37,30 @@ class Redshift:
list_clusters_paginator = regional_client.get_paginator("describe_clusters")
for page in list_clusters_paginator.paginate():
for cluster in page["Clusters"]:
cluster_to_append = Cluster(
id=cluster["ClusterIdentifier"],
region=regional_client.region,
)
if (
"PubliclyAccessible" in cluster
and cluster["PubliclyAccessible"]
if not self.audit_resources or (
is_resource_filtered(
cluster["ClusterIdentifier"], self.audit_resources
)
):
cluster_to_append.public_access = True
if "Endpoint" in cluster and "Address" in cluster["Endpoint"]:
cluster_to_append.endpoint_address = cluster["Endpoint"][
"Address"
]
if (
"AllowVersionUpgrade" in cluster
and cluster["AllowVersionUpgrade"]
):
cluster_to_append.allow_version_upgrade = True
self.clusters.append(cluster_to_append)
cluster_to_append = Cluster(
id=cluster["ClusterIdentifier"],
region=regional_client.region,
)
if (
"PubliclyAccessible" in cluster
and cluster["PubliclyAccessible"]
):
cluster_to_append.public_access = True
if "Endpoint" in cluster and "Address" in cluster["Endpoint"]:
cluster_to_append.endpoint_address = cluster["Endpoint"][
"Address"
]
if (
"AllowVersionUpgrade" in cluster
and cluster["AllowVersionUpgrade"]
):
cluster_to_append.allow_version_upgrade = True
self.clusters.append(cluster_to_append)
except Exception as error:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"

View File

@@ -1,6 +1,7 @@
from pydantic import BaseModel
from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -10,6 +11,7 @@ class Route53:
self.service = "route53"
self.session = audit_info.audit_session
self.audited_partition = audit_info.audited_partition
self.audit_resources = audit_info.audit_resources
self.hosted_zones = {}
global_client = generate_regional_clients(
self.service, audit_info, global_service=True
@@ -30,16 +32,20 @@ class Route53:
for page in list_hosted_zones_paginator.paginate():
for hosted_zone in page["HostedZones"]:
hosted_zone_id = hosted_zone["Id"].replace("/hostedzone/", "")
hosted_zone_name = hosted_zone["Name"]
private_zone = hosted_zone["Config"]["PrivateZone"]
arn = f"arn:{self.audited_partition}:route53:::{hosted_zone_id}"
if not self.audit_resources or (
is_resource_filtered(arn, self.audit_resources)
):
hosted_zone_name = hosted_zone["Name"]
private_zone = hosted_zone["Config"]["PrivateZone"]
self.hosted_zones[hosted_zone_id] = HostedZone(
id=hosted_zone_id,
name=hosted_zone_name,
private_zone=private_zone,
arn=f"arn:{self.audited_partition}:route53:::{hosted_zone_id}",
region=self.region,
)
self.hosted_zones[hosted_zone_id] = HostedZone(
id=hosted_zone_id,
name=hosted_zone_name,
private_zone=private_zone,
arn=arn,
region=self.region,
)
except Exception as error:
logger.error(

View File

@@ -3,6 +3,7 @@ import threading
from dataclasses import dataclass
from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -13,6 +14,7 @@ class S3:
self.session = audit_info.audit_session
self.client = self.session.client(self.service)
self.audited_account = audit_info.audited_account
self.audit_resources = audit_info.audit_resources
self.audited_partition = audit_info.audited_partition
self.regional_clients = generate_regional_clients(self.service, audit_info)
self.buckets = self.__list_buckets__(audit_info)
@@ -50,12 +52,17 @@ class S3:
bucket_region = "us-east-1"
# Arn
arn = f"arn:{self.audited_partition}:s3:::{bucket['Name']}"
# Check if there are filter regions
if audit_info.audited_regions:
if bucket_region in audit_info.audited_regions:
if not self.audit_resources or (
is_resource_filtered(arn, self.audit_resources)
):
# Check if there are filter regions
if audit_info.audited_regions:
if bucket_region in audit_info.audited_regions:
buckets.append(
Bucket(bucket["Name"], arn, bucket_region)
)
else:
buckets.append(Bucket(bucket["Name"], arn, bucket_region))
else:
buckets.append(Bucket(bucket["Name"], arn, bucket_region))
except Exception as error:
logger.error(
f"{bucket_region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"

View File

@@ -3,6 +3,7 @@ import threading
from pydantic import BaseModel
from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -11,6 +12,7 @@ class SageMaker:
def __init__(self, audit_info):
self.service = "sagemaker"
self.session = audit_info.audit_session
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info)
self.sagemaker_notebook_instances = []
self.sagemaker_models = []
@@ -42,13 +44,19 @@ class SageMaker:
)
for page in list_notebook_instances_paginator.paginate():
for notebook_instance in page["NotebookInstances"]:
self.sagemaker_notebook_instances.append(
NotebookInstance(
name=notebook_instance["NotebookInstanceName"],
region=regional_client.region,
arn=notebook_instance["NotebookInstanceArn"],
if not self.audit_resources or (
is_resource_filtered(
notebook_instance["NotebookInstanceArn"],
self.audit_resources,
)
):
self.sagemaker_notebook_instances.append(
NotebookInstance(
name=notebook_instance["NotebookInstanceName"],
region=regional_client.region,
arn=notebook_instance["NotebookInstanceArn"],
)
)
)
except Exception as error:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
@@ -60,13 +68,16 @@ class SageMaker:
list_models_paginator = regional_client.get_paginator("list_models")
for page in list_models_paginator.paginate():
for model in page["Models"]:
self.sagemaker_models.append(
Model(
name=model["ModelName"],
region=regional_client.region,
arn=model["ModelArn"],
if not self.audit_resources or (
is_resource_filtered(model["ModelArn"], self.audit_resources)
):
self.sagemaker_models.append(
Model(
name=model["ModelName"],
region=regional_client.region,
arn=model["ModelArn"],
)
)
)
except Exception as error:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
@@ -80,13 +91,18 @@ class SageMaker:
)
for page in list_training_jobs_paginator.paginate():
for training_job in page["TrainingJobSummaries"]:
self.sagemaker_training_jobs.append(
TrainingJob(
name=training_job["TrainingJobName"],
region=regional_client.region,
arn=training_job["TrainingJobArn"],
if not self.audit_resources or (
is_resource_filtered(
training_job["TrainingJobArn"], self.audit_resources
)
):
self.sagemaker_training_jobs.append(
TrainingJob(
name=training_job["TrainingJobName"],
region=regional_client.region,
arn=training_job["TrainingJobArn"],
)
)
)
except Exception as error:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"

View File

@@ -3,6 +3,7 @@ import threading
from pydantic import BaseModel
from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -12,6 +13,7 @@ class SecretsManager:
self.service = "secretsmanager"
self.session = audit_info.audit_session
self.audited_account = audit_info.audited_account
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info)
self.secrets = {}
self.__threading_call__(self.__list_secrets__)
@@ -34,15 +36,18 @@ class SecretsManager:
list_secrets_paginator = regional_client.get_paginator("list_secrets")
for page in list_secrets_paginator.paginate():
for secret in page["SecretList"]:
self.secrets[secret["Name"]] = Secret(
arn=secret["ARN"],
name=secret["Name"],
region=regional_client.region,
)
if "RotationEnabled" in secret:
self.secrets[secret["Name"]].rotation_enabled = secret[
"RotationEnabled"
]
if not self.audit_resources or (
is_resource_filtered(secret["ARN"], self.audit_resources)
):
self.secrets[secret["Name"]] = Secret(
arn=secret["ARN"],
name=secret["Name"],
region=regional_client.region,
)
if "RotationEnabled" in secret:
self.secrets[secret["Name"]].rotation_enabled = secret[
"RotationEnabled"
]
except Exception as error:
logger.error(

View File

@@ -2,6 +2,7 @@ import threading
from dataclasses import dataclass
from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -11,6 +12,7 @@ class SecurityHub:
self.service = "securityhub"
self.session = audit_info.audit_session
self.audited_account = audit_info.audited_account
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info)
self.securityhubs = []
self.__threading_call__(self.__describe_hub__)
@@ -51,16 +53,29 @@ class SecurityHub:
else:
# SecurityHub is active so get HubArn
hub_arn = regional_client.describe_hub()["HubArn"]
hub_id = hub_arn.split("/")[1]
self.securityhubs.append(
SecurityHubHub(
hub_arn,
hub_id,
"ACTIVE",
standards,
regional_client.region,
if not self.audit_resources or (
is_resource_filtered(hub_arn, self.audit_resources)
):
hub_id = hub_arn.split("/")[1]
self.securityhubs.append(
SecurityHubHub(
hub_arn,
hub_id,
"ACTIVE",
standards,
regional_client.region,
)
)
else:
self.securityhubs.append(
SecurityHubHub(
"",
"Security Hub",
"NOT_AVAILABLE",
"",
regional_client.region,
)
)
)
except Exception as error:
# Check if Account is subscribed to Security Hub

View File

@@ -4,6 +4,7 @@ from json import loads
from pydantic import BaseModel
from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -12,6 +13,7 @@ class SNS:
def __init__(self, audit_info):
self.service = "sns"
self.session = audit_info.audit_session
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info)
self.topics = []
self.__threading_call__(self.__list_topics__)
@@ -35,13 +37,18 @@ class SNS:
list_topics_paginator = regional_client.get_paginator("list_topics")
for page in list_topics_paginator.paginate():
for topic_arn in page["Topics"]:
self.topics.append(
Topic(
name=topic_arn["TopicArn"].rsplit(":", 1)[1],
arn=topic_arn["TopicArn"],
region=regional_client.region,
if not self.audit_resources or (
is_resource_filtered(
topic_arn["TopicArn"], self.audit_resources
)
):
self.topics.append(
Topic(
name=topic_arn["TopicArn"].rsplit(":", 1)[1],
arn=topic_arn["TopicArn"],
region=regional_client.region,
)
)
)
except Exception as error:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"

View File

@@ -4,6 +4,7 @@ from json import loads
from pydantic import BaseModel
from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -12,6 +13,7 @@ class SQS:
def __init__(self, audit_info):
self.service = "sqs"
self.session = audit_info.audit_session
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info)
self.queues = []
self.__threading_call__(self.__list_queues__)
@@ -36,12 +38,15 @@ class SQS:
for page in list_queues_paginator.paginate():
if "QueueUrls" in page:
for queue in page["QueueUrls"]:
self.queues.append(
Queue(
id=queue,
region=regional_client.region,
if not self.audit_resources or (
is_resource_filtered(queue, self.audit_resources)
):
self.queues.append(
Queue(
id=queue,
region=regional_client.region,
)
)
)
except Exception as error:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"

View File

@@ -5,6 +5,7 @@ from enum import Enum
from pydantic import BaseModel
from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -14,6 +15,7 @@ class SSM:
self.service = "ssm"
self.session = audit_info.audit_session
self.audited_account = audit_info.audited_account
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info)
self.documents = {}
self.compliance_resources = {}
@@ -53,12 +55,15 @@ class SSM:
list_documents_paginator = regional_client.get_paginator("list_documents")
for page in list_documents_paginator.paginate(**list_documents_parameters):
for document in page["DocumentIdentifiers"]:
document_name = document["Name"]
if not self.audit_resources or (
is_resource_filtered(document["Name"], self.audit_resources)
):
document_name = document["Name"]
self.documents[document_name] = Document(
name=document_name,
region=regional_client.region,
)
self.documents[document_name] = Document(
name=document_name,
region=regional_client.region,
)
except Exception as error:
logger.error(

View File

@@ -3,6 +3,7 @@ import threading
from dataclasses import dataclass
from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -12,6 +13,7 @@ class VPC:
self.service = "ec2"
self.session = audit_info.audit_session
self.audited_account = audit_info.audited_account
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info)
self.vpcs = []
self.vpc_peering_connections = []
@@ -43,14 +45,17 @@ class VPC:
describe_vpcs_paginator = regional_client.get_paginator("describe_vpcs")
for page in describe_vpcs_paginator.paginate():
for vpc in page["Vpcs"]:
self.vpcs.append(
VPCs(
vpc["VpcId"],
vpc["IsDefault"],
vpc["CidrBlock"],
regional_client.region,
if not self.audit_resources or (
is_resource_filtered(vpc["VpcId"], self.audit_resources)
):
self.vpcs.append(
VPCs(
vpc["VpcId"],
vpc["IsDefault"],
vpc["CidrBlock"],
regional_client.region,
)
)
)
except Exception as error:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
@@ -64,16 +69,21 @@ class VPC:
)
for page in describe_vpc_peering_connections_paginator.paginate():
for conn in page["VpcPeeringConnections"]:
self.vpc_peering_connections.append(
VpcPeeringConnection(
conn["VpcPeeringConnectionId"],
conn["AccepterVpcInfo"]["VpcId"],
conn["AccepterVpcInfo"]["CidrBlock"],
conn["RequesterVpcInfo"]["VpcId"],
conn["RequesterVpcInfo"]["CidrBlock"],
regional_client.region,
if not self.audit_resources or (
is_resource_filtered(
conn["VpcPeeringConnectionId"], self.audit_resources
)
):
self.vpc_peering_connections.append(
VpcPeeringConnection(
conn["VpcPeeringConnectionId"],
conn["AccepterVpcInfo"]["VpcId"],
conn["AccepterVpcInfo"]["CidrBlock"],
conn["RequesterVpcInfo"]["VpcId"],
conn["RequesterVpcInfo"]["CidrBlock"],
regional_client.region,
)
)
)
except Exception as error:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
@@ -140,19 +150,24 @@ class VPC:
)
for page in describe_vpc_endpoints_paginator.paginate():
for endpoint in page["VpcEndpoints"]:
endpoint_policy = None
if endpoint.get("PolicyDocument"):
endpoint_policy = json.loads(endpoint["PolicyDocument"])
self.vpc_endpoints.append(
VpcEndpoint(
endpoint["VpcEndpointId"],
endpoint["VpcId"],
endpoint["State"],
endpoint_policy,
endpoint["OwnerId"],
regional_client.region,
if not self.audit_resources or (
is_resource_filtered(
endpoint["VpcEndpointId"], self.audit_resources
)
):
endpoint_policy = None
if endpoint.get("PolicyDocument"):
endpoint_policy = json.loads(endpoint["PolicyDocument"])
self.vpc_endpoints.append(
VpcEndpoint(
endpoint["VpcEndpointId"],
endpoint["VpcId"],
endpoint["State"],
endpoint_policy,
endpoint["OwnerId"],
regional_client.region,
)
)
)
except Exception as error:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
@@ -167,14 +182,19 @@ class VPC:
for page in describe_vpc_endpoint_services_paginator.paginate():
for endpoint in page["ServiceDetails"]:
if endpoint["Owner"] != "amazon":
self.vpc_endpoint_services.append(
VpcEndpointService(
endpoint["ServiceId"],
endpoint["ServiceName"],
endpoint["Owner"],
regional_client.region,
if not self.audit_resources or (
is_resource_filtered(
endpoint["ServiceId"], self.audit_resources
)
):
self.vpc_endpoint_services.append(
VpcEndpointService(
endpoint["ServiceId"],
endpoint["ServiceName"],
endpoint["Owner"],
regional_client.region,
)
)
)
except Exception as error:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"

View File

@@ -3,6 +3,7 @@ import threading
from pydantic import BaseModel
from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -11,6 +12,7 @@ class WAF:
def __init__(self, audit_info):
self.service = "waf-regional"
self.session = audit_info.audit_session
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info)
self.web_acls = []
self.__threading_call__(self.__list_web_acls__)
@@ -32,14 +34,17 @@ class WAF:
logger.info("WAF - Listing Regional Web ACLs...")
try:
for waf in regional_client.list_web_acls()["WebACLs"]:
self.web_acls.append(
WebAcl(
name=waf["Name"],
id=waf["WebACLId"],
albs=[],
region=regional_client.region,
if not self.audit_resources or (
is_resource_filtered(waf["WebACLId"], self.audit_resources)
):
self.web_acls.append(
WebAcl(
name=waf["Name"],
id=waf["WebACLId"],
albs=[],
region=regional_client.region,
)
)
)
except Exception as error:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"

View File

@@ -3,6 +3,7 @@ import threading
from pydantic import BaseModel
from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -11,6 +12,7 @@ class WAFv2:
def __init__(self, audit_info):
self.service = "wafv2"
self.session = audit_info.audit_session
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info)
self.web_acls = []
self.__threading_call__(self.__list_web_acls__)
@@ -32,15 +34,18 @@ class WAFv2:
logger.info("WAFv2 - Listing Regional Web ACLs...")
try:
for wafv2 in regional_client.list_web_acls(Scope="REGIONAL")["WebACLs"]:
self.web_acls.append(
WebAclv2(
arn=wafv2["ARN"],
name=wafv2["Name"],
id=wafv2["Id"],
albs=[],
region=regional_client.region,
if not self.audit_resources or (
is_resource_filtered(wafv2["ARN"], self.audit_resources)
):
self.web_acls.append(
WebAclv2(
arn=wafv2["ARN"],
name=wafv2["Name"],
id=wafv2["Id"],
albs=[],
region=regional_client.region,
)
)
)
except Exception as error:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"

View File

@@ -3,6 +3,7 @@ import threading
from pydantic import BaseModel
from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -11,6 +12,7 @@ class WorkSpaces:
def __init__(self, audit_info):
self.service = "workspaces"
self.session = audit_info.audit_session
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info)
self.workspaces = []
self.__threading_call__(self.__describe_workspaces__)
@@ -35,20 +37,25 @@ class WorkSpaces:
)
for page in describe_workspaces_paginator.paginate():
for workspace in page["Workspaces"]:
workspace_to_append = WorkSpace(
id=workspace["WorkspaceId"], region=regional_client.region
)
if (
"UserVolumeEncryptionEnabled" in workspace
and workspace["UserVolumeEncryptionEnabled"]
if not self.audit_resources or (
is_resource_filtered(
workspace["WorkspaceId"], self.audit_resources
)
):
workspace_to_append.user_volume_encryption_enabled = True
if (
"RootVolumeEncryptionEnabled" in workspace
and workspace["RootVolumeEncryptionEnabled"]
):
workspace_to_append.root_volume_encryption_enabled = True
self.workspaces.append(workspace_to_append)
workspace_to_append = WorkSpace(
id=workspace["WorkspaceId"], region=regional_client.region
)
if (
"UserVolumeEncryptionEnabled" in workspace
and workspace["UserVolumeEncryptionEnabled"]
):
workspace_to_append.user_volume_encryption_enabled = True
if (
"RootVolumeEncryptionEnabled" in workspace
and workspace["RootVolumeEncryptionEnabled"]
):
workspace_to_append.root_volume_encryption_enabled = True
self.workspaces.append(workspace_to_append)
except Exception as error:
logger.error(

View File

@@ -96,7 +96,7 @@ Caller Identity ARN: {Fore.YELLOW}[{audit_info.audited_identity_arn}]{Style.RESE
"""
set_aws_audit_info returns the AWS_Audit_Info
"""
logger.info("Setting Azure session ...")
logger.info("Setting AWS session ...")
# Assume Role Options
input_role = arguments.get("role")
@@ -236,6 +236,11 @@ Caller Identity ARN: {Fore.YELLOW}[{audit_info.audited_identity_arn}]{Style.RESE
if not arguments.get("only_logs"):
self.print_audit_credentials(current_audit_info)
# Parse Scan Tags
input_scan_tags = arguments.get("scan_tags")
current_audit_info.audit_resources = get_tagged_resources(
input_scan_tags, current_audit_info
)
return current_audit_info
def set_azure_audit_info(self, arguments) -> Azure_Audit_Info:
@@ -287,3 +292,33 @@ def set_provider_audit_info(provider: str, arguments: dict):
sys.exit()
else:
return provider_audit_info
def get_tagged_resources(input_scan_tags: list, current_audit_info: AWS_Audit_Info):
"""
get_tagged_resources returns a list of the resources that are going to be scanned based on the given input tags
"""
try:
scan_tags = []
tagged_resources = []
if input_scan_tags:
for tag in input_scan_tags:
key = tag.split("=")[0]
value = tag.split("=")[1]
scan_tags.append({"Key": key, "Values": [value]})
# Get Resources with scan_tags for all regions
for region in current_audit_info.audited_regions:
client = current_audit_info.audit_session.client(
"resourcegroupstaggingapi", region_name=region
)
get_resources_paginator = client.get_paginator("get_resources")
for page in get_resources_paginator.paginate(TagFilters=scan_tags):
for resource in page["ResourceTagMappingList"]:
tagged_resources.append(resource["ResourceARN"])
except Exception as error:
logger.critical(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
sys.exit()
else:
return tagged_resources

View File

@@ -54,6 +54,7 @@ class Test_Parser:
assert not parsed.output_bucket_no_assume
assert not parsed.shodan
assert not parsed.allowlist_file
assert not parsed.scan_tags
def test_default_parser_no_arguments_azure(self):
provider = "azure"
@@ -795,6 +796,24 @@ class Test_Parser:
parsed = self.parser.parse(command)
assert parsed.allowlist_file == allowlist_file
def test_aws_parser_scan_tags_short(self):
argument = "-t"
scan_tag = "Key=Value"
command = [prowler_command, argument, scan_tag]
parsed = self.parser.parse(command)
assert len(parsed.scan_tags) == 1
assert scan_tag in parsed.scan_tags
def test_aws_parser_scan_tags_long(self):
argument = "--scan-tags"
scan_tag1 = "Key=Value"
scan_tag2 = "Key2=Value2"
command = [prowler_command, argument, scan_tag1, scan_tag2]
parsed = self.parser.parse(command)
assert len(parsed.scan_tags) == 2
assert scan_tag1 in parsed.scan_tags
assert scan_tag2 in parsed.scan_tags
def test_parser_azure_auth_sp(self):
argument = "--sp-env-auth"
command = [prowler_command, "azure", argument]

View File

@@ -81,6 +81,7 @@ class Test_Outputs:
assumed_role_info=None,
audited_regions=["eu-west-2", "eu-west-1"],
organizations_metadata=None,
audit_resources=None,
)
test_output_modes = [
["csv"],
@@ -258,6 +259,7 @@ class Test_Outputs:
assumed_role_info=None,
audited_regions=["eu-west-2", "eu-west-1"],
organizations_metadata=None,
audit_resources=None,
)
finding = Check_Report(
load_check_metadata(
@@ -327,6 +329,7 @@ class Test_Outputs:
assumed_role_info=None,
audited_regions=["eu-west-2", "eu-west-1"],
organizations_metadata=None,
audit_resources=None,
)
# Creat mock bucket
bucket_name = "test_bucket"
@@ -429,6 +432,7 @@ class Test_Outputs:
assumed_role_info=None,
audited_regions=["eu-west-2", "eu-west-1"],
organizations_metadata=None,
audit_resources=None,
)
finding = Check_Report(
load_check_metadata(

View File

@@ -0,0 +1,17 @@
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
class Test_Scan_Filters:
def test_is_resource_filtered(self):
audit_resources = [
"arn:aws:iam::123456789012:user/test_user",
"arn:aws:s3:::test_bucket",
]
assert is_resource_filtered(
"arn:aws:iam::123456789012:user/test_user", audit_resources
)
assert not is_resource_filtered(
"arn:aws:iam::123456789012:user/test1", audit_resources
)
assert is_resource_filtered("test_bucket", audit_resources)
assert is_resource_filtered("arn:aws:s3:::test_bucket", audit_resources)

View File

@@ -56,6 +56,7 @@ class Test_AWS_Provider:
),
audited_regions=audited_regions,
organizations_metadata=None,
audit_resources=None,
)
# Call assume_role
@@ -109,6 +110,7 @@ class Test_AWS_Provider:
assumed_role_info=None,
audited_regions=audited_regions,
organizations_metadata=None,
audit_resources=None,
)
generate_regional_clients_response = generate_regional_clients(
"ec2", audit_info
@@ -137,6 +139,7 @@ class Test_AWS_Provider:
assumed_role_info=None,
audited_regions=audited_regions,
organizations_metadata=None,
audit_resources=None,
)
generate_regional_clients_response = generate_regional_clients(
"route53", audit_info, global_service=True
@@ -164,6 +167,7 @@ class Test_AWS_Provider:
assumed_role_info=None,
audited_regions=audited_regions,
organizations_metadata=None,
audit_resources=None,
)
generate_regional_clients_response = generate_regional_clients(
"shield", audit_info, global_service=True

View File

@@ -32,6 +32,7 @@ class Test_Allowlist:
assumed_role_info=None,
audited_regions=None,
organizations_metadata=None,
audit_resources=None,
)
return audit_info

View File

@@ -27,6 +27,7 @@ class Test_ACM_Service:
assumed_role_info=None,
audited_regions=None,
organizations_metadata=None,
audit_resources=None,
)
return audit_info

View File

@@ -27,6 +27,7 @@ class Test_APIGateway_Service:
assumed_role_info=None,
audited_regions=None,
organizations_metadata=None,
audit_resources=None,
)
return audit_info

View File

@@ -58,6 +58,7 @@ class Test_ApiGatewayV2_Service:
assumed_role_info=None,
audited_regions=None,
organizations_metadata=None,
audit_resources=None,
)
return audit_info

View File

@@ -29,6 +29,7 @@ class Test_AutoScaling_Service:
assumed_role_info=None,
audited_regions=None,
organizations_metadata=None,
audit_resources=None,
)
return audit_info

View File

@@ -42,6 +42,7 @@ class Test_awslambda_function_invoke_api_operations_cloudtrail_logging_enabled:
assumed_role_info=None,
audited_regions=None,
organizations_metadata=None,
audit_resources=None,
)
return audit_info

View File

@@ -74,6 +74,7 @@ class Test_Lambda_Service:
assumed_role_info=None,
audited_regions=None,
organizations_metadata=None,
audit_resources=None,
)
return audit_info

View File

@@ -151,6 +151,7 @@ class Test_CloudFormation_Service:
assumed_role_info=None,
audited_regions=None,
organizations_metadata=None,
audit_resources=None,
)
return audit_info

View File

@@ -161,6 +161,7 @@ class Test_CloudFront_Service:
assumed_role_info=None,
audited_regions=None,
organizations_metadata=None,
audit_resources=None,
)
return audit_info

View File

@@ -26,6 +26,7 @@ class Test_Cloudtrail_Service:
assumed_role_info=None,
audited_regions=["eu-west-1", "us-east-1"],
organizations_metadata=None,
audit_resources=None,
)
return audit_info

View File

@@ -27,6 +27,7 @@ class Test_CloudWatch_Service:
assumed_role_info=None,
audited_regions=None,
organizations_metadata=None,
audit_resources=None,
)
return audit_info

View File

@@ -70,6 +70,7 @@ class Test_Codebuild_Service:
assumed_role_info=None,
audited_regions=None,
organizations_metadata=None,
audit_resources=None,
)
return audit_info

View File

@@ -27,6 +27,7 @@ class Test_Config_Service:
assumed_role_info=None,
audited_regions=["eu-west-1", "us-east-1"],
organizations_metadata=None,
audit_resources=None,
)
return audit_info

View File

@@ -27,6 +27,7 @@ class Test_DynamoDB_Service:
assumed_role_info=None,
audited_regions=None,
organizations_metadata=None,
audit_resources=None,
)
return audit_info

View File

@@ -36,6 +36,7 @@ class Test_EC2_Service:
assumed_role_info=None,
audited_regions=["eu-west-1", "us-east-1"],
organizations_metadata=None,
audit_resources=None,
)
return audit_info

View File

@@ -87,6 +87,7 @@ class Test_ECR_Service:
assumed_role_info=None,
audited_regions=None,
organizations_metadata=None,
audit_resources=None,
)
return audit_info

View File

@@ -39,6 +39,7 @@ class Test_ECS_Service:
assumed_role_info=None,
audited_regions=None,
organizations_metadata=None,
audit_resources=None,
)
return audit_info

View File

@@ -70,6 +70,7 @@ class Test_EFS:
assumed_role_info=None,
audited_regions=None,
organizations_metadata=None,
audit_resources=None,
)
return audit_info

View File

@@ -44,6 +44,7 @@ class Test_EKS_Service:
assumed_role_info=None,
audited_regions=None,
organizations_metadata=None,
audit_resources=None,
)
return audit_info

View File

@@ -27,6 +27,7 @@ class Test_ELB_Service:
assumed_role_info=None,
audited_regions=None,
organizations_metadata=None,
audit_resources=None,
)
return audit_info

View File

@@ -27,6 +27,7 @@ class Test_ELBv2_Service:
assumed_role_info=None,
audited_regions=None,
organizations_metadata=None,
audit_resources=None,
)
return audit_info

View File

@@ -30,6 +30,7 @@ class Test_emr_cluster_publicly_accesible:
assumed_role_info=None,
audited_regions=None,
organizations_metadata=None,
audit_resources=None,
)
return audit_info

View File

@@ -66,6 +66,7 @@ class Test_EMR_Service:
assumed_role_info=None,
audited_regions=None,
organizations_metadata=None,
audit_resources=None,
)
return audit_info

View File

@@ -65,6 +65,7 @@ class Test_GlobalAccelerator_Service:
assumed_role_info=None,
audited_regions=None,
organizations_metadata=None,
audit_resources=None,
)
return audit_info

View File

@@ -135,6 +135,7 @@ class Test_Glue_Service:
assumed_role_info=None,
audited_regions=None,
organizations_metadata=None,
audit_resources=None,
)
return audit_info

View File

@@ -49,6 +49,7 @@ class Test_GuardDuty_Service:
assumed_role_info=None,
audited_regions=None,
organizations_metadata=None,
audit_resources=None,
)
return audit_info

View File

@@ -30,6 +30,7 @@ class Test_IAM_Service:
assumed_role_info=None,
audited_regions=None,
organizations_metadata=None,
audit_resources=None,
)
return audit_info

View File

@@ -29,6 +29,7 @@ class Test_ACM_Service:
assumed_role_info=None,
audited_regions=None,
organizations_metadata=None,
audit_resources=None,
)
return audit_info

View File

@@ -115,6 +115,7 @@ class Test_OpenSearchService_Service:
assumed_role_info=None,
audited_regions=None,
organizations_metadata=None,
audit_resources=None,
)
return audit_info

View File

@@ -27,6 +27,7 @@ class Test_RDS_Service:
assumed_role_info=None,
audited_regions=None,
organizations_metadata=None,
audit_resources=None,
)
return audit_info

View File

@@ -75,6 +75,7 @@ class Test_Redshift_Service:
assumed_role_info=None,
audited_regions=None,
organizations_metadata=None,
audit_resources=None,
)
return audit_info

View File

@@ -43,6 +43,7 @@ class Test_Route53_Service:
assumed_role_info=None,
audited_regions=None,
organizations_metadata=None,
audit_resources=None,
)
return audit_info

View File

@@ -82,6 +82,7 @@ class Test_Route53_Service:
assumed_role_info=None,
audited_regions=None,
organizations_metadata=None,
audit_resources=None,
)
return audit_info

View File

@@ -29,6 +29,7 @@ class Test_s3_account_level_public_access_blocks:
assumed_role_info=None,
audited_regions=None,
organizations_metadata=None,
audit_resources=None,
)
return audit_info

View File

@@ -30,6 +30,7 @@ class Test_s3_bucket_public_access:
assumed_role_info=None,
audited_regions=None,
organizations_metadata=None,
audit_resources=None,
)
return audit_info

View File

@@ -30,6 +30,7 @@ class Test_S3_Service:
assumed_role_info=None,
audited_regions=None,
organizations_metadata=None,
audit_resources=None,
)
return audit_info

View File

@@ -116,6 +116,7 @@ class Test_SageMaker_Service:
assumed_role_info=None,
audited_regions=None,
organizations_metadata=None,
audit_resources=None,
)
return audit_info

View File

@@ -45,6 +45,7 @@ class Test_SecretsManager_Service:
assumed_role_info=None,
audited_regions=None,
organizations_metadata=None,
audit_resources=None,
)
return audit_info

View File

@@ -42,6 +42,7 @@ class Test_shield_advanced_protection_in_associated_elastic_ips:
assumed_role_info=None,
audited_regions=None,
organizations_metadata=None,
audit_resources=None,
)
return audit_info

View File

@@ -29,6 +29,7 @@ class Test_shield_advanced_protection_in_classic_load_balancers:
assumed_role_info=None,
audited_regions=None,
organizations_metadata=None,
audit_resources=None,
)
return audit_info

View File

@@ -42,6 +42,7 @@ class Test_shield_advanced_protection_in_internet_facing_load_balancers:
assumed_role_info=None,
audited_regions=None,
organizations_metadata=None,
audit_resources=None,
)
return audit_info

View File

@@ -52,6 +52,7 @@ class Test_Shield_Service:
assumed_role_info=None,
audited_regions=None,
organizations_metadata=None,
audit_resources=None,
)
return audit_info

Some files were not shown because too many files have changed in this diff Show More