feat(pip): Prepare for PyPI (#1531)

This commit is contained in:
Sergio Garcia
2022-12-13 09:07:55 +01:00
committed by GitHub
parent 0cd13b90f4
commit bb09267f2a
1461 changed files with 6625 additions and 6904 deletions

0
tests/__init__.py Normal file
View File

View File

@@ -0,0 +1,180 @@
import os
from prowler.lib.check.check import (
exclude_checks_to_run,
exclude_services_to_run,
parse_checks_from_file,
)
from prowler.lib.check.models import load_check_metadata
class Test_Check:
def test_load_check_metadata(self):
test_cases = [
{
"input": {
"metadata_path": f"{os.path.dirname(os.path.realpath(__file__))}/fixtures/metadata.json",
},
"expected": {
"CheckID": "iam_disable_30_days_credentials",
"CheckTitle": "Ensure credentials unused for 30 days or greater are disabled",
"ServiceName": "iam",
"Severity": "low",
},
}
]
for test in test_cases:
metadata_path = test["input"]["metadata_path"]
check_metadata = load_check_metadata(metadata_path)
assert check_metadata.CheckID == test["expected"]["CheckID"]
assert check_metadata.CheckTitle == test["expected"]["CheckTitle"]
assert check_metadata.ServiceName == test["expected"]["ServiceName"]
assert check_metadata.Severity == test["expected"]["Severity"]
def test_parse_checks_from_file(self):
test_cases = [
{
"input": {
"path": f"{os.path.dirname(os.path.realpath(__file__))}/fixtures/checklistA.json",
"provider": "aws",
},
"expected": {"check11", "check12", "check7777"},
}
]
for test in test_cases:
check_file = test["input"]["path"]
provider = test["input"]["provider"]
assert parse_checks_from_file(check_file, provider) == test["expected"]
def test_exclude_checks_to_run(self):
test_cases = [
{
"input": {
"check_list": {"check12", "check11", "extra72", "check13"},
"excluded_checks": {"check12", "check13"},
},
"expected": {"check11", "extra72"},
},
{
"input": {
"check_list": {"check112", "check11", "extra72", "check13"},
"excluded_checks": {"check12", "check13", "check14"},
},
"expected": {"check112", "check11", "extra72"},
},
]
for test in test_cases:
check_list = test["input"]["check_list"]
excluded_checks = test["input"]["excluded_checks"]
assert (
exclude_checks_to_run(check_list, excluded_checks) == test["expected"]
)
def test_exclude_services_to_run(self):
test_cases = [
{
"input": {
"checks_to_run": {
"iam_disable_30_days_credentials",
"iam_disable_90_days_credentials",
},
"excluded_services": {"ec2"},
"provider": "aws",
},
"expected": {
"iam_disable_30_days_credentials",
"iam_disable_90_days_credentials",
},
},
{
"input": {
"checks_to_run": {
"iam_disable_30_days_credentials",
"iam_disable_90_days_credentials",
},
"excluded_services": {"iam"},
"provider": "aws",
},
"expected": set(),
},
]
for test in test_cases:
excluded_services = test["input"]["excluded_services"]
checks_to_run = test["input"]["checks_to_run"]
provider = test["input"]["provider"]
assert (
exclude_services_to_run(checks_to_run, excluded_services, provider)
== test["expected"]
)
# def test_parse_checks_from_compliance_framework_two(self):
# test_case = {
# "input": {"compliance_frameworks": ["cis_v1.4_aws", "ens_v3_aws"]},
# "expected": {
# "vpc_flow_logs_enabled",
# "ec2_ebs_snapshot_encryption",
# "iam_user_mfa_enabled_console_access",
# "cloudtrail_multi_region_enabled",
# "ec2_elbv2_insecure_ssl_ciphers",
# "guardduty_is_enabled",
# "s3_bucket_default_encryption",
# "cloudfront_distributions_https_enabled",
# "iam_avoid_root_usage",
# "s3_bucket_secure_transport_policy",
# },
# }
# with mock.patch(
# "prowler.lib.check.check.compliance_specification_dir_path",
# new=f"{os.path.dirname(os.path.realpath(__file__))}/fixtures",
# ):
# provider = "aws"
# bulk_compliance_frameworks = bulk_load_compliance_frameworks(provider)
# compliance_frameworks = test_case["input"]["compliance_frameworks"]
# assert (
# parse_checks_from_compliance_framework(
# compliance_frameworks, bulk_compliance_frameworks
# )
# == test_case["expected"]
# )
# def test_parse_checks_from_compliance_framework_one(self):
# test_case = {
# "input": {"compliance_frameworks": ["cis_v1.4_aws"]},
# "expected": {
# "iam_user_mfa_enabled_console_access",
# "s3_bucket_default_encryption",
# "iam_avoid_root_usage",
# },
# }
# with mock.patch(
# "prowler.lib.check.check.compliance_specification_dir",
# new=f"{os.path.dirname(os.path.realpath(__file__))}/fixtures",
# ):
# provider = "aws"
# bulk_compliance_frameworks = bulk_load_compliance_frameworks(provider)
# compliance_frameworks = test_case["input"]["compliance_frameworks"]
# assert (
# parse_checks_from_compliance_framework(
# compliance_frameworks, bulk_compliance_frameworks
# )
# == test_case["expected"]
# )
# def test_parse_checks_from_compliance_framework_no_compliance(self):
# test_case = {
# "input": {"compliance_frameworks": []},
# "expected": set(),
# }
# with mock.patch(
# "prowler.lib.check.check.compliance_specification_dir",
# new=f"{os.path.dirname(os.path.realpath(__file__))}/fixtures",
# ):
# provider = "aws"
# bulk_compliance_frameworks = bulk_load_compliance_frameworks(provider)
# compliance_frameworks = test_case["input"]["compliance_frameworks"]
# assert (
# parse_checks_from_compliance_framework(
# compliance_frameworks, bulk_compliance_frameworks
# )
# == test_case["expected"]
# )

View File

@@ -0,0 +1,82 @@
{
"Framework": "CIS",
"Provider": "AWS",
"Version": "1.4",
"Requirements": [
{
"Id": "1.4",
"Description": "Ensure no 'root' user account access key exists (Automated)",
"Attributes": [
{
"Section": "1. Identity and Access Management (IAM)",
"Level": [
"level1"
],
"Rationale": "Removing access keys associated with the 'root' user account limits vectors by which the account can be compromised. Additionally, removing the 'root' access keys encourages the creation and use of role based accounts that are least privileged.",
"Guidance": "The 'root' user account is the most privileged user in an AWS account. AWS Access Keys provide programmatic access to a given AWS account. It is recommended that all access keys associated with the 'root' user account be removed.",
"Additional information": "IAM User account \"root\" for us-gov cloud regions is not enabled by default. However, on request to AWS support enables 'root' access only through access-keys (CLI, API methods) for us-gov cloud region.",
"References": [
"CCE-78910-7",
"https://docs.aws.amazon.com/general/latest/gr/aws-access-keys-best-practices.html",
"https://docs.aws.amazon.com/general/latest/gr/managing-aws-access-keys.html",
"https://docs.aws.amazon.com/IAM/latest/APIReference/API_GetAccountSummary.html",
"https://aws.amazon.com/blogs/security/an-easier-way-to-determine-the-presence-of-aws-account-access-keys/"
]
}
],
"Checks": [
"iam_avoid_root_usage"
]
},
{
"Id": "1.10",
"Description": "Ensure multi-factor authentication (MFA) is enabled for all IAM users that have a console password (Automated)",
"Attributes": [
{
"Section": "1. Identity and Access Management (IAM)",
"Level": [
"level1"
],
"Guidance": "Multi-Factor Authentication (MFA) adds an extra layer of authentication assurance beyond traditional credentials. With MFA enabled, when a user signs in to the AWS Console, they will be prompted for their user name and password as well as for an authentication code from their physical or virtual MFA token. It is recommended that MFA be enabled for all accounts that have a console password.",
"Rationale": "Enabling MFA provides increased security for console access as it requires the authenticating principal to possess a device that displays a time-sensitive key and have knowledge of a credential.",
"Impact": "AWS will soon end support for SMS multi-factor authentication (MFA). New customers are not allowed to use this feature. We recommend that existing customers switch to one of the following alternative methods of MFA.",
"Additional information": "Forced IAM User Self-Service Remediation. Amazon has published a pattern that forces users to self-service setup MFA before they have access to their complete permissions set. Until they complete this step, they cannot access their full permissions. This pattern can be used on new AWS accounts. It can also be used on existing accounts - it is recommended users are given instructions and a grace period to accomplish MFA enrollment before active enforcement on existing AWS accounts.",
"References": [
"CCE-78901-6",
"https://tools.ietf.org/html/rfc6238",
"https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_mfa.html",
"https://docs.aws.amazon.com/IAM/latest/UserGuide/best-practices.html#enable-mfa-for-privileged-users",
"https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_mfa_enable_virtual.html",
"https://blogs.aws.amazon.com/security/post/Tx2SJJYE082KBUK/How-to-Delegate-Management-of-Multi-Factor-Authentication-to-AWS-IAM-Users"
]
}
],
"Checks": [
"iam_user_mfa_enabled_console_access"
]
},
{
"Id": "2.1.1",
"Description": "Ensure all S3 buckets employ encryption-at-rest (Automated)",
"Attributes": [
{
"Section": "2. Storage",
"Level": [
"level2"
],
"Guidance": "Amazon S3 provides a variety of no, or low, cost encryption options to protect data at rest.",
"Rationale": "Encrypting data at rest reduces the likelihood that it is unintentionally exposed and can nullify the impact of disclosure if the encryption remains unbroken.",
"Impact": "Amazon S3 buckets with default bucket encryption using SSE-KMS cannot be used as destination buckets for Amazon S3 server access logging. Only SSE-S3 default encryption is supported for server access log destination buckets.",
"Additional information": "S3 bucket encryption only applies to objects as they are placed in the bucket. Enabling S3 bucket encryption does not encrypt objects previously stored within the bucket",
"References": [
"https://docs.aws.amazon.com/AmazonS3/latest/user-guide/default-bucket-encryption.html",
"https://docs.aws.amazon.com/AmazonS3/latest/dev/bucket-encryption.html#bucket-encryption-related-resources"
]
}
],
"Checks": [
"s3_bucket_default_encryption"
]
}
]
}

View File

@@ -0,0 +1,82 @@
{
"Framework": "ENS",
"Version": "3",
"Requirements": [
{
"Id": "op.mon.1",
"Description": "Detección de intrusión",
"Attributes": [
{
"Marco": "operacional",
"Categoria": "monitorización del sistema",
"Descripcion_Control": "- En ausencia de otras herramientas de terceros, habilitar Amazon GuarDuty para la detección de amenazas e intrusiones..- Activar el servicio de eventos AWS CloudTrail para todas las regiones..- Activar el servicio VPC FlowLogs..-Deberá habilitarse Amazon GuardDuty para todas las regiones tanto en la cuenta raíz como en las cuentas miembro de un entorno multi-cuenta..-Todas las cuentas miembro deberán estar añadidas para la supervisión bajo la cuenta raíz..-La adminsitración de Amazon GuardDuty quedará delegada exclusivamente a la cuenta de seguridad para garantizar una correcta asignación de los roles para este servicio.",
"Nivel": [
"bajo",
"medio",
"alto"
],
"Dimensiones": [
"confidencialidad",
"integridad",
"trazabilidad",
"autenticidad",
"disponibilidad"
]
}
],
"Checks": [
"guardduty_is_enabled",
"cloudtrail_multi_region_enabled",
"vpc_flow_logs_enabled",
"guardduty_is_enabled"
]
},
{
"Id": "op.mon.3",
"Description": "Protección de la integridad y de la autenticidad",
"Attributes": [
{
"Marco": "operacional",
"Categoria": "protección de las comunicaciones",
"Descripcion_Control": "- Habilitar TLS en los balanceadores de carga ELB.- Evitar el uso de protocolos de cifrado inseguros en la conexión TLS entre clientes y balanceadores de carga.- Asegurar que los Buckets de almacenamiento S3 apliquen cifrado para la transferencia de datos empleando TLS.- Asegurar que la distribución entre frontales CloudFront y sus orígenes únicamente emplee tráfico HTTPS.",
"Nivel": [
"bajo",
"medio",
"alto"
],
"Dimensiones": [
"integridad",
"autenticidad"
]
}
],
"Checks": [
"ec2_elbv2_insecure_ssl_ciphers",
"ec2_elbv2_insecure_ssl_ciphers",
"s3_bucket_secure_transport_policy",
"cloudfront_distributions_https_enabled"
]
},
{
"Id": "mp.si.2.r2.1",
"Description": "Copias de seguridad",
"Attributes": [
{
"Marco": "medidas de protección",
"Categoria": "protección de los soportes de información",
"Descripcion_Control": "Se deberá asegurar el cifrado de las copias de seguridad de EBS.",
"Nivel": [
"alto"
],
"Dimensiones": [
"confidencialidad",
"integridad"
]
}
],
"Checks": [
"ec2_ebs_snapshot_encryption"
]
}
]
}

View File

@@ -0,0 +1,7 @@
{
"aws": [
"check11",
"check12",
"check7777"
]
}

View File

@@ -0,0 +1,18 @@
{
"aws": {
"gdpr": {
"checks": [
"check11",
"check12"
],
"description": "GDPR Readiness"
},
"iam": {
"checks": [
"iam_disable_30_days_credentials",
"iam_disable_90_days_credentials"
],
"description": "Identity and Access Management"
}
}
}

View File

@@ -0,0 +1,58 @@
{
"Categories": [
"cat1",
"cat2"
],
"CheckID": "iam_disable_30_days_credentials",
"CheckTitle": "Ensure credentials unused for 30 days or greater are disabled",
"CheckType": [
"Software and Configuration Checks"
],
"Compliance": [
{
"Control": [
"4.4"
],
"Framework": "CIS-AWS",
"Group": [
"level1",
"level2"
],
"Version": "1.4"
}
],
"DependsOn": [
"othercheck1",
"othercheck2"
],
"Description": "Ensure credentials unused for 30 days or greater are disabled",
"Notes": "additional information",
"Provider": "aws",
"RelatedTo": [
"othercheck3",
"othercheck4"
],
"RelatedUrl": "https://serviceofficialsiteorpageforthissubject",
"Remediation": {
"Code": {
"CLI": "cli command or URL to the cli command location.",
"NativeIaC": "code or URL to the code location.",
"Other": "cli command or URL to the cli command location.",
"Terraform": "code or URL to the code location."
},
"Recommendation": {
"Text": "Run sudo yum update and cross your fingers and toes.",
"Url": "https://myfp.com/recommendations/dangerous_things_and_how_to_fix_them.html"
}
},
"ResourceIdTemplate": "arn:partition:service:region:account-id:resource-id",
"ResourceType": "AwsIamAccessAnalyzer",
"Risk": "Risk associated.",
"ServiceName": "iam",
"Severity": "low",
"SubServiceName": "accessanalyzer",
"Tags": {
"Tag1Key": "value",
"Tag2Key": "value"
}
}

View File

@@ -0,0 +1,58 @@
{
"Categories": [
"cat1",
"cat2"
],
"CheckID": "iam_disable_30_days_credentials",
"CheckTitle": "Ensure credentials unused for 30 days or greater are disabled",
"CheckType": [
"Software and Configuration Checks"
],
"Compliance": [
{
"Control": [
"4.4"
],
"Framework": "CIS-AWS",
"Group": [
"level1",
"level2"
],
"Version": "1.4"
}
],
"DependsOn": [
"othercheck1",
"othercheck2"
],
"Description": "Ensure credentials unused for 30 days or greater are disabled",
"Notes": "additional information",
"Provider": "aws",
"RelatedTo": [
"othercheck3",
"othercheck4"
],
"RelatedUrl": "https://serviceofficialsiteorpageforthissubject",
"Remediation": {
"Code": {
"CLI": "cli command or URL to the cli command location.",
"NativeIaC": "code or URL to the code location.",
"Other": "cli command or URL to the cli command location.",
"Terraform": "code or URL to the code location."
},
"Recommendation": {
"Text": "Run sudo yum update and cross your fingers and toes.",
"Url": "https://myfp.com/recommendations/dangerous_things_and_how_to_fix_them.html"
}
},
"ResourceIdTemplate": "arn:partition:service:region:account-id:resource-id",
"ResourceType": "AwsIamAccessAnalyzer",
"Risk": "Risk associated.",
"ServiceName": "iam",
"Severity": "low",
"SubServiceName": "accessanalyzer",
"Tags": {
"Tag1Key": "value",
"Tag2Key": "value"
}
}

View File

@@ -0,0 +1,344 @@
import os
from os import path, remove
import boto3
import pytest
from colorama import Fore
from moto import mock_s3
from prowler.config.config import (
csv_file_suffix,
json_asff_file_suffix,
json_file_suffix,
orange_color,
output_file_timestamp,
prowler_version,
timestamp_iso,
timestamp_utc,
)
from prowler.lib.check.models import Check_Report, load_check_metadata
from prowler.lib.outputs.models import (
Check_Output_CSV,
Check_Output_JSON,
Check_Output_JSON_ASFF,
Compliance,
ProductFields,
Resource,
Severity,
)
from prowler.lib.outputs.outputs import (
fill_file_descriptors,
fill_json,
fill_json_asff,
generate_csv_fields,
send_to_s3_bucket,
set_report_color,
)
from prowler.lib.utils.utils import hash_sha512, open_file
from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
class Test_Outputs:
def test_fill_file_descriptors(self):
audited_account = "123456789012"
output_directory = f"{os.path.dirname(os.path.realpath(__file__))}"
audit_info = AWS_Audit_Info(
original_session=None,
audit_session=None,
audited_account="123456789012",
audited_identity_arn="test-arn",
audited_user_id="test",
audited_partition="aws",
profile="default",
profile_region="eu-west-1",
credentials=None,
assumed_role_info=None,
audited_regions=["eu-west-2", "eu-west-1"],
organizations_metadata=None,
)
test_output_modes = [
["csv"],
["json"],
["json-asff"],
["csv", "json"],
["csv", "json", "json-asff"],
]
output_filename = f"prowler-output-{audited_account}-{output_file_timestamp}"
expected = [
{
"csv": open_file(
f"{output_directory}/{output_filename}{csv_file_suffix}",
"a",
)
},
{
"json": open_file(
f"{output_directory}/{output_filename}{json_file_suffix}",
"a",
)
},
{
"json-asff": open_file(
f"{output_directory}/{output_filename}{json_asff_file_suffix}",
"a",
)
},
{
"csv": open_file(
f"{output_directory}/{output_filename}{csv_file_suffix}",
"a",
),
"json": open_file(
f"{output_directory}/{output_filename}{json_file_suffix}",
"a",
),
},
{
"csv": open_file(
f"{output_directory}/{output_filename}{csv_file_suffix}",
"a",
),
"json": open_file(
f"{output_directory}/{output_filename}{json_file_suffix}",
"a",
),
"json-asff": open_file(
f"{output_directory}/{output_filename}{json_asff_file_suffix}",
"a",
),
},
]
for index, output_mode_list in enumerate(test_output_modes):
test_output_file_descriptors = fill_file_descriptors(
output_mode_list,
output_directory,
output_filename,
audit_info,
)
for output_mode in output_mode_list:
assert (
test_output_file_descriptors[output_mode].name
== expected[index][output_mode].name
)
remove(expected[index][output_mode].name)
def test_set_report_color(self):
test_status = ["PASS", "FAIL", "ERROR", "WARNING"]
test_colors = [Fore.GREEN, Fore.RED, Fore.BLACK, orange_color]
for status in test_status:
assert set_report_color(status) in test_colors
def test_set_report_color_invalid(self):
test_status = "INVALID"
with pytest.raises(Exception) as exc:
set_report_color(test_status)
assert "Invalid Report Status. Must be PASS, FAIL, ERROR or WARNING" in str(
exc.value
)
assert exc.type == Exception
def test_generate_csv_fields(self):
expected = [
"assessment_start_time",
"finding_unique_id",
"provider",
"profile",
"account_id",
"account_name",
"account_email",
"account_arn",
"account_org",
"account_tags",
"region",
"check_id",
"check_title",
"check_type",
"status",
"status_extended",
"service_name",
"subservice_name",
"severity",
"resource_id",
"resource_arn",
"resource_type",
"resource_details",
"resource_tags",
"description",
"risk",
"related_url",
"remediation_recommendation_text",
"remediation_recommendation_url",
"remediation_recommendation_code_nativeiac",
"remediation_recommendation_code_terraform",
"remediation_recommendation_code_cli",
"remediation_recommendation_code_other",
"categories",
"depends_on",
"related_to",
"notes",
# "compliance",
]
assert generate_csv_fields(Check_Output_CSV) == expected
def test_fill_json(self):
input_audit_info = AWS_Audit_Info(
original_session=None,
audit_session=None,
audited_account="123456789012",
audited_identity_arn="test-arn",
audited_user_id="test",
audited_partition="aws",
profile="default",
profile_region="eu-west-1",
credentials=None,
assumed_role_info=None,
audited_regions=["eu-west-2", "eu-west-1"],
organizations_metadata=None,
)
finding = Check_Report(
load_check_metadata(
f"{path.dirname(path.realpath(__file__))}/fixtures/metadata.json"
).json()
)
finding.resource_details = "Test resource details"
finding.resource_id = "test-resource"
finding.resource_arn = "test-arn"
finding.region = "eu-west-1"
finding.status = "PASS"
finding.status_extended = "This is a test"
input = Check_Output_JSON(**finding.check_metadata.dict())
expected = Check_Output_JSON(**finding.check_metadata.dict())
expected.AssessmentStartTime = timestamp_iso
expected.FindingUniqueId = ""
expected.Profile = "default"
expected.AccountId = "123456789012"
expected.OrganizationsInfo = None
expected.Region = "eu-west-1"
expected.Status = "PASS"
expected.StatusExtended = "This is a test"
expected.ResourceId = "test-resource"
expected.ResourceArn = "test-arn"
expected.ResourceDetails = "Test resource details"
assert fill_json(input, input_audit_info, finding) == expected
def test_fill_json_asff(self):
input_audit_info = AWS_Audit_Info(
original_session=None,
audit_session=None,
audited_account="123456789012",
audited_identity_arn="test-arn",
audited_user_id="test",
audited_partition="aws",
profile="default",
profile_region="eu-west-1",
credentials=None,
assumed_role_info=None,
audited_regions=["eu-west-2", "eu-west-1"],
organizations_metadata=None,
)
finding = Check_Report(
load_check_metadata(
f"{path.dirname(path.realpath(__file__))}/fixtures/metadata.json"
).json()
)
finding.resource_details = "Test resource details"
finding.resource_id = "test-resource"
finding.resource_arn = "test-arn"
finding.region = "eu-west-1"
finding.status = "PASS"
finding.status_extended = "This is a test"
input = Check_Output_JSON_ASFF()
expected = Check_Output_JSON_ASFF()
expected.Id = f"prowler-{finding.check_metadata.CheckID}-123456789012-eu-west-1-{hash_sha512('test-resource')}"
expected.ProductArn = "arn:aws:securityhub:eu-west-1::product/prowler/prowler"
expected.ProductFields = ProductFields(
ProviderVersion=prowler_version, ProwlerResourceName="test-resource"
)
expected.GeneratorId = "prowler-" + finding.check_metadata.CheckID
expected.AwsAccountId = "123456789012"
expected.Types = finding.check_metadata.CheckType
expected.FirstObservedAt = (
expected.UpdatedAt
) = expected.CreatedAt = timestamp_utc.strftime("%Y-%m-%dT%H:%M:%SZ")
expected.Severity = Severity(Label=finding.check_metadata.Severity.upper())
expected.Title = finding.check_metadata.CheckTitle
expected.Description = finding.check_metadata.Description
expected.Resources = [
Resource(
Id="test-resource",
Type=finding.check_metadata.ResourceType,
Partition="aws",
Region="eu-west-1",
)
]
expected.Compliance = Compliance(
Status="PASS" + "ED",
RelatedRequirements=finding.check_metadata.CheckType,
)
expected.Remediation = {
"Recommendation": finding.check_metadata.Remediation.Recommendation
}
assert fill_json_asff(input, input_audit_info, finding) == expected
@mock_s3
def test_send_to_s3_bucket(self):
# Create mock session
session = boto3.session.Session(
region_name="us-east-1",
)
# Create mock audit_info
input_audit_info = AWS_Audit_Info(
original_session=None,
audit_session=session,
audited_account="123456789012",
audited_identity_arn="test-arn",
audited_user_id="test",
audited_partition="aws",
profile="default",
profile_region="eu-west-1",
credentials=None,
assumed_role_info=None,
audited_regions=["eu-west-2", "eu-west-1"],
organizations_metadata=None,
)
# Creat mock bucket
bucket_name = "test_bucket"
client = boto3.client("s3")
client.create_bucket(Bucket=bucket_name)
# Create mock csv output file
output_directory = f"{os.path.dirname(os.path.realpath(__file__))}/fixtures"
output_mode = "csv"
filename = f"prowler-output-{input_audit_info.audited_account}"
# Send mock csv file to mock S3 Bucket
send_to_s3_bucket(
filename,
output_directory,
output_mode,
bucket_name,
input_audit_info.audit_session,
)
# Check if the file has been sent by checking its content type
assert (
client.get_object(
Bucket=bucket_name,
Key=output_directory
+ "/"
+ output_mode
+ "/"
+ filename
+ csv_file_suffix,
)["ContentType"]
== "binary/octet-stream"
)

View File

@@ -0,0 +1,191 @@
import json
import boto3
import sure # noqa
from moto import mock_iam, mock_organizations, mock_sts
from prowler.providers.aws.aws_provider import (
assume_role,
get_organizations_metadata,
get_region_global_service,
validate_credentials,
)
from prowler.providers.aws.lib.audit_info.models import AWS_Assume_Role, AWS_Audit_Info
ACCOUNT_ID = 123456789012
class Test_AWS_Provider:
@mock_sts
@mock_iam
def test_validate_credentials(self):
# Create a mock IAM user
iam_client = boto3.client("iam", region_name="us-east-1")
iam_user = iam_client.create_user(UserName="test-user")["User"]
# Create a mock IAM access keys
access_key = iam_client.create_access_key(UserName=iam_user["UserName"])[
"AccessKey"
]
access_key_id = access_key["AccessKeyId"]
secret_access_key = access_key["SecretAccessKey"]
# Create AWS session to validate
session = boto3.session.Session(
aws_access_key_id=access_key_id,
aws_secret_access_key=secret_access_key,
region_name="us-east-1",
)
# Validate AWS session
get_caller_identity = validate_credentials(session)
get_caller_identity["Arn"].should.equal(iam_user["Arn"])
get_caller_identity["UserId"].should.equal(iam_user["UserId"])
# assert get_caller_identity["UserId"] == str(ACCOUNT_ID)
@mock_iam
@mock_sts
def test_assume_role(self):
# Variables
role_name = "test-role"
role_arn = f"arn:aws:iam::{ACCOUNT_ID}:role/{role_name}"
session_duration_seconds = 900
audited_regions = "eu-west-1"
sessionName = "ProwlerProAsessmentSession"
# Boto 3 client to create our user
iam_client = boto3.client("iam", region_name="us-east-1")
# IAM user
iam_user = iam_client.create_user(UserName="test-user")["User"]
access_key = iam_client.create_access_key(UserName=iam_user["UserName"])[
"AccessKey"
]
access_key_id = access_key["AccessKeyId"]
secret_access_key = access_key["SecretAccessKey"]
# New Boto3 session with the previously create user
session = boto3.session.Session(
aws_access_key_id=access_key_id,
aws_secret_access_key=secret_access_key,
region_name="us-east-1",
)
# Fulfil the input session object for Prowler
audit_info = AWS_Audit_Info(
original_session=session,
audit_session=None,
audited_account=None,
audited_partition=None,
audited_identity_arn=None,
audited_user_id=None,
profile=None,
profile_region=None,
credentials=None,
assumed_role_info=AWS_Assume_Role(
role_arn=role_arn,
session_duration=session_duration_seconds,
external_id=None,
),
audited_regions=audited_regions,
organizations_metadata=None,
)
# Call assume_role
assume_role_response = assume_role(audit_info)
# Recover credentials for the assume role operation
credentials = assume_role_response["Credentials"]
# Test the response
# SessionToken
credentials["SessionToken"].should.have.length_of(356)
credentials["SessionToken"].startswith("FQoGZXIvYXdzE")
# AccessKeyId
credentials["AccessKeyId"].should.have.length_of(20)
credentials["AccessKeyId"].startswith("ASIA")
# SecretAccessKey
credentials["SecretAccessKey"].should.have.length_of(40)
# Assumed Role
assume_role_response["AssumedRoleUser"]["Arn"].should.equal(
f"arn:aws:sts::{ACCOUNT_ID}:assumed-role/{role_name}/{sessionName}"
)
# AssumedRoleUser
assert assume_role_response["AssumedRoleUser"]["AssumedRoleId"].startswith(
"AROA"
)
assert assume_role_response["AssumedRoleUser"]["AssumedRoleId"].endswith(
":" + sessionName
)
assume_role_response["AssumedRoleUser"]["AssumedRoleId"].should.have.length_of(
21 + 1 + len(sessionName)
)
@mock_organizations
@mock_sts
@mock_iam
def test_organizations(self):
client = boto3.client("organizations", region_name="us-east-1")
iam_client = boto3.client("iam", region_name="us-east-1")
sts_client = boto3.client("sts", region_name="us-east-1")
mockname = "mock-account"
mockdomain = "moto-example.org"
mockemail = "@".join([mockname, mockdomain])
org_id = client.create_organization(FeatureSet="ALL")["Organization"]["Id"]
account_id = client.create_account(AccountName=mockname, Email=mockemail)[
"CreateAccountStatus"
]["AccountId"]
client.tag_resource(
ResourceId=account_id, Tags=[{"Key": "key", "Value": "value"}]
)
trust_policy_document = {
"Version": "2012-10-17",
"Statement": {
"Effect": "Allow",
"Principal": {
"AWS": "arn:aws:iam::{account_id}:root".format(
account_id=ACCOUNT_ID
)
},
"Action": "sts:AssumeRole",
},
}
iam_role_arn = iam_client.role_arn = iam_client.create_role(
RoleName="test-role",
AssumeRolePolicyDocument=json.dumps(trust_policy_document),
)["Role"]["Arn"]
session_name = "new-session"
assumed_role = sts_client.assume_role(
RoleArn=iam_role_arn, RoleSessionName=session_name
)
org = get_organizations_metadata(account_id, assumed_role)
org.account_details_email.should.equal(mockemail)
org.account_details_name.should.equal(mockname)
org.account_details_arn.should.equal(
"arn:aws:organizations::{0}:account/{1}/{2}".format(
ACCOUNT_ID, org_id, account_id
)
)
org.account_details_org.should.equal(org_id)
org.account_details_tags.should.equal("key:value,")
def test_get_region_global_service(self):
# Create mock audit_info
input_audit_info = AWS_Audit_Info(
original_session=None,
audit_session=None,
audited_account="123456789012",
audited_identity_arn="test-arn",
audited_user_id="test",
audited_partition="aws",
profile="default",
profile_region="eu-west-1",
credentials=None,
assumed_role_info=None,
audited_regions=["eu-west-2", "eu-west-1"],
organizations_metadata=None,
)
assert (
get_region_global_service(input_audit_info)
== input_audit_info.audited_regions[0]
)

View File

@@ -0,0 +1,135 @@
import yaml
from boto3 import resource, session
from moto import mock_dynamodb, mock_s3
from prowler.providers.aws.lib.allowlist.allowlist import (
is_allowlisted,
parse_allowlist_file,
)
from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
AWS_ACCOUNT_NUMBER = 123456789012
AWS_REGION = "us-east-1"
class Test_Allowlist:
# Mocked Audit Info
def set_mocked_audit_info(self):
audit_info = AWS_Audit_Info(
original_session=None,
audit_session=session.Session(
profile_name=None,
botocore_session=None,
),
audited_account=AWS_ACCOUNT_NUMBER,
audited_user_id=None,
audited_partition="aws",
audited_identity_arn=None,
profile=None,
profile_region=None,
credentials=None,
assumed_role_info=None,
audited_regions=None,
organizations_metadata=None,
)
return audit_info
# Test S3 allowlist
@mock_s3
def test_s3_allowlist(self):
audit_info = self.set_mocked_audit_info()
# Create bucket and upload allowlist yaml
s3_resource = resource("s3", region_name=AWS_REGION)
s3_resource.create_bucket(Bucket="test-allowlist")
s3_resource.Object("test-allowlist", "allowlist.yaml").put(
Body=open(
"tests/providers/aws/lib/allowlist/fixtures/allowlist.yaml",
"rb",
)
)
with open("tests/providers/aws/lib/allowlist/fixtures/allowlist.yaml") as f:
assert yaml.safe_load(f)["Allowlist"] == parse_allowlist_file(
audit_info, "s3://test-allowlist/allowlist.yaml"
)
# Test S3 allowlist
@mock_dynamodb
def test_dynamo_allowlist(self):
audit_info = self.set_mocked_audit_info()
# Create table and put item
dynamodb_resource = resource("dynamodb", region_name=AWS_REGION)
table_name = "test-allowlist"
params = {
"TableName": table_name,
"KeySchema": [
{"AttributeName": "Accounts", "KeyType": "HASH"},
{"AttributeName": "Checks", "KeyType": "RANGE"},
],
"AttributeDefinitions": [
{"AttributeName": "Accounts", "AttributeType": "S"},
{"AttributeName": "Checks", "AttributeType": "S"},
],
"ProvisionedThroughput": {
"ReadCapacityUnits": 10,
"WriteCapacityUnits": 10,
},
}
table = dynamodb_resource.create_table(**params)
table.put_item(
Item={
"Accounts": "*",
"Checks": "iam_user_hardware_mfa_enabled",
"Regions": ["eu-west-1", "us-east-1"],
"Resources": ["keyword"],
}
)
assert (
"keyword"
in parse_allowlist_file(
audit_info,
"arn:aws:dynamodb:"
+ AWS_REGION
+ ":"
+ str(AWS_ACCOUNT_NUMBER)
+ ":table/"
+ table_name,
)["Accounts"]["*"]["Checks"]["iam_user_hardware_mfa_enabled"]["Resources"]
)
# Allowlist checks
def test_is_allowlisted(self):
# Allowlist example
allowlist = {
"Accounts": {
"*": {
"Checks": {
"check_test": {
"Regions": ["us-east-1", "eu-west-1"],
"Resources": ["prowler", "^test"],
}
}
}
}
}
assert is_allowlisted(
allowlist, AWS_ACCOUNT_NUMBER, "check_test", AWS_REGION, "prowler"
)
assert is_allowlisted(
allowlist, AWS_ACCOUNT_NUMBER, "check_test", AWS_REGION, "prowler-test"
)
assert is_allowlisted(
allowlist, AWS_ACCOUNT_NUMBER, "check_test", AWS_REGION, "test-prowler"
)
assert not (
is_allowlisted(
allowlist, AWS_ACCOUNT_NUMBER, "check_test", "us-east-2", "test"
)
)

View File

@@ -0,0 +1,37 @@
### Account, Check and/or Region can be * to apply for all the cases
### Resources is a list that can have either Regex or Keywords:
########################### ALLOWLIST EXAMPLE ###########################
Allowlist:
Accounts:
"123456789012":
Checks:
"iam_user_hardware_mfa_enabled":
Regions:
- "us-east-1"
Resources:
- "user-1" # Will ignore user-1 in check iam_user_hardware_mfa_enabled
- "user-2" # Will ignore user-2 in check iam_user_hardware_mfa_enabled
"*":
Regions:
- "*"
Resources:
- "test" # Will ignore every resource containing the string "test" in every account and region
"098765432109":
Checks:
"s3_bucket_object_versioning":
Regions:
- "eu-west-1"
- "us-east-1"
Resources:
- "ci-logs" # Will ignore bucket "ci-logs" AND ALSO bucket "ci-logs-replica" in specified check and regions
- "logs" # Will ignore EVERY BUCKET containing the string "logs" in specified check and regions
- "[[:alnum:]]+-logs" # Will ignore all buckets containing the terms ci-logs, qa-logs, etc. in specified check and regions
# EXAMPLE: CONTROL TOWER (to migrate)
# When using Control Tower, guardrails prevent access to certain protected resources. The allowlist
# below ensures that warnings instead of errors are reported for the affected resources.
#extra734:aws-controltower-logs-[[:digit:]]+-[[:alpha:]\-]+
#extra734:aws-controltower-s3-access-logs-[[:digit:]]+-[[:alpha:]\-]+
#extra764:aws-controltower-logs-[[:digit:]]+-[[:alpha:]\-]+
#extra764:aws-controltower-s3-access-logs-[[:digit:]]+-[[:alpha:]\-]+

View File

@@ -0,0 +1,33 @@
import sure # noqa
from prowler.providers.aws.lib.arn.arn import arn_parsing
ACCOUNT_ID = "123456789012"
RESOURCE_TYPE = "role"
IAM_ROLE = "test-role"
class Test_ARN_Parsing:
def test_arn_parsing(self):
test_cases = [
{
"input_arn": f"arn:aws:iam::{ACCOUNT_ID}:{RESOURCE_TYPE}/{IAM_ROLE}",
"expected": {
"partition": "aws",
"service": "iam",
"region": None,
"account_id": ACCOUNT_ID,
"resource_type": RESOURCE_TYPE,
"resource": IAM_ROLE,
},
}
]
for test in test_cases:
input_arn = test["input_arn"]
parsed_arn = arn_parsing(input_arn)
parsed_arn.partition.should.equal(test["expected"]["partition"])
parsed_arn.service.should.equal(test["expected"]["service"])
parsed_arn.region.should.equal(test["expected"]["region"])
parsed_arn.account_id.should.equal(test["expected"]["account_id"])
parsed_arn.resource_type.should.equal(test["expected"]["resource_type"])
parsed_arn.resource.should.equal(test["expected"]["resource"])

View File

@@ -0,0 +1,169 @@
from unittest import mock
from prowler.providers.aws.services.accessanalyzer.accessanalyzer_service import (
Analyzer,
)
class Test_accessanalyzer_enabled_without_findings:
def test_no_analyzers(self):
accessanalyzer_client = mock.MagicMock
accessanalyzer_client.analyzers = []
with mock.patch(
"prowler.providers.aws.services.accessanalyzer.accessanalyzer_service.AccessAnalyzer",
new=accessanalyzer_client,
):
# Test Check
from prowler.providers.aws.services.accessanalyzer.accessanalyzer_enabled_without_findings.accessanalyzer_enabled_without_findings import (
accessanalyzer_enabled_without_findings,
)
check = accessanalyzer_enabled_without_findings()
result = check.execute()
assert len(result) == 0
def test_one_analyzer_not_available(self):
# Include analyzers to check
accessanalyzer_client = mock.MagicMock
accessanalyzer_client.analyzers = [
Analyzer(
"",
"Test Analyzer",
"NOT_AVAILABLE",
"",
"",
"",
"eu-west-1",
)
]
with mock.patch(
"prowler.providers.aws.services.accessanalyzer.accessanalyzer_service.AccessAnalyzer",
accessanalyzer_client,
):
from prowler.providers.aws.services.accessanalyzer.accessanalyzer_enabled_without_findings.accessanalyzer_enabled_without_findings import (
accessanalyzer_enabled_without_findings,
)
check = accessanalyzer_enabled_without_findings()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert result[0].status_extended == "IAM Access Analyzer is not enabled"
assert result[0].resource_id == "Test Analyzer"
def test_two_analyzers(self):
accessanalyzer_client = mock.MagicMock
accessanalyzer_client.analyzers = [
Analyzer(
"",
"Test Analyzer",
"NOT_AVAILABLE",
"",
"",
"",
"eu-west-1",
),
Analyzer(
"",
"Test Analyzer",
"ACTIVE",
10,
"",
"",
"eu-west-1",
),
]
# Patch AccessAnalyzer Client
with mock.patch(
"prowler.providers.aws.services.accessanalyzer.accessanalyzer_service.AccessAnalyzer",
new=accessanalyzer_client,
):
# Test Check
from prowler.providers.aws.services.accessanalyzer.accessanalyzer_enabled_without_findings.accessanalyzer_enabled_without_findings import (
accessanalyzer_enabled_without_findings,
)
check = accessanalyzer_enabled_without_findings()
result = check.execute()
assert len(result) == 2
assert result[0].status == "FAIL"
assert result[0].status_extended == "IAM Access Analyzer is not enabled"
assert result[0].resource_id == "Test Analyzer"
assert result[1].status == "FAIL"
assert (
result[1].status_extended
== "IAM Access Analyzer Test Analyzer has 10 active findings"
)
assert result[1].resource_id == "Test Analyzer"
def test_one_active_analyzer_without_findings(self):
accessanalyzer_client = mock.MagicMock
accessanalyzer_client.analyzers = [
Analyzer(
"",
"Test Analyzer",
"ACTIVE",
0,
"",
"",
"eu-west-1",
)
]
with mock.patch(
"prowler.providers.aws.services.accessanalyzer.accessanalyzer_service.AccessAnalyzer",
new=accessanalyzer_client,
):
# Test Check
from prowler.providers.aws.services.accessanalyzer.accessanalyzer_enabled_without_findings.accessanalyzer_enabled_without_findings import (
accessanalyzer_enabled_without_findings,
)
check = accessanalyzer_enabled_without_findings()
result = check.execute()
assert len(result) == 1
assert result[0].status == "PASS"
assert (
result[0].status_extended
== "IAM Access Analyzer Test Analyzer has no active findings"
)
assert result[0].resource_id == "Test Analyzer"
def test_one_active_analyzer_not_active(self):
accessanalyzer_client = mock.MagicMock
accessanalyzer_client.analyzers = [
Analyzer(
"",
"Test Analyzer",
"FAILED",
0,
"",
"",
"eu-west-1",
)
]
# Patch AccessAnalyzer Client
with mock.patch(
"prowler.providers.aws.services.accessanalyzer.accessanalyzer_service.AccessAnalyzer",
new=accessanalyzer_client,
):
# Test Check
from prowler.providers.aws.services.accessanalyzer.accessanalyzer_enabled_without_findings.accessanalyzer_enabled_without_findings import (
accessanalyzer_enabled_without_findings,
)
check = accessanalyzer_enabled_without_findings()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "IAM Access Analyzer Test Analyzer is not active"
)
assert result[0].resource_id == "Test Analyzer"

View File

@@ -0,0 +1,95 @@
from unittest.mock import patch
import botocore
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.accessanalyzer.accessanalyzer_service import (
AccessAnalyzer,
)
# Mock Test Region
AWS_REGION = "eu-west-1"
# Mocking Access Analyzer Calls
make_api_call = botocore.client.BaseClient._make_api_call
def mock_make_api_call(self, operation_name, kwarg):
"""
Mock every AWS API call using Boto3
As you can see the operation_name has the list_analyzers snake_case form but
we are using the ListAnalyzers form.
Rationale -> https://github.com/boto/botocore/blob/develop/botocore/client.py#L810:L816
"""
if operation_name == "ListAnalyzers":
return {
"analyzers": [
{
"arn": "ARN",
"name": "Test Analyzer",
"status": "Enabled",
"findings": 0,
"tags": "",
"type": "ACCOUNT",
"region": "eu-west-1",
}
]
}
if operation_name == "ListFindings":
# If we only want to count the number of findings
# we return a list of values just to count them
return {"findings": [0, 1, 2]}
return make_api_call(self, operation_name, kwarg)
# Mock generate_regional_clients()
def mock_generate_regional_clients(service, audit_info):
regional_client = audit_info.audit_session.client(service, region_name=AWS_REGION)
regional_client.region = AWS_REGION
return {AWS_REGION: regional_client}
# Patch every AWS call using Boto3 and generate_regional_clients to have 1 client
@patch("botocore.client.BaseClient._make_api_call", new=mock_make_api_call)
@patch(
"prowler.providers.aws.services.accessanalyzer.accessanalyzer_service.generate_regional_clients",
new=mock_generate_regional_clients,
)
class Test_AccessAnalyzer_Service:
# Test AccessAnalyzer Client
def test__get_client__(self):
access_analyzer = AccessAnalyzer(current_audit_info)
assert (
access_analyzer.regional_clients[AWS_REGION].__class__.__name__
== "AccessAnalyzer"
)
# Test AccessAnalyzer Session
def test__get_session__(self):
access_analyzer = AccessAnalyzer(current_audit_info)
assert access_analyzer.session.__class__.__name__ == "Session"
# Test AccessAnalyzer Service
def test__get_service__(self):
access_analyzer = AccessAnalyzer(current_audit_info)
assert access_analyzer.service == "accessanalyzer"
def test__list_analyzers__(self):
# Set partition for the service
current_audit_info.audited_partition = "aws"
access_analyzer = AccessAnalyzer(current_audit_info)
assert len(access_analyzer.analyzers) == 1
assert access_analyzer.analyzers[0].arn == "ARN"
assert access_analyzer.analyzers[0].name == "Test Analyzer"
assert access_analyzer.analyzers[0].status == "Enabled"
assert access_analyzer.analyzers[0].tags == ""
assert access_analyzer.analyzers[0].type == "ACCOUNT"
assert access_analyzer.analyzers[0].region == AWS_REGION
def test__list_findings__(self):
# Set partition for the service
current_audit_info.audited_partition = "aws"
access_analyzer = AccessAnalyzer(current_audit_info)
assert len(access_analyzer.analyzers) == 1
assert access_analyzer.analyzers[0].findings_count == 3

View File

@@ -0,0 +1,69 @@
from unittest import mock
from boto3 import client
from moto import mock_acm
AWS_REGION = "us-east-1"
class Test_acm_certificates_expiration_check:
@mock_acm
def test_acm_certificate_expirated(self):
# Generate ACM Client
acm_client = client("acm", region_name=AWS_REGION)
# Request ACM certificate
certificate = acm_client.request_certificate(
DomainName="test.com",
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.acm.acm_service import ACM
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.acm.acm_certificates_expiration_check.acm_certificates_expiration_check.acm_client",
new=ACM(current_audit_info),
) as service_client:
# Test Check
from prowler.providers.aws.services.acm.acm_certificates_expiration_check.acm_certificates_expiration_check import (
acm_certificates_expiration_check,
)
service_client.certificates[0].expiration_days = 5
check = acm_certificates_expiration_check()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert result[0].resource_id == "test.com"
assert result[0].resource_arn == certificate["CertificateArn"]
@mock_acm
def test_acm_certificate_not_expirated(self):
# Generate ACM Client
acm_client = client("acm", region_name=AWS_REGION)
# Request ACM certificate
certificate = acm_client.request_certificate(
DomainName="test.com",
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.acm.acm_service import ACM
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.acm.acm_certificates_expiration_check.acm_certificates_expiration_check.acm_client",
new=ACM(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.acm.acm_certificates_expiration_check.acm_certificates_expiration_check import (
acm_certificates_expiration_check,
)
check = acm_certificates_expiration_check()
result = check.execute()
assert len(result) == 1
assert result[0].status == "PASS"
assert result[0].resource_id == "test.com"
assert result[0].resource_arn == certificate["CertificateArn"]

View File

@@ -0,0 +1,113 @@
from unittest import mock
from boto3 import client
from moto import mock_acm
AWS_REGION = "us-east-1"
class Test_acm_certificates_transparency_logs_enabled:
@mock_acm
def test_acm_certificate_with_logging(self):
# Generate ACM Client
acm_client = client("acm", region_name=AWS_REGION)
# Request ACM certificate
certificate = acm_client.request_certificate(
DomainName="test.com",
Options={"CertificateTransparencyLoggingPreference": "ENABLED"},
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.acm.acm_service import ACM
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.acm.acm_certificates_transparency_logs_enabled.acm_certificates_transparency_logs_enabled.acm_client",
new=ACM(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.acm.acm_certificates_transparency_logs_enabled.acm_certificates_transparency_logs_enabled import (
acm_certificates_transparency_logs_enabled,
)
check = acm_certificates_transparency_logs_enabled()
result = check.execute()
assert len(result) == 1
assert result[0].status == "PASS"
assert (
result[0].status_extended
== "ACM Certificate for test.com has Certificate Transparency logging enabled."
)
assert result[0].resource_id == "test.com"
assert result[0].resource_arn == certificate["CertificateArn"]
@mock_acm
def test_acm_certificate_without_logging(self):
# Generate ACM Client
acm_client = client("acm", region_name=AWS_REGION)
# Request ACM certificate
certificate = acm_client.request_certificate(
DomainName="test.com",
Options={"CertificateTransparencyLoggingPreference": "ENABLED"},
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.acm.acm_service import ACM
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.acm.acm_certificates_transparency_logs_enabled.acm_certificates_transparency_logs_enabled.acm_client",
new=ACM(current_audit_info),
) as service_client:
# Test Check
from prowler.providers.aws.services.acm.acm_certificates_transparency_logs_enabled.acm_certificates_transparency_logs_enabled import (
acm_certificates_transparency_logs_enabled,
)
service_client.certificates[0].transparency_logging = False
check = acm_certificates_transparency_logs_enabled()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "ACM Certificate for test.com has Certificate Transparency logging disabled."
)
assert result[0].resource_id == "test.com"
assert result[0].resource_arn == certificate["CertificateArn"]
@mock_acm
def test_acm_default_certificate(self):
# Generate ACM Client
acm_client = client("acm", region_name=AWS_REGION)
# Request ACM certificate
certificate = acm_client.request_certificate(
DomainName="test.com",
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.acm.acm_service import ACM
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.acm.acm_certificates_transparency_logs_enabled.acm_certificates_transparency_logs_enabled.acm_client",
new=ACM(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.acm.acm_certificates_transparency_logs_enabled.acm_certificates_transparency_logs_enabled import (
acm_certificates_transparency_logs_enabled,
)
check = acm_certificates_transparency_logs_enabled()
result = check.execute()
assert len(result) == 1
assert result[0].status == "PASS"
assert (
result[0].status_extended
== "ACM Certificate for test.com has Certificate Transparency logging enabled."
)
assert result[0].resource_id == "test.com"
assert result[0].resource_arn == certificate["CertificateArn"]

View File

@@ -0,0 +1,94 @@
from boto3 import client, session
from moto import mock_acm
from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
from prowler.providers.aws.services.acm.acm_service import ACM
AWS_ACCOUNT_NUMBER = 123456789012
AWS_REGION = "us-east-1"
class Test_ACM_Service:
# Mocked Audit Info
def set_mocked_audit_info(self):
audit_info = AWS_Audit_Info(
original_session=None,
audit_session=session.Session(
profile_name=None,
botocore_session=None,
),
audited_account=AWS_ACCOUNT_NUMBER,
audited_user_id=None,
audited_partition="aws",
audited_identity_arn=None,
profile=None,
profile_region=None,
credentials=None,
assumed_role_info=None,
audited_regions=None,
organizations_metadata=None,
)
return audit_info
# Test ACM Service
@mock_acm
def test_service(self):
# ACM client for this test class
audit_info = self.set_mocked_audit_info()
acm = ACM(audit_info)
assert acm.service == "acm"
# Test ACM Client
@mock_acm
def test_client(self):
# ACM client for this test class
audit_info = self.set_mocked_audit_info()
acm = ACM(audit_info)
for regional_client in acm.regional_clients.values():
assert regional_client.__class__.__name__ == "ACM"
# Test ACM Session
@mock_acm
def test__get_session__(self):
# ACM client for this test class
audit_info = self.set_mocked_audit_info()
acm = ACM(audit_info)
assert acm.session.__class__.__name__ == "Session"
# Test ACM Session
@mock_acm
def test_audited_account(self):
# ACM client for this test class
audit_info = self.set_mocked_audit_info()
acm = ACM(audit_info)
assert acm.audited_account == AWS_ACCOUNT_NUMBER
# Test ACM List Certificates
@mock_acm
def test__list_certificates__(self):
# Generate ACM Client
acm_client = client("acm", region_name=AWS_REGION)
# Request ACM certificate
certificate = acm_client.request_certificate(
DomainName="test.com",
)
# ACM client for this test class
audit_info = self.set_mocked_audit_info()
acm = ACM(audit_info)
assert len(acm.certificates) == 1
assert acm.certificates[0].arn == certificate["CertificateArn"]
# Test ACM Describe Certificates
@mock_acm
def test__describe_certificates__(self):
# Generate ACM Client
acm_client = client("acm", region_name=AWS_REGION)
# Request ACM certificate
certificate = acm_client.request_certificate(
DomainName="test.com",
)
# ACM client for this test class
audit_info = self.set_mocked_audit_info()
acm = ACM(audit_info)
assert acm.certificates[0].type == "AMAZON_ISSUED"
assert acm.certificates[0].arn == certificate["CertificateArn"]

View File

@@ -0,0 +1,125 @@
from unittest import mock
from boto3 import client
from moto import mock_apigateway, mock_iam, mock_lambda
from moto.core import DEFAULT_ACCOUNT_ID as ACCOUNT_ID
AWS_REGION = "us-east-1"
class Test_apigateway_authorizers_enabled:
@mock_apigateway
def test_apigateway_no_rest_apis(self):
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.apigateway.apigateway_service import (
APIGateway,
)
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.apigateway.apigateway_authorizers_enabled.apigateway_authorizers_enabled.apigateway_client",
new=APIGateway(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.apigateway.apigateway_authorizers_enabled.apigateway_authorizers_enabled import (
apigateway_authorizers_enabled,
)
check = apigateway_authorizers_enabled()
result = check.execute()
assert len(result) == 0
@mock_apigateway
@mock_iam
@mock_lambda
def test_apigateway_one_rest_api_with_lambda_authorizer(self):
# Create APIGateway Mocked Resources
apigateway_client = client("apigateway", region_name=AWS_REGION)
lambda_client = client("lambda", region_name=AWS_REGION)
iam_client = client("iam")
# Create APIGateway Rest API
role_arn = iam_client.create_role(
RoleName="my-role",
AssumeRolePolicyDocument="some policy",
)["Role"]["Arn"]
rest_api = apigateway_client.create_rest_api(
name="test-rest-api",
)
authorizer = lambda_client.create_function(
FunctionName="lambda-authorizer",
Runtime="python3.7",
Role=role_arn,
Handler="lambda_function.lambda_handler",
Code={
"ImageUri": "123456789012.dkr.ecr.us-east-1.amazonaws.com/hello-world:latest"
},
)
apigateway_client.create_authorizer(
name="test",
restApiId=rest_api["id"],
type="TOKEN",
authorizerUri=f"arn:aws:apigateway:{apigateway_client.meta.region_name}:lambda:path/2015-03-31/functions/arn:aws:lambda:{apigateway_client.meta.region_name}:{ACCOUNT_ID}:function:{authorizer['FunctionName']}/invocations",
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.apigateway.apigateway_service import (
APIGateway,
)
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.apigateway.apigateway_authorizers_enabled.apigateway_authorizers_enabled.apigateway_client",
new=APIGateway(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.apigateway.apigateway_authorizers_enabled.apigateway_authorizers_enabled import (
apigateway_authorizers_enabled,
)
check = apigateway_authorizers_enabled()
result = check.execute()
assert result[0].status == "PASS"
assert len(result) == 1
assert (
result[0].status_extended
== f"API Gateway test-rest-api ID {rest_api['id']} has authorizer configured."
)
assert result[0].resource_id == "test-rest-api"
@mock_apigateway
def test_apigateway_one_rest_api_without_lambda_authorizer(self):
# Create APIGateway Mocked Resources
apigateway_client = client("apigateway", region_name=AWS_REGION)
# Create APIGateway Rest API
rest_api = apigateway_client.create_rest_api(
name="test-rest-api",
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.apigateway.apigateway_service import (
APIGateway,
)
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.apigateway.apigateway_authorizers_enabled.apigateway_authorizers_enabled.apigateway_client",
new=APIGateway(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.apigateway.apigateway_authorizers_enabled.apigateway_authorizers_enabled import (
apigateway_authorizers_enabled,
)
check = apigateway_authorizers_enabled()
result = check.execute()
assert result[0].status == "FAIL"
assert len(result) == 1
assert (
result[0].status_extended
== f"API Gateway test-rest-api ID {rest_api['id']} has not authorizer configured."
)
assert result[0].resource_id == "test-rest-api"

View File

@@ -0,0 +1,146 @@
from unittest import mock
from boto3 import client
from moto import mock_apigateway
from prowler.providers.aws.services.apigateway.apigateway_service import Stage
AWS_REGION = "us-east-1"
class Test_apigateway_client_certificate_enabled:
@mock_apigateway
def test_apigateway_no_stages(self):
# Create APIGateway Mocked Resources
apigateway_client = client("apigateway", region_name=AWS_REGION)
# Create APIGateway Rest API
apigateway_client.create_rest_api(
name="test-rest-api",
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.apigateway.apigateway_service import (
APIGateway,
)
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.apigateway.apigateway_client_certificate_enabled.apigateway_client_certificate_enabled.apigateway_client",
new=APIGateway(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.apigateway.apigateway_client_certificate_enabled.apigateway_client_certificate_enabled import (
apigateway_client_certificate_enabled,
)
check = apigateway_client_certificate_enabled()
result = check.execute()
assert len(result) == 0
@mock_apigateway
def test_apigateway_one_stage_without_certificate(self):
# Create APIGateway Mocked Resources
apigateway_client = client("apigateway", region_name=AWS_REGION)
# Create APIGateway Deployment Stage
rest_api = apigateway_client.create_rest_api(
name="test-rest-api",
)
# Get the rest api's root id
root_resource_id = apigateway_client.get_resources(restApiId=rest_api["id"])[
"items"
][0]["id"]
resource = apigateway_client.create_resource(
restApiId=rest_api["id"],
parentId=root_resource_id,
pathPart="test-path",
)
apigateway_client.put_method(
restApiId=rest_api["id"],
resourceId=resource["id"],
httpMethod="GET",
authorizationType="NONE",
)
apigateway_client.put_integration(
restApiId=rest_api["id"],
resourceId=resource["id"],
httpMethod="GET",
type="HTTP",
integrationHttpMethod="POST",
uri="http://test.com",
)
apigateway_client.create_deployment(
restApiId=rest_api["id"],
stageName="test",
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.apigateway.apigateway_service import (
APIGateway,
)
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.apigateway.apigateway_client_certificate_enabled.apigateway_client_certificate_enabled.apigateway_client",
new=APIGateway(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.apigateway.apigateway_client_certificate_enabled.apigateway_client_certificate_enabled import (
apigateway_client_certificate_enabled,
)
check = apigateway_client_certificate_enabled()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== f"API Gateway test-rest-api ID {rest_api['id']} in stage test has not client certificate enabled."
)
assert result[0].resource_id == "test-rest-api"
@mock_apigateway
def test_apigateway_one_stage_with_certificate(self):
# Create APIGateway Mocked Resources
apigateway_client = client("apigateway", region_name=AWS_REGION)
# Create APIGateway Deployment Stage
rest_api = apigateway_client.create_rest_api(
name="test-rest-api",
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.apigateway.apigateway_service import (
APIGateway,
)
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.apigateway.apigateway_client_certificate_enabled.apigateway_client_certificate_enabled.apigateway_client",
new=APIGateway(current_audit_info),
) as service_client:
# Test Check
from prowler.providers.aws.services.apigateway.apigateway_client_certificate_enabled.apigateway_client_certificate_enabled import (
apigateway_client_certificate_enabled,
)
service_client.rest_apis[0].stages.append(
Stage(
"test",
logging=True,
client_certificate=True,
waf=True,
)
)
check = apigateway_client_certificate_enabled()
result = check.execute()
assert len(result) == 1
assert result[0].status == "PASS"
assert (
result[0].status_extended
== f"API Gateway test-rest-api ID {rest_api['id']} in stage test has client certificate enabled."
)
assert result[0].resource_id == "test-rest-api"

View File

@@ -0,0 +1,111 @@
from unittest import mock
from boto3 import client
from moto import mock_apigateway
AWS_REGION = "us-east-1"
class Test_apigateway_endpoint_public:
@mock_apigateway
def test_apigateway_no_rest_apis(self):
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.apigateway.apigateway_service import (
APIGateway,
)
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.apigateway.apigateway_endpoint_public.apigateway_endpoint_public.apigateway_client",
new=APIGateway(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.apigateway.apigateway_endpoint_public.apigateway_endpoint_public import (
apigateway_endpoint_public,
)
check = apigateway_endpoint_public()
result = check.execute()
assert len(result) == 0
@mock_apigateway
def test_apigateway_one_private_rest_api(self):
# Create APIGateway Mocked Resources
apigateway_client = client("apigateway", region_name=AWS_REGION)
# Create APIGateway Deployment Stage
rest_api = apigateway_client.create_rest_api(
name="test-rest-api",
endpointConfiguration={
"types": [
"PRIVATE",
]
},
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.apigateway.apigateway_service import (
APIGateway,
)
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.apigateway.apigateway_endpoint_public.apigateway_endpoint_public.apigateway_client",
new=APIGateway(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.apigateway.apigateway_endpoint_public.apigateway_endpoint_public import (
apigateway_endpoint_public,
)
check = apigateway_endpoint_public()
result = check.execute()
assert result[0].status == "PASS"
assert len(result) == 1
assert (
result[0].status_extended
== f"API Gateway test-rest-api ID {rest_api['id']} is private."
)
assert result[0].resource_id == "test-rest-api"
@mock_apigateway
def test_apigateway_one_public_rest_api(self):
# Create APIGateway Mocked Resources
apigateway_client = client("apigateway", region_name=AWS_REGION)
# Create APIGateway Deployment Stage
rest_api = apigateway_client.create_rest_api(
name="test-rest-api",
endpointConfiguration={
"types": [
"EDGE",
]
},
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.apigateway.apigateway_service import (
APIGateway,
)
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.apigateway.apigateway_endpoint_public.apigateway_endpoint_public.apigateway_client",
new=APIGateway(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.apigateway.apigateway_endpoint_public.apigateway_endpoint_public import (
apigateway_endpoint_public,
)
check = apigateway_endpoint_public()
result = check.execute()
assert result[0].status == "FAIL"
assert len(result) == 1
assert (
result[0].status_extended
== f"API Gateway test-rest-api ID {rest_api['id']} is internet accesible."
)
assert result[0].resource_id == "test-rest-api"

View File

@@ -0,0 +1,166 @@
from unittest import mock
from boto3 import client
from moto import mock_apigateway
AWS_REGION = "us-east-1"
class Test_apigateway_logging_enabled:
@mock_apigateway
def test_apigateway_no_rest_apis(self):
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.apigateway.apigateway_service import (
APIGateway,
)
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.apigateway.apigateway_logging_enabled.apigateway_logging_enabled.apigateway_client",
new=APIGateway(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.apigateway.apigateway_logging_enabled.apigateway_logging_enabled import (
apigateway_logging_enabled,
)
check = apigateway_logging_enabled()
result = check.execute()
assert len(result) == 0
@mock_apigateway
def test_apigateway_one_rest_api_with_logging(self):
# Create APIGateway Mocked Resources
apigateway_client = client("apigateway", region_name=AWS_REGION)
rest_api = apigateway_client.create_rest_api(
name="test-rest-api",
)
# Get the rest api's root id
root_resource_id = apigateway_client.get_resources(restApiId=rest_api["id"])[
"items"
][0]["id"]
resource = apigateway_client.create_resource(
restApiId=rest_api["id"],
parentId=root_resource_id,
pathPart="test-path",
)
apigateway_client.put_method(
restApiId=rest_api["id"],
resourceId=resource["id"],
httpMethod="GET",
authorizationType="NONE",
)
apigateway_client.put_integration(
restApiId=rest_api["id"],
resourceId=resource["id"],
httpMethod="GET",
type="HTTP",
integrationHttpMethod="POST",
uri="http://test.com",
)
apigateway_client.create_deployment(
restApiId=rest_api["id"],
stageName="test",
)
apigateway_client.update_stage(
restApiId=rest_api["id"],
stageName="test",
patchOperations=[
{
"op": "replace",
"path": "/*/*/logging/loglevel",
"value": "INFO",
},
],
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.apigateway.apigateway_service import (
APIGateway,
)
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.apigateway.apigateway_logging_enabled.apigateway_logging_enabled.apigateway_client",
new=APIGateway(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.apigateway.apigateway_logging_enabled.apigateway_logging_enabled import (
apigateway_logging_enabled,
)
check = apigateway_logging_enabled()
result = check.execute()
assert result[0].status == "PASS"
assert len(result) == 1
assert (
result[0].status_extended
== f"API Gateway test-rest-api ID {rest_api['id']} in stage test has logging enabled."
)
assert result[0].resource_id == "test-rest-api"
@mock_apigateway
def test_apigateway_one_rest_api_without_logging(self):
# Create APIGateway Mocked Resources
apigateway_client = client("apigateway", region_name=AWS_REGION)
# Create APIGateway Rest API
rest_api = apigateway_client.create_rest_api(
name="test-rest-api",
)
# Get the rest api's root id
root_resource_id = apigateway_client.get_resources(restApiId=rest_api["id"])[
"items"
][0]["id"]
resource = apigateway_client.create_resource(
restApiId=rest_api["id"],
parentId=root_resource_id,
pathPart="test-path",
)
apigateway_client.put_method(
restApiId=rest_api["id"],
resourceId=resource["id"],
httpMethod="GET",
authorizationType="NONE",
)
apigateway_client.put_integration(
restApiId=rest_api["id"],
resourceId=resource["id"],
httpMethod="GET",
type="HTTP",
integrationHttpMethod="POST",
uri="http://test.com",
)
apigateway_client.create_deployment(
restApiId=rest_api["id"],
stageName="test",
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.apigateway.apigateway_service import (
APIGateway,
)
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.apigateway.apigateway_logging_enabled.apigateway_logging_enabled.apigateway_client",
new=APIGateway(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.apigateway.apigateway_logging_enabled.apigateway_logging_enabled import (
apigateway_logging_enabled,
)
check = apigateway_logging_enabled()
result = check.execute()
assert result[0].status == "FAIL"
assert len(result) == 1
assert (
result[0].status_extended
== f"API Gateway test-rest-api ID {rest_api['id']} in stage test has logging disabled."
)
assert result[0].resource_id == "test-rest-api"

View File

@@ -0,0 +1,165 @@
from boto3 import client, session
from moto import mock_apigateway
from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
from prowler.providers.aws.services.apigateway.apigateway_service import APIGateway
AWS_ACCOUNT_NUMBER = 123456789012
AWS_REGION = "us-east-1"
class Test_APIGateway_Service:
# Mocked Audit Info
def set_mocked_audit_info(self):
audit_info = AWS_Audit_Info(
original_session=None,
audit_session=session.Session(
profile_name=None,
botocore_session=None,
),
audited_account=AWS_ACCOUNT_NUMBER,
audited_user_id=None,
audited_partition="aws",
audited_identity_arn=None,
profile=None,
profile_region=None,
credentials=None,
assumed_role_info=None,
audited_regions=None,
organizations_metadata=None,
)
return audit_info
# Test APIGateway Service
@mock_apigateway
def test_service(self):
# APIGateway client for this test class
audit_info = self.set_mocked_audit_info()
apigateway = APIGateway(audit_info)
assert apigateway.service == "apigateway"
# Test APIGateway Client
@mock_apigateway
def test_client(self):
# APIGateway client for this test class
audit_info = self.set_mocked_audit_info()
apigateway = APIGateway(audit_info)
for regional_client in apigateway.regional_clients.values():
assert regional_client.__class__.__name__ == "APIGateway"
# Test APIGateway Session
@mock_apigateway
def test__get_session__(self):
# APIGateway client for this test class
audit_info = self.set_mocked_audit_info()
apigateway = APIGateway(audit_info)
assert apigateway.session.__class__.__name__ == "Session"
# Test APIGateway Session
@mock_apigateway
def test_audited_account(self):
# APIGateway client for this test class
audit_info = self.set_mocked_audit_info()
apigateway = APIGateway(audit_info)
assert apigateway.audited_account == AWS_ACCOUNT_NUMBER
# Test APIGateway Get Rest APIs
@mock_apigateway
def test__get_rest_apis__(self):
# Generate APIGateway Client
apigateway_client = client("apigateway", region_name=AWS_REGION)
# Create APIGateway Rest API
apigateway_client.create_rest_api(
name="test-rest-api",
)
# APIGateway client for this test class
audit_info = self.set_mocked_audit_info()
apigateway = APIGateway(audit_info)
assert len(apigateway.rest_apis) == len(
apigateway_client.get_rest_apis()["items"]
)
# Test APIGateway Get Authorizers
@mock_apigateway
def test__get_authorizers__(self):
# Generate APIGateway Client
apigateway_client = client("apigateway", region_name=AWS_REGION)
# Create APIGateway Rest API
rest_api = apigateway_client.create_rest_api(
name="test-rest-api",
)
# Create authorizer
apigateway_client.create_authorizer(
name="test-authorizer",
restApiId=rest_api["id"],
type="TOKEN",
)
# APIGateway client for this test class
audit_info = self.set_mocked_audit_info()
apigateway = APIGateway(audit_info)
assert apigateway.rest_apis[0].authorizer is True
# Test APIGateway Get Rest API
@mock_apigateway
def test__get_rest_api__(self):
# Generate APIGateway Client
apigateway_client = client("apigateway", region_name=AWS_REGION)
# Create private APIGateway Rest API
apigateway_client.create_rest_api(
name="test-rest-api", endpointConfiguration={"types": ["PRIVATE"]}
)
# APIGateway client for this test class
audit_info = self.set_mocked_audit_info()
apigateway = APIGateway(audit_info)
assert apigateway.rest_apis[0].public_endpoint is False
# Test APIGateway Get Stages
@mock_apigateway
def test__get_stages__(self):
# Generate APIGateway Client
apigateway_client = client("apigateway", region_name=AWS_REGION)
# Create APIGateway Rest API and a deployment stage
rest_api = apigateway_client.create_rest_api(
name="test-rest-api",
)
# Get the rest api's root id
root_resource_id = apigateway_client.get_resources(restApiId=rest_api["id"])[
"items"
][0]["id"]
resource = apigateway_client.create_resource(
restApiId=rest_api["id"],
parentId=root_resource_id,
pathPart="test-path",
)
apigateway_client.put_method(
restApiId=rest_api["id"],
resourceId=resource["id"],
httpMethod="GET",
authorizationType="NONE",
)
apigateway_client.put_integration(
restApiId=rest_api["id"],
resourceId=resource["id"],
httpMethod="GET",
type="HTTP",
integrationHttpMethod="POST",
uri="http://test.com",
)
apigateway_client.create_deployment(
restApiId=rest_api["id"],
stageName="test",
)
apigateway_client.update_stage(
restApiId=rest_api["id"],
stageName="test",
patchOperations=[
{
"op": "replace",
"path": "/*/*/logging/loglevel",
"value": "INFO",
},
],
)
audit_info = self.set_mocked_audit_info()
apigateway = APIGateway(audit_info)
assert apigateway.rest_apis[0].stages[0].logging is True

View File

@@ -0,0 +1,172 @@
from unittest import mock
from boto3 import client
from moto import mock_apigateway, mock_wafv2
AWS_REGION = "us-east-1"
class Test_apigateway_waf_acl_attached:
@mock_apigateway
def test_apigateway_no_rest_apis(self):
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.apigateway.apigateway_service import (
APIGateway,
)
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.apigateway.apigateway_waf_acl_attached.apigateway_waf_acl_attached.apigateway_client",
new=APIGateway(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.apigateway.apigateway_waf_acl_attached.apigateway_waf_acl_attached import (
apigateway_waf_acl_attached,
)
check = apigateway_waf_acl_attached()
result = check.execute()
assert len(result) == 0
@mock_apigateway
@mock_wafv2
def test_apigateway_one_rest_api_with_waf(self):
# Create APIGateway Mocked Resources
apigateway_client = client("apigateway", region_name=AWS_REGION)
waf_client = client("wafv2", region_name=AWS_REGION)
rest_api = apigateway_client.create_rest_api(
name="test-rest-api",
)
# Get the rest api's root id
root_resource_id = apigateway_client.get_resources(restApiId=rest_api["id"])[
"items"
][0]["id"]
resource = apigateway_client.create_resource(
restApiId=rest_api["id"],
parentId=root_resource_id,
pathPart="test-path",
)
apigateway_client.put_method(
restApiId=rest_api["id"],
resourceId=resource["id"],
httpMethod="GET",
authorizationType="NONE",
)
apigateway_client.put_integration(
restApiId=rest_api["id"],
resourceId=resource["id"],
httpMethod="GET",
type="HTTP",
integrationHttpMethod="POST",
uri="http://test.com",
)
apigateway_client.create_deployment(
restApiId=rest_api["id"],
stageName="test",
)
waf_arn = waf_client.create_web_acl(
Name="test",
Scope="REGIONAL",
DefaultAction={"Allow": {}},
VisibilityConfig={
"SampledRequestsEnabled": False,
"CloudWatchMetricsEnabled": False,
"MetricName": "idk",
},
)["Summary"]["ARN"]
waf_client.associate_web_acl(
WebACLArn=waf_arn,
ResourceArn=f"arn:aws:apigateway:{apigateway_client.meta.region_name}::/restapis/{rest_api['id']}/stages/test",
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.apigateway.apigateway_service import (
APIGateway,
)
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.apigateway.apigateway_waf_acl_attached.apigateway_waf_acl_attached.apigateway_client",
new=APIGateway(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.apigateway.apigateway_waf_acl_attached.apigateway_waf_acl_attached import (
apigateway_waf_acl_attached,
)
check = apigateway_waf_acl_attached()
result = check.execute()
assert result[0].status == "PASS"
assert len(result) == 1
assert (
result[0].status_extended
== f"API Gateway test-rest-api ID {rest_api['id']} in stage test has {waf_arn} WAF ACL attached."
)
assert result[0].resource_id == "test-rest-api"
@mock_apigateway
def test_apigateway_one_rest_api_without_waf(self):
# Create APIGateway Mocked Resources
apigateway_client = client("apigateway", region_name=AWS_REGION)
# Create APIGateway Rest API
rest_api = apigateway_client.create_rest_api(
name="test-rest-api",
)
# Get the rest api's root id
root_resource_id = apigateway_client.get_resources(restApiId=rest_api["id"])[
"items"
][0]["id"]
resource = apigateway_client.create_resource(
restApiId=rest_api["id"],
parentId=root_resource_id,
pathPart="test-path",
)
apigateway_client.put_method(
restApiId=rest_api["id"],
resourceId=resource["id"],
httpMethod="GET",
authorizationType="NONE",
)
apigateway_client.put_integration(
restApiId=rest_api["id"],
resourceId=resource["id"],
httpMethod="GET",
type="HTTP",
integrationHttpMethod="POST",
uri="http://test.com",
)
apigateway_client.create_deployment(
restApiId=rest_api["id"],
stageName="test",
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.apigateway.apigateway_service import (
APIGateway,
)
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.apigateway.apigateway_waf_acl_attached.apigateway_waf_acl_attached.apigateway_client",
new=APIGateway(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.apigateway.apigateway_waf_acl_attached.apigateway_waf_acl_attached import (
apigateway_waf_acl_attached,
)
check = apigateway_waf_acl_attached()
result = check.execute()
assert result[0].status == "FAIL"
assert len(result) == 1
assert (
result[0].status_extended
== f"API Gateway test-rest-api ID {rest_api['id']} in stage test has not WAF ACL attached."
)
assert result[0].resource_id == "test-rest-api"

View File

@@ -0,0 +1,94 @@
from unittest import mock
import botocore
from boto3 import client
from mock import patch
from moto import mock_apigatewayv2
AWS_REGION = "us-east-1"
# Mocking ApiGatewayV2 Calls
make_api_call = botocore.client.BaseClient._make_api_call
def mock_make_api_call(self, operation_name, kwarg):
"""
We have to mock every AWS API call using Boto3
Rationale -> https://github.com/boto/botocore/blob/develop/botocore/client.py#L810:L816
"""
if operation_name == "GetAuthorizers":
return {"Items": [{"AuthorizerId": "authorizer-id", "Name": "test-authorizer"}]}
elif operation_name == "GetStages":
return {
"Items": [
{
"AccessLogSettings": {
"DestinationArn": "string",
"Format": "string",
},
"StageName": "test-stage",
}
]
}
return make_api_call(self, operation_name, kwarg)
@patch("botocore.client.BaseClient._make_api_call", new=mock_make_api_call)
class Test_apigatewayv2_access_logging_enabled:
@mock_apigatewayv2
def test_apigateway_no_apis(self):
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.apigatewayv2.apigatewayv2_service import (
ApiGatewayV2,
)
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.apigatewayv2.apigatewayv2_access_logging_enabled.apigatewayv2_access_logging_enabled.apigatewayv2_client",
new=ApiGatewayV2(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.apigatewayv2.apigatewayv2_access_logging_enabled.apigatewayv2_access_logging_enabled import (
apigatewayv2_access_logging_enabled,
)
check = apigatewayv2_access_logging_enabled()
result = check.execute()
assert len(result) == 0
@mock_apigatewayv2
def test_apigateway_one_api_with_logging_in_stage(self):
# Create ApiGatewayV2 Mocked Resources
apigatewayv2_client = client("apigatewayv2", region_name=AWS_REGION)
# Create ApiGatewayV2 API
api = apigatewayv2_client.create_api(Name="test-api", ProtocolType="HTTP")
# Get stages mock with stage with logging
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.apigatewayv2.apigatewayv2_service import (
ApiGatewayV2,
)
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.apigatewayv2.apigatewayv2_access_logging_enabled.apigatewayv2_access_logging_enabled.apigatewayv2_client",
new=ApiGatewayV2(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.apigatewayv2.apigatewayv2_access_logging_enabled.apigatewayv2_access_logging_enabled import (
apigatewayv2_access_logging_enabled,
)
check = apigatewayv2_access_logging_enabled()
result = check.execute()
assert result[0].status == "PASS"
assert len(result) == 1
assert (
result[0].status_extended
== f"API Gateway V2 test-api ID {api['ApiId']} in stage test-stage has access logging enabled."
)
assert result[0].resource_id == "test-api"

View File

@@ -0,0 +1,100 @@
from unittest import mock
import botocore
from boto3 import client
from mock import patch
from moto import mock_apigatewayv2
AWS_REGION = "us-east-1"
# Mocking ApiGatewayV2 Calls
make_api_call = botocore.client.BaseClient._make_api_call
def mock_make_api_call(self, operation_name, kwarg):
"""
We have to mock every AWS API call using Boto3
Rationale -> https://github.com/boto/botocore/blob/develop/botocore/client.py#L810:L816
"""
if operation_name == "GetAuthorizers":
return {"Items": [{"AuthorizerId": "authorizer-id", "Name": "test-authorizer"}]}
elif operation_name == "GetStages":
return {
"Items": [
{
"AccessLogSettings": {
"DestinationArn": "string",
"Format": "string",
},
"StageName": "test-stage",
}
]
}
return make_api_call(self, operation_name, kwarg)
@patch("botocore.client.BaseClient._make_api_call", new=mock_make_api_call)
class Test_apigatewayv2_authorizers_enabled:
@mock_apigatewayv2
def test_apigateway_no_apis(self):
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.apigatewayv2.apigatewayv2_service import (
ApiGatewayV2,
)
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.apigatewayv2.apigatewayv2_authorizers_enabled.apigatewayv2_authorizers_enabled.apigatewayv2_client",
new=ApiGatewayV2(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.apigatewayv2.apigatewayv2_authorizers_enabled.apigatewayv2_authorizers_enabled import (
apigatewayv2_authorizers_enabled,
)
check = apigatewayv2_authorizers_enabled()
result = check.execute()
assert len(result) == 0
@mock_apigatewayv2
def test_apigateway_one_api_with_authorizer(self):
# Create ApiGatewayV2 Mocked Resources
apigatewayv2_client = client("apigatewayv2", region_name=AWS_REGION)
# Create ApiGatewayV2 API
api = apigatewayv2_client.create_api(Name="test-api", ProtocolType="HTTP")
apigatewayv2_client.create_authorizer(
ApiId=api["ApiId"],
AuthorizerType="REQUEST",
IdentitySource=[],
Name="auth1",
AuthorizerPayloadFormatVersion="2.0",
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.apigatewayv2.apigatewayv2_service import (
ApiGatewayV2,
)
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.apigatewayv2.apigatewayv2_authorizers_enabled.apigatewayv2_authorizers_enabled.apigatewayv2_client",
new=ApiGatewayV2(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.apigatewayv2.apigatewayv2_authorizers_enabled.apigatewayv2_authorizers_enabled import (
apigatewayv2_authorizers_enabled,
)
check = apigatewayv2_authorizers_enabled()
result = check.execute()
assert result[0].status == "PASS"
assert len(result) == 1
assert (
result[0].status_extended
== f"API Gateway V2 test-api ID {api['ApiId']} has authorizer configured."
)
assert result[0].resource_id == "test-api"

View File

@@ -0,0 +1,139 @@
import botocore
from boto3 import client, session
from mock import patch
from moto import mock_apigatewayv2
from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
from prowler.providers.aws.services.apigatewayv2.apigatewayv2_service import (
ApiGatewayV2,
)
AWS_ACCOUNT_NUMBER = 123456789012
AWS_REGION = "us-east-1"
# Mocking ApiGatewayV2 Calls
make_api_call = botocore.client.BaseClient._make_api_call
def mock_make_api_call(self, operation_name, kwarg):
"""
We have to mock every AWS API call using Boto3
Rationale -> https://github.com/boto/botocore/blob/develop/botocore/client.py#L810:L816
"""
if operation_name == "GetAuthorizers":
return {"Items": [{"AuthorizerId": "authorizer-id", "Name": "test-authorizer"}]}
elif operation_name == "GetStages":
return {
"Items": [
{
"AccessLogSettings": {
"DestinationArn": "string",
"Format": "string",
},
"StageName": "test-stage",
}
]
}
return make_api_call(self, operation_name, kwarg)
@patch("botocore.client.BaseClient._make_api_call", new=mock_make_api_call)
class Test_ApiGatewayV2_Service:
# Mocked Audit Info
def set_mocked_audit_info(self):
audit_info = AWS_Audit_Info(
original_session=None,
audit_session=session.Session(
profile_name=None,
botocore_session=None,
),
audited_account=AWS_ACCOUNT_NUMBER,
audited_user_id=None,
audited_partition="aws",
audited_identity_arn=None,
profile=None,
profile_region=None,
credentials=None,
assumed_role_info=None,
audited_regions=None,
organizations_metadata=None,
)
return audit_info
# Test ApiGatewayV2 Service
@mock_apigatewayv2
def test_service(self):
# ApiGatewayV2 client for this test class
audit_info = self.set_mocked_audit_info()
apigatewayv2 = ApiGatewayV2(audit_info)
assert apigatewayv2.service == "apigatewayv2"
# Test ApiGatewayV2 Client
@mock_apigatewayv2
def test_client(self):
# ApiGatewayV2 client for this test class
audit_info = self.set_mocked_audit_info()
apigatewayv2 = ApiGatewayV2(audit_info)
for regional_client in apigatewayv2.regional_clients.values():
assert regional_client.__class__.__name__ == "ApiGatewayV2"
# Test ApiGatewayV2 Session
@mock_apigatewayv2
def test__get_session__(self):
# ApiGatewayV2 client for this test class
audit_info = self.set_mocked_audit_info()
apigatewayv2 = ApiGatewayV2(audit_info)
assert apigatewayv2.session.__class__.__name__ == "Session"
# Test ApiGatewayV2 Session
@mock_apigatewayv2
def test_audited_account(self):
# ApiGatewayV2 client for this test class
audit_info = self.set_mocked_audit_info()
apigatewayv2 = ApiGatewayV2(audit_info)
assert apigatewayv2.audited_account == AWS_ACCOUNT_NUMBER
# Test ApiGatewayV2 Get APIs
@mock_apigatewayv2
def test__get_apis__(self):
# Generate ApiGatewayV2 Client
apigatewayv2_client = client("apigatewayv2", region_name=AWS_REGION)
# Create ApiGatewayV2 API
apigatewayv2_client.create_api(Name="test-api", ProtocolType="HTTP")
# ApiGatewayV2 client for this test class
audit_info = self.set_mocked_audit_info()
apigatewayv2 = ApiGatewayV2(audit_info)
assert len(apigatewayv2.apis) == len(apigatewayv2_client.get_apis()["Items"])
# Test ApiGatewayV2 Get Authorizers
@mock_apigatewayv2
def test__get_authorizers__(self):
# Generate ApiGatewayV2 Client
apigatewayv2_client = client("apigatewayv2", region_name=AWS_REGION)
# Create ApiGatewayV2 Rest API
api = apigatewayv2_client.create_api(Name="test-api", ProtocolType="HTTP")
# Create authorizer
apigatewayv2_client.create_authorizer(
ApiId=api["ApiId"],
AuthorizerType="REQUEST",
IdentitySource=[],
Name="auth1",
AuthorizerPayloadFormatVersion="2.0",
)
# ApiGatewayV2 client for this test class
audit_info = self.set_mocked_audit_info()
apigatewayv2 = ApiGatewayV2(audit_info)
assert apigatewayv2.apis[0].authorizer is True
# Test ApiGatewayV2 Get Stages
@mock_apigatewayv2
def test__get_stages__(self):
# Generate ApiGatewayV2 Client
apigatewayv2_client = client("apigatewayv2", region_name=AWS_REGION)
# Create ApiGatewayV2 Rest API and a deployment stage
apigatewayv2_client.create_api(Name="test-api", ProtocolType="HTTP")
audit_info = self.set_mocked_audit_info()
apigatewayv2 = ApiGatewayV2(audit_info)
assert apigatewayv2.apis[0].stages[0].logging is True

View File

@@ -0,0 +1,158 @@
from unittest import mock
from prowler.providers.aws.services.appstream.appstream_service import Fleet
# Mock Test Region
AWS_REGION = "eu-west-1"
class Test_appstream_fleet_default_internet_access_disabled:
def test_no_fleets(self):
appstream_client = mock.MagicMock
appstream_client.fleets = []
with mock.patch(
"prowler.providers.aws.services.appstream.appstream_service.AppStream",
new=appstream_client,
):
# Test Check
from prowler.providers.aws.services.appstream.appstream_fleet_default_internet_access_disabled.appstream_fleet_default_internet_access_disabled import (
appstream_fleet_default_internet_access_disabled,
)
check = appstream_fleet_default_internet_access_disabled()
result = check.execute()
assert len(result) == 0
def test_one_fleet_internet_access_enabled(self):
appstream_client = mock.MagicMock
appstream_client.fleets = []
fleet1 = Fleet(
arn="arn",
name="test-fleet",
max_user_duration_in_seconds=900,
disconnect_timeout_in_seconds=900,
idle_disconnect_timeout_in_seconds=900,
enable_default_internet_access=True,
region=AWS_REGION,
)
appstream_client.fleets.append(fleet1)
with mock.patch(
"prowler.providers.aws.services.appstream.appstream_service.AppStream",
new=appstream_client,
):
# Test Check
from prowler.providers.aws.services.appstream.appstream_fleet_default_internet_access_disabled.appstream_fleet_default_internet_access_disabled import (
appstream_fleet_default_internet_access_disabled,
)
check = appstream_fleet_default_internet_access_disabled()
result = check.execute()
assert len(result) == 1
assert result[0].resource_arn == fleet1.arn
assert result[0].region == fleet1.region
assert result[0].resource_id == fleet1.name
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== f"Fleet {fleet1.name} has default internet access enabled"
)
def test_one_fleet_internet_access_disbaled(self):
appstream_client = mock.MagicMock
appstream_client.fleets = []
fleet1 = Fleet(
arn="arn",
name="test-fleet",
max_user_duration_in_seconds=900,
disconnect_timeout_in_seconds=900,
idle_disconnect_timeout_in_seconds=900,
enable_default_internet_access=False,
region=AWS_REGION,
)
appstream_client.fleets.append(fleet1)
with mock.patch(
"prowler.providers.aws.services.appstream.appstream_service.AppStream",
new=appstream_client,
):
# Test Check
from prowler.providers.aws.services.appstream.appstream_fleet_default_internet_access_disabled.appstream_fleet_default_internet_access_disabled import (
appstream_fleet_default_internet_access_disabled,
)
check = appstream_fleet_default_internet_access_disabled()
result = check.execute()
assert len(result) == 1
assert result[0].resource_arn == fleet1.arn
assert result[0].region == fleet1.region
assert result[0].resource_id == fleet1.name
assert result[0].status == "PASS"
assert (
result[0].status_extended
== f"Fleet {fleet1.name} has default internet access disabled"
)
def test_two_fleets_internet_access_one_enabled_two_disabled(self):
appstream_client = mock.MagicMock
appstream_client.fleets = []
fleet1 = Fleet(
arn="arn",
name="test-fleet-1",
max_user_duration_in_seconds=900,
disconnect_timeout_in_seconds=900,
idle_disconnect_timeout_in_seconds=900,
enable_default_internet_access=True,
region=AWS_REGION,
)
fleet2 = Fleet(
arn="arn",
name="test-fleet-2",
max_user_duration_in_seconds=900,
disconnect_timeout_in_seconds=900,
idle_disconnect_timeout_in_seconds=900,
enable_default_internet_access=False,
region=AWS_REGION,
)
appstream_client.fleets.append(fleet1)
appstream_client.fleets.append(fleet2)
with mock.patch(
"prowler.providers.aws.services.appstream.appstream_service.AppStream",
new=appstream_client,
):
# Test Check
from prowler.providers.aws.services.appstream.appstream_fleet_default_internet_access_disabled.appstream_fleet_default_internet_access_disabled import (
appstream_fleet_default_internet_access_disabled,
)
check = appstream_fleet_default_internet_access_disabled()
result = check.execute()
assert len(result) == 2
for res in result:
if res.resource_id == fleet1.name:
assert result[0].resource_arn == fleet1.arn
assert result[0].region == fleet1.region
assert result[0].resource_id == fleet1.name
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== f"Fleet {fleet1.name} has default internet access enabled"
)
if res.resource_id == fleet2.name:
assert result[1].resource_arn == fleet2.arn
assert result[1].region == fleet2.region
assert result[1].resource_id == fleet2.name
assert result[1].status == "PASS"
assert (
result[1].status_extended
== f"Fleet {fleet2.name} has default internet access disabled"
)

View File

@@ -0,0 +1,164 @@
from unittest import mock
from prowler.providers.aws.services.appstream.appstream_service import Fleet
# Mock Test Region
AWS_REGION = "eu-west-1"
class Test_appstream_fleet_maximum_session_duration:
def test_no_fleets(self):
appstream_client = mock.MagicMock
appstream_client.fleets = []
with mock.patch(
"prowler.providers.aws.services.appstream.appstream_service.AppStream",
new=appstream_client,
):
# Test Check
from prowler.providers.aws.services.appstream.appstream_fleet_maximum_session_duration.appstream_fleet_maximum_session_duration import (
appstream_fleet_maximum_session_duration,
)
check = appstream_fleet_maximum_session_duration()
result = check.execute()
assert len(result) == 0
def test_one_fleet_maximum_session_duration_more_than_10_hours(self):
appstream_client = mock.MagicMock
appstream_client.fleets = []
fleet1 = Fleet(
arn="arn",
name="test-fleet",
# 11 Hours
max_user_duration_in_seconds=11 * 60 * 60,
disconnect_timeout_in_seconds=900,
idle_disconnect_timeout_in_seconds=900,
enable_default_internet_access=True,
region=AWS_REGION,
)
appstream_client.fleets.append(fleet1)
with mock.patch(
"prowler.providers.aws.services.appstream.appstream_service.AppStream",
new=appstream_client,
):
# Test Check
from prowler.providers.aws.services.appstream.appstream_fleet_maximum_session_duration.appstream_fleet_maximum_session_duration import (
appstream_fleet_maximum_session_duration,
)
check = appstream_fleet_maximum_session_duration()
result = check.execute()
assert len(result) == 1
assert result[0].resource_arn == fleet1.arn
assert result[0].region == fleet1.region
assert result[0].resource_id == fleet1.name
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== f"Fleet {fleet1.name} has the maximum session duration configured for more that 10 hours"
)
def test_one_fleet_maximum_session_duration_less_than_10_hours(self):
appstream_client = mock.MagicMock
appstream_client.fleets = []
fleet1 = Fleet(
arn="arn",
name="test-fleet",
# 9 Hours
max_user_duration_in_seconds=9 * 60 * 60,
disconnect_timeout_in_seconds=900,
idle_disconnect_timeout_in_seconds=900,
enable_default_internet_access=True,
region=AWS_REGION,
)
appstream_client.fleets.append(fleet1)
with mock.patch(
"prowler.providers.aws.services.appstream.appstream_service.AppStream",
new=appstream_client,
):
# Test Check
from prowler.providers.aws.services.appstream.appstream_fleet_maximum_session_duration.appstream_fleet_maximum_session_duration import (
appstream_fleet_maximum_session_duration,
)
check = appstream_fleet_maximum_session_duration()
result = check.execute()
assert len(result) == 1
assert result[0].resource_arn == fleet1.arn
assert result[0].region == fleet1.region
assert result[0].resource_id == fleet1.name
assert result[0].status == "PASS"
assert (
result[0].status_extended
== f"Fleet {fleet1.name} has the maximum session duration configured for less that 10 hours"
)
def test_two_fleets_one_maximum_session_duration_less_than_10_hours_on_more_than_10_hours(
self,
):
appstream_client = mock.MagicMock
appstream_client.fleets = []
fleet1 = Fleet(
arn="arn",
name="test-fleet-1",
# 1 Hours
max_user_duration_in_seconds=1 * 60 * 60,
disconnect_timeout_in_seconds=900,
idle_disconnect_timeout_in_seconds=900,
enable_default_internet_access=True,
region=AWS_REGION,
)
fleet2 = Fleet(
arn="arn",
name="test-fleet-2",
# 24 Hours
max_user_duration_in_seconds=24 * 60 * 60,
disconnect_timeout_in_seconds=900,
idle_disconnect_timeout_in_seconds=900,
enable_default_internet_access=False,
region=AWS_REGION,
)
appstream_client.fleets.append(fleet1)
appstream_client.fleets.append(fleet2)
with mock.patch(
"prowler.providers.aws.services.appstream.appstream_service.AppStream",
new=appstream_client,
):
# Test Check
from prowler.providers.aws.services.appstream.appstream_fleet_maximum_session_duration.appstream_fleet_maximum_session_duration import (
appstream_fleet_maximum_session_duration,
)
check = appstream_fleet_maximum_session_duration()
result = check.execute()
assert len(result) == 2
for res in result:
if res.resource_id == fleet1.name:
assert result[0].resource_arn == fleet1.arn
assert result[0].region == fleet1.region
assert result[0].resource_id == fleet1.name
assert result[0].status == "PASS"
assert (
result[0].status_extended
== f"Fleet {fleet1.name} has the maximum session duration configured for less that 10 hours"
)
if res.resource_id == fleet2.name:
assert result[1].resource_arn == fleet2.arn
assert result[1].region == fleet2.region
assert result[1].resource_id == fleet2.name
assert result[1].status == "FAIL"
assert (
result[1].status_extended
== f"Fleet {fleet2.name} has the maximum session duration configured for more that 10 hours"
)

View File

@@ -0,0 +1,164 @@
from unittest import mock
from prowler.providers.aws.services.appstream.appstream_service import Fleet
# Mock Test Region
AWS_REGION = "eu-west-1"
class Test_appstream_fleet_session_disconnect_timeout:
def test_no_fleets(self):
appstream_client = mock.MagicMock
appstream_client.fleets = []
with mock.patch(
"prowler.providers.aws.services.appstream.appstream_service.AppStream",
new=appstream_client,
):
# Test Check
from prowler.providers.aws.services.appstream.appstream_fleet_session_disconnect_timeout.appstream_fleet_session_disconnect_timeout import (
appstream_fleet_session_disconnect_timeout,
)
check = appstream_fleet_session_disconnect_timeout()
result = check.execute()
assert len(result) == 0
def test_one_fleet_session_disconnect_timeout_more_than_5_minutes(self):
appstream_client = mock.MagicMock
appstream_client.fleets = []
fleet1 = Fleet(
arn="arn",
name="test-fleet",
max_user_duration_in_seconds=1 * 60 * 60,
# 1 hour
disconnect_timeout_in_seconds=1 * 60 * 60,
idle_disconnect_timeout_in_seconds=900,
enable_default_internet_access=True,
region=AWS_REGION,
)
appstream_client.fleets.append(fleet1)
with mock.patch(
"prowler.providers.aws.services.appstream.appstream_service.AppStream",
new=appstream_client,
):
# Test Check
from prowler.providers.aws.services.appstream.appstream_fleet_session_disconnect_timeout.appstream_fleet_session_disconnect_timeout import (
appstream_fleet_session_disconnect_timeout,
)
check = appstream_fleet_session_disconnect_timeout()
result = check.execute()
assert len(result) == 1
assert result[0].resource_arn == fleet1.arn
assert result[0].region == fleet1.region
assert result[0].resource_id == fleet1.name
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== f"Fleet {fleet1.name} has the session disconnect timeout set to more than 5 minutes"
)
def test_one_fleet_session_disconnect_timeout_less_than_5_minutes(self):
appstream_client = mock.MagicMock
appstream_client.fleets = []
fleet1 = Fleet(
arn="arn",
name="test-fleet",
max_user_duration_in_seconds=900,
# 4 minutes
disconnect_timeout_in_seconds=4 * 60,
idle_disconnect_timeout_in_seconds=900,
enable_default_internet_access=True,
region=AWS_REGION,
)
appstream_client.fleets.append(fleet1)
with mock.patch(
"prowler.providers.aws.services.appstream.appstream_service.AppStream",
new=appstream_client,
):
# Test Check
from prowler.providers.aws.services.appstream.appstream_fleet_session_disconnect_timeout.appstream_fleet_session_disconnect_timeout import (
appstream_fleet_session_disconnect_timeout,
)
check = appstream_fleet_session_disconnect_timeout()
result = check.execute()
assert len(result) == 1
assert result[0].resource_arn == fleet1.arn
assert result[0].region == fleet1.region
assert result[0].resource_id == fleet1.name
assert result[0].status == "PASS"
assert (
result[0].status_extended
== f"Fleet {fleet1.name} has the session disconnect timeout set to less than 5 minutes"
)
def test_two_fleets_session_disconnect_timeout_less_than_5_minutes_one_more_than_5_minutes(
self,
):
appstream_client = mock.MagicMock
appstream_client.fleets = []
fleet1 = Fleet(
arn="arn",
name="test-fleet-1",
max_user_duration_in_seconds=1 * 60 * 60,
# 1 Hours
disconnect_timeout_in_seconds=1 * 60 * 60,
idle_disconnect_timeout_in_seconds=900,
enable_default_internet_access=True,
region=AWS_REGION,
)
fleet2 = Fleet(
arn="arn",
name="test-fleet-2",
max_user_duration_in_seconds=24 * 60 * 60,
# 3 minutes
disconnect_timeout_in_seconds=3 * 60,
idle_disconnect_timeout_in_seconds=900,
enable_default_internet_access=False,
region=AWS_REGION,
)
appstream_client.fleets.append(fleet1)
appstream_client.fleets.append(fleet2)
with mock.patch(
"prowler.providers.aws.services.appstream.appstream_service.AppStream",
new=appstream_client,
):
# Test Check
from prowler.providers.aws.services.appstream.appstream_fleet_session_disconnect_timeout.appstream_fleet_session_disconnect_timeout import (
appstream_fleet_session_disconnect_timeout,
)
check = appstream_fleet_session_disconnect_timeout()
result = check.execute()
assert len(result) == 2
for res in result:
if res.resource_id == fleet1.name:
assert result[0].resource_arn == fleet1.arn
assert result[0].region == fleet1.region
assert result[0].resource_id == fleet1.name
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== f"Fleet {fleet1.name} has the session disconnect timeout set to more than 5 minutes"
)
if res.resource_id == fleet2.name:
assert result[1].resource_arn == fleet2.arn
assert result[1].region == fleet2.region
assert result[1].resource_id == fleet2.name
assert result[1].status == "PASS"
assert (
result[1].status_extended
== f"Fleet {fleet2.name} has the session disconnect timeout set to less than 5 minutes"
)

View File

@@ -0,0 +1,164 @@
from unittest import mock
from prowler.providers.aws.services.appstream.appstream_service import Fleet
# Mock Test Region
AWS_REGION = "eu-west-1"
class Test_appstream_fleet_session_idle_disconnect_timeout:
def test_no_fleets(self):
appstream_client = mock.MagicMock
appstream_client.fleets = []
with mock.patch(
"prowler.providers.aws.services.appstream.appstream_service.AppStream",
new=appstream_client,
):
# Test Check
from prowler.providers.aws.services.appstream.appstream_fleet_session_idle_disconnect_timeout.appstream_fleet_session_idle_disconnect_timeout import (
appstream_fleet_session_idle_disconnect_timeout,
)
check = appstream_fleet_session_idle_disconnect_timeout()
result = check.execute()
assert len(result) == 0
def test_one_fleet_session_idle_disconnect_timeout_more_than_10_minutes(self):
appstream_client = mock.MagicMock
appstream_client.fleets = []
fleet1 = Fleet(
arn="arn",
name="test-fleet",
max_user_duration_in_seconds=1 * 60 * 60,
disconnect_timeout_in_seconds=1 * 60 * 60,
# 15 minutes
idle_disconnect_timeout_in_seconds=15 * 60,
enable_default_internet_access=True,
region=AWS_REGION,
)
appstream_client.fleets.append(fleet1)
with mock.patch(
"prowler.providers.aws.services.appstream.appstream_service.AppStream",
new=appstream_client,
):
# Test Check
from prowler.providers.aws.services.appstream.appstream_fleet_session_idle_disconnect_timeout.appstream_fleet_session_idle_disconnect_timeout import (
appstream_fleet_session_idle_disconnect_timeout,
)
check = appstream_fleet_session_idle_disconnect_timeout()
result = check.execute()
assert len(result) == 1
assert result[0].resource_arn == fleet1.arn
assert result[0].region == fleet1.region
assert result[0].resource_id == fleet1.name
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== f"Fleet {fleet1.name} has the session idle disconnect timeout set to more than 10 minutes"
)
def test_one_fleet_session_idle_disconnect_timeout_less_than_10_minutes(self):
appstream_client = mock.MagicMock
appstream_client.fleets = []
fleet1 = Fleet(
arn="arn",
name="test-fleet",
max_user_duration_in_seconds=900,
disconnect_timeout_in_seconds=4 * 60,
# 8 minutes
idle_disconnect_timeout_in_seconds=8 * 60,
enable_default_internet_access=True,
region=AWS_REGION,
)
appstream_client.fleets.append(fleet1)
with mock.patch(
"prowler.providers.aws.services.appstream.appstream_service.AppStream",
new=appstream_client,
):
# Test Check
from prowler.providers.aws.services.appstream.appstream_fleet_session_idle_disconnect_timeout.appstream_fleet_session_idle_disconnect_timeout import (
appstream_fleet_session_idle_disconnect_timeout,
)
check = appstream_fleet_session_idle_disconnect_timeout()
result = check.execute()
assert len(result) == 1
assert result[0].resource_arn == fleet1.arn
assert result[0].region == fleet1.region
assert result[0].resource_id == fleet1.name
assert result[0].status == "PASS"
assert (
result[0].status_extended
== f"Fleet {fleet1.name} has the session idle disconnect timeout set to less than 10 minutes"
)
def test_two_fleets_session_idle_disconnect_timeout_than_10_minutes_one_more_than_10_minutes(
self,
):
appstream_client = mock.MagicMock
appstream_client.fleets = []
fleet1 = Fleet(
arn="arn",
name="test-fleet-1",
max_user_duration_in_seconds=1 * 60 * 60,
disconnect_timeout_in_seconds=3 * 60,
# 5 minutes
idle_disconnect_timeout_in_seconds=5 * 60,
enable_default_internet_access=True,
region=AWS_REGION,
)
fleet2 = Fleet(
arn="arn",
name="test-fleet-2",
max_user_duration_in_seconds=24 * 60 * 60,
disconnect_timeout_in_seconds=3 * 60,
# 45 minutes
idle_disconnect_timeout_in_seconds=45 * 60,
enable_default_internet_access=False,
region=AWS_REGION,
)
appstream_client.fleets.append(fleet1)
appstream_client.fleets.append(fleet2)
with mock.patch(
"prowler.providers.aws.services.appstream.appstream_service.AppStream",
new=appstream_client,
):
# Test Check
from prowler.providers.aws.services.appstream.appstream_fleet_session_idle_disconnect_timeout.appstream_fleet_session_idle_disconnect_timeout import (
appstream_fleet_session_idle_disconnect_timeout,
)
check = appstream_fleet_session_idle_disconnect_timeout()
result = check.execute()
assert len(result) == 2
for res in result:
if res.resource_id == fleet1.name:
assert result[0].resource_arn == fleet1.arn
assert result[0].region == fleet1.region
assert result[0].resource_id == fleet1.name
assert result[0].status == "PASS"
assert (
result[0].status_extended
== f"Fleet {fleet1.name} has the session idle disconnect timeout set to less than 10 minutes"
)
if res.resource_id == fleet2.name:
assert result[1].resource_arn == fleet2.arn
assert result[1].region == fleet2.region
assert result[1].resource_id == fleet2.name
assert result[1].status == "FAIL"
assert (
result[1].status_extended
== f"Fleet {fleet2.name} has the session idle disconnect timeout set to more than 10 minutes"
)

View File

@@ -0,0 +1,104 @@
from unittest.mock import patch
import botocore
from moto.core import DEFAULT_ACCOUNT_ID
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.appstream.appstream_service import AppStream
# Mock Test Region
AWS_REGION = "eu-west-1"
# Mocking Access Analyzer Calls
make_api_call = botocore.client.BaseClient._make_api_call
def mock_make_api_call(self, operation_name, kwarg):
"""
We have to mock every AWS API call using Boto3
As you can see the operation_name has the list_analyzers snake_case form but
we are using the ListAnalyzers form.
Rationale -> https://github.com/boto/botocore/blob/develop/botocore/client.py#L810:L816
"""
if operation_name == "DescribeFleets":
return {
"Fleets": [
{
"Arn": f"arn:aws:appstream:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:fleet/test-prowler3-0",
"Name": "test-prowler3-0",
"MaxUserDurationInSeconds": 100,
"DisconnectTimeoutInSeconds": 900,
"IdleDisconnectTimeoutInSeconds": 900,
"EnableDefaultInternetAccess": False,
},
{
"Arn": f"arn:aws:appstream:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:fleet/test-prowler3-1",
"Name": "test-prowler3-1",
"MaxUserDurationInSeconds": 57600,
"DisconnectTimeoutInSeconds": 900,
"IdleDisconnectTimeoutInSeconds": 900,
"EnableDefaultInternetAccess": True,
},
]
}
return make_api_call(self, operation_name, kwarg)
# Mock generate_regional_clients()
def mock_generate_regional_clients(service, audit_info):
regional_client = audit_info.audit_session.client(service, region_name=AWS_REGION)
regional_client.region = AWS_REGION
return {AWS_REGION: regional_client}
# Patch every AWS call using Boto3 and generate_regional_clients to have 1 client
@patch("botocore.client.BaseClient._make_api_call", new=mock_make_api_call)
@patch(
"prowler.providers.aws.services.appstream.appstream_service.generate_regional_clients",
new=mock_generate_regional_clients,
)
class Test_AppStream_Service:
# Test AppStream Client
def test__get_client__(self):
appstream = AppStream(current_audit_info)
assert appstream.regional_clients[AWS_REGION].__class__.__name__ == "AppStream"
# Test AppStream Session
def test__get_session__(self):
appstream = AppStream(current_audit_info)
assert appstream.session.__class__.__name__ == "Session"
# Test AppStream Session
def test__get_service__(self):
appstream = AppStream(current_audit_info)
assert appstream.service == "appstream"
def test__describe_fleets__(self):
# Set partition for the service
current_audit_info.audited_partition = "aws"
appstream = AppStream(current_audit_info)
assert len(appstream.fleets) == 2
assert (
appstream.fleets[0].arn
== f"arn:aws:appstream:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:fleet/test-prowler3-0"
)
assert appstream.fleets[0].name == "test-prowler3-0"
assert appstream.fleets[0].max_user_duration_in_seconds == 100
assert appstream.fleets[0].disconnect_timeout_in_seconds == 900
assert appstream.fleets[0].idle_disconnect_timeout_in_seconds == 900
assert appstream.fleets[0].enable_default_internet_access is False
assert appstream.fleets[0].region == AWS_REGION
assert (
appstream.fleets[1].arn
== f"arn:aws:appstream:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:fleet/test-prowler3-1"
)
assert appstream.fleets[1].name == "test-prowler3-1"
assert appstream.fleets[1].max_user_duration_in_seconds == 57600
assert appstream.fleets[1].disconnect_timeout_in_seconds == 900
assert appstream.fleets[1].idle_disconnect_timeout_in_seconds == 900
assert appstream.fleets[1].enable_default_internet_access is True
assert appstream.fleets[1].region == AWS_REGION

View File

@@ -0,0 +1,194 @@
from unittest import mock
from boto3 import client
from moto import mock_autoscaling
AWS_REGION = "us-east-1"
class Test_autoscaling_find_secrets_ec2_launch_configuration:
@mock_autoscaling
def test_no_autoscaling(self):
autoscaling_client = client("autoscaling", region_name=AWS_REGION)
autoscaling_client.launch_configurations = []
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.autoscaling.autoscaling_service import (
AutoScaling,
)
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.autoscaling.autoscaling_find_secrets_ec2_launch_configuration.autoscaling_find_secrets_ec2_launch_configuration.autoscaling_client",
new=AutoScaling(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.autoscaling.autoscaling_find_secrets_ec2_launch_configuration.autoscaling_find_secrets_ec2_launch_configuration import (
autoscaling_find_secrets_ec2_launch_configuration,
)
check = autoscaling_find_secrets_ec2_launch_configuration()
result = check.execute()
assert len(result) == 0
@mock_autoscaling
def test_one_autoscaling_with_no_secrets(self):
# Include launch_configurations to check
autoscaling_client = client("autoscaling", region_name=AWS_REGION)
autoscaling_client.create_launch_configuration(
LaunchConfigurationName="tester",
ImageId="ami-12c6146b",
InstanceType="t1.micro",
KeyName="the_keys",
SecurityGroups=["default", "default2"],
UserData="This is some user_data",
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.autoscaling.autoscaling_service import (
AutoScaling,
)
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.autoscaling.autoscaling_find_secrets_ec2_launch_configuration.autoscaling_find_secrets_ec2_launch_configuration.autoscaling_client",
new=AutoScaling(current_audit_info),
):
from prowler.providers.aws.services.autoscaling.autoscaling_find_secrets_ec2_launch_configuration.autoscaling_find_secrets_ec2_launch_configuration import (
autoscaling_find_secrets_ec2_launch_configuration,
)
check = autoscaling_find_secrets_ec2_launch_configuration()
result = check.execute()
assert len(result) == 1
assert result[0].status == "PASS"
assert (
result[0].status_extended
== "No secrets found in autoscaling tester User Data."
)
assert result[0].resource_id == "tester"
@mock_autoscaling
def test_one_autoscaling_with_secrets(self):
# Include launch_configurations to check
autoscaling_client = client("autoscaling", region_name=AWS_REGION)
autoscaling_client.create_launch_configuration(
LaunchConfigurationName="tester",
ImageId="ami-12c6146b",
InstanceType="t1.micro",
KeyName="the_keys",
SecurityGroups=["default", "default2"],
UserData="DB_PASSWORD=foobar123",
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.autoscaling.autoscaling_service import (
AutoScaling,
)
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.autoscaling.autoscaling_find_secrets_ec2_launch_configuration.autoscaling_find_secrets_ec2_launch_configuration.autoscaling_client",
new=AutoScaling(current_audit_info),
):
from prowler.providers.aws.services.autoscaling.autoscaling_find_secrets_ec2_launch_configuration.autoscaling_find_secrets_ec2_launch_configuration import (
autoscaling_find_secrets_ec2_launch_configuration,
)
check = autoscaling_find_secrets_ec2_launch_configuration()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "Potential secret found in autoscaling tester User Data."
)
assert result[0].resource_id == "tester"
@mock_autoscaling
def test_one_autoscaling_file_with_secrets(self):
# Include launch_configurations to check
f = open(
"prowler/providers/aws/services/autoscaling/autoscaling_find_secrets_ec2_launch_configuration/fixtures/fixture",
"r",
)
secrets = f.read()
autoscaling_client = client("autoscaling", region_name=AWS_REGION)
autoscaling_client.create_launch_configuration(
LaunchConfigurationName="tester",
ImageId="ami-12c6146b",
InstanceType="t1.micro",
KeyName="the_keys",
SecurityGroups=["default", "default2"],
UserData=secrets,
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.autoscaling.autoscaling_service import (
AutoScaling,
)
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.autoscaling.autoscaling_find_secrets_ec2_launch_configuration.autoscaling_find_secrets_ec2_launch_configuration.autoscaling_client",
new=AutoScaling(current_audit_info),
):
from prowler.providers.aws.services.autoscaling.autoscaling_find_secrets_ec2_launch_configuration.autoscaling_find_secrets_ec2_launch_configuration import (
autoscaling_find_secrets_ec2_launch_configuration,
)
check = autoscaling_find_secrets_ec2_launch_configuration()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "Potential secret found in autoscaling tester User Data."
)
assert result[0].resource_id == "tester"
@mock_autoscaling
def test_one_launch_configurations_without_user_data(self):
# Include launch_configurations to check
autoscaling_client = client("autoscaling", region_name=AWS_REGION)
autoscaling_client.create_launch_configuration(
LaunchConfigurationName="tester",
ImageId="ami-12c6146b",
InstanceType="t1.micro",
KeyName="the_keys",
SecurityGroups=["default", "default2"],
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.autoscaling.autoscaling_service import (
AutoScaling,
)
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.autoscaling.autoscaling_find_secrets_ec2_launch_configuration.autoscaling_find_secrets_ec2_launch_configuration.autoscaling_client",
new=AutoScaling(current_audit_info),
):
from prowler.providers.aws.services.autoscaling.autoscaling_find_secrets_ec2_launch_configuration.autoscaling_find_secrets_ec2_launch_configuration import (
autoscaling_find_secrets_ec2_launch_configuration,
)
check = autoscaling_find_secrets_ec2_launch_configuration()
result = check.execute()
assert len(result) == 1
assert result[0].status == "PASS"
assert (
result[0].status_extended
== "No secrets found in autoscaling tester since User Data is empty."
)
assert result[0].resource_id == "tester"

View File

@@ -0,0 +1,100 @@
from base64 import b64decode
from boto3 import client, session
from moto import mock_autoscaling
from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
from prowler.providers.aws.services.autoscaling.autoscaling_service import AutoScaling
AWS_ACCOUNT_NUMBER = 123456789012
AWS_REGION = "us-east-1"
class Test_AutoScaling_Service:
# Mocked Audit Info
def set_mocked_audit_info(self):
audit_info = AWS_Audit_Info(
original_session=None,
audit_session=session.Session(
profile_name=None,
botocore_session=None,
),
audited_account=AWS_ACCOUNT_NUMBER,
audited_user_id=None,
audited_partition="aws",
audited_identity_arn=None,
profile=None,
profile_region=None,
credentials=None,
assumed_role_info=None,
audited_regions=None,
organizations_metadata=None,
)
return audit_info
# Test AutoScaling Service
@mock_autoscaling
def test_service(self):
# AutoScaling client for this test class
audit_info = self.set_mocked_audit_info()
autoscaling = AutoScaling(audit_info)
assert autoscaling.service == "autoscaling"
# Test AutoScaling Client
@mock_autoscaling
def test_client(self):
# AutoScaling client for this test class
audit_info = self.set_mocked_audit_info()
autoscaling = AutoScaling(audit_info)
for regional_client in autoscaling.regional_clients.values():
assert regional_client.__class__.__name__ == "AutoScaling"
# Test AutoScaling Session
@mock_autoscaling
def test__get_session__(self):
# AutoScaling client for this test class
audit_info = self.set_mocked_audit_info()
autoscaling = AutoScaling(audit_info)
assert autoscaling.session.__class__.__name__ == "Session"
# Test AutoScaling Session
@mock_autoscaling
def test_audited_account(self):
# AutoScaling client for this test class
audit_info = self.set_mocked_audit_info()
autoscaling = AutoScaling(audit_info)
assert autoscaling.audited_account == AWS_ACCOUNT_NUMBER
# Test AutoScaling Get APIs
@mock_autoscaling
def test__describe_launch_configurations__(self):
# Generate AutoScaling Client
autoscaling_client = client("autoscaling", region_name=AWS_REGION)
# Create AutoScaling API
autoscaling_client.create_launch_configuration(
LaunchConfigurationName="tester1",
ImageId="ami-12c6146b",
InstanceType="t1.micro",
KeyName="the_keys",
SecurityGroups=["default", "default2"],
UserData="DB_PASSWORD=foobar123",
)
autoscaling_client.create_launch_configuration(
LaunchConfigurationName="tester2",
ImageId="ami-12c6146b",
InstanceType="t1.micro",
KeyName="the_keys",
SecurityGroups=["default", "default2"],
)
# AutoScaling client for this test class
audit_info = self.set_mocked_audit_info()
autoscaling = AutoScaling(audit_info)
assert len(autoscaling.launch_configurations) == 2
assert autoscaling.launch_configurations[0].name == "tester1"
assert (
b64decode(autoscaling.launch_configurations[0].user_data).decode("utf-8")
== "DB_PASSWORD=foobar123"
)
assert autoscaling.launch_configurations[0].image_id == "ami-12c6146b"
assert autoscaling.launch_configurations[1].image_id == "ami-12c6146b"
assert autoscaling.launch_configurations[1].name == "tester2"

View File

@@ -0,0 +1,217 @@
from unittest import mock
from boto3 import client, session
from mock import patch
from moto import mock_cloudtrail, mock_s3
from moto.core import DEFAULT_ACCOUNT_ID
from prowler.providers.aws.lib.audit_info.audit_info import AWS_Audit_Info
from prowler.providers.aws.services.awslambda.awslambda_service import Function
AWS_REGION = "us-east-1"
# Mock generate_regional_clients()
def mock_generate_regional_clients(service, audit_info):
regional_client = audit_info.audit_session.client(service, region_name=AWS_REGION)
regional_client.region = AWS_REGION
return {AWS_REGION: regional_client}
# Patch every AWS call using Boto3 and generate_regional_clients to have 1 client
@patch(
"prowler.providers.aws.services.accessanalyzer.accessanalyzer_service.generate_regional_clients",
new=mock_generate_regional_clients,
)
class Test_awslambda_function_invoke_api_operations_cloudtrail_logging_enabled:
# Mocked Audit Info
def set_mocked_audit_info(self):
audit_info = AWS_Audit_Info(
original_session=None,
audit_session=session.Session(
profile_name=None,
botocore_session=None,
),
audited_account=None,
audited_user_id=None,
audited_partition="aws",
audited_identity_arn=None,
profile=None,
profile_region=None,
credentials=None,
assumed_role_info=None,
audited_regions=None,
organizations_metadata=None,
)
return audit_info
@mock_cloudtrail
def test_no_functions(self):
lambda_client = mock.MagicMock
lambda_client.functions = {}
from prowler.providers.aws.services.cloudtrail.cloudtrail_service import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.awslambda.awslambda_service.Lambda",
new=lambda_client,
), mock.patch(
"prowler.providers.aws.lib.audit_info.audit_info.current_audit_info",
self.set_mocked_audit_info(),
), mock.patch(
"prowler.providers.aws.services.awslambda.awslambda_function_invoke_api_operations_cloudtrail_logging_enabled.awslambda_function_invoke_api_operations_cloudtrail_logging_enabled.cloudtrail_client",
new=Cloudtrail(self.set_mocked_audit_info()),
):
# Test Check
from prowler.providers.aws.services.awslambda.awslambda_function_invoke_api_operations_cloudtrail_logging_enabled.awslambda_function_invoke_api_operations_cloudtrail_logging_enabled import (
awslambda_function_invoke_api_operations_cloudtrail_logging_enabled,
)
check = (
awslambda_function_invoke_api_operations_cloudtrail_logging_enabled()
)
result = check.execute()
assert len(result) == 0
@mock_cloudtrail
@mock_s3
def test_lambda_not_recorded_by_cloudtrail(self):
# Lambda Client
lambda_client = mock.MagicMock
function_name = "test-lambda"
function_runtime = "python3.9"
function_arn = (
f"arn:aws:lambda:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:function/{function_name}"
)
lambda_client.functions = {
function_name: Function(
name=function_name,
arn=function_arn,
region=AWS_REGION,
runtime=function_runtime,
)
}
# CloudTrail Client
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
trail_name = "test-trail"
bucket_name = "test-bucket"
s3_client.create_bucket(Bucket=bucket_name)
cloudtrail_client.create_trail(
Name=trail_name, S3BucketName=bucket_name, IsMultiRegionTrail=False
)
from prowler.providers.aws.services.cloudtrail.cloudtrail_service import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.awslambda.awslambda_service.Lambda",
new=lambda_client,
), mock.patch(
"prowler.providers.aws.lib.audit_info.audit_info.current_audit_info",
self.set_mocked_audit_info(),
), mock.patch(
"prowler.providers.aws.services.awslambda.awslambda_function_invoke_api_operations_cloudtrail_logging_enabled.awslambda_function_invoke_api_operations_cloudtrail_logging_enabled.cloudtrail_client",
new=Cloudtrail(self.set_mocked_audit_info()),
):
# Test Check
from prowler.providers.aws.services.awslambda.awslambda_function_invoke_api_operations_cloudtrail_logging_enabled.awslambda_function_invoke_api_operations_cloudtrail_logging_enabled import (
awslambda_function_invoke_api_operations_cloudtrail_logging_enabled,
)
check = (
awslambda_function_invoke_api_operations_cloudtrail_logging_enabled()
)
result = check.execute()
assert len(result) == 1
assert result[0].region == AWS_REGION
assert result[0].resource_id == function_name
assert result[0].resource_arn == function_arn
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== f"Lambda function {function_name} is not recorded by CloudTrail"
)
@mock_cloudtrail
@mock_s3
def test_lambda_recorded_by_cloudtrail(self):
# Lambda Client
lambda_client = mock.MagicMock
function_name = "test-lambda"
function_runtime = "python3.9"
function_arn = (
f"arn:aws:lambda:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:function/{function_name}"
)
lambda_client.functions = {
function_name: Function(
name=function_name,
arn=function_arn,
region=AWS_REGION,
runtime=function_runtime,
)
}
# CloudTrail Client
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
trail_name = "test-trail"
bucket_name = "test-bucket"
s3_client.create_bucket(Bucket=bucket_name)
cloudtrail_client.create_trail(
Name=trail_name, S3BucketName=bucket_name, IsMultiRegionTrail=False
)
_ = cloudtrail_client.put_event_selectors(
TrailName=trail_name,
EventSelectors=[
{
"ReadWriteType": "All",
"IncludeManagementEvents": True,
"DataResources": [
{"Type": "AWS::Lambda::Function", "Values": [function_arn]}
],
}
],
)
from prowler.providers.aws.services.cloudtrail.cloudtrail_service import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.awslambda.awslambda_service.Lambda",
new=lambda_client,
), mock.patch(
"prowler.providers.aws.lib.audit_info.audit_info.current_audit_info",
self.set_mocked_audit_info(),
), mock.patch(
"prowler.providers.aws.services.awslambda.awslambda_function_invoke_api_operations_cloudtrail_logging_enabled.awslambda_function_invoke_api_operations_cloudtrail_logging_enabled.cloudtrail_client",
new=Cloudtrail(self.set_mocked_audit_info()),
):
# Test Check
from prowler.providers.aws.services.awslambda.awslambda_function_invoke_api_operations_cloudtrail_logging_enabled.awslambda_function_invoke_api_operations_cloudtrail_logging_enabled import (
awslambda_function_invoke_api_operations_cloudtrail_logging_enabled,
)
check = (
awslambda_function_invoke_api_operations_cloudtrail_logging_enabled()
)
result = check.execute()
assert len(result) == 1
assert result[0].region == AWS_REGION
assert result[0].resource_id == function_name
assert result[0].resource_arn == function_arn
assert result[0].status == "PASS"
assert (
result[0].status_extended
== f"Lambda function {function_name} is recorded by CloudTrail {trail_name}"
)

View File

@@ -0,0 +1,127 @@
import zipfile
from unittest import mock
from awslambda_service_test import create_zip_file
from moto.core import DEFAULT_ACCOUNT_ID
from prowler.providers.aws.services.awslambda.awslambda_service import (
Function,
LambdaCode,
)
AWS_REGION = "us-east-1"
class Test_awslambda_function_no_secrets_in_code:
def test_no_functions(self):
lambda_client = mock.MagicMock
lambda_client.functions = {}
with mock.patch(
"prowler.providers.aws.services.awslambda.awslambda_service.Lambda",
new=lambda_client,
):
# Test Check
from prowler.providers.aws.services.awslambda.awslambda_function_no_secrets_in_code.awslambda_function_no_secrets_in_code import (
awslambda_function_no_secrets_in_code,
)
check = awslambda_function_no_secrets_in_code()
result = check.execute()
assert len(result) == 0
def test_function_code_with_secrets(self):
lambda_client = mock.MagicMock
function_name = "test-lambda"
function_runtime = "nodejs4.3"
function_arn = (
f"arn:aws:lambda:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:function/{function_name}"
)
code_with_secrets = """
def lambda_handler(event, context):
db_password = "test-password"
print("custom log event")
return event
"""
lambda_client.functions = {
"function_name": Function(
name=function_name,
arn=function_arn,
region=AWS_REGION,
runtime=function_runtime,
code=LambdaCode(
location="",
code_zip=zipfile.ZipFile(create_zip_file(code_with_secrets)),
),
)
}
with mock.patch(
"prowler.providers.aws.services.awslambda.awslambda_service.Lambda",
new=lambda_client,
):
# Test Check
from prowler.providers.aws.services.awslambda.awslambda_function_no_secrets_in_code.awslambda_function_no_secrets_in_code import (
awslambda_function_no_secrets_in_code,
)
check = awslambda_function_no_secrets_in_code()
result = check.execute()
assert len(result) == 1
assert result[0].region == AWS_REGION
assert result[0].resource_id == function_name
assert result[0].resource_arn == function_arn
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== f"Potential secret found in Lambda function {function_name} code"
)
def test_function_code_without_secrets(self):
lambda_client = mock.MagicMock
function_name = "test-lambda"
function_runtime = "nodejs4.3"
function_arn = (
f"arn:aws:lambda:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:function/{function_name}"
)
code_with_secrets = """
def lambda_handler(event, context):
print("custom log event")
return event
"""
lambda_client.functions = {
"function_name": Function(
name=function_name,
arn=function_arn,
region=AWS_REGION,
runtime=function_runtime,
code=LambdaCode(
location="",
code_zip=zipfile.ZipFile(create_zip_file(code_with_secrets)),
),
)
}
with mock.patch(
"prowler.providers.aws.services.awslambda.awslambda_service.Lambda",
new=lambda_client,
):
# Test Check
from prowler.providers.aws.services.awslambda.awslambda_function_no_secrets_in_code.awslambda_function_no_secrets_in_code import (
awslambda_function_no_secrets_in_code,
)
check = awslambda_function_no_secrets_in_code()
result = check.execute()
assert len(result) == 1
assert result[0].region == AWS_REGION
assert result[0].resource_id == function_name
assert result[0].resource_arn == function_arn
assert result[0].status == "PASS"
assert (
result[0].status_extended
== f"No secrets found in Lambda function {function_name} code"
)

View File

@@ -0,0 +1,146 @@
from unittest import mock
from moto.core import DEFAULT_ACCOUNT_ID
from prowler.providers.aws.services.awslambda.awslambda_service import Function
AWS_REGION = "us-east-1"
class Test_awslambda_function_no_secrets_in_variables:
def test_no_functions(self):
lambda_client = mock.MagicMock
lambda_client.functions = {}
with mock.patch(
"prowler.providers.aws.services.awslambda.awslambda_service.Lambda",
new=lambda_client,
):
# Test Check
from prowler.providers.aws.services.awslambda.awslambda_function_no_secrets_in_variables.awslambda_function_no_secrets_in_variables import (
awslambda_function_no_secrets_in_variables,
)
check = awslambda_function_no_secrets_in_variables()
result = check.execute()
assert len(result) == 0
def test_function_no_variables(self):
lambda_client = mock.MagicMock
function_name = "test-lambda"
function_runtime = "nodejs4.3"
function_arn = (
f"arn:aws:lambda:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:function/{function_name}"
)
lambda_client.functions = {
"function_name": Function(
name=function_name,
arn=function_arn,
region=AWS_REGION,
runtime=function_runtime,
)
}
with mock.patch(
"prowler.providers.aws.services.awslambda.awslambda_service.Lambda",
new=lambda_client,
):
# Test Check
from prowler.providers.aws.services.awslambda.awslambda_function_no_secrets_in_variables.awslambda_function_no_secrets_in_variables import (
awslambda_function_no_secrets_in_variables,
)
check = awslambda_function_no_secrets_in_variables()
result = check.execute()
assert len(result) == 1
assert result[0].region == AWS_REGION
assert result[0].resource_id == function_name
assert result[0].resource_arn == function_arn
assert result[0].status == "PASS"
assert (
result[0].status_extended
== f"No secrets found in Lambda function {function_name} variables"
)
def test_function_secrets_in_variables(self):
lambda_client = mock.MagicMock
function_name = "test-lambda"
function_runtime = "nodejs4.3"
function_arn = (
f"arn:aws:lambda:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:function/{function_name}"
)
lambda_client.functions = {
"function_name": Function(
name=function_name,
arn=function_arn,
region=AWS_REGION,
runtime=function_runtime,
environment={"db_password": "test-password"},
)
}
with mock.patch(
"prowler.providers.aws.services.awslambda.awslambda_service.Lambda",
new=lambda_client,
):
# Test Check
from prowler.providers.aws.services.awslambda.awslambda_function_no_secrets_in_variables.awslambda_function_no_secrets_in_variables import (
awslambda_function_no_secrets_in_variables,
)
check = awslambda_function_no_secrets_in_variables()
result = check.execute()
assert len(result) == 1
assert result[0].region == AWS_REGION
assert result[0].resource_id == function_name
assert result[0].resource_arn == function_arn
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== f"Potential secret found in Lambda function {function_name} variables"
)
def test_function_no_secrets_in_variables(self):
lambda_client = mock.MagicMock
function_name = "test-lambda"
function_runtime = "nodejs4.3"
function_arn = (
f"arn:aws:lambda:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:function/{function_name}"
)
lambda_client.functions = {
"function_name": Function(
name=function_name,
arn=function_arn,
region=AWS_REGION,
runtime=function_runtime,
environment={"db_username": "test-user"},
)
}
with mock.patch(
"prowler.providers.aws.services.awslambda.awslambda_service.Lambda",
new=lambda_client,
):
# Test Check
from prowler.providers.aws.services.awslambda.awslambda_function_no_secrets_in_variables.awslambda_function_no_secrets_in_variables import (
awslambda_function_no_secrets_in_variables,
)
check = awslambda_function_no_secrets_in_variables()
result = check.execute()
assert len(result) == 1
assert result[0].region == AWS_REGION
assert result[0].resource_id == function_name
assert result[0].resource_arn == function_arn
assert result[0].status == "PASS"
assert (
result[0].status_extended
== f"No secrets found in Lambda function {function_name} variables"
)

View File

@@ -0,0 +1,189 @@
from unittest import mock
from moto.core import DEFAULT_ACCOUNT_ID
from prowler.providers.aws.services.awslambda.awslambda_service import Function
AWS_REGION = "us-east-1"
class Test_awslambda_function_not_publicly_accessible:
def test_no_functions(self):
lambda_client = mock.MagicMock
lambda_client.functions = {}
with mock.patch(
"prowler.providers.aws.services.awslambda.awslambda_service.Lambda",
new=lambda_client,
):
# Test Check
from prowler.providers.aws.services.awslambda.awslambda_function_not_publicly_accessible.awslambda_function_not_publicly_accessible import (
awslambda_function_not_publicly_accessible,
)
check = awslambda_function_not_publicly_accessible()
result = check.execute()
assert len(result) == 0
def test_function_public(self):
lambda_client = mock.MagicMock
function_name = "test-lambda"
function_runtime = "nodejs4.3"
function_arn = (
f"arn:aws:lambda:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:function/{function_name}"
)
lambda_policy = {
"Version": "2012-10-17",
"Statement": [
{
"Sid": "public-access",
"Principal": {"AWS": ["*", DEFAULT_ACCOUNT_ID]},
"Effect": "Allow",
"Action": [
"lambda:InvokeFunction",
],
"Resource": [function_arn],
}
],
}
lambda_client.functions = {
"function_name": Function(
name=function_name,
arn=function_arn,
region=AWS_REGION,
runtime=function_runtime,
policy=lambda_policy,
)
}
with mock.patch(
"prowler.providers.aws.services.awslambda.awslambda_service.Lambda",
new=lambda_client,
):
# Test Check
from prowler.providers.aws.services.awslambda.awslambda_function_not_publicly_accessible.awslambda_function_not_publicly_accessible import (
awslambda_function_not_publicly_accessible,
)
check = awslambda_function_not_publicly_accessible()
result = check.execute()
assert len(result) == 1
assert result[0].region == AWS_REGION
assert result[0].resource_id == function_name
assert result[0].resource_arn == function_arn
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== f"Lambda function {function_name} has a policy resource-based policy with public access"
)
def test_function_not_public(self):
lambda_client = mock.MagicMock
function_name = "test-lambda"
function_runtime = "nodejs4.3"
function_arn = (
f"arn:aws:lambda:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:function/{function_name}"
)
lambda_policy = {
"Version": "2012-10-17",
"Statement": [
{
"Sid": "public-access",
"Principal": {"AWS": [DEFAULT_ACCOUNT_ID]},
"Effect": "Allow",
"Action": [
"lambda:InvokeFunction",
],
"Resource": [function_arn],
}
],
}
lambda_client.functions = {
"function_name": Function(
name=function_name,
arn=function_arn,
region=AWS_REGION,
runtime=function_runtime,
policy=lambda_policy,
)
}
with mock.patch(
"prowler.providers.aws.services.awslambda.awslambda_service.Lambda",
new=lambda_client,
):
# Test Check
from prowler.providers.aws.services.awslambda.awslambda_function_not_publicly_accessible.awslambda_function_not_publicly_accessible import (
awslambda_function_not_publicly_accessible,
)
check = awslambda_function_not_publicly_accessible()
result = check.execute()
assert len(result) == 1
assert result[0].region == AWS_REGION
assert result[0].resource_id == function_name
assert result[0].resource_arn == function_arn
assert result[0].status == "PASS"
assert (
result[0].status_extended
== f"Lambda function {function_name} has a policy resource-based policy not public"
)
def test_function_public_with_canonical(self):
lambda_client = mock.MagicMock
function_name = "test-lambda"
function_runtime = "nodejs4.3"
function_arn = (
f"arn:aws:lambda:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:function/{function_name}"
)
lambda_policy = {
"Version": "2012-10-17",
"Statement": [
{
"Sid": "public-access",
"Principal": {"CanonicalUser": ["*"]},
"Effect": "Allow",
"Action": [
"lambda:InvokeFunction",
],
"Resource": [function_arn],
}
],
}
lambda_client.functions = {
"function_name": Function(
name=function_name,
arn=function_arn,
region=AWS_REGION,
runtime=function_runtime,
policy=lambda_policy,
)
}
with mock.patch(
"prowler.providers.aws.services.awslambda.awslambda_service.Lambda",
new=lambda_client,
):
# Test Check
from prowler.providers.aws.services.awslambda.awslambda_function_not_publicly_accessible.awslambda_function_not_publicly_accessible import (
awslambda_function_not_publicly_accessible,
)
check = awslambda_function_not_publicly_accessible()
result = check.execute()
assert len(result) == 1
assert result[0].region == AWS_REGION
assert result[0].resource_id == function_name
assert result[0].resource_arn == function_arn
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== f"Lambda function {function_name} has a policy resource-based policy with public access"
)

View File

@@ -0,0 +1,163 @@
from unittest import mock
from moto.core import DEFAULT_ACCOUNT_ID
from prowler.providers.aws.services.awslambda.awslambda_service import (
AuthType,
Function,
URLConfig,
URLConfigCORS,
)
AWS_REGION = "us-east-1"
class Test_awslambda_function_url_cors_policy:
def test_no_functions(self):
lambda_client = mock.MagicMock
lambda_client.functions = {}
with mock.patch(
"prowler.providers.aws.services.awslambda.awslambda_service.Lambda",
new=lambda_client,
):
# Test Check
from prowler.providers.aws.services.awslambda.awslambda_function_url_cors_policy.awslambda_function_url_cors_policy import (
awslambda_function_url_cors_policy,
)
check = awslambda_function_url_cors_policy()
result = check.execute()
assert len(result) == 0
def test_function_cors_asterisk(self):
lambda_client = mock.MagicMock
function_name = "test-lambda"
function_runtime = "nodejs4.3"
function_arn = (
f"arn:aws:lambda:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:function/{function_name}"
)
lambda_client.functions = {
"function_name": Function(
name=function_name,
arn=function_arn,
region=AWS_REGION,
runtime=function_runtime,
url_config=URLConfig(
auth_type=AuthType.NONE,
url="",
cors_config=URLConfigCORS(allow_origins=["*"]),
),
)
}
with mock.patch(
"prowler.providers.aws.services.awslambda.awslambda_service.Lambda",
new=lambda_client,
):
# Test Check
from prowler.providers.aws.services.awslambda.awslambda_function_url_cors_policy.awslambda_function_url_cors_policy import (
awslambda_function_url_cors_policy,
)
check = awslambda_function_url_cors_policy()
result = check.execute()
assert len(result) == 1
assert result[0].region == AWS_REGION
assert result[0].resource_id == function_name
assert result[0].resource_arn == function_arn
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== f"Lambda function {function_name} URL has a wide CORS configuration"
)
def test_function_cors_not_wide(self):
lambda_client = mock.MagicMock
function_name = "test-lambda"
function_runtime = "python3.9"
function_arn = (
f"arn:aws:lambda:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:function/{function_name}"
)
lambda_client.functions = {
"function_name": Function(
name=function_name,
arn=function_arn,
region=AWS_REGION,
runtime=function_runtime,
url_config=URLConfig(
auth_type=AuthType.AWS_IAM,
url="",
cors_config=URLConfigCORS(allow_origins=["https://example.com"]),
),
)
}
with mock.patch(
"prowler.providers.aws.services.awslambda.awslambda_service.Lambda",
new=lambda_client,
):
# Test Check
from prowler.providers.aws.services.awslambda.awslambda_function_url_cors_policy.awslambda_function_url_cors_policy import (
awslambda_function_url_cors_policy,
)
check = awslambda_function_url_cors_policy()
result = check.execute()
assert len(result) == 1
assert result[0].region == AWS_REGION
assert result[0].resource_id == function_name
assert result[0].resource_arn == function_arn
assert result[0].status == "PASS"
assert (
result[0].status_extended
== f"Lambda function {function_name} has not a wide CORS configuration"
)
def test_function_cors_wide_with_two_origins(self):
lambda_client = mock.MagicMock
function_name = "test-lambda"
function_runtime = "python3.9"
function_arn = (
f"arn:aws:lambda:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:function/{function_name}"
)
lambda_client.functions = {
"function_name": Function(
name=function_name,
arn=function_arn,
region=AWS_REGION,
runtime=function_runtime,
url_config=URLConfig(
auth_type=AuthType.AWS_IAM,
url="",
cors_config=URLConfigCORS(
allow_origins=["https://example.com", "*"]
),
),
)
}
with mock.patch(
"prowler.providers.aws.services.awslambda.awslambda_service.Lambda",
new=lambda_client,
):
# Test Check
from prowler.providers.aws.services.awslambda.awslambda_function_url_cors_policy.awslambda_function_url_cors_policy import (
awslambda_function_url_cors_policy,
)
check = awslambda_function_url_cors_policy()
result = check.execute()
assert len(result) == 1
assert result[0].region == AWS_REGION
assert result[0].resource_id == function_name
assert result[0].resource_arn == function_arn
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== f"Lambda function {function_name} URL has a wide CORS configuration"
)

View File

@@ -0,0 +1,118 @@
from unittest import mock
from moto.core import DEFAULT_ACCOUNT_ID
from prowler.providers.aws.services.awslambda.awslambda_service import (
AuthType,
Function,
URLConfig,
URLConfigCORS,
)
AWS_REGION = "us-east-1"
class Test_awslambda_function_url_public:
def test_no_functions(self):
lambda_client = mock.MagicMock
lambda_client.functions = {}
with mock.patch(
"prowler.providers.aws.services.awslambda.awslambda_service.Lambda",
new=lambda_client,
):
# Test Check
from prowler.providers.aws.services.awslambda.awslambda_function_url_public.awslambda_function_url_public import (
awslambda_function_url_public,
)
check = awslambda_function_url_public()
result = check.execute()
assert len(result) == 0
def test_function_public_url(self):
lambda_client = mock.MagicMock
function_name = "test-lambda"
function_runtime = "nodejs4.3"
function_arn = (
f"arn:aws:lambda:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:function/{function_name}"
)
lambda_client.functions = {
"function_name": Function(
name=function_name,
arn=function_arn,
region=AWS_REGION,
runtime=function_runtime,
url_config=URLConfig(
auth_type=AuthType.NONE,
url="",
cors_config=URLConfigCORS(allow_origins=[]),
),
)
}
with mock.patch(
"prowler.providers.aws.services.awslambda.awslambda_service.Lambda",
new=lambda_client,
):
# Test Check
from prowler.providers.aws.services.awslambda.awslambda_function_url_public.awslambda_function_url_public import (
awslambda_function_url_public,
)
check = awslambda_function_url_public()
result = check.execute()
assert len(result) == 1
assert result[0].region == AWS_REGION
assert result[0].resource_id == function_name
assert result[0].resource_arn == function_arn
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== f"Lambda function {function_name} has a publicly accessible function URL"
)
def test_function_private_url(self):
lambda_client = mock.MagicMock
function_name = "test-lambda"
function_runtime = "python3.9"
function_arn = (
f"arn:aws:lambda:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:function/{function_name}"
)
lambda_client.functions = {
"function_name": Function(
name=function_name,
arn=function_arn,
region=AWS_REGION,
runtime=function_runtime,
url_config=URLConfig(
auth_type=AuthType.AWS_IAM,
url="",
cors_config=URLConfigCORS(allow_origins=[]),
),
)
}
with mock.patch(
"prowler.providers.aws.services.awslambda.awslambda_service.Lambda",
new=lambda_client,
):
# Test Check
from prowler.providers.aws.services.awslambda.awslambda_function_url_public.awslambda_function_url_public import (
awslambda_function_url_public,
)
check = awslambda_function_url_public()
result = check.execute()
assert len(result) == 1
assert result[0].region == AWS_REGION
assert result[0].resource_id == function_name
assert result[0].resource_arn == function_arn
assert result[0].status == "PASS"
assert (
result[0].status_extended
== f"Lambda function {function_name} has not a publicly accessible function URL"
)

View File

@@ -0,0 +1,126 @@
from unittest import mock
from moto.core import DEFAULT_ACCOUNT_ID
from prowler.providers.aws.services.awslambda.awslambda_service import Function
AWS_REGION = "us-east-1"
def mock_get_config_var(config_var: str):
return [
"python3.6",
"python2.7",
"nodejs4.3",
"nodejs4.3-edge",
"nodejs6.10",
"nodejs",
"nodejs8.10",
"nodejs10.x",
"dotnetcore1.0",
"dotnetcore2.0",
"dotnetcore2.1",
"ruby2.5",
]
class Test_awslambda_function_using_supported_runtimes:
def test_no_functions(self):
lambda_client = mock.MagicMock
lambda_client.functions = {}
with mock.patch(
"prowler.providers.aws.services.awslambda.awslambda_service.Lambda",
new=lambda_client,
):
# Test Check
from prowler.providers.aws.services.awslambda.awslambda_function_using_supported_runtimes.awslambda_function_using_supported_runtimes import (
awslambda_function_using_supported_runtimes,
)
check = awslambda_function_using_supported_runtimes()
result = check.execute()
assert len(result) == 0
def test_function_obsolete_runtime(self):
lambda_client = mock.MagicMock
function_name = "test-lambda"
function_runtime = "nodejs4.3"
function_arn = (
f"arn:aws:lambda:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:function/{function_name}"
)
lambda_client.functions = {
"function_name": Function(
name=function_name,
arn=function_arn,
region=AWS_REGION,
runtime=function_runtime,
)
}
with mock.patch(
"prowler.providers.aws.services.awslambda.awslambda_service.Lambda",
new=lambda_client,
), mock.patch(
"prowler.providers.aws.services.awslambda.awslambda_function_using_supported_runtimes.awslambda_function_using_supported_runtimes.get_config_var",
new=mock_get_config_var,
):
# Test Check
from prowler.providers.aws.services.awslambda.awslambda_function_using_supported_runtimes.awslambda_function_using_supported_runtimes import (
awslambda_function_using_supported_runtimes,
)
check = awslambda_function_using_supported_runtimes()
result = check.execute()
assert len(result) == 1
assert result[0].region == AWS_REGION
assert result[0].resource_id == function_name
assert result[0].resource_arn == function_arn
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== f"Lambda function {function_name} is using {function_runtime} which is obsolete"
)
def test_function_supported_runtime(self):
lambda_client = mock.MagicMock
function_name = "test-lambda"
function_runtime = "python3.9"
function_arn = (
f"arn:aws:lambda:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:function/{function_name}"
)
lambda_client.functions = {
"function_name": Function(
name=function_name,
arn=function_arn,
region=AWS_REGION,
runtime=function_runtime,
)
}
with mock.patch(
"prowler.providers.aws.services.awslambda.awslambda_service.Lambda",
new=lambda_client,
), mock.patch(
"prowler.providers.aws.services.awslambda.awslambda_function_using_supported_runtimes.awslambda_function_using_supported_runtimes.get_config_var",
new=mock_get_config_var,
):
# Test Check
from prowler.providers.aws.services.awslambda.awslambda_function_using_supported_runtimes.awslambda_function_using_supported_runtimes import (
awslambda_function_using_supported_runtimes,
)
check = awslambda_function_using_supported_runtimes()
result = check.execute()
assert len(result) == 1
assert result[0].region == AWS_REGION
assert result[0].resource_id == function_name
assert result[0].resource_arn == function_arn
assert result[0].status == "PASS"
assert (
result[0].status_extended
== f"Lambda function {function_name} is using {function_runtime} which is supported"
)

View File

@@ -0,0 +1,227 @@
import io
import os
import tempfile
import zipfile
from re import search
from unittest.mock import patch
import mock
from boto3 import client, resource, session
from moto import mock_iam, mock_lambda, mock_s3
from moto.core import DEFAULT_ACCOUNT_ID
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
from prowler.providers.aws.services.awslambda.awslambda_service import AuthType, Lambda
# Mock Test Region
AWS_REGION = "eu-west-1"
def create_zip_file(code: str = "") -> io.BytesIO:
zip_output = io.BytesIO()
zip_file = zipfile.ZipFile(zip_output, "w", zipfile.ZIP_DEFLATED)
if not code:
zip_file.writestr(
"lambda_function.py",
"""
def lambda_handler(event, context):
print("custom log event")
return event
""",
)
else:
zip_file.writestr("lambda_function.py", code)
zip_file.close()
zip_output.seek(0)
return zip_output
def mock_request_get(_):
"""Mock requests.get() to get the Lambda Code in Zip Format"""
mock_resp = mock.MagicMock
mock_resp.status_code = 200
mock_resp.content = create_zip_file().read()
return mock_resp
# Mock generate_regional_clients()
def mock_generate_regional_clients(service, audit_info):
regional_client = audit_info.audit_session.client(service, region_name=AWS_REGION)
regional_client.region = AWS_REGION
return {AWS_REGION: regional_client}
@patch(
"prowler.providers.aws.services.awslambda.awslambda_service.generate_regional_clients",
new=mock_generate_regional_clients,
)
class Test_Lambda_Service:
def set_mocked_audit_info(self):
audit_info = AWS_Audit_Info(
original_session=None,
audit_session=session.Session(
profile_name=None,
botocore_session=None,
),
audited_account=DEFAULT_ACCOUNT_ID,
audited_user_id=None,
audited_partition="aws",
audited_identity_arn=None,
profile=None,
profile_region=None,
credentials=None,
assumed_role_info=None,
audited_regions=None,
organizations_metadata=None,
)
return audit_info
# Test Lambda Client
def test__get_client__(self):
awslambda = Lambda(current_audit_info)
assert awslambda.regional_clients[AWS_REGION].__class__.__name__ == "Lambda"
# Test Lambda Session
def test__get_session__(self):
awslambda = Lambda(current_audit_info)
assert awslambda.session.__class__.__name__ == "Session"
# Test Lambda Service
def test__get_service__(self):
awslambda = Lambda(current_audit_info)
assert awslambda.service == "lambda"
@mock_lambda
@mock_iam
@mock_s3
def test__list_functions__(self):
# Create IAM Lambda Role
iam_client = client("iam", region_name=AWS_REGION)
iam_role = iam_client.create_role(
RoleName="test-lambda-role",
AssumeRolePolicyDocument="test-policy",
Path="/",
)["Role"]["Arn"]
# Create S3 Bucket
s3_client = resource("s3", region_name=AWS_REGION)
s3_client.create_bucket(
Bucket="test-bucket",
CreateBucketConfiguration={"LocationConstraint": AWS_REGION},
)
# Create Test Lambda
lambda_client = client("lambda", region_name=AWS_REGION)
lambda_name = "test-lambda"
resp = lambda_client.create_function(
FunctionName=lambda_name,
Runtime="python3.7",
Role=iam_role,
Handler="lambda_function.lambda_handler",
Code={"ZipFile": create_zip_file().read()},
Description="test lambda function",
Timeout=3,
MemorySize=128,
PackageType="ZIP",
Publish=True,
VpcConfig={
"SecurityGroupIds": ["sg-123abc"],
"SubnetIds": ["subnet-123abc"],
},
Environment={"Variables": {"db-password": "test-password"}},
)
# Update Lambda Policy
lambda_policy = {
"Version": "2012-10-17",
"Id": "default",
"Statement": [
{
"Action": "lambda:GetFunction",
"Principal": "*",
"Effect": "Allow",
"Resource": f"arn:aws:lambda:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:function:{lambda_name}",
"Sid": "test",
}
],
}
_ = lambda_client.add_permission(
FunctionName=lambda_name,
StatementId="test",
Action="lambda:GetFunction",
Principal="*",
)
# Create Function URL Config
_ = lambda_client.create_function_url_config(
FunctionName=lambda_name,
AuthType=AuthType.AWS_IAM.value,
Cors={
"AllowCredentials": True,
"AllowHeaders": [
"string",
],
"AllowMethods": [
"string",
],
"AllowOrigins": [
"*",
],
"ExposeHeaders": [
"string",
],
"MaxAge": 123,
},
)
lambda_arn = resp["FunctionArn"]
with mock.patch(
"prowler.providers.aws.services.awslambda.awslambda_service.requests.get",
new=mock_request_get,
):
awslambda = Lambda(self.set_mocked_audit_info())
assert awslambda.functions
assert awslambda.functions[lambda_name].name == lambda_name
assert awslambda.functions[lambda_name].arn == lambda_arn
assert awslambda.functions[lambda_name].runtime == "python3.7"
assert awslambda.functions[lambda_name].environment == {
"db-password": "test-password"
}
assert awslambda.functions[lambda_name].region == AWS_REGION
assert awslambda.functions[lambda_name].policy == lambda_policy
assert awslambda.functions[lambda_name].code
assert search(
f"s3://awslambda-{AWS_REGION}-tasks.s3-{AWS_REGION}.amazonaws.com",
awslambda.functions[lambda_name].code.location,
)
assert awslambda.functions[lambda_name].url_config
assert (
awslambda.functions[lambda_name].url_config.auth_type
== AuthType.AWS_IAM
)
assert search(
"lambda-url.eu-west-1.on.aws",
awslambda.functions[lambda_name].url_config.url,
)
assert awslambda.functions[lambda_name].url_config.cors_config
assert awslambda.functions[
lambda_name
].url_config.cors_config.allow_origins == ["*"]
# Pending ZipFile tests
with tempfile.TemporaryDirectory() as tmp_dir_name:
awslambda.functions[lambda_name].code.code_zip.extractall(tmp_dir_name)
files_in_zip = next(os.walk(tmp_dir_name))[2]
assert len(files_in_zip) == 1
assert files_in_zip[0] == "lambda_function.py"
with open(f"{tmp_dir_name}/{files_in_zip[0]}", "r") as lambda_code_file:
_ = lambda_code_file
# assert (
# lambda_code_file.read()
# == """
# def lambda_handler(event, context):
# print("custom log event")
# return event
# """
# )

View File

@@ -0,0 +1,133 @@
from unittest import mock
from prowler.providers.aws.services.cloudformation.cloudformation_service import Stack
# Mock Test Region
AWS_REGION = "eu-west-1"
class Test_cloudformation_outputs_find_secrets:
def test_no_stacks(self):
cloudformation_client = mock.MagicMock
cloudformation_client.stacks = []
with mock.patch(
"prowler.providers.aws.services.cloudformation.cloudformation_service.CloudFormation",
new=cloudformation_client,
):
# Test Check
from prowler.providers.aws.services.cloudformation.cloudformation_outputs_find_secrets.cloudformation_outputs_find_secrets import (
cloudformation_outputs_find_secrets,
)
check = cloudformation_outputs_find_secrets()
result = check.execute()
assert len(result) == 0
def test_stack_secret_in_outputs(self):
cloudformation_client = mock.MagicMock
stack_name = "Test-Stack"
cloudformation_client.stacks = [
Stack(
arn="arn:aws:cloudformation:eu-west-1:123456789012:stack/Test-Stack/796c8d26-b390-41d7-a23c-0702c4e78b60",
name=stack_name,
outputs=["DB_PASSWORD:foobar123", "ENV:DEV"],
region=AWS_REGION,
)
]
with mock.patch(
"prowler.providers.aws.services.cloudformation.cloudformation_service.CloudFormation",
cloudformation_client,
):
from prowler.providers.aws.services.cloudformation.cloudformation_outputs_find_secrets.cloudformation_outputs_find_secrets import (
cloudformation_outputs_find_secrets,
)
check = cloudformation_outputs_find_secrets()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== f"Potential secret found in Stack {stack_name} Outputs."
)
assert result[0].resource_id == "Test-Stack"
assert (
result[0].resource_arn
== "arn:aws:cloudformation:eu-west-1:123456789012:stack/Test-Stack/796c8d26-b390-41d7-a23c-0702c4e78b60"
)
assert result[0].region == AWS_REGION
def test_stack_no_secret_in_outputs(self):
cloudformation_client = mock.MagicMock
stack_name = "Test-Stack"
cloudformation_client.stacks = [
Stack(
arn="arn:aws:cloudformation:eu-west-1:123456789012:stack/Test-Stack/796c8d26-b390-41d7-a23c-0702c4e78b60",
name=stack_name,
outputs=["ENV:DEV"],
region=AWS_REGION,
)
]
with mock.patch(
"prowler.providers.aws.services.cloudformation.cloudformation_service.CloudFormation",
cloudformation_client,
):
from prowler.providers.aws.services.cloudformation.cloudformation_outputs_find_secrets.cloudformation_outputs_find_secrets import (
cloudformation_outputs_find_secrets,
)
check = cloudformation_outputs_find_secrets()
result = check.execute()
assert len(result) == 1
assert result[0].status == "PASS"
assert (
result[0].status_extended
== f"No secrets found in Stack {stack_name} Outputs."
)
assert result[0].resource_id == "Test-Stack"
assert (
result[0].resource_arn
== "arn:aws:cloudformation:eu-west-1:123456789012:stack/Test-Stack/796c8d26-b390-41d7-a23c-0702c4e78b60"
)
assert result[0].region == AWS_REGION
def test_stack_no_outputs(self):
cloudformation_client = mock.MagicMock
stack_name = "Test-Stack"
cloudformation_client.stacks = [
Stack(
arn="arn:aws:cloudformation:eu-west-1:123456789012:stack/Test-Stack/796c8d26-b390-41d7-a23c-0702c4e78b60",
name=stack_name,
outputs=[],
region=AWS_REGION,
)
]
with mock.patch(
"prowler.providers.aws.services.cloudformation.cloudformation_service.CloudFormation",
cloudformation_client,
):
from prowler.providers.aws.services.cloudformation.cloudformation_outputs_find_secrets.cloudformation_outputs_find_secrets import (
cloudformation_outputs_find_secrets,
)
check = cloudformation_outputs_find_secrets()
result = check.execute()
assert len(result) == 1
assert result[0].status == "PASS"
assert (
result[0].status_extended
== f"CloudFormation {stack_name} has no Outputs."
)
assert result[0].resource_id == "Test-Stack"
assert (
result[0].resource_arn
== "arn:aws:cloudformation:eu-west-1:123456789012:stack/Test-Stack/796c8d26-b390-41d7-a23c-0702c4e78b60"
)
assert result[0].region == AWS_REGION

View File

@@ -0,0 +1,210 @@
import datetime
import json
from unittest.mock import patch
import boto3
import botocore
from boto3 import session
from dateutil.tz import tzutc
from moto import mock_cloudformation
from moto.core import DEFAULT_ACCOUNT_ID
from prowler.providers.aws.lib.audit_info.audit_info import AWS_Audit_Info
from prowler.providers.aws.services.cloudformation.cloudformation_service import (
CloudFormation,
)
# Mock Test Region
AWS_REGION = "eu-west-1"
# Dummy CloudFormation Template
dummy_template = {
"AWSTemplateFormatVersion": "2010-09-09",
"Description": "Stack 1",
"Resources": {
"EC2Instance1": {
"Type": "AWS::EC2::Instance",
"Properties": {
"ImageId": "EXAMPLE_AMI_ID",
"KeyName": "dummy",
"InstanceType": "t2.micro",
"Tags": [
{"Key": "Description", "Value": "Test tag"},
{"Key": "Name", "Value": "Name tag for tests"},
],
},
}
},
}
# Mocking Access Analyzer Calls
make_api_call = botocore.client.BaseClient._make_api_call
def mock_make_api_call(self, operation_name, kwarg):
"""
As you can see the operation_name has the list_analyzers snake_case form but
we are using the ListAnalyzers form.
Rationale -> https://github.com/boto/botocore/blob/develop/botocore/client.py#L810:L816
We have to mock every AWS API call using Boto3
"""
if operation_name == "CreateStack":
return {
"StackId": "arn:aws:cloudformation:eu-west-1:123456789012:stack/Test-Stack/796c8d26-b390-41d7-a23c-0702c4e78b60"
}
if operation_name == "DescribeStacks":
if "StackName" in kwarg:
return {
"Stacks": [
{
"StackId": "arn:aws:cloudformation:eu-west-1:123456789012:stack/Test-Stack/796c8d26-b390-41d7-a23c-0702c4e78b60",
"StackName": "Test-Stack",
"Description": "Stack 1",
"Parameters": [],
"CreationTime": datetime.datetime(
2022, 11, 7, 9, 33, 51, tzinfo=tzutc()
),
"StackStatus": "CREATE_COMPLETE",
"DisableRollback": False,
"NotificationARNs": [],
"Outputs": [
{
"OutputKey": "TestOutput1",
"OutputValue": "TestValue1",
"Description": "Test Output Description.",
}
],
"RoleARN": "arn:aws:iam::123456789012:role/moto",
"EnableTerminationProtection": True,
"Tags": [
{"Key": "Tag1", "Value": "Value1"},
{"Key": "Tag2", "Value": "Value2"},
],
}
]
}
# Return all Stacks
else:
return {
"Stacks": [
{
"StackId": "arn:aws:cloudformation:eu-west-1:123456789012:stack/Test-Stack/796c8d26-b390-41d7-a23c-0702c4e78b60",
"StackName": "Test-Stack",
"Description": "Stack 1",
"Parameters": [],
"CreationTime": datetime.datetime(
2022, 11, 7, 9, 33, 51, tzinfo=tzutc()
),
"StackStatus": "CREATE_COMPLETE",
"DisableRollback": False,
"NotificationARNs": [],
"Outputs": [
{
"OutputKey": "TestOutput1",
"OutputValue": "TestValue1",
"Description": "Test Output Description.",
}
],
"RoleARN": "arn:aws:iam::123456789012:role/moto",
"Tags": [
{"Key": "Tag1", "Value": "Value1"},
{"Key": "Tag2", "Value": "Value2"},
],
}
]
}
return make_api_call(self, operation_name, kwarg)
# Mock generate_regional_clients()
def mock_generate_regional_clients(service, audit_info):
regional_client = audit_info.audit_session.client(service, region_name=AWS_REGION)
regional_client.region = AWS_REGION
return {AWS_REGION: regional_client}
# Patch every AWS call using Boto3 and generate_regional_clients to have 1 client
@patch("botocore.client.BaseClient._make_api_call", new=mock_make_api_call)
@patch(
"prowler.providers.aws.services.cloudformation.cloudformation_service.generate_regional_clients",
new=mock_generate_regional_clients,
)
class Test_CloudFormation_Service:
# Mocked Audit Info
def set_mocked_audit_info(self):
audit_info = AWS_Audit_Info(
original_session=None,
audit_session=session.Session(
profile_name=None,
botocore_session=None,
),
audited_account=None,
audited_user_id=None,
audited_partition=None,
audited_identity_arn=None,
profile=None,
profile_region=None,
credentials=None,
assumed_role_info=None,
audited_regions=None,
organizations_metadata=None,
)
return audit_info
# Test CloudFormation Client
@mock_cloudformation
def test__get_client__(self):
cloudformation = CloudFormation(self.set_mocked_audit_info())
assert (
cloudformation.regional_clients[AWS_REGION].__class__.__name__
== "CloudFormation"
)
# Test CloudFormation Service
@mock_cloudformation
def test__get_service__(self):
cloudformation = CloudFormation(self.set_mocked_audit_info())
assert (
cloudformation.regional_clients[AWS_REGION].__class__.__name__
== "CloudFormation"
)
# Test CloudFormation Session
@mock_cloudformation
def test__get_session__(self):
cloudformation = CloudFormation(self.set_mocked_audit_info())
assert cloudformation.session.__class__.__name__ == "Session"
@mock_cloudformation
def test__describe_stacks__(self):
cloudformation_client = boto3.client("cloudformation", region_name=AWS_REGION)
stack_arn = cloudformation_client.create_stack(
StackName="Test-Stack",
TemplateBody=json.dumps(dummy_template),
RoleARN=f"arn:aws:iam::{DEFAULT_ACCOUNT_ID}:role/moto",
Tags=[
{"Key": "Tag1", "Value": "Value1"},
{"Key": "Tag2", "Value": "Value2"},
],
EnableTerminationProtection=True,
Outputs=[
{
"OutputKey": "TestOutput1",
"OutputValue": "TestValue1",
"Description": "Test Output Description.",
}
],
)
cloudformation = CloudFormation(self.set_mocked_audit_info())
assert len(cloudformation.stacks) == 1
assert cloudformation.stacks[0].arn == stack_arn["StackId"]
assert cloudformation.stacks[0].name == "Test-Stack"
assert cloudformation.stacks[0].outputs == ["TestOutput1:TestValue1"]
assert cloudformation.stacks[0].enable_termination_protection is True
assert cloudformation.stacks[0].is_nested_stack is False
assert cloudformation.stacks[0].root_nested_stack == ""
assert cloudformation.stacks[0].region == AWS_REGION

View File

@@ -0,0 +1,99 @@
from unittest import mock
from prowler.providers.aws.services.cloudformation.cloudformation_service import Stack
# Mock Test Region
AWS_REGION = "eu-west-1"
class Test_cloudformation_stacks_termination_protection_enabled:
def test_no_stacks(self):
cloudformation_client = mock.MagicMock
cloudformation_client.stacks = []
with mock.patch(
"prowler.providers.aws.services.cloudformation.cloudformation_service.CloudFormation",
new=cloudformation_client,
):
# Test Check
from prowler.providers.aws.services.cloudformation.cloudformation_stacks_termination_protection_enabled.cloudformation_stacks_termination_protection_enabled import (
cloudformation_stacks_termination_protection_enabled,
)
check = cloudformation_stacks_termination_protection_enabled()
result = check.execute()
assert len(result) == 0
def test_stack_termination_protection_enabled(self):
cloudformation_client = mock.MagicMock
stack_name = "Test-Stack"
cloudformation_client.stacks = [
Stack(
arn="arn:aws:cloudformation:eu-west-1:123456789012:stack/Test-Stack/796c8d26-b390-41d7-a23c-0702c4e78b60",
name=stack_name,
outputs="",
region=AWS_REGION,
)
]
cloudformation_client.stacks[0].enable_termination_protection = True
with mock.patch(
"prowler.providers.aws.services.cloudformation.cloudformation_service.CloudFormation",
cloudformation_client,
):
from prowler.providers.aws.services.cloudformation.cloudformation_stacks_termination_protection_enabled.cloudformation_stacks_termination_protection_enabled import (
cloudformation_stacks_termination_protection_enabled,
)
check = cloudformation_stacks_termination_protection_enabled()
result = check.execute()
assert len(result) == 1
assert result[0].status == "PASS"
assert (
result[0].status_extended
== f"CloudFormation {stack_name} has termination protection enabled"
)
assert result[0].resource_id == "Test-Stack"
assert (
result[0].resource_arn
== "arn:aws:cloudformation:eu-west-1:123456789012:stack/Test-Stack/796c8d26-b390-41d7-a23c-0702c4e78b60"
)
assert result[0].region == AWS_REGION
def test_stack_termination_protection_disabled(self):
cloudformation_client = mock.MagicMock
stack_name = "Test-Stack"
cloudformation_client.stacks = [
Stack(
arn="arn:aws:cloudformation:eu-west-1:123456789012:stack/Test-Stack/796c8d26-b390-41d7-a23c-0702c4e78b60",
name=stack_name,
outputs="",
region=AWS_REGION,
)
]
cloudformation_client.stacks[0].enable_termination_protection = False
with mock.patch(
"prowler.providers.aws.services.cloudformation.cloudformation_service.CloudFormation",
cloudformation_client,
):
from prowler.providers.aws.services.cloudformation.cloudformation_stacks_termination_protection_enabled.cloudformation_stacks_termination_protection_enabled import (
cloudformation_stacks_termination_protection_enabled,
)
check = cloudformation_stacks_termination_protection_enabled()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== f"CloudFormation {stack_name} has termination protection disabled"
)
assert result[0].resource_id == "Test-Stack"
assert (
result[0].resource_arn
== "arn:aws:cloudformation:eu-west-1:123456789012:stack/Test-Stack/796c8d26-b390-41d7-a23c-0702c4e78b60"
)
assert result[0].region == AWS_REGION

View File

@@ -0,0 +1,110 @@
from unittest import mock
from moto.core import DEFAULT_ACCOUNT_ID
from prowler.providers.aws.services.cloudfront.cloudfront_service import (
DefaultCacheConfigBehaviour,
Distribution,
ViewerProtocolPolicy,
)
DISTRIBUTION_ID = "E27LVI50CSW06W"
DISTRIBUTION_ARN = (
f"arn:aws:cloudfront::{DEFAULT_ACCOUNT_ID}:distribution/{DISTRIBUTION_ID}"
)
REGION = "eu-west-1"
class Test_cloudfront_distributions_field_level_encryption_enabled:
def test_no_distributions(self):
cloudfront_client = mock.MagicMock
cloudfront_client.distributions = {}
with mock.patch(
"prowler.providers.aws.services.cloudfront.cloudfront_service.CloudFront",
new=cloudfront_client,
):
# Test Check
from prowler.providers.aws.services.cloudfront.cloudfront_distributions_field_level_encryption_enabled.cloudfront_distributions_field_level_encryption_enabled import (
cloudfront_distributions_field_level_encryption_enabled,
)
check = cloudfront_distributions_field_level_encryption_enabled()
result = check.execute()
assert len(result) == 0
def test_one_distribution_field_level_encryption_enabled(self):
cloudfront_client = mock.MagicMock
cloudfront_client.distributions = {
"DISTRIBUTION_ID": Distribution(
arn=DISTRIBUTION_ARN,
id=DISTRIBUTION_ID,
region=REGION,
origins=[],
default_cache_config=DefaultCacheConfigBehaviour(
realtime_log_config_arn="",
viewer_protocol_policy=ViewerProtocolPolicy.https_only,
field_level_encryption_id="AAAAAAAA",
),
)
}
with mock.patch(
"prowler.providers.aws.services.cloudfront.cloudfront_service.CloudFront",
new=cloudfront_client,
):
# Test Check
from prowler.providers.aws.services.cloudfront.cloudfront_distributions_field_level_encryption_enabled.cloudfront_distributions_field_level_encryption_enabled import (
cloudfront_distributions_field_level_encryption_enabled,
)
check = cloudfront_distributions_field_level_encryption_enabled()
result = check.execute()
assert len(result) == 1
assert result[0].region == REGION
assert result[0].resource_arn == DISTRIBUTION_ARN
assert result[0].resource_id == DISTRIBUTION_ID
assert result[0].status == "PASS"
assert (
result[0].status_extended
== f"CloudFront Distribution {DISTRIBUTION_ID} has Field Level Encryption enabled"
)
def test_one_distribution_field_level_encryption_disabled(self):
cloudfront_client = mock.MagicMock
cloudfront_client.distributions = {
"DISTRIBUTION_ID": Distribution(
arn=DISTRIBUTION_ARN,
id=DISTRIBUTION_ID,
region=REGION,
origins=[],
default_cache_config=DefaultCacheConfigBehaviour(
realtime_log_config_arn="",
viewer_protocol_policy=ViewerProtocolPolicy.https_only,
field_level_encryption_id="",
),
)
}
with mock.patch(
"prowler.providers.aws.services.cloudfront.cloudfront_service.CloudFront",
new=cloudfront_client,
):
# Test Check
from prowler.providers.aws.services.cloudfront.cloudfront_distributions_field_level_encryption_enabled.cloudfront_distributions_field_level_encryption_enabled import (
cloudfront_distributions_field_level_encryption_enabled,
)
check = cloudfront_distributions_field_level_encryption_enabled()
result = check.execute()
assert len(result) == 1
assert result[0].region == REGION
assert result[0].resource_arn == DISTRIBUTION_ARN
assert result[0].resource_id == DISTRIBUTION_ID
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== f"CloudFront Distribution {DISTRIBUTION_ID} has Field Level Encryption disabled"
)

View File

@@ -0,0 +1,135 @@
from unittest import mock
from moto.core import DEFAULT_ACCOUNT_ID
from prowler.providers.aws.services.cloudfront.cloudfront_service import (
Distribution,
GeoRestrictionType,
)
DISTRIBUTION_ID = "E27LVI50CSW06W"
DISTRIBUTION_ARN = (
f"arn:aws:cloudfront::{DEFAULT_ACCOUNT_ID}:distribution/{DISTRIBUTION_ID}"
)
REGION = "eu-west-1"
class Test_cloudfront_distributions_geo_restrictions_enabled:
def test_no_distributions(self):
cloudfront_client = mock.MagicMock
cloudfront_client.distributions = {}
with mock.patch(
"prowler.providers.aws.services.cloudfront.cloudfront_service.CloudFront",
new=cloudfront_client,
):
# Test Check
from prowler.providers.aws.services.cloudfront.cloudfront_distributions_geo_restrictions_enabled.cloudfront_distributions_geo_restrictions_enabled import (
cloudfront_distributions_geo_restrictions_enabled,
)
check = cloudfront_distributions_geo_restrictions_enabled()
result = check.execute()
assert len(result) == 0
def test_one_distribution_geo_restriction_disabled(self):
cloudfront_client = mock.MagicMock
cloudfront_client.distributions = {
"DISTRIBUTION_ID": Distribution(
arn=DISTRIBUTION_ARN,
id=DISTRIBUTION_ID,
region=REGION,
origins=[],
geo_restriction_type=GeoRestrictionType.none,
)
}
with mock.patch(
"prowler.providers.aws.services.cloudfront.cloudfront_service.CloudFront",
new=cloudfront_client,
):
# Test Check
from prowler.providers.aws.services.cloudfront.cloudfront_distributions_geo_restrictions_enabled.cloudfront_distributions_geo_restrictions_enabled import (
cloudfront_distributions_geo_restrictions_enabled,
)
check = cloudfront_distributions_geo_restrictions_enabled()
result = check.execute()
assert len(result) == 1
assert result[0].region == REGION
assert result[0].resource_arn == DISTRIBUTION_ARN
assert result[0].resource_id == DISTRIBUTION_ID
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== f"CloudFront Distribution {DISTRIBUTION_ID} has Geo restrictions disabled"
)
def test_one_distribution_geo_restriction_enabled_whitelist(self):
cloudfront_client = mock.MagicMock
cloudfront_client.distributions = {
"DISTRIBUTION_ID": Distribution(
arn=DISTRIBUTION_ARN,
id=DISTRIBUTION_ID,
region=REGION,
origins=[],
geo_restriction_type=GeoRestrictionType.whitelist,
)
}
with mock.patch(
"prowler.providers.aws.services.cloudfront.cloudfront_service.CloudFront",
new=cloudfront_client,
):
# Test Check
from prowler.providers.aws.services.cloudfront.cloudfront_distributions_geo_restrictions_enabled.cloudfront_distributions_geo_restrictions_enabled import (
cloudfront_distributions_geo_restrictions_enabled,
)
check = cloudfront_distributions_geo_restrictions_enabled()
result = check.execute()
assert len(result) == 1
assert result[0].region == REGION
assert result[0].resource_arn == DISTRIBUTION_ARN
assert result[0].resource_id == DISTRIBUTION_ID
assert result[0].status == "PASS"
assert (
result[0].status_extended
== f"CloudFront Distribution {DISTRIBUTION_ID} has Geo restrictions enabled"
)
def test_one_distribution_geo_restriction_enabled_blacklist(self):
cloudfront_client = mock.MagicMock
cloudfront_client.distributions = {
"DISTRIBUTION_ID": Distribution(
arn=DISTRIBUTION_ARN,
id=DISTRIBUTION_ID,
region=REGION,
origins=[],
geo_restriction_type=GeoRestrictionType.blacklist,
)
}
with mock.patch(
"prowler.providers.aws.services.cloudfront.cloudfront_service.CloudFront",
new=cloudfront_client,
):
# Test Check
from prowler.providers.aws.services.cloudfront.cloudfront_distributions_geo_restrictions_enabled.cloudfront_distributions_geo_restrictions_enabled import (
cloudfront_distributions_geo_restrictions_enabled,
)
check = cloudfront_distributions_geo_restrictions_enabled()
result = check.execute()
assert len(result) == 1
assert result[0].region == REGION
assert result[0].resource_arn == DISTRIBUTION_ARN
assert result[0].resource_id == DISTRIBUTION_ID
assert result[0].status == "PASS"
assert (
result[0].status_extended
== f"CloudFront Distribution {DISTRIBUTION_ID} has Geo restrictions enabled"
)

View File

@@ -0,0 +1,148 @@
from unittest import mock
from moto.core import DEFAULT_ACCOUNT_ID
from prowler.providers.aws.services.cloudfront.cloudfront_service import (
DefaultCacheConfigBehaviour,
Distribution,
ViewerProtocolPolicy,
)
DISTRIBUTION_ID = "E27LVI50CSW06W"
DISTRIBUTION_ARN = (
f"arn:aws:cloudfront::{DEFAULT_ACCOUNT_ID}:distribution/{DISTRIBUTION_ID}"
)
REGION = "eu-west-1"
class Test_cloudfront_distributions_https_enabled:
def test_no_distributions(self):
cloudfront_client = mock.MagicMock
cloudfront_client.distributions = {}
with mock.patch(
"prowler.providers.aws.services.cloudfront.cloudfront_service.CloudFront",
new=cloudfront_client,
):
# Test Check
from prowler.providers.aws.services.cloudfront.cloudfront_distributions_https_enabled.cloudfront_distributions_https_enabled import (
cloudfront_distributions_https_enabled,
)
check = cloudfront_distributions_https_enabled()
result = check.execute()
assert len(result) == 0
def test_one_distribution_https_disabled(self):
cloudfront_client = mock.MagicMock
cloudfront_client.distributions = {
"DISTRIBUTION_ID": Distribution(
arn=DISTRIBUTION_ARN,
id=DISTRIBUTION_ID,
region=REGION,
origins=[],
default_cache_config=DefaultCacheConfigBehaviour(
realtime_log_config_arn="",
viewer_protocol_policy=ViewerProtocolPolicy.allow_all,
field_level_encryption_id="",
),
)
}
with mock.patch(
"prowler.providers.aws.services.cloudfront.cloudfront_service.CloudFront",
new=cloudfront_client,
):
# Test Check
from prowler.providers.aws.services.cloudfront.cloudfront_distributions_https_enabled.cloudfront_distributions_https_enabled import (
cloudfront_distributions_https_enabled,
)
check = cloudfront_distributions_https_enabled()
result = check.execute()
assert len(result) == 1
assert result[0].region == REGION
assert result[0].resource_arn == DISTRIBUTION_ARN
assert result[0].resource_id == DISTRIBUTION_ID
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== f"CloudFront Distribution {DISTRIBUTION_ID} viewers can use HTTP or HTTPS"
)
def test_one_distribution_https_redirect(self):
cloudfront_client = mock.MagicMock
cloudfront_client.distributions = {
"DISTRIBUTION_ID": Distribution(
arn=DISTRIBUTION_ARN,
id=DISTRIBUTION_ID,
region=REGION,
origins=[],
default_cache_config=DefaultCacheConfigBehaviour(
realtime_log_config_arn="",
viewer_protocol_policy=ViewerProtocolPolicy.redirect_to_https,
field_level_encryption_id="",
),
)
}
with mock.patch(
"prowler.providers.aws.services.cloudfront.cloudfront_service.CloudFront",
new=cloudfront_client,
):
# Test Check
from prowler.providers.aws.services.cloudfront.cloudfront_distributions_https_enabled.cloudfront_distributions_https_enabled import (
cloudfront_distributions_https_enabled,
)
check = cloudfront_distributions_https_enabled()
result = check.execute()
assert len(result) == 1
assert result[0].region == REGION
assert result[0].resource_arn == DISTRIBUTION_ARN
assert result[0].resource_id == DISTRIBUTION_ID
assert result[0].status == "PASS"
assert (
result[0].status_extended
== f"CloudFront Distribution {DISTRIBUTION_ID} has redirect to HTTPS"
)
def test_one_distribution_https_only(self):
cloudfront_client = mock.MagicMock
cloudfront_client.distributions = {
"DISTRIBUTION_ID": Distribution(
arn=DISTRIBUTION_ARN,
id=DISTRIBUTION_ID,
region=REGION,
origins=[],
default_cache_config=DefaultCacheConfigBehaviour(
realtime_log_config_arn="",
viewer_protocol_policy=ViewerProtocolPolicy.https_only,
field_level_encryption_id="",
),
)
}
with mock.patch(
"prowler.providers.aws.services.cloudfront.cloudfront_service.CloudFront",
new=cloudfront_client,
):
# Test Check
from prowler.providers.aws.services.cloudfront.cloudfront_distributions_https_enabled.cloudfront_distributions_https_enabled import (
cloudfront_distributions_https_enabled,
)
check = cloudfront_distributions_https_enabled()
result = check.execute()
assert len(result) == 1
assert result[0].region == REGION
assert result[0].resource_arn == DISTRIBUTION_ARN
assert result[0].resource_id == DISTRIBUTION_ID
assert result[0].status == "PASS"
assert (
result[0].status_extended
== f"CloudFront Distribution {DISTRIBUTION_ID} has HTTPS only"
)

View File

@@ -0,0 +1,185 @@
from unittest import mock
from moto.core import DEFAULT_ACCOUNT_ID
from prowler.providers.aws.services.cloudfront.cloudfront_service import (
DefaultCacheConfigBehaviour,
Distribution,
ViewerProtocolPolicy,
)
DISTRIBUTION_ID = "E27LVI50CSW06W"
DISTRIBUTION_ARN = (
f"arn:aws:cloudfront::{DEFAULT_ACCOUNT_ID}:distribution/{DISTRIBUTION_ID}"
)
REGION = "eu-west-1"
class Test_cloudfront_distributions_logging_enabled:
def test_no_distributions(self):
cloudfront_client = mock.MagicMock
cloudfront_client.distributions = {}
with mock.patch(
"prowler.providers.aws.services.cloudfront.cloudfront_service.CloudFront",
new=cloudfront_client,
):
# Test Check
from prowler.providers.aws.services.cloudfront.cloudfront_distributions_logging_enabled.cloudfront_distributions_logging_enabled import (
cloudfront_distributions_logging_enabled,
)
check = cloudfront_distributions_logging_enabled()
result = check.execute()
assert len(result) == 0
def test_one_distribution_logging_enabled(self):
cloudfront_client = mock.MagicMock
cloudfront_client.distributions = {
"DISTRIBUTION_ID": Distribution(
arn=DISTRIBUTION_ARN,
id=DISTRIBUTION_ID,
region=REGION,
logging_enabled=True,
origins=[],
)
}
with mock.patch(
"prowler.providers.aws.services.cloudfront.cloudfront_service.CloudFront",
new=cloudfront_client,
):
# Test Check
from prowler.providers.aws.services.cloudfront.cloudfront_distributions_logging_enabled.cloudfront_distributions_logging_enabled import (
cloudfront_distributions_logging_enabled,
)
check = cloudfront_distributions_logging_enabled()
result = check.execute()
assert len(result) == 1
assert result[0].region == REGION
assert result[0].resource_arn == DISTRIBUTION_ARN
assert result[0].resource_id == DISTRIBUTION_ID
assert result[0].status == "PASS"
assert (
result[0].status_extended
== f"CloudFront Distribution {DISTRIBUTION_ID} has logging enabled"
)
def test_one_distribution_logging_disabled_realtime_disabled(self):
cloudfront_client = mock.MagicMock
cloudfront_client.distributions = {
"DISTRIBUTION_ID": Distribution(
arn=DISTRIBUTION_ARN,
id=DISTRIBUTION_ID,
region=REGION,
logging_enabled=False,
default_cache_config=DefaultCacheConfigBehaviour(
realtime_log_config_arn="",
viewer_protocol_policy=ViewerProtocolPolicy.https_only,
field_level_encryption_id="",
),
origins=[],
)
}
with mock.patch(
"prowler.providers.aws.services.cloudfront.cloudfront_service.CloudFront",
new=cloudfront_client,
):
# Test Check
from prowler.providers.aws.services.cloudfront.cloudfront_distributions_logging_enabled.cloudfront_distributions_logging_enabled import (
cloudfront_distributions_logging_enabled,
)
check = cloudfront_distributions_logging_enabled()
result = check.execute()
assert len(result) == 1
assert result[0].region == REGION
assert result[0].resource_arn == DISTRIBUTION_ARN
assert result[0].resource_id == DISTRIBUTION_ID
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== f"CloudFront Distribution {DISTRIBUTION_ID} has logging disabled"
)
def test_one_distribution_logging_disabled_realtime_enabled(self):
cloudfront_client = mock.MagicMock
cloudfront_client.distributions = {
"DISTRIBUTION_ID": Distribution(
arn=DISTRIBUTION_ARN,
id=DISTRIBUTION_ID,
region=REGION,
logging_enabled=False,
default_cache_config=DefaultCacheConfigBehaviour(
realtime_log_config_arn=DISTRIBUTION_ARN,
viewer_protocol_policy=ViewerProtocolPolicy.https_only,
field_level_encryption_id="",
),
origins=[],
)
}
with mock.patch(
"prowler.providers.aws.services.cloudfront.cloudfront_service.CloudFront",
new=cloudfront_client,
):
# Test Check
from prowler.providers.aws.services.cloudfront.cloudfront_distributions_logging_enabled.cloudfront_distributions_logging_enabled import (
cloudfront_distributions_logging_enabled,
)
check = cloudfront_distributions_logging_enabled()
result = check.execute()
assert len(result) == 1
assert result[0].region == REGION
assert result[0].resource_arn == DISTRIBUTION_ARN
assert result[0].resource_id == DISTRIBUTION_ID
assert result[0].status == "PASS"
assert (
result[0].status_extended
== f"CloudFront Distribution {DISTRIBUTION_ID} has logging enabled"
)
def test_one_distribution_logging_enabled_realtime_enabled(self):
cloudfront_client = mock.MagicMock
cloudfront_client.distributions = {
"DISTRIBUTION_ID": Distribution(
arn=DISTRIBUTION_ARN,
id=DISTRIBUTION_ID,
region=REGION,
logging_enabled=True,
default_cache_config=DefaultCacheConfigBehaviour(
realtime_log_config_arn=DISTRIBUTION_ARN,
viewer_protocol_policy=ViewerProtocolPolicy.https_only,
field_level_encryption_id="",
),
origins=[],
)
}
with mock.patch(
"prowler.providers.aws.services.cloudfront.cloudfront_service.CloudFront",
new=cloudfront_client,
):
# Test Check
from prowler.providers.aws.services.cloudfront.cloudfront_distributions_logging_enabled.cloudfront_distributions_logging_enabled import (
cloudfront_distributions_logging_enabled,
)
check = cloudfront_distributions_logging_enabled()
result = check.execute()
assert len(result) == 1
assert result[0].region == REGION
assert result[0].resource_arn == DISTRIBUTION_ARN
assert result[0].resource_id == DISTRIBUTION_ID
assert result[0].status == "PASS"
assert (
result[0].status_extended
== f"CloudFront Distribution {DISTRIBUTION_ID} has logging enabled"
)

View File

@@ -0,0 +1,306 @@
from unittest import mock
from moto.core import DEFAULT_ACCOUNT_ID
from prowler.providers.aws.services.cloudfront.cloudfront_service import Distribution
DISTRIBUTION_ID = "E27LVI50CSW06W"
DISTRIBUTION_ARN = (
f"arn:aws:cloudfront::{DEFAULT_ACCOUNT_ID}:distribution/{DISTRIBUTION_ID}"
)
REGION = "eu-west-1"
class Test_cloudfront_distributions_using_deprecated_ssl_protocols:
def test_no_distributions(self):
cloudfront_client = mock.MagicMock
cloudfront_client.distributions = {}
with mock.patch(
"prowler.providers.aws.services.cloudfront.cloudfront_service.CloudFront",
new=cloudfront_client,
):
# Test Check
from prowler.providers.aws.services.cloudfront.cloudfront_distributions_using_deprecated_ssl_protocols.cloudfront_distributions_using_deprecated_ssl_protocols import (
cloudfront_distributions_using_deprecated_ssl_protocols,
)
check = cloudfront_distributions_using_deprecated_ssl_protocols()
result = check.execute()
assert len(result) == 0
def test_one_distribution_using_deprecated_ssl_protocols(self):
cloudfront_client = mock.MagicMock
cloudfront_client.distributions = {
"DISTRIBUTION_ID": Distribution(
arn=DISTRIBUTION_ARN,
id=DISTRIBUTION_ID,
region=REGION,
origins=[
{
"Id": "string",
"DomainName": "string",
"OriginPath": "string",
"CustomHeaders": {
"Quantity": 123,
"Items": [
{
"HeaderName": "string",
"HeaderValue": "string",
},
],
},
"S3OriginConfig": {"OriginAccessIdentity": "string"},
"CustomOriginConfig": {
"HTTPPort": 123,
"HTTPSPort": 123,
"OriginProtocolPolicy": "https-only",
"OriginSslProtocols": {
"Quantity": 123,
"Items": [
"SSLv3",
],
},
"OriginReadTimeout": 123,
"OriginKeepaliveTimeout": 123,
},
"ConnectionAttempts": 123,
"ConnectionTimeout": 123,
"OriginShield": {
"Enabled": False,
"OriginShieldRegion": "string",
},
"OriginAccessControlId": "string",
},
],
)
}
with mock.patch(
"prowler.providers.aws.services.cloudfront.cloudfront_service.CloudFront",
new=cloudfront_client,
):
# Test Check
from prowler.providers.aws.services.cloudfront.cloudfront_distributions_using_deprecated_ssl_protocols.cloudfront_distributions_using_deprecated_ssl_protocols import (
cloudfront_distributions_using_deprecated_ssl_protocols,
)
check = cloudfront_distributions_using_deprecated_ssl_protocols()
result = check.execute()
assert len(result) == 1
assert result[0].region == REGION
assert result[0].resource_arn == DISTRIBUTION_ARN
assert result[0].resource_id == DISTRIBUTION_ID
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== f"CloudFront Distribution {DISTRIBUTION_ID} is using a deprecated SSL protocol"
)
def test_one_distribution_using_SSL_and_TLS(self):
cloudfront_client = mock.MagicMock
cloudfront_client.distributions = {
"DISTRIBUTION_ID": Distribution(
arn=DISTRIBUTION_ARN,
id=DISTRIBUTION_ID,
region=REGION,
origins=[
{
"Id": "string",
"DomainName": "string",
"OriginPath": "string",
"CustomHeaders": {
"Quantity": 123,
"Items": [
{
"HeaderName": "string",
"HeaderValue": "string",
},
],
},
"S3OriginConfig": {"OriginAccessIdentity": "string"},
"CustomOriginConfig": {
"HTTPPort": 123,
"HTTPSPort": 123,
"OriginProtocolPolicy": "https-only",
"OriginSslProtocols": {
"Quantity": 123,
"Items": [
"SSLv3",
"TLSv1.2",
],
},
"OriginReadTimeout": 123,
"OriginKeepaliveTimeout": 123,
},
"ConnectionAttempts": 123,
"ConnectionTimeout": 123,
"OriginShield": {
"Enabled": False,
"OriginShieldRegion": "string",
},
"OriginAccessControlId": "string",
},
],
)
}
with mock.patch(
"prowler.providers.aws.services.cloudfront.cloudfront_service.CloudFront",
new=cloudfront_client,
):
# Test Check
from prowler.providers.aws.services.cloudfront.cloudfront_distributions_using_deprecated_ssl_protocols.cloudfront_distributions_using_deprecated_ssl_protocols import (
cloudfront_distributions_using_deprecated_ssl_protocols,
)
check = cloudfront_distributions_using_deprecated_ssl_protocols()
result = check.execute()
assert len(result) == 1
assert result[0].region == REGION
assert result[0].resource_arn == DISTRIBUTION_ARN
assert result[0].resource_id == DISTRIBUTION_ID
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== f"CloudFront Distribution {DISTRIBUTION_ID} is using a deprecated SSL protocol"
)
def test_one_distribution_using_SSL_and_bad_TLS(self):
cloudfront_client = mock.MagicMock
cloudfront_client.distributions = {
"DISTRIBUTION_ID": Distribution(
arn=DISTRIBUTION_ARN,
id=DISTRIBUTION_ID,
region=REGION,
origins=[
{
"Id": "string",
"DomainName": "string",
"OriginPath": "string",
"CustomHeaders": {
"Quantity": 123,
"Items": [
{
"HeaderName": "string",
"HeaderValue": "string",
},
],
},
"S3OriginConfig": {"OriginAccessIdentity": "string"},
"CustomOriginConfig": {
"HTTPPort": 123,
"HTTPSPort": 123,
"OriginProtocolPolicy": "https-only",
"OriginSslProtocols": {
"Quantity": 123,
"Items": [
"SSLv3",
"TLSv1.1",
],
},
"OriginReadTimeout": 123,
"OriginKeepaliveTimeout": 123,
},
"ConnectionAttempts": 123,
"ConnectionTimeout": 123,
"OriginShield": {
"Enabled": False,
"OriginShieldRegion": "string",
},
"OriginAccessControlId": "string",
},
],
)
}
with mock.patch(
"prowler.providers.aws.services.cloudfront.cloudfront_service.CloudFront",
new=cloudfront_client,
):
# Test Check
from prowler.providers.aws.services.cloudfront.cloudfront_distributions_using_deprecated_ssl_protocols.cloudfront_distributions_using_deprecated_ssl_protocols import (
cloudfront_distributions_using_deprecated_ssl_protocols,
)
check = cloudfront_distributions_using_deprecated_ssl_protocols()
result = check.execute()
assert len(result) == 1
assert result[0].region == REGION
assert result[0].resource_arn == DISTRIBUTION_ARN
assert result[0].resource_id == DISTRIBUTION_ID
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== f"CloudFront Distribution {DISTRIBUTION_ID} is using a deprecated SSL protocol"
)
def test_one_distribution_not_using_deprecated_ssl_protocols(self):
cloudfront_client = mock.MagicMock
cloudfront_client.distributions = {
"DISTRIBUTION_ID": Distribution(
arn=DISTRIBUTION_ARN,
id=DISTRIBUTION_ID,
region=REGION,
origins=[
{
"Id": "string",
"DomainName": "string",
"OriginPath": "string",
"CustomHeaders": {
"Quantity": 123,
"Items": [
{
"HeaderName": "string",
"HeaderValue": "string",
},
],
},
"S3OriginConfig": {"OriginAccessIdentity": "string"},
"CustomOriginConfig": {
"HTTPPort": 123,
"HTTPSPort": 123,
"OriginProtocolPolicy": "https-only",
"OriginSslProtocols": {
"Quantity": 123,
"Items": ["TLSv1.2"],
},
"OriginReadTimeout": 123,
"OriginKeepaliveTimeout": 123,
},
"ConnectionAttempts": 123,
"ConnectionTimeout": 123,
"OriginShield": {
"Enabled": False,
"OriginShieldRegion": "string",
},
"OriginAccessControlId": "string",
},
],
)
}
with mock.patch(
"prowler.providers.aws.services.cloudfront.cloudfront_service.CloudFront",
new=cloudfront_client,
):
# Test Check
from prowler.providers.aws.services.cloudfront.cloudfront_distributions_using_deprecated_ssl_protocols.cloudfront_distributions_using_deprecated_ssl_protocols import (
cloudfront_distributions_using_deprecated_ssl_protocols,
)
check = cloudfront_distributions_using_deprecated_ssl_protocols()
result = check.execute()
assert len(result) == 1
assert result[0].region == REGION
assert result[0].resource_arn == DISTRIBUTION_ARN
assert result[0].resource_id == DISTRIBUTION_ID
assert result[0].status == "PASS"
assert (
result[0].status_extended
== f"CloudFront Distribution {DISTRIBUTION_ID} is not using a deprecated SSL protocol"
)

View File

@@ -0,0 +1,98 @@
from unittest import mock
from moto.core import DEFAULT_ACCOUNT_ID
from prowler.providers.aws.services.cloudfront.cloudfront_service import Distribution
DISTRIBUTION_ID = "E27LVI50CSW06W"
DISTRIBUTION_ARN = (
f"arn:aws:cloudfront::{DEFAULT_ACCOUNT_ID}:distribution/{DISTRIBUTION_ID}"
)
REGION = "eu-west-1"
class Test_cloudfront_distributions_using_waf:
def test_no_distributions(self):
cloudfront_client = mock.MagicMock
cloudfront_client.distributions = {}
with mock.patch(
"prowler.providers.aws.services.cloudfront.cloudfront_service.CloudFront",
new=cloudfront_client,
):
# Test Check
from prowler.providers.aws.services.cloudfront.cloudfront_distributions_using_waf.cloudfront_distributions_using_waf import (
cloudfront_distributions_using_waf,
)
check = cloudfront_distributions_using_waf()
result = check.execute()
assert len(result) == 0
def test_one_distribution_waf(self):
wef_acl_id = "TEST-WAF-ACL"
cloudfront_client = mock.MagicMock
cloudfront_client.distributions = {
"DISTRIBUTION_ID": Distribution(
arn=DISTRIBUTION_ARN,
id=DISTRIBUTION_ID,
region=REGION,
web_acl_id=wef_acl_id,
origins=[],
)
}
with mock.patch(
"prowler.providers.aws.services.cloudfront.cloudfront_service.CloudFront",
new=cloudfront_client,
):
# Test Check
from prowler.providers.aws.services.cloudfront.cloudfront_distributions_using_waf.cloudfront_distributions_using_waf import (
cloudfront_distributions_using_waf,
)
check = cloudfront_distributions_using_waf()
result = check.execute()
assert len(result) == 1
assert result[0].region == REGION
assert result[0].resource_arn == DISTRIBUTION_ARN
assert result[0].resource_id == DISTRIBUTION_ID
assert result[0].status == "PASS"
assert (
result[0].status_extended
== f"CloudFront Distribution {DISTRIBUTION_ID} is using AWS WAF web ACL {wef_acl_id}"
)
def test_one_distribution_no_waf(self):
cloudfront_client = mock.MagicMock
cloudfront_client.distributions = {
"DISTRIBUTION_ID": Distribution(
arn=DISTRIBUTION_ARN,
id=DISTRIBUTION_ID,
region=REGION,
origins=[],
)
}
with mock.patch(
"prowler.providers.aws.services.cloudfront.cloudfront_service.CloudFront",
new=cloudfront_client,
):
# Test Check
from prowler.providers.aws.services.cloudfront.cloudfront_distributions_using_waf.cloudfront_distributions_using_waf import (
cloudfront_distributions_using_waf,
)
check = cloudfront_distributions_using_waf()
result = check.execute()
assert len(result) == 1
assert result[0].region == REGION
assert result[0].resource_arn == DISTRIBUTION_ARN
assert result[0].resource_id == DISTRIBUTION_ID
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== f"CloudFront Distribution {DISTRIBUTION_ID} is not using AWS WAF web ACL"
)

View File

@@ -0,0 +1,247 @@
from unittest.mock import patch
import botocore
from boto3 import client, session
from moto import mock_cloudfront
from moto.core import DEFAULT_ACCOUNT_ID
from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
from prowler.providers.aws.services.cloudfront.cloudfront_service import CloudFront
# Mock Test Region
AWS_REGION = "eu-west-1"
def example_distribution_config(ref):
"""Return a basic example distribution config for use in tests."""
return {
"CallerReference": ref,
"Origins": {
"Quantity": 1,
"Items": [
{
"Id": "origin1",
"DomainName": "asdf.s3.us-east-1.amazonaws.com",
"S3OriginConfig": {"OriginAccessIdentity": ""},
}
],
},
"DefaultCacheBehavior": {
"TargetOriginId": "origin1",
"ViewerProtocolPolicy": "allow-all",
"MinTTL": 10,
"ForwardedValues": {
"QueryString": False,
"Cookies": {"Forward": "none"},
},
},
"Comment": "an optional comment that's not actually optional",
"Enabled": False,
}
# Mocking Access Analyzer Calls
make_api_call = botocore.client.BaseClient._make_api_call
def mock_make_api_call(self, operation_name, kwarg):
"""
As you can see the operation_name has the list_analyzers snake_case form but
we are using the ListAnalyzers form.
Rationale -> https://github.com/boto/botocore/blob/develop/botocore/client.py#L810:L816
We have to mock every AWS API call using Boto3
"""
if operation_name == "GetDistributionConfig":
if kwarg["Id"]:
return {
"DistributionConfig": {
"Origins": {"Quantity": 123, "Items": []},
"OriginGroups": {"Quantity": 123, "Items": []},
"DefaultCacheBehavior": {
"TargetOriginId": "",
"TrustedSigners": {
"Enabled": False,
"Quantity": 123,
"Items": [
"",
],
},
"TrustedKeyGroups": {
"Enabled": False,
"Quantity": 123,
"Items": [
"",
],
},
"ViewerProtocolPolicy": "https-only",
"AllowedMethods": {
"Quantity": 123,
"Items": [
"GET",
],
"CachedMethods": {
"Quantity": 123,
"Items": [
"GET",
],
},
},
"SmoothStreaming": False,
"Compress": False,
"LambdaFunctionAssociations": {},
"FunctionAssociations": {},
"FieldLevelEncryptionId": "enabled",
"RealtimeLogConfigArn": "test-log-arn",
"CachePolicyId": "",
"OriginRequestPolicyId": "",
"ResponseHeadersPolicyId": "",
"ForwardedValues": {
"QueryString": False,
"Cookies": {},
"Headers": {},
"QueryStringCacheKeys": {},
},
"MinTTL": 123,
"DefaultTTL": 123,
"MaxTTL": 123,
},
"CacheBehaviors": {},
"CustomErrorResponses": {},
"Comment": "",
"Logging": {
"Enabled": True,
"IncludeCookies": False,
"Bucket": "",
"Prefix": "",
},
"PriceClass": "PriceClass_All",
"Enabled": False,
"ViewerCertificate": {},
"Restrictions": {
"GeoRestriction": {
"RestrictionType": "blacklist",
"Quantity": 123,
"Items": [
"",
],
}
},
"WebACLId": "test-web-acl",
"HttpVersion": "http2and3",
"IsIPV6Enabled": False,
},
"ETag": "",
}
return make_api_call(self, operation_name, kwarg)
# PENDING PR TO GET THE PARAMETERS USING MOTO
# Patch every AWS call using Boto3
@patch("botocore.client.BaseClient._make_api_call", new=mock_make_api_call)
class Test_CloudFront_Service:
# Mocked Audit Info
def set_mocked_audit_info(self):
audit_info = AWS_Audit_Info(
original_session=None,
audit_session=session.Session(
profile_name=None,
botocore_session=None,
region_name=AWS_REGION,
),
audited_account=DEFAULT_ACCOUNT_ID,
audited_user_id=None,
audited_partition="aws",
audited_identity_arn=None,
profile=None,
profile_region=AWS_REGION,
credentials=None,
assumed_role_info=None,
audited_regions=None,
organizations_metadata=None,
)
return audit_info
# Test CloudFront Client
@mock_cloudfront
def test__get_client__(self):
cloudfront = CloudFront(self.set_mocked_audit_info())
assert cloudfront.client.__class__.__name__ == "CloudFront"
# Test CloudFront Session
@mock_cloudfront
def test__get_session__(self):
cloudfront = CloudFront(self.set_mocked_audit_info())
assert cloudfront.session.__class__.__name__ == "Session"
# Test CloudFront Service
@mock_cloudfront
def test__get_service__(self):
cloudfront = CloudFront(self.set_mocked_audit_info())
assert cloudfront.service == "cloudfront"
@mock_cloudfront
def test__list_distributions__zero(self):
cloudfront = CloudFront(self.set_mocked_audit_info())
assert len(cloudfront.distributions) == 0
@mock_cloudfront
def test__list_distributions__complete(self):
cloudfront_client = client("cloudfront")
config = example_distribution_config("ref")
response = cloudfront_client.create_distribution(DistributionConfig=config)
cloudfront_distribution_id = response["Distribution"]["Id"]
cloudfront_distribution_arn = response["Distribution"]["ARN"]
cloudfront = CloudFront(self.set_mocked_audit_info())
assert len(cloudfront.distributions) == 1
assert (
cloudfront.distributions[cloudfront_distribution_id].arn
== cloudfront_distribution_arn
)
assert (
cloudfront.distributions[cloudfront_distribution_id].id
== cloudfront_distribution_id
)
assert (
cloudfront.distributions[cloudfront_distribution_id].region
== self.set_mocked_audit_info().audit_session.region_name
)
assert (
cloudfront.distributions[cloudfront_distribution_id].logging_enabled is True
)
assert (
cloudfront.distributions[cloudfront_distribution_id].origins
== cloudfront_client.get_distribution(Id=cloudfront_distribution_id)[
"Distribution"
]["DistributionConfig"]["Origins"]["Items"]
)
assert (
cloudfront.distributions[cloudfront_distribution_id].geo_restriction_type
== "blacklist"
)
assert (
cloudfront.distributions[cloudfront_distribution_id].web_acl_id
== "test-web-acl"
)
assert (
cloudfront.distributions[
cloudfront_distribution_id
].default_cache_config.realtime_log_config_arn
== "test-log-arn"
)
assert (
cloudfront.distributions[
cloudfront_distribution_id
].default_cache_config.viewer_protocol_policy
== "https-only"
)
assert (
cloudfront.distributions[
cloudfront_distribution_id
].default_cache_config.field_level_encryption_id
== "enabled"
)

View File

@@ -0,0 +1,231 @@
from datetime import datetime, timedelta, timezone
from re import search
from unittest import mock
from boto3 import client
from moto import mock_cloudtrail, mock_s3
class Test_cloudtrail_cloudwatch_logging_enabled:
@mock_cloudtrail
@mock_s3
def test_trails_sending_logs_during_and_not_last_day(self):
cloudtrail_client_us_east_1 = client("cloudtrail", region_name="us-east-1")
s3_client_us_east_1 = client("s3", region_name="us-east-1")
cloudtrail_client_eu_west_1 = client("cloudtrail", region_name="eu-west-1")
s3_client_eu_west_1 = client("s3", region_name="eu-west-1")
trail_name_us = "trail_test_us"
bucket_name_us = "bucket_test_us"
trail_name_eu = "trail_test_eu"
bucket_name_eu = "bucket_test_eu"
s3_client_us_east_1.create_bucket(Bucket=bucket_name_us)
s3_client_eu_west_1.create_bucket(
Bucket=bucket_name_eu,
CreateBucketConfiguration={"LocationConstraint": "eu-west-1"},
)
trail_us = cloudtrail_client_us_east_1.create_trail(
Name=trail_name_us, S3BucketName=bucket_name_us, IsMultiRegionTrail=False
)
trail_eu = cloudtrail_client_eu_west_1.create_trail(
Name=trail_name_eu, S3BucketName=bucket_name_eu, IsMultiRegionTrail=False
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudtrail.cloudtrail_service import (
Cloudtrail,
)
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.cloudtrail.cloudtrail_cloudwatch_logging_enabled.cloudtrail_cloudwatch_logging_enabled.cloudtrail_client",
new=Cloudtrail(current_audit_info),
) as service_client:
# Test Check
from prowler.providers.aws.services.cloudtrail.cloudtrail_cloudwatch_logging_enabled.cloudtrail_cloudwatch_logging_enabled import (
cloudtrail_cloudwatch_logging_enabled,
)
for trail in service_client.trails:
if trail.name == trail_name_us:
trail.latest_cloudwatch_delivery_time = datetime.now().replace(
tzinfo=timezone.utc
)
elif trail.name == trail_name_eu:
trail.latest_cloudwatch_delivery_time = (
datetime.now() - timedelta(days=2)
).replace(tzinfo=timezone.utc)
regions = []
for region in service_client.regional_clients.keys():
regions.append(region)
check = cloudtrail_cloudwatch_logging_enabled()
result = check.execute()
# len of result if has to be 2 since we only have 2 single region trails
assert len(result) == 2
for report in result:
if report.resource_id == trail_name_us:
assert report.resource_id == trail_name_us
assert report.resource_arn == trail_us["TrailARN"]
assert report.status == "PASS"
assert search(
report.status_extended,
f"Single region trail {trail_name_us} has been logging the last 24h",
)
if report.resource_id == trail_name_eu:
assert report.resource_id == trail_name_eu
assert report.resource_arn == trail_eu["TrailARN"]
assert report.status == "FAIL"
assert search(
report.status_extended,
f"Single region trail {trail_name_eu} is not logging in the last 24h",
)
@mock_cloudtrail
@mock_s3
def test_multi_region_and_single_region_logging_and_not(self):
cloudtrail_client_us_east_1 = client("cloudtrail", region_name="us-east-1")
s3_client_us_east_1 = client("s3", region_name="us-east-1")
cloudtrail_client_eu_west_1 = client("cloudtrail", region_name="eu-west-1")
s3_client_eu_west_1 = client("s3", region_name="eu-west-1")
trail_name_us = "trail_test_us"
bucket_name_us = "bucket_test_us"
trail_name_eu = "trail_test_eu"
bucket_name_eu = "bucket_test_eu"
s3_client_us_east_1.create_bucket(Bucket=bucket_name_us)
s3_client_eu_west_1.create_bucket(
Bucket=bucket_name_eu,
CreateBucketConfiguration={"LocationConstraint": "eu-west-1"},
)
trail_us = cloudtrail_client_us_east_1.create_trail(
Name=trail_name_us, S3BucketName=bucket_name_us, IsMultiRegionTrail=True
)
trail_eu = cloudtrail_client_eu_west_1.create_trail(
Name=trail_name_eu, S3BucketName=bucket_name_eu, IsMultiRegionTrail=False
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudtrail.cloudtrail_service import (
Cloudtrail,
)
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.cloudtrail.cloudtrail_cloudwatch_logging_enabled.cloudtrail_cloudwatch_logging_enabled.cloudtrail_client",
new=Cloudtrail(current_audit_info),
) as service_client:
# Test Check
from prowler.providers.aws.services.cloudtrail.cloudtrail_cloudwatch_logging_enabled.cloudtrail_cloudwatch_logging_enabled import (
cloudtrail_cloudwatch_logging_enabled,
)
for trail in service_client.trails:
if trail.name == trail_name_us:
trail.latest_cloudwatch_delivery_time = datetime.now().replace(
tzinfo=timezone.utc
)
elif trail.name == trail_name_eu:
trail.latest_cloudwatch_delivery_time = (
datetime.now() - timedelta(days=2)
).replace(tzinfo=timezone.utc)
regions = []
for region in service_client.regional_clients.keys():
regions.append(region)
check = cloudtrail_cloudwatch_logging_enabled()
result = check.execute()
# len of result should be 24 -> (1 multiregion entry per region + 1 entry because of single region trail)
assert len(result) == 26
for report in result:
if report.resource_id == trail_name_us:
assert report.resource_id == trail_name_us
assert report.resource_arn == trail_us["TrailARN"]
assert report.status == "PASS"
assert search(
report.status_extended,
f"Multiregion trail {trail_name_us} has been logging the last 24h",
)
if report.resource_id == trail_name_eu and report.region == "eu-west-1":
assert report.resource_id == trail_name_eu
assert report.resource_arn == trail_eu["TrailARN"]
assert report.status == "FAIL"
assert search(
report.status_extended,
f"Single region trail {trail_name_eu} is not logging in the last 24h",
)
@mock_cloudtrail
@mock_s3
def test_trails_sending_and_not_sending_logs(self):
cloudtrail_client_us_east_1 = client("cloudtrail", region_name="us-east-1")
s3_client_us_east_1 = client("s3", region_name="us-east-1")
cloudtrail_client_eu_west_1 = client("cloudtrail", region_name="eu-west-1")
s3_client_eu_west_1 = client("s3", region_name="eu-west-1")
trail_name_us = "trail_test_us"
bucket_name_us = "bucket_test_us"
trail_name_eu = "trail_test_eu"
bucket_name_eu = "bucket_test_eu"
s3_client_us_east_1.create_bucket(Bucket=bucket_name_us)
s3_client_eu_west_1.create_bucket(
Bucket=bucket_name_eu,
CreateBucketConfiguration={"LocationConstraint": "eu-west-1"},
)
trail_us = cloudtrail_client_us_east_1.create_trail(
Name=trail_name_us, S3BucketName=bucket_name_us, IsMultiRegionTrail=False
)
trail_eu = cloudtrail_client_eu_west_1.create_trail(
Name=trail_name_eu, S3BucketName=bucket_name_eu, IsMultiRegionTrail=False
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudtrail.cloudtrail_service import (
Cloudtrail,
)
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.cloudtrail.cloudtrail_cloudwatch_logging_enabled.cloudtrail_cloudwatch_logging_enabled.cloudtrail_client",
new=Cloudtrail(current_audit_info),
) as service_client:
# Test Check
from prowler.providers.aws.services.cloudtrail.cloudtrail_cloudwatch_logging_enabled.cloudtrail_cloudwatch_logging_enabled import (
cloudtrail_cloudwatch_logging_enabled,
)
for trail in service_client.trails:
if trail.name == trail_name_us:
trail.latest_cloudwatch_delivery_time = datetime.now().replace(
tzinfo=timezone.utc
)
elif trail.name == trail_name_us:
trail.latest_cloudwatch_delivery_time = None
regions = []
for region in service_client.regional_clients.keys():
regions.append(region)
check = cloudtrail_cloudwatch_logging_enabled()
result = check.execute()
# len of result if has to be 2 since we only have 2 single region trails
assert len(result) == 2
for report in result:
if report.resource_id == trail_name_us:
assert report.resource_id == trail_name_us
assert report.resource_arn == trail_us["TrailARN"]
assert report.status == "PASS"
assert search(
report.status_extended,
f"Single region trail {trail_name_us} has been logging the last 24h",
)
if report.resource_id == trail_name_eu:
assert report.resource_id == trail_name_eu
assert report.resource_arn == trail_eu["TrailARN"]
assert report.status == "FAIL"
assert search(
report.status_extended,
f"Single region trail {trail_name_eu} is not configured to deliver logs",
)

View File

@@ -0,0 +1,93 @@
from re import search
from unittest import mock
from boto3 import client
from moto import mock_cloudtrail, mock_kms, mock_s3
class Test_cloudtrail_kms_encryption_enabled:
@mock_cloudtrail
@mock_s3
def test_trail_no_kms(self):
cloudtrail_client_us_east_1 = client("cloudtrail", region_name="us-east-1")
s3_client_us_east_1 = client("s3", region_name="us-east-1")
trail_name_us = "trail_test_us"
bucket_name_us = "bucket_test_us"
s3_client_us_east_1.create_bucket(Bucket=bucket_name_us)
trail_us = cloudtrail_client_us_east_1.create_trail(
Name=trail_name_us, S3BucketName=bucket_name_us, IsMultiRegionTrail=False
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudtrail.cloudtrail_service import (
Cloudtrail,
)
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.cloudtrail.cloudtrail_kms_encryption_enabled.cloudtrail_kms_encryption_enabled.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudtrail.cloudtrail_kms_encryption_enabled.cloudtrail_kms_encryption_enabled import (
cloudtrail_kms_encryption_enabled,
)
check = cloudtrail_kms_encryption_enabled()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert search(
"has encryption disabled",
result[0].status_extended,
)
assert result[0].resource_id == trail_name_us
assert result[0].resource_arn == trail_us["TrailARN"]
@mock_cloudtrail
@mock_s3
@mock_kms
def test_trail_kms(self):
cloudtrail_client_us_east_1 = client("cloudtrail", region_name="us-east-1")
s3_client_us_east_1 = client("s3", region_name="us-east-1")
kms_client = client("kms", region_name="us-east-1")
trail_name_us = "trail_test_us"
bucket_name_us = "bucket_test_us"
s3_client_us_east_1.create_bucket(Bucket=bucket_name_us)
key_arn = kms_client.create_key()["KeyMetadata"]["Arn"]
trail_us = cloudtrail_client_us_east_1.create_trail(
Name=trail_name_us,
S3BucketName=bucket_name_us,
IsMultiRegionTrail=False,
KmsKeyId=key_arn,
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudtrail.cloudtrail_service import (
Cloudtrail,
)
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.cloudtrail.cloudtrail_kms_encryption_enabled.cloudtrail_kms_encryption_enabled.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudtrail.cloudtrail_kms_encryption_enabled.cloudtrail_kms_encryption_enabled import (
cloudtrail_kms_encryption_enabled,
)
check = cloudtrail_kms_encryption_enabled()
result = check.execute()
assert len(result) == 1
assert result[0].status == "PASS"
assert search(
"has encryption enabled",
result[0].status_extended,
)
assert result[0].resource_id == trail_name_us
assert result[0].resource_arn == trail_us["TrailARN"]

View File

@@ -0,0 +1,106 @@
from re import search
from unittest import mock
from boto3 import client
from moto import mock_cloudtrail, mock_s3
class Test_cloudtrail_log_file_validation_enabled:
@mock_cloudtrail
@mock_s3
def test_no_logging_validation(self):
cloudtrail_client_us_east_1 = client("cloudtrail", region_name="us-east-1")
s3_client_us_east_1 = client("s3", region_name="us-east-1")
trail_name_us = "trail_test_us"
bucket_name_us = "bucket_test_us"
s3_client_us_east_1.create_bucket(Bucket=bucket_name_us)
trail_us = cloudtrail_client_us_east_1.create_trail(
Name=trail_name_us, S3BucketName=bucket_name_us, IsMultiRegionTrail=False
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudtrail.cloudtrail_service import (
Cloudtrail,
)
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.cloudtrail.cloudtrail_log_file_validation_enabled.cloudtrail_log_file_validation_enabled.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudtrail.cloudtrail_log_file_validation_enabled.cloudtrail_log_file_validation_enabled import (
cloudtrail_log_file_validation_enabled,
)
check = cloudtrail_log_file_validation_enabled()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert search("log file validation disabled", result[0].status_extended)
assert result[0].resource_id == trail_name_us
assert result[0].resource_arn == trail_us["TrailARN"]
@mock_cloudtrail
@mock_s3
def test_various_trails_with_and_without_logging_validation(self):
cloudtrail_client_us_east_1 = client("cloudtrail", region_name="us-east-1")
s3_client_us_east_1 = client("s3", region_name="us-east-1")
cloudtrail_client_eu_west_1 = client("cloudtrail", region_name="eu-west-1")
s3_client_eu_west_1 = client("s3", region_name="eu-west-1")
trail_name_us = "trail_test_us"
bucket_name_us = "bucket_test_us"
trail_name_eu = "trail_test_eu"
bucket_name_eu = "bucket_test_eu"
s3_client_us_east_1.create_bucket(Bucket=bucket_name_us)
s3_client_eu_west_1.create_bucket(
Bucket=bucket_name_eu,
CreateBucketConfiguration={"LocationConstraint": "eu-west-1"},
)
trail_us = cloudtrail_client_us_east_1.create_trail(
Name=trail_name_us,
S3BucketName=bucket_name_us,
IsMultiRegionTrail=False,
EnableLogFileValidation=True,
)
trail_eu = cloudtrail_client_eu_west_1.create_trail(
Name=trail_name_eu, S3BucketName=bucket_name_eu, IsMultiRegionTrail=False
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudtrail.cloudtrail_service import (
Cloudtrail,
)
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.cloudtrail.cloudtrail_log_file_validation_enabled.cloudtrail_log_file_validation_enabled.cloudtrail_client",
new=Cloudtrail(current_audit_info),
) as service_client:
# Test Check
from prowler.providers.aws.services.cloudtrail.cloudtrail_log_file_validation_enabled.cloudtrail_log_file_validation_enabled import (
cloudtrail_log_file_validation_enabled,
)
regions = []
for region in service_client.regional_clients.keys():
regions.append(region)
check = cloudtrail_log_file_validation_enabled()
result = check.execute()
assert len(result) == 2
for report in result:
if report.resource_id == trail_name_us:
assert report.status == "PASS"
assert search("log file validation enabled", report.status_extended)
assert report.resource_id == trail_name_us
assert report.resource_arn == trail_us["TrailARN"]
elif report.resource_id == trail_name_eu:
assert report.status == "FAIL"
assert search(
"log file validation disabled", report.status_extended
)
assert report.resource_id == trail_name_eu
assert report.resource_arn == trail_eu["TrailARN"]

View File

@@ -0,0 +1,117 @@
from re import search
from unittest import mock
from boto3 import client
from moto import mock_cloudtrail, mock_s3
class Test_cloudtrail_logs_s3_bucket_access_logging_enabled:
@mock_cloudtrail
@mock_s3
def test_bucket_not_logging(self):
cloudtrail_client_us_east_1 = client("cloudtrail", region_name="us-east-1")
s3_client_us_east_1 = client("s3", region_name="us-east-1")
trail_name_us = "trail_test_us"
bucket_name_us = "bucket_test_us"
s3_client_us_east_1.create_bucket(Bucket=bucket_name_us)
trail_us = cloudtrail_client_us_east_1.create_trail(
Name=trail_name_us, S3BucketName=bucket_name_us, IsMultiRegionTrail=False
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudtrail.cloudtrail_service import (
Cloudtrail,
)
from prowler.providers.aws.services.s3.s3_service import S3
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.cloudtrail.cloudtrail_logs_s3_bucket_access_logging_enabled.cloudtrail_logs_s3_bucket_access_logging_enabled.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
with mock.patch(
"prowler.providers.aws.services.cloudtrail.cloudtrail_logs_s3_bucket_access_logging_enabled.cloudtrail_logs_s3_bucket_access_logging_enabled.s3_client",
new=S3(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudtrail.cloudtrail_logs_s3_bucket_access_logging_enabled.cloudtrail_logs_s3_bucket_access_logging_enabled import (
cloudtrail_logs_s3_bucket_access_logging_enabled,
)
check = cloudtrail_logs_s3_bucket_access_logging_enabled()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert search(
"S3 bucket access logging is not enabled for bucket",
result[0].status_extended,
)
assert result[0].resource_id == trail_name_us
assert result[0].resource_arn == trail_us["TrailARN"]
@mock_cloudtrail
@mock_s3
def test_bucket_logging(self):
cloudtrail_client_us_east_1 = client("cloudtrail", region_name="us-east-1")
s3_client_us_east_1 = client("s3", region_name="us-east-1")
trail_name_us = "trail_test_us"
bucket_name_us = "bucket_test_us"
logging_bucket = "logging"
s3_client_us_east_1.create_bucket(
Bucket=bucket_name_us,
)
s3_client_us_east_1.create_bucket(
Bucket=logging_bucket,
)
trail_us = cloudtrail_client_us_east_1.create_trail(
Name=trail_name_us, S3BucketName=bucket_name_us, IsMultiRegionTrail=False
)
s3_client_us_east_1.put_bucket_acl(
Bucket=logging_bucket,
GrantWrite="uri=http://acs.amazonaws.com/groups/s3/LogDelivery",
GrantReadACP="uri=http://acs.amazonaws.com/groups/s3/LogDelivery",
)
s3_client_us_east_1.put_bucket_logging(
Bucket=bucket_name_us,
BucketLoggingStatus={
"LoggingEnabled": {
"TargetBucket": logging_bucket,
"TargetPrefix": logging_bucket,
}
},
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudtrail.cloudtrail_service import (
Cloudtrail,
)
from prowler.providers.aws.services.s3.s3_service import S3
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.cloudtrail.cloudtrail_logs_s3_bucket_access_logging_enabled.cloudtrail_logs_s3_bucket_access_logging_enabled.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
with mock.patch(
"prowler.providers.aws.services.cloudtrail.cloudtrail_logs_s3_bucket_access_logging_enabled.cloudtrail_logs_s3_bucket_access_logging_enabled.s3_client",
new=S3(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudtrail.cloudtrail_logs_s3_bucket_access_logging_enabled.cloudtrail_logs_s3_bucket_access_logging_enabled import (
cloudtrail_logs_s3_bucket_access_logging_enabled,
)
check = cloudtrail_logs_s3_bucket_access_logging_enabled()
result = check.execute()
assert len(result) == 1
assert result[0].status == "PASS"
assert search(
"S3 bucket access logging is enabled for bucket",
result[0].status_extended,
)
assert result[0].resource_id == trail_name_us
assert result[0].resource_arn == trail_us["TrailARN"]

View File

@@ -0,0 +1,173 @@
from re import search
from unittest import mock
from boto3 import client
from moto import mock_cloudtrail, mock_s3
class Test_cloudtrail_logs_s3_bucket_is_not_publicly_accessible:
@mock_cloudtrail
@mock_s3
def test_trail_bucket_no_acl(self):
cloudtrail_client = client("cloudtrail", region_name="us-east-1")
s3_client = client("s3", region_name="us-east-1")
trail_name_us = "trail_test_us"
bucket_name_us = "bucket_test_us"
s3_client.create_bucket(Bucket=bucket_name_us)
trail_us = cloudtrail_client.create_trail(
Name=trail_name_us, S3BucketName=bucket_name_us, IsMultiRegionTrail=False
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudtrail.cloudtrail_service import (
Cloudtrail,
)
from prowler.providers.aws.services.s3.s3_service import S3
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.cloudtrail.cloudtrail_logs_s3_bucket_is_not_publicly_accessible.cloudtrail_logs_s3_bucket_is_not_publicly_accessible.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
with mock.patch(
"prowler.providers.aws.services.cloudtrail.cloudtrail_logs_s3_bucket_access_logging_enabled.cloudtrail_logs_s3_bucket_access_logging_enabled.s3_client",
new=S3(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudtrail.cloudtrail_logs_s3_bucket_is_not_publicly_accessible.cloudtrail_logs_s3_bucket_is_not_publicly_accessible import (
cloudtrail_logs_s3_bucket_is_not_publicly_accessible,
)
check = cloudtrail_logs_s3_bucket_is_not_publicly_accessible()
result = check.execute()
assert len(result) == 1
assert result[0].status == "PASS"
assert result[0].resource_id == trail_name_us
assert result[0].resource_arn == trail_us["TrailARN"]
assert search(
result[0].status_extended,
f"S3 Bucket {bucket_name_us} from single region trail {trail_name_us} is not publicly accessible",
)
@mock_cloudtrail
@mock_s3
def test_trail_bucket_public_acl(self):
s3_client = client("s3", region_name="us-east-1")
bucket_name_us = "bucket_test_us"
s3_client.create_bucket(Bucket=bucket_name_us)
s3_client.put_bucket_acl(
AccessControlPolicy={
"Grants": [
{
"Grantee": {
"DisplayName": "test",
"EmailAddress": "",
"ID": "test_ID",
"Type": "Group",
"URI": "http://acs.amazonaws.com/groups/global/AllUsers",
},
"Permission": "READ",
},
],
"Owner": {"DisplayName": "test", "ID": "test_id"},
},
Bucket=bucket_name_us,
)
trail_name_us = "trail_test_us"
cloudtrail_client = client("cloudtrail", region_name="us-east-1")
trail_us = cloudtrail_client.create_trail(
Name=trail_name_us, S3BucketName=bucket_name_us, IsMultiRegionTrail=False
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudtrail.cloudtrail_service import (
Cloudtrail,
)
from prowler.providers.aws.services.s3.s3_service import S3
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.cloudtrail.cloudtrail_logs_s3_bucket_is_not_publicly_accessible.cloudtrail_logs_s3_bucket_is_not_publicly_accessible.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
with mock.patch(
"prowler.providers.aws.services.cloudtrail.cloudtrail_logs_s3_bucket_is_not_publicly_accessible.cloudtrail_logs_s3_bucket_is_not_publicly_accessible.s3_client",
new=S3(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudtrail.cloudtrail_logs_s3_bucket_is_not_publicly_accessible.cloudtrail_logs_s3_bucket_is_not_publicly_accessible import (
cloudtrail_logs_s3_bucket_is_not_publicly_accessible,
)
check = cloudtrail_logs_s3_bucket_is_not_publicly_accessible()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert result[0].resource_id == trail_name_us
assert result[0].resource_arn == trail_us["TrailARN"]
assert search(
result[0].status_extended,
f"S3 Bucket {bucket_name_us} from single region trail {trail_name_us} is publicly accessible",
)
@mock_cloudtrail
@mock_s3
def test_trail_bucket_not_public_acl(self):
cloudtrail_client = client("cloudtrail", region_name="us-east-1")
s3_client = client("s3", region_name="us-east-1")
trail_name_us = "trail_test_us"
bucket_name_us = "bucket_test_us"
s3_client.create_bucket(Bucket=bucket_name_us)
trail_us = cloudtrail_client.create_trail(
Name=trail_name_us, S3BucketName=bucket_name_us, IsMultiRegionTrail=False
)
s3_client.put_bucket_acl(
AccessControlPolicy={
"Grants": [
{
"Grantee": {
"DisplayName": "test",
"EmailAddress": "",
"ID": "test_ID",
"Type": "CanonicalUser",
"URI": "http://acs.amazonaws.com/groups/global/AuthenticatedUsers",
},
"Permission": "READ",
},
],
"Owner": {"DisplayName": "test", "ID": "test_id"},
},
Bucket=bucket_name_us,
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudtrail.cloudtrail_service import (
Cloudtrail,
)
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.cloudtrail.cloudtrail_logs_s3_bucket_is_not_publicly_accessible.cloudtrail_logs_s3_bucket_is_not_publicly_accessible.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudtrail.cloudtrail_logs_s3_bucket_is_not_publicly_accessible.cloudtrail_logs_s3_bucket_is_not_publicly_accessible import (
cloudtrail_logs_s3_bucket_is_not_publicly_accessible,
)
check = cloudtrail_logs_s3_bucket_is_not_publicly_accessible()
result = check.execute()
assert len(result) == 1
assert result[0].status == "PASS"
assert result[0].resource_id == trail_name_us
assert result[0].resource_arn == trail_us["TrailARN"]
assert search(
result[0].status_extended,
f"S3 Bucket {bucket_name_us} from single region trail {trail_name_us} is not publicly accessible",
)

View File

@@ -0,0 +1,161 @@
from re import search
from unittest import mock
from boto3 import client
from moto import mock_cloudtrail, mock_s3
class Test_cloudtrail_multi_region_enabled:
@mock_cloudtrail
def test_no_trails(self):
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudtrail.cloudtrail_service import (
Cloudtrail,
)
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.cloudtrail.cloudtrail_multi_region_enabled.cloudtrail_multi_region_enabled.cloudtrail_client",
new=Cloudtrail(current_audit_info),
) as service_client:
# Test Check
from prowler.providers.aws.services.cloudtrail.cloudtrail_multi_region_enabled.cloudtrail_multi_region_enabled import (
cloudtrail_multi_region_enabled,
)
regions = []
for region in service_client.regional_clients.keys():
regions.append(region)
check = cloudtrail_multi_region_enabled()
result = check.execute()
assert len(result) == len(regions)
for report in result:
assert report.status == "FAIL"
assert search(
"No CloudTrail trails enabled and logging were found",
report.status_extended,
)
assert report.resource_id == "No trails"
assert report.resource_arn == "No trails"
@mock_cloudtrail
@mock_s3
def test_various_trails_no_login(self):
cloudtrail_client_us_east_1 = client("cloudtrail", region_name="us-east-1")
s3_client_us_east_1 = client("s3", region_name="us-east-1")
cloudtrail_client_eu_west_1 = client("cloudtrail", region_name="eu-west-1")
s3_client_eu_west_1 = client("s3", region_name="eu-west-1")
trail_name_us = "trail_test_us"
bucket_name_us = "bucket_test_us"
trail_name_eu = "trail_test_eu"
bucket_name_eu = "bucket_test_eu"
s3_client_us_east_1.create_bucket(Bucket=bucket_name_us)
s3_client_eu_west_1.create_bucket(
Bucket=bucket_name_eu,
CreateBucketConfiguration={"LocationConstraint": "eu-west-1"},
)
_ = cloudtrail_client_us_east_1.create_trail(
Name=trail_name_us, S3BucketName=bucket_name_us, IsMultiRegionTrail=False
)
_ = cloudtrail_client_eu_west_1.create_trail(
Name=trail_name_eu, S3BucketName=bucket_name_eu, IsMultiRegionTrail=False
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudtrail.cloudtrail_service import (
Cloudtrail,
)
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.cloudtrail.cloudtrail_multi_region_enabled.cloudtrail_multi_region_enabled.cloudtrail_client",
new=Cloudtrail(current_audit_info),
) as service_client:
# Test Check
from prowler.providers.aws.services.cloudtrail.cloudtrail_multi_region_enabled.cloudtrail_multi_region_enabled import (
cloudtrail_multi_region_enabled,
)
regions = []
for region in service_client.regional_clients.keys():
regions.append(region)
check = cloudtrail_multi_region_enabled()
result = check.execute()
assert len(result) == len(regions)
for report in result:
assert report.status == "FAIL"
assert search(
"No CloudTrail trails enabled and logging were found",
report.status_extended,
)
assert report.resource_id == "No trails"
assert report.resource_arn == "No trails"
@mock_cloudtrail
@mock_s3
def test_various_trails_with_and_without_login(self):
cloudtrail_client_us_east_1 = client("cloudtrail", region_name="us-east-1")
s3_client_us_east_1 = client("s3", region_name="us-east-1")
cloudtrail_client_eu_west_1 = client("cloudtrail", region_name="eu-west-1")
s3_client_eu_west_1 = client("s3", region_name="eu-west-1")
trail_name_us = "trail_test_us"
bucket_name_us = "bucket_test_us"
trail_name_eu = "trail_test_eu"
bucket_name_eu = "bucket_test_eu"
s3_client_us_east_1.create_bucket(Bucket=bucket_name_us)
s3_client_eu_west_1.create_bucket(
Bucket=bucket_name_eu,
CreateBucketConfiguration={"LocationConstraint": "eu-west-1"},
)
trail_us = cloudtrail_client_us_east_1.create_trail(
Name=trail_name_us, S3BucketName=bucket_name_us, IsMultiRegionTrail=False
)
cloudtrail_client_eu_west_1.create_trail(
Name=trail_name_eu, S3BucketName=bucket_name_eu, IsMultiRegionTrail=False
)
_ = cloudtrail_client_us_east_1.start_logging(Name=trail_name_us)
_ = cloudtrail_client_us_east_1.get_trail_status(Name=trail_name_us)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudtrail.cloudtrail_service import (
Cloudtrail,
)
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.cloudtrail.cloudtrail_multi_region_enabled.cloudtrail_multi_region_enabled.cloudtrail_client",
new=Cloudtrail(current_audit_info),
) as service_client:
# Test Check
from prowler.providers.aws.services.cloudtrail.cloudtrail_multi_region_enabled.cloudtrail_multi_region_enabled import (
cloudtrail_multi_region_enabled,
)
regions = []
for region in service_client.regional_clients.keys():
regions.append(region)
check = cloudtrail_multi_region_enabled()
result = check.execute()
assert len(result) == len(regions)
for report in result:
if report.resource_id == trail_name_us:
assert report.status == "PASS"
assert search(
"is not multiregion and it is logging", report.status_extended
)
assert report.resource_id == trail_name_us
assert report.resource_arn == trail_us["TrailARN"]
else:
assert report.status == "FAIL"
assert search(
"No CloudTrail trails enabled and logging were found",
report.status_extended,
)
assert report.resource_id == "No trails"
assert report.resource_arn == "No trails"

View File

@@ -0,0 +1,149 @@
from re import search
from unittest import mock
from boto3 import client
from moto import mock_cloudtrail, mock_s3
class Test_cloudtrail_s3_dataevents_read_enabled:
@mock_cloudtrail
@mock_s3
def test_trail_without_data_events(self):
cloudtrail_client_us_east_1 = client("cloudtrail", region_name="us-east-1")
s3_client_us_east_1 = client("s3", region_name="us-east-1")
trail_name_us = "trail_test_us"
bucket_name_us = "bucket_test_us"
s3_client_us_east_1.create_bucket(Bucket=bucket_name_us)
cloudtrail_client_us_east_1.create_trail(
Name=trail_name_us, S3BucketName=bucket_name_us, IsMultiRegionTrail=False
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudtrail.cloudtrail_service import (
Cloudtrail,
)
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.cloudtrail.cloudtrail_s3_dataevents_read_enabled.cloudtrail_s3_dataevents_read_enabled.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudtrail.cloudtrail_s3_dataevents_read_enabled.cloudtrail_s3_dataevents_read_enabled import (
cloudtrail_s3_dataevents_read_enabled,
)
check = cloudtrail_s3_dataevents_read_enabled()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert search(
"No CloudTrail trails have a data event to record all S3 object-level API operations.",
result[0].status_extended,
)
assert result[0].resource_id == "No trails"
assert result[0].resource_arn == "No trails"
@mock_cloudtrail
@mock_s3
def test_trail_without_s3_data_events(self):
cloudtrail_client_us_east_1 = client("cloudtrail", region_name="us-east-1")
s3_client_us_east_1 = client("s3", region_name="us-east-1")
trail_name_us = "trail_test_us"
bucket_name_us = "bucket_test_us"
s3_client_us_east_1.create_bucket(Bucket=bucket_name_us)
cloudtrail_client_us_east_1.create_trail(
Name=trail_name_us, S3BucketName=bucket_name_us, IsMultiRegionTrail=False
)
_ = cloudtrail_client_us_east_1.put_event_selectors(
TrailName=trail_name_us,
EventSelectors=[
{
"ReadWriteType": "All",
"IncludeManagementEvents": True,
"DataResources": [
{"Type": "AWS::Lambda::Function", "Values": ["arn:aws:lambda"]}
],
}
],
)["EventSelectors"]
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudtrail.cloudtrail_service import (
Cloudtrail,
)
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.cloudtrail.cloudtrail_s3_dataevents_read_enabled.cloudtrail_s3_dataevents_read_enabled.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudtrail.cloudtrail_s3_dataevents_read_enabled.cloudtrail_s3_dataevents_read_enabled import (
cloudtrail_s3_dataevents_read_enabled,
)
check = cloudtrail_s3_dataevents_read_enabled()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert search(
"No CloudTrail trails have a data event to record all S3 object-level API operations.",
result[0].status_extended,
)
assert result[0].resource_id == "No trails"
assert result[0].resource_arn == "No trails"
@mock_cloudtrail
@mock_s3
def test_trail_with_s3_data_events(self):
cloudtrail_client_us_east_1 = client("cloudtrail", region_name="us-east-1")
s3_client_us_east_1 = client("s3", region_name="us-east-1")
trail_name_us = "trail_test_us"
bucket_name_us = "bucket_test_us"
s3_client_us_east_1.create_bucket(Bucket=bucket_name_us)
trail_us = cloudtrail_client_us_east_1.create_trail(
Name=trail_name_us, S3BucketName=bucket_name_us, IsMultiRegionTrail=False
)
_ = cloudtrail_client_us_east_1.put_event_selectors(
TrailName=trail_name_us,
EventSelectors=[
{
"ReadWriteType": "All",
"IncludeManagementEvents": True,
"DataResources": [
{"Type": "AWS::S3::Object", "Values": ["arn:aws:s3:::*/*"]}
],
}
],
)["EventSelectors"]
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudtrail.cloudtrail_service import (
Cloudtrail,
)
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.cloudtrail.cloudtrail_s3_dataevents_read_enabled.cloudtrail_s3_dataevents_read_enabled.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudtrail.cloudtrail_s3_dataevents_read_enabled.cloudtrail_s3_dataevents_read_enabled import (
cloudtrail_s3_dataevents_read_enabled,
)
check = cloudtrail_s3_dataevents_read_enabled()
result = check.execute()
assert len(result) == 1
assert result[0].status == "PASS"
assert search(
"have a data event to record all S3 object-level API operations.",
result[0].status_extended,
)
assert result[0].resource_id == trail_name_us
assert result[0].resource_arn == trail_us["TrailARN"]

View File

@@ -0,0 +1,149 @@
from re import search
from unittest import mock
from boto3 import client
from moto import mock_cloudtrail, mock_s3
class Test_cloudtrail_s3_dataevents_write_enabled:
@mock_cloudtrail
@mock_s3
def test_trail_without_data_events(self):
cloudtrail_client_us_east_1 = client("cloudtrail", region_name="us-east-1")
s3_client_us_east_1 = client("s3", region_name="us-east-1")
trail_name_us = "trail_test_us"
bucket_name_us = "bucket_test_us"
s3_client_us_east_1.create_bucket(Bucket=bucket_name_us)
cloudtrail_client_us_east_1.create_trail(
Name=trail_name_us, S3BucketName=bucket_name_us, IsMultiRegionTrail=False
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudtrail.cloudtrail_service import (
Cloudtrail,
)
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.cloudtrail.cloudtrail_s3_dataevents_write_enabled.cloudtrail_s3_dataevents_write_enabled.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudtrail.cloudtrail_s3_dataevents_write_enabled.cloudtrail_s3_dataevents_write_enabled import (
cloudtrail_s3_dataevents_write_enabled,
)
check = cloudtrail_s3_dataevents_write_enabled()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert search(
"No CloudTrail trails have a data event to record all S3 object-level API operations.",
result[0].status_extended,
)
assert result[0].resource_id == "No trails"
assert result[0].resource_arn == "No trails"
@mock_cloudtrail
@mock_s3
def test_trail_without_s3_data_events(self):
cloudtrail_client_us_east_1 = client("cloudtrail", region_name="us-east-1")
s3_client_us_east_1 = client("s3", region_name="us-east-1")
trail_name_us = "trail_test_us"
bucket_name_us = "bucket_test_us"
s3_client_us_east_1.create_bucket(Bucket=bucket_name_us)
cloudtrail_client_us_east_1.create_trail(
Name=trail_name_us, S3BucketName=bucket_name_us, IsMultiRegionTrail=False
)
_ = cloudtrail_client_us_east_1.put_event_selectors(
TrailName=trail_name_us,
EventSelectors=[
{
"ReadWriteType": "All",
"IncludeManagementEvents": True,
"DataResources": [
{"Type": "AWS::Lambda::Function", "Values": ["arn:aws:lambda"]}
],
}
],
)["EventSelectors"]
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudtrail.cloudtrail_service import (
Cloudtrail,
)
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.cloudtrail.cloudtrail_s3_dataevents_write_enabled.cloudtrail_s3_dataevents_write_enabled.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudtrail.cloudtrail_s3_dataevents_write_enabled.cloudtrail_s3_dataevents_write_enabled import (
cloudtrail_s3_dataevents_write_enabled,
)
check = cloudtrail_s3_dataevents_write_enabled()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert search(
"No CloudTrail trails have a data event to record all S3 object-level API operations.",
result[0].status_extended,
)
assert result[0].resource_id == "No trails"
assert result[0].resource_arn == "No trails"
@mock_cloudtrail
@mock_s3
def test_trail_with_s3_data_events(self):
cloudtrail_client_us_east_1 = client("cloudtrail", region_name="us-east-1")
s3_client_us_east_1 = client("s3", region_name="us-east-1")
trail_name_us = "trail_test_us"
bucket_name_us = "bucket_test_us"
s3_client_us_east_1.create_bucket(Bucket=bucket_name_us)
trail_us = cloudtrail_client_us_east_1.create_trail(
Name=trail_name_us, S3BucketName=bucket_name_us, IsMultiRegionTrail=False
)
_ = cloudtrail_client_us_east_1.put_event_selectors(
TrailName=trail_name_us,
EventSelectors=[
{
"ReadWriteType": "All",
"IncludeManagementEvents": True,
"DataResources": [
{"Type": "AWS::S3::Object", "Values": ["arn:aws:s3:::*/*"]}
],
}
],
)["EventSelectors"]
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudtrail.cloudtrail_service import (
Cloudtrail,
)
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.cloudtrail.cloudtrail_s3_dataevents_write_enabled.cloudtrail_s3_dataevents_write_enabled.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudtrail.cloudtrail_s3_dataevents_write_enabled.cloudtrail_s3_dataevents_write_enabled import (
cloudtrail_s3_dataevents_write_enabled,
)
check = cloudtrail_s3_dataevents_write_enabled()
result = check.execute()
assert len(result) == 1
assert result[0].status == "PASS"
assert search(
"have a data event to record all S3 object-level API operations.",
result[0].status_extended,
)
assert result[0].resource_id == trail_name_us
assert result[0].resource_arn == trail_us["TrailARN"]

View File

@@ -0,0 +1,186 @@
from boto3 import client, session
from moto import mock_cloudtrail, mock_s3
from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
from prowler.providers.aws.services.cloudtrail.cloudtrail_service import Cloudtrail
AWS_ACCOUNT_NUMBER = 123456789012
class Test_Cloudtrail_Service:
# Mocked Audit Info
def set_mocked_audit_info(self):
audit_info = AWS_Audit_Info(
original_session=None,
audit_session=session.Session(
profile_name=None,
botocore_session=None,
),
audited_account=AWS_ACCOUNT_NUMBER,
audited_user_id=None,
audited_partition="aws",
audited_identity_arn=None,
profile=None,
profile_region=None,
credentials=None,
assumed_role_info=None,
audited_regions=None,
organizations_metadata=None,
)
return audit_info
# Test Cloudtrail Service
@mock_cloudtrail
def test_service(self):
audit_info = self.set_mocked_audit_info()
cloudtrail = Cloudtrail(audit_info)
assert cloudtrail.service == "cloudtrail"
# Test Cloudtrail client
@mock_cloudtrail
def test_client(self):
audit_info = self.set_mocked_audit_info()
cloudtrail = Cloudtrail(audit_info)
for regional_client in cloudtrail.regional_clients.values():
assert regional_client.__class__.__name__ == "CloudTrail"
# Test Cloudtrail session
@mock_cloudtrail
def test__get_session__(self):
audit_info = self.set_mocked_audit_info()
cloudtrail = Cloudtrail(audit_info)
assert cloudtrail.session.__class__.__name__ == "Session"
# Test Cloudtrail Session
@mock_cloudtrail
def test_audited_account(self):
audit_info = self.set_mocked_audit_info()
cloudtrail = Cloudtrail(audit_info)
assert cloudtrail.audited_account == AWS_ACCOUNT_NUMBER
@mock_cloudtrail
@mock_s3
def test_describe_trails(self):
cloudtrail_client_us_east_1 = client("cloudtrail", region_name="us-east-1")
s3_client_us_east_1 = client("s3", region_name="us-east-1")
cloudtrail_client_eu_west_1 = client("cloudtrail", region_name="eu-west-1")
s3_client_eu_west_1 = client("s3", region_name="eu-west-1")
trail_name_us = "trail_test_us"
bucket_name_us = "bucket_test_us"
trail_name_eu = "trail_test_eu"
bucket_name_eu = "bucket_test_eu"
s3_client_us_east_1.create_bucket(Bucket=bucket_name_us)
s3_client_eu_west_1.create_bucket(
Bucket=bucket_name_eu,
CreateBucketConfiguration={"LocationConstraint": "eu-west-1"},
)
cloudtrail_client_us_east_1.create_trail(
Name=trail_name_us, S3BucketName=bucket_name_us, IsMultiRegionTrail=False
)
cloudtrail_client_eu_west_1.create_trail(
Name=trail_name_eu, S3BucketName=bucket_name_eu, IsMultiRegionTrail=False
)
audit_info = self.set_mocked_audit_info()
cloudtrail = Cloudtrail(audit_info)
# 1 None result per region plus 2 created
assert len(cloudtrail.trails) == 25
for trail in cloudtrail.trails:
if trail.name:
assert trail.name == trail_name_us or trail.name == trail_name_eu
assert not trail.is_multiregion
assert (
trail.home_region == "us-east-1" or trail.home_region == "eu-west-1"
)
assert trail.region == "us-east-1" or trail.region == "eu-west-1"
assert not trail.is_logging
assert not trail.log_file_validation_enabled
assert not trail.latest_cloudwatch_delivery_time
assert (
trail.s3_bucket == bucket_name_eu
or trail.s3_bucket == bucket_name_us
)
@mock_cloudtrail
@mock_s3
def test_status_trails(self):
cloudtrail_client_us_east_1 = client("cloudtrail", region_name="us-east-1")
s3_client_us_east_1 = client("s3", region_name="us-east-1")
cloudtrail_client_eu_west_1 = client("cloudtrail", region_name="eu-west-1")
s3_client_eu_west_1 = client("s3", region_name="eu-west-1")
trail_name_us = "trail_test_us"
bucket_name_us = "bucket_test_us"
trail_name_eu = "trail_test_eu"
bucket_name_eu = "bucket_test_eu"
s3_client_us_east_1.create_bucket(Bucket=bucket_name_us)
s3_client_eu_west_1.create_bucket(
Bucket=bucket_name_eu,
CreateBucketConfiguration={"LocationConstraint": "eu-west-1"},
)
cloudtrail_client_us_east_1.create_trail(
Name=trail_name_us,
S3BucketName=bucket_name_us,
IsMultiRegionTrail=False,
EnableLogFileValidation=True,
)
cloudtrail_client_us_east_1.start_logging(Name=trail_name_us)
cloudtrail_client_eu_west_1.create_trail(
Name=trail_name_eu, S3BucketName=bucket_name_eu, IsMultiRegionTrail=False
)
audit_info = self.set_mocked_audit_info()
cloudtrail = Cloudtrail(audit_info)
# 1 None result per region plus 2 created
assert len(cloudtrail.trails) == 25
for trail in cloudtrail.trails:
if trail.name:
if trail.name == trail_name_us:
assert not trail.is_multiregion
assert trail.home_region == "us-east-1"
assert trail.region == "us-east-1"
assert trail.is_logging
assert trail.log_file_validation_enabled
assert not trail.latest_cloudwatch_delivery_time
assert trail.s3_bucket == bucket_name_us
@mock_cloudtrail
@mock_s3
def test_get_event_selectors(self):
cloudtrail_client_us_east_1 = client("cloudtrail", region_name="us-east-1")
s3_client_us_east_1 = client("s3", region_name="us-east-1")
trail_name_us = "trail_test_us"
bucket_name_us = "bucket_test_us"
s3_client_us_east_1.create_bucket(Bucket=bucket_name_us)
cloudtrail_client_us_east_1.create_trail(
Name=trail_name_us,
S3BucketName=bucket_name_us,
IsMultiRegionTrail=False,
EnableLogFileValidation=True,
)
cloudtrail_client_us_east_1.start_logging(Name=trail_name_us)
data_events_response = cloudtrail_client_us_east_1.put_event_selectors(
TrailName=trail_name_us,
EventSelectors=[
{
"ReadWriteType": "All",
"IncludeManagementEvents": True,
"DataResources": [
{"Type": "AWS::S3::Object", "Values": ["arn:aws:s3:::*/*"]}
],
}
],
)["EventSelectors"]
audit_info = self.set_mocked_audit_info()
cloudtrail = Cloudtrail(audit_info)
# 1 None result per region plus 2 created
assert len(cloudtrail.trails) == 25
for trail in cloudtrail.trails:
if trail.name:
if trail.name == trail_name_us:
assert not trail.is_multiregion
assert trail.home_region == "us-east-1"
assert trail.region == "us-east-1"
assert trail.is_logging
assert trail.log_file_validation_enabled
assert not trail.latest_cloudwatch_delivery_time
assert trail.s3_bucket == bucket_name_us
assert trail.data_events == data_events_response

View File

@@ -0,0 +1,292 @@
from unittest import mock
from boto3 import client
from moto import mock_cloudtrail, mock_cloudwatch, mock_logs, mock_s3
from moto.core import DEFAULT_ACCOUNT_ID
AWS_REGION = "us-east-1"
class Test_cloudwatch_log_metric_filter_unauthorized_api_calls:
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
def test_cloudwatch_no_log_groups(self):
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_network_acls_alarm_configured.cloudwatch_changes_to_network_acls_alarm_configured.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_network_acls_alarm_configured.cloudwatch_changes_to_network_acls_alarm_configured.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_network_acls_alarm_configured.cloudwatch_changes_to_network_acls_alarm_configured.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_network_acls_alarm_configured.cloudwatch_changes_to_network_acls_alarm_configured import (
cloudwatch_changes_to_network_acls_alarm_configured,
)
check = cloudwatch_changes_to_network_acls_alarm_configured()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "No CloudWatch log groups found with metric filters or alarms associated."
)
assert result[0].resource_id == ""
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
@mock_s3
def test_cloudwatch_trail_no_log_group(self):
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
s3_client.create_bucket(Bucket="test")
cloudtrail_client.create_trail(Name="test_trail", S3BucketName="test")
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_network_acls_alarm_configured.cloudwatch_changes_to_network_acls_alarm_configured.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_network_acls_alarm_configured.cloudwatch_changes_to_network_acls_alarm_configured.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_network_acls_alarm_configured.cloudwatch_changes_to_network_acls_alarm_configured.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_network_acls_alarm_configured.cloudwatch_changes_to_network_acls_alarm_configured import (
cloudwatch_changes_to_network_acls_alarm_configured,
)
check = cloudwatch_changes_to_network_acls_alarm_configured()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "No CloudWatch log groups found with metric filters or alarms associated."
)
assert result[0].resource_id == ""
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
@mock_s3
def test_cloudwatch_trail_with_log_group(self):
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
logs_client = client("logs", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
s3_client.create_bucket(Bucket="test")
logs_client.create_log_group(logGroupName="/log-group/test")
cloudtrail_client.create_trail(
Name="test_trail",
S3BucketName="test",
CloudWatchLogsLogGroupArn=f"arn:aws:logs:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:log-group:/log-group/test:*",
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_network_acls_alarm_configured.cloudwatch_changes_to_network_acls_alarm_configured.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_network_acls_alarm_configured.cloudwatch_changes_to_network_acls_alarm_configured.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_network_acls_alarm_configured.cloudwatch_changes_to_network_acls_alarm_configured.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_network_acls_alarm_configured.cloudwatch_changes_to_network_acls_alarm_configured import (
cloudwatch_changes_to_network_acls_alarm_configured,
)
check = cloudwatch_changes_to_network_acls_alarm_configured()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "No CloudWatch log groups found with metric filters or alarms associated."
)
assert result[0].resource_id == ""
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
@mock_s3
def test_cloudwatch_trail_with_log_group_with_metric(self):
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
logs_client = client("logs", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
s3_client.create_bucket(Bucket="test")
logs_client.create_log_group(logGroupName="/log-group/test")
cloudtrail_client.create_trail(
Name="test_trail",
S3BucketName="test",
CloudWatchLogsLogGroupArn=f"arn:aws:logs:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:log-group:/log-group/test:*",
)
logs_client.put_metric_filter(
logGroupName="/log-group/test",
filterName="test-filter",
filterPattern="{ ($.eventName = CreateNetworkAcl) || ($.eventName = CreateNetworkAclEntry) || ($.eventName = DeleteNetworkAcl) || ($.eventName = DeleteNetworkAclEntry) || ($.eventName = ReplaceNetworkAclEntry) || ($.eventName = ReplaceNetworkAclAssociation) }",
metricTransformations=[
{
"metricName": "my-metric",
"metricNamespace": "my-namespace",
"metricValue": "$.value",
}
],
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_network_acls_alarm_configured.cloudwatch_changes_to_network_acls_alarm_configured.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_network_acls_alarm_configured.cloudwatch_changes_to_network_acls_alarm_configured.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_network_acls_alarm_configured.cloudwatch_changes_to_network_acls_alarm_configured.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_network_acls_alarm_configured.cloudwatch_changes_to_network_acls_alarm_configured import (
cloudwatch_changes_to_network_acls_alarm_configured,
)
check = cloudwatch_changes_to_network_acls_alarm_configured()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "CloudWatch log group /log-group/test found with metric filter test-filter but no alarms associated."
)
assert result[0].resource_id == "/log-group/test"
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
@mock_s3
def test_cloudwatch_trail_with_log_group_with_metric_and_alarm(self):
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
cloudwatch_client = client("cloudwatch", region_name=AWS_REGION)
logs_client = client("logs", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
s3_client.create_bucket(Bucket="test")
logs_client.create_log_group(logGroupName="/log-group/test")
cloudtrail_client.create_trail(
Name="test_trail",
S3BucketName="test",
CloudWatchLogsLogGroupArn=f"arn:aws:logs:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:log-group:/log-group/test:*",
)
logs_client.put_metric_filter(
logGroupName="/log-group/test",
filterName="test-filter",
filterPattern="{ ($.eventName = CreateNetworkAcl) || ($.eventName = CreateNetworkAclEntry) || ($.eventName = DeleteNetworkAcl) || ($.eventName = DeleteNetworkAclEntry) || ($.eventName = ReplaceNetworkAclEntry) || ($.eventName = ReplaceNetworkAclAssociation) }",
metricTransformations=[
{
"metricName": "my-metric",
"metricNamespace": "my-namespace",
"metricValue": "$.value",
}
],
)
cloudwatch_client.put_metric_alarm(
AlarmName="test-alarm",
MetricName="my-metric",
Namespace="my-namespace",
Period=10,
EvaluationPeriods=5,
Statistic="Average",
Threshold=2,
ComparisonOperator="GreaterThanThreshold",
ActionsEnabled=True,
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_network_acls_alarm_configured.cloudwatch_changes_to_network_acls_alarm_configured.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_network_acls_alarm_configured.cloudwatch_changes_to_network_acls_alarm_configured.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_network_acls_alarm_configured.cloudwatch_changes_to_network_acls_alarm_configured.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_network_acls_alarm_configured.cloudwatch_changes_to_network_acls_alarm_configured import (
cloudwatch_changes_to_network_acls_alarm_configured,
)
check = cloudwatch_changes_to_network_acls_alarm_configured()
result = check.execute()
assert len(result) == 1
assert result[0].status == "PASS"
assert (
result[0].status_extended
== "CloudWatch log group /log-group/test found with metric filter test-filter and alarms set."
)
assert result[0].resource_id == "/log-group/test"

View File

@@ -0,0 +1,292 @@
from unittest import mock
from boto3 import client
from moto import mock_cloudtrail, mock_cloudwatch, mock_logs, mock_s3
from moto.core import DEFAULT_ACCOUNT_ID
AWS_REGION = "us-east-1"
class Test_cloudwatch_log_metric_filter_unauthorized_api_calls:
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
def test_cloudwatch_no_log_groups(self):
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_network_gateways_alarm_configured.cloudwatch_changes_to_network_gateways_alarm_configured.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_network_gateways_alarm_configured.cloudwatch_changes_to_network_gateways_alarm_configured.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_network_gateways_alarm_configured.cloudwatch_changes_to_network_gateways_alarm_configured.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_network_gateways_alarm_configured.cloudwatch_changes_to_network_gateways_alarm_configured import (
cloudwatch_changes_to_network_gateways_alarm_configured,
)
check = cloudwatch_changes_to_network_gateways_alarm_configured()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "No CloudWatch log groups found with metric filters or alarms associated."
)
assert result[0].resource_id == ""
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
@mock_s3
def test_cloudwatch_trail_no_log_group(self):
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
s3_client.create_bucket(Bucket="test")
cloudtrail_client.create_trail(Name="test_trail", S3BucketName="test")
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_network_gateways_alarm_configured.cloudwatch_changes_to_network_gateways_alarm_configured.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_network_gateways_alarm_configured.cloudwatch_changes_to_network_gateways_alarm_configured.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_network_gateways_alarm_configured.cloudwatch_changes_to_network_gateways_alarm_configured.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_network_gateways_alarm_configured.cloudwatch_changes_to_network_gateways_alarm_configured import (
cloudwatch_changes_to_network_gateways_alarm_configured,
)
check = cloudwatch_changes_to_network_gateways_alarm_configured()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "No CloudWatch log groups found with metric filters or alarms associated."
)
assert result[0].resource_id == ""
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
@mock_s3
def test_cloudwatch_trail_with_log_group(self):
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
logs_client = client("logs", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
s3_client.create_bucket(Bucket="test")
logs_client.create_log_group(logGroupName="/log-group/test")
cloudtrail_client.create_trail(
Name="test_trail",
S3BucketName="test",
CloudWatchLogsLogGroupArn=f"arn:aws:logs:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:log-group:/log-group/test:*",
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_network_gateways_alarm_configured.cloudwatch_changes_to_network_gateways_alarm_configured.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_network_gateways_alarm_configured.cloudwatch_changes_to_network_gateways_alarm_configured.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_network_gateways_alarm_configured.cloudwatch_changes_to_network_gateways_alarm_configured.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_network_gateways_alarm_configured.cloudwatch_changes_to_network_gateways_alarm_configured import (
cloudwatch_changes_to_network_gateways_alarm_configured,
)
check = cloudwatch_changes_to_network_gateways_alarm_configured()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "No CloudWatch log groups found with metric filters or alarms associated."
)
assert result[0].resource_id == ""
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
@mock_s3
def test_cloudwatch_trail_with_log_group_with_metric(self):
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
logs_client = client("logs", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
s3_client.create_bucket(Bucket="test")
logs_client.create_log_group(logGroupName="/log-group/test")
cloudtrail_client.create_trail(
Name="test_trail",
S3BucketName="test",
CloudWatchLogsLogGroupArn=f"arn:aws:logs:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:log-group:/log-group/test:*",
)
logs_client.put_metric_filter(
logGroupName="/log-group/test",
filterName="test-filter",
filterPattern="{($.eventName = CreateCustomerGateway) || ($.eventName = DeleteCustomerGateway) || ($.eventName = AttachInternetGateway) || ($.eventName = CreateInternetGateway) || ($.eventName = DeleteInternetGateway) || ($.eventName = DetachInternetGateway) }",
metricTransformations=[
{
"metricName": "my-metric",
"metricNamespace": "my-namespace",
"metricValue": "$.value",
}
],
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_network_gateways_alarm_configured.cloudwatch_changes_to_network_gateways_alarm_configured.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_network_gateways_alarm_configured.cloudwatch_changes_to_network_gateways_alarm_configured.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_network_gateways_alarm_configured.cloudwatch_changes_to_network_gateways_alarm_configured.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_network_gateways_alarm_configured.cloudwatch_changes_to_network_gateways_alarm_configured import (
cloudwatch_changes_to_network_gateways_alarm_configured,
)
check = cloudwatch_changes_to_network_gateways_alarm_configured()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "CloudWatch log group /log-group/test found with metric filter test-filter but no alarms associated."
)
assert result[0].resource_id == "/log-group/test"
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
@mock_s3
def test_cloudwatch_trail_with_log_group_with_metric_and_alarm(self):
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
cloudwatch_client = client("cloudwatch", region_name=AWS_REGION)
logs_client = client("logs", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
s3_client.create_bucket(Bucket="test")
logs_client.create_log_group(logGroupName="/log-group/test")
cloudtrail_client.create_trail(
Name="test_trail",
S3BucketName="test",
CloudWatchLogsLogGroupArn=f"arn:aws:logs:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:log-group:/log-group/test:*",
)
logs_client.put_metric_filter(
logGroupName="/log-group/test",
filterName="test-filter",
filterPattern="{($.eventName = CreateCustomerGateway) || ($.eventName = DeleteCustomerGateway) || ($.eventName = AttachInternetGateway) || ($.eventName = CreateInternetGateway) || ($.eventName = DeleteInternetGateway) || ($.eventName = DetachInternetGateway) }",
metricTransformations=[
{
"metricName": "my-metric",
"metricNamespace": "my-namespace",
"metricValue": "$.value",
}
],
)
cloudwatch_client.put_metric_alarm(
AlarmName="test-alarm",
MetricName="my-metric",
Namespace="my-namespace",
Period=10,
EvaluationPeriods=5,
Statistic="Average",
Threshold=2,
ComparisonOperator="GreaterThanThreshold",
ActionsEnabled=True,
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_network_gateways_alarm_configured.cloudwatch_changes_to_network_gateways_alarm_configured.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_network_gateways_alarm_configured.cloudwatch_changes_to_network_gateways_alarm_configured.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_network_gateways_alarm_configured.cloudwatch_changes_to_network_gateways_alarm_configured.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_network_gateways_alarm_configured.cloudwatch_changes_to_network_gateways_alarm_configured import (
cloudwatch_changes_to_network_gateways_alarm_configured,
)
check = cloudwatch_changes_to_network_gateways_alarm_configured()
result = check.execute()
assert len(result) == 1
assert result[0].status == "PASS"
assert (
result[0].status_extended
== "CloudWatch log group /log-group/test found with metric filter test-filter and alarms set."
)
assert result[0].resource_id == "/log-group/test"

View File

@@ -0,0 +1,292 @@
from unittest import mock
from boto3 import client
from moto import mock_cloudtrail, mock_cloudwatch, mock_logs, mock_s3
from moto.core import DEFAULT_ACCOUNT_ID
AWS_REGION = "us-east-1"
class Test_cloudwatch_log_metric_filter_unauthorized_api_calls:
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
def test_cloudwatch_no_log_groups(self):
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_network_route_tables_alarm_configured.cloudwatch_changes_to_network_route_tables_alarm_configured.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_network_route_tables_alarm_configured.cloudwatch_changes_to_network_route_tables_alarm_configured.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_network_route_tables_alarm_configured.cloudwatch_changes_to_network_route_tables_alarm_configured.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_network_route_tables_alarm_configured.cloudwatch_changes_to_network_route_tables_alarm_configured import (
cloudwatch_changes_to_network_route_tables_alarm_configured,
)
check = cloudwatch_changes_to_network_route_tables_alarm_configured()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "No CloudWatch log groups found with metric filters or alarms associated."
)
assert result[0].resource_id == ""
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
@mock_s3
def test_cloudwatch_trail_no_log_group(self):
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
s3_client.create_bucket(Bucket="test")
cloudtrail_client.create_trail(Name="test_trail", S3BucketName="test")
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_network_route_tables_alarm_configured.cloudwatch_changes_to_network_route_tables_alarm_configured.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_network_route_tables_alarm_configured.cloudwatch_changes_to_network_route_tables_alarm_configured.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_network_route_tables_alarm_configured.cloudwatch_changes_to_network_route_tables_alarm_configured.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_network_route_tables_alarm_configured.cloudwatch_changes_to_network_route_tables_alarm_configured import (
cloudwatch_changes_to_network_route_tables_alarm_configured,
)
check = cloudwatch_changes_to_network_route_tables_alarm_configured()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "No CloudWatch log groups found with metric filters or alarms associated."
)
assert result[0].resource_id == ""
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
@mock_s3
def test_cloudwatch_trail_with_log_group(self):
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
logs_client = client("logs", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
s3_client.create_bucket(Bucket="test")
logs_client.create_log_group(logGroupName="/log-group/test")
cloudtrail_client.create_trail(
Name="test_trail",
S3BucketName="test",
CloudWatchLogsLogGroupArn=f"arn:aws:logs:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:log-group:/log-group/test:*",
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_network_route_tables_alarm_configured.cloudwatch_changes_to_network_route_tables_alarm_configured.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_network_route_tables_alarm_configured.cloudwatch_changes_to_network_route_tables_alarm_configured.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_network_route_tables_alarm_configured.cloudwatch_changes_to_network_route_tables_alarm_configured.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_network_route_tables_alarm_configured.cloudwatch_changes_to_network_route_tables_alarm_configured import (
cloudwatch_changes_to_network_route_tables_alarm_configured,
)
check = cloudwatch_changes_to_network_route_tables_alarm_configured()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "No CloudWatch log groups found with metric filters or alarms associated."
)
assert result[0].resource_id == ""
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
@mock_s3
def test_cloudwatch_trail_with_log_group_with_metric(self):
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
logs_client = client("logs", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
s3_client.create_bucket(Bucket="test")
logs_client.create_log_group(logGroupName="/log-group/test")
cloudtrail_client.create_trail(
Name="test_trail",
S3BucketName="test",
CloudWatchLogsLogGroupArn=f"arn:aws:logs:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:log-group:/log-group/test:*",
)
logs_client.put_metric_filter(
logGroupName="/log-group/test",
filterName="test-filter",
filterPattern="{($.eventName = CreateRoute) || ($.eventName = CreateRouteTable) || ($.eventName = ReplaceRoute) || ($.eventName = ReplaceRouteTableAssociation)|| ($.eventName = DeleteRouteTable) || ($.eventName = DeleteRoute) || ($.eventName = DisassociateRouteTable) }",
metricTransformations=[
{
"metricName": "my-metric",
"metricNamespace": "my-namespace",
"metricValue": "$.value",
}
],
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_network_route_tables_alarm_configured.cloudwatch_changes_to_network_route_tables_alarm_configured.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_network_route_tables_alarm_configured.cloudwatch_changes_to_network_route_tables_alarm_configured.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_network_route_tables_alarm_configured.cloudwatch_changes_to_network_route_tables_alarm_configured.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_network_route_tables_alarm_configured.cloudwatch_changes_to_network_route_tables_alarm_configured import (
cloudwatch_changes_to_network_route_tables_alarm_configured,
)
check = cloudwatch_changes_to_network_route_tables_alarm_configured()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "CloudWatch log group /log-group/test found with metric filter test-filter but no alarms associated."
)
assert result[0].resource_id == "/log-group/test"
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
@mock_s3
def test_cloudwatch_trail_with_log_group_with_metric_and_alarm(self):
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
cloudwatch_client = client("cloudwatch", region_name=AWS_REGION)
logs_client = client("logs", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
s3_client.create_bucket(Bucket="test")
logs_client.create_log_group(logGroupName="/log-group/test")
cloudtrail_client.create_trail(
Name="test_trail",
S3BucketName="test",
CloudWatchLogsLogGroupArn=f"arn:aws:logs:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:log-group:/log-group/test:*",
)
logs_client.put_metric_filter(
logGroupName="/log-group/test",
filterName="test-filter",
filterPattern="{($.eventName = CreateRoute) || ($.eventName = CreateRouteTable) || ($.eventName = ReplaceRoute) || ($.eventName = ReplaceRouteTableAssociation)|| ($.eventName = DeleteRouteTable) || ($.eventName = DeleteRoute) || ($.eventName = DisassociateRouteTable) }",
metricTransformations=[
{
"metricName": "my-metric",
"metricNamespace": "my-namespace",
"metricValue": "$.value",
}
],
)
cloudwatch_client.put_metric_alarm(
AlarmName="test-alarm",
MetricName="my-metric",
Namespace="my-namespace",
Period=10,
EvaluationPeriods=5,
Statistic="Average",
Threshold=2,
ComparisonOperator="GreaterThanThreshold",
ActionsEnabled=True,
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_network_route_tables_alarm_configured.cloudwatch_changes_to_network_route_tables_alarm_configured.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_network_route_tables_alarm_configured.cloudwatch_changes_to_network_route_tables_alarm_configured.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_network_route_tables_alarm_configured.cloudwatch_changes_to_network_route_tables_alarm_configured.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_network_route_tables_alarm_configured.cloudwatch_changes_to_network_route_tables_alarm_configured import (
cloudwatch_changes_to_network_route_tables_alarm_configured,
)
check = cloudwatch_changes_to_network_route_tables_alarm_configured()
result = check.execute()
assert len(result) == 1
assert result[0].status == "PASS"
assert (
result[0].status_extended
== "CloudWatch log group /log-group/test found with metric filter test-filter and alarms set."
)
assert result[0].resource_id == "/log-group/test"

View File

@@ -0,0 +1,292 @@
from unittest import mock
from boto3 import client
from moto import mock_cloudtrail, mock_cloudwatch, mock_logs, mock_s3
from moto.core import DEFAULT_ACCOUNT_ID
AWS_REGION = "us-east-1"
class Test_cloudwatch_log_metric_filter_unauthorized_api_calls:
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
def test_cloudwatch_no_log_groups(self):
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_vpcs_alarm_configured.cloudwatch_changes_to_vpcs_alarm_configured.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_vpcs_alarm_configured.cloudwatch_changes_to_vpcs_alarm_configured.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_vpcs_alarm_configured.cloudwatch_changes_to_vpcs_alarm_configured.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_vpcs_alarm_configured.cloudwatch_changes_to_vpcs_alarm_configured import (
cloudwatch_changes_to_vpcs_alarm_configured,
)
check = cloudwatch_changes_to_vpcs_alarm_configured()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "No CloudWatch log groups found with metric filters or alarms associated."
)
assert result[0].resource_id == ""
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
@mock_s3
def test_cloudwatch_trail_no_log_group(self):
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
s3_client.create_bucket(Bucket="test")
cloudtrail_client.create_trail(Name="test_trail", S3BucketName="test")
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_vpcs_alarm_configured.cloudwatch_changes_to_vpcs_alarm_configured.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_vpcs_alarm_configured.cloudwatch_changes_to_vpcs_alarm_configured.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_vpcs_alarm_configured.cloudwatch_changes_to_vpcs_alarm_configured.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_vpcs_alarm_configured.cloudwatch_changes_to_vpcs_alarm_configured import (
cloudwatch_changes_to_vpcs_alarm_configured,
)
check = cloudwatch_changes_to_vpcs_alarm_configured()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "No CloudWatch log groups found with metric filters or alarms associated."
)
assert result[0].resource_id == ""
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
@mock_s3
def test_cloudwatch_trail_with_log_group(self):
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
logs_client = client("logs", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
s3_client.create_bucket(Bucket="test")
logs_client.create_log_group(logGroupName="/log-group/test")
cloudtrail_client.create_trail(
Name="test_trail",
S3BucketName="test",
CloudWatchLogsLogGroupArn=f"arn:aws:logs:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:log-group:/log-group/test:*",
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_vpcs_alarm_configured.cloudwatch_changes_to_vpcs_alarm_configured.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_vpcs_alarm_configured.cloudwatch_changes_to_vpcs_alarm_configured.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_vpcs_alarm_configured.cloudwatch_changes_to_vpcs_alarm_configured.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_vpcs_alarm_configured.cloudwatch_changes_to_vpcs_alarm_configured import (
cloudwatch_changes_to_vpcs_alarm_configured,
)
check = cloudwatch_changes_to_vpcs_alarm_configured()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "No CloudWatch log groups found with metric filters or alarms associated."
)
assert result[0].resource_id == ""
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
@mock_s3
def test_cloudwatch_trail_with_log_group_with_metric(self):
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
logs_client = client("logs", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
s3_client.create_bucket(Bucket="test")
logs_client.create_log_group(logGroupName="/log-group/test")
cloudtrail_client.create_trail(
Name="test_trail",
S3BucketName="test",
CloudWatchLogsLogGroupArn=f"arn:aws:logs:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:log-group:/log-group/test:*",
)
logs_client.put_metric_filter(
logGroupName="/log-group/test",
filterName="test-filter",
filterPattern="{ ($.eventName = CreateVpc) || ($.eventName = DeleteVpc) || ($.eventName = ModifyVpcAttribute) || ($.eventName = AcceptVpcPeeringConnection) || ($.eventName = CreateVpcPeeringConnection) || ($.eventName = DeleteVpcPeeringConnection) || ($.eventName = RejectVpcPeeringConnection) || ($.eventName = AttachClassicLinkVpc) || ($.eventName = DetachClassicLinkVpc) || ($.eventName = DisableVpcClassicLink) || ($.eventName = EnableVpcClassicLink) }",
metricTransformations=[
{
"metricName": "my-metric",
"metricNamespace": "my-namespace",
"metricValue": "$.value",
}
],
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_vpcs_alarm_configured.cloudwatch_changes_to_vpcs_alarm_configured.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_vpcs_alarm_configured.cloudwatch_changes_to_vpcs_alarm_configured.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_vpcs_alarm_configured.cloudwatch_changes_to_vpcs_alarm_configured.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_vpcs_alarm_configured.cloudwatch_changes_to_vpcs_alarm_configured import (
cloudwatch_changes_to_vpcs_alarm_configured,
)
check = cloudwatch_changes_to_vpcs_alarm_configured()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "CloudWatch log group /log-group/test found with metric filter test-filter but no alarms associated."
)
assert result[0].resource_id == "/log-group/test"
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
@mock_s3
def test_cloudwatch_trail_with_log_group_with_metric_and_alarm(self):
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
cloudwatch_client = client("cloudwatch", region_name=AWS_REGION)
logs_client = client("logs", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
s3_client.create_bucket(Bucket="test")
logs_client.create_log_group(logGroupName="/log-group/test")
cloudtrail_client.create_trail(
Name="test_trail",
S3BucketName="test",
CloudWatchLogsLogGroupArn=f"arn:aws:logs:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:log-group:/log-group/test:*",
)
logs_client.put_metric_filter(
logGroupName="/log-group/test",
filterName="test-filter",
filterPattern="{ ($.eventName = CreateVpc) || ($.eventName = DeleteVpc) || ($.eventName = ModifyVpcAttribute) || ($.eventName = AcceptVpcPeeringConnection) || ($.eventName = CreateVpcPeeringConnection) || ($.eventName = DeleteVpcPeeringConnection) || ($.eventName = RejectVpcPeeringConnection) || ($.eventName = AttachClassicLinkVpc) || ($.eventName = DetachClassicLinkVpc) || ($.eventName = DisableVpcClassicLink) || ($.eventName = EnableVpcClassicLink) }",
metricTransformations=[
{
"metricName": "my-metric",
"metricNamespace": "my-namespace",
"metricValue": "$.value",
}
],
)
cloudwatch_client.put_metric_alarm(
AlarmName="test-alarm",
MetricName="my-metric",
Namespace="my-namespace",
Period=10,
EvaluationPeriods=5,
Statistic="Average",
Threshold=2,
ComparisonOperator="GreaterThanThreshold",
ActionsEnabled=True,
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_vpcs_alarm_configured.cloudwatch_changes_to_vpcs_alarm_configured.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_vpcs_alarm_configured.cloudwatch_changes_to_vpcs_alarm_configured.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_vpcs_alarm_configured.cloudwatch_changes_to_vpcs_alarm_configured.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_vpcs_alarm_configured.cloudwatch_changes_to_vpcs_alarm_configured import (
cloudwatch_changes_to_vpcs_alarm_configured,
)
check = cloudwatch_changes_to_vpcs_alarm_configured()
result = check.execute()
assert len(result) == 1
assert result[0].status == "PASS"
assert (
result[0].status_extended
== "CloudWatch log group /log-group/test found with metric filter test-filter and alarms set."
)
assert result[0].resource_id == "/log-group/test"

View File

@@ -0,0 +1,68 @@
from unittest import mock
from boto3 import client
from moto import mock_iam
AWS_REGION = "us-east-1"
class Test_cloudwatch_cross_account_sharing_disabled:
@mock_iam
def test_cloudwatch_without_cross_account_role(self):
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.iam.iam_service import IAM
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_cross_account_sharing_disabled.cloudwatch_cross_account_sharing_disabled.iam_client",
new=IAM(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_cross_account_sharing_disabled.cloudwatch_cross_account_sharing_disabled import (
cloudwatch_cross_account_sharing_disabled,
)
check = cloudwatch_cross_account_sharing_disabled()
result = check.execute()
assert len(result) == 1
assert result[0].status == "PASS"
assert (
result[0].status_extended
== "CloudWatch doesn't allows cross-account sharing"
)
assert result[0].resource_id == "CloudWatch-CrossAccountSharingRole"
@mock_iam
def test_cloudwatch_log_group_with_cross_account_role(self):
# Generate Logs Client
iam_client = client("iam", region_name=AWS_REGION)
# Request Logs group
iam_client.create_role(
RoleName="CloudWatch-CrossAccountSharingRole", AssumeRolePolicyDocument="{}"
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.iam.iam_service import IAM
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_cross_account_sharing_disabled.cloudwatch_cross_account_sharing_disabled.iam_client",
new=IAM(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_cross_account_sharing_disabled.cloudwatch_cross_account_sharing_disabled import (
cloudwatch_cross_account_sharing_disabled,
)
check = cloudwatch_cross_account_sharing_disabled()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "CloudWatch has allowed cross-account sharing."
)
assert result[0].resource_id == "CloudWatch-CrossAccountSharingRole"

View File

@@ -0,0 +1,92 @@
from unittest import mock
from boto3 import client
from moto import mock_logs
AWS_REGION = "us-east-1"
class Test_cloudwatch_log_group_kms_encryption_enabled:
def test_cloudwatch_no_log_groups(self):
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import Logs
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_group_kms_encryption_enabled.cloudwatch_log_group_kms_encryption_enabled.logs_client",
new=Logs(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_log_group_kms_encryption_enabled.cloudwatch_log_group_kms_encryption_enabled import (
cloudwatch_log_group_kms_encryption_enabled,
)
check = cloudwatch_log_group_kms_encryption_enabled()
result = check.execute()
assert len(result) == 0
@mock_logs
def test_cloudwatch_log_group_without_kms_key(self):
# Generate Logs Client
logs_client = client("logs", region_name=AWS_REGION)
# Request Logs group
logs_client.create_log_group(
logGroupName="test",
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import Logs
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_group_kms_encryption_enabled.cloudwatch_log_group_kms_encryption_enabled.logs_client",
new=Logs(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_log_group_kms_encryption_enabled.cloudwatch_log_group_kms_encryption_enabled import (
cloudwatch_log_group_kms_encryption_enabled,
)
check = cloudwatch_log_group_kms_encryption_enabled()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "Log Group test does not have AWS KMS keys associated."
)
assert result[0].resource_id == "test"
@mock_logs
def test_cloudwatch_log_group_with_kms_key(self):
# Generate Logs Client
logs_client = client("logs", region_name=AWS_REGION)
# Request Logs group
logs_client.create_log_group(logGroupName="test", kmsKeyId="test_kms_id")
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import Logs
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_group_kms_encryption_enabled.cloudwatch_log_group_kms_encryption_enabled.logs_client",
new=Logs(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_log_group_kms_encryption_enabled.cloudwatch_log_group_kms_encryption_enabled import (
cloudwatch_log_group_kms_encryption_enabled,
)
check = cloudwatch_log_group_kms_encryption_enabled()
result = check.execute()
assert len(result) == 1
assert result[0].status == "PASS"
assert (
result[0].status_extended
== "Log Group test does have AWS KMS key test_kms_id associated."
)
assert result[0].resource_id == "test"

View File

@@ -0,0 +1,129 @@
from unittest import mock
from boto3 import client
from moto import mock_logs
AWS_REGION = "us-east-1"
class Test_cloudwatch_log_group_retention_policy_specific_days_enabled:
def test_cloudwatch_no_log_groups(self):
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import Logs
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_group_retention_policy_specific_days_enabled.cloudwatch_log_group_retention_policy_specific_days_enabled.logs_client",
new=Logs(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_log_group_retention_policy_specific_days_enabled.cloudwatch_log_group_retention_policy_specific_days_enabled import (
cloudwatch_log_group_retention_policy_specific_days_enabled,
)
check = cloudwatch_log_group_retention_policy_specific_days_enabled()
result = check.execute()
assert len(result) == 0
@mock_logs
def test_cloudwatch_log_group_without_retention_days(self):
# Generate Logs Client
logs_client = client("logs", region_name=AWS_REGION)
# Request Logs group
logs_client.create_log_group(
logGroupName="test",
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import Logs
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_group_retention_policy_specific_days_enabled.cloudwatch_log_group_retention_policy_specific_days_enabled.logs_client",
new=Logs(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_log_group_retention_policy_specific_days_enabled.cloudwatch_log_group_retention_policy_specific_days_enabled import (
cloudwatch_log_group_retention_policy_specific_days_enabled,
)
check = cloudwatch_log_group_retention_policy_specific_days_enabled()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "Log Group test has less than 365 days retention period (0 days)."
)
assert result[0].resource_id == "test"
@mock_logs
def test_cloudwatch_log_group_with_compliant_retention_days(self):
# Generate Logs Client
logs_client = client("logs", region_name=AWS_REGION)
# Request Logs group
logs_client.create_log_group(
logGroupName="test",
)
logs_client.put_retention_policy(logGroupName="test", retentionInDays=400)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import Logs
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_group_retention_policy_specific_days_enabled.cloudwatch_log_group_retention_policy_specific_days_enabled.logs_client",
new=Logs(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_log_group_retention_policy_specific_days_enabled.cloudwatch_log_group_retention_policy_specific_days_enabled import (
cloudwatch_log_group_retention_policy_specific_days_enabled,
)
check = cloudwatch_log_group_retention_policy_specific_days_enabled()
result = check.execute()
assert len(result) == 1
assert result[0].status == "PASS"
assert (
result[0].status_extended
== "Log Group test comply with 365 days retention period since it has 400 days."
)
assert result[0].resource_id == "test"
@mock_logs
def test_cloudwatch_log_group_with_no_compliant_retention_days(self):
# Generate Logs Client
logs_client = client("logs", region_name=AWS_REGION)
# Request Logs group
logs_client.create_log_group(
logGroupName="test",
)
logs_client.put_retention_policy(logGroupName="test", retentionInDays=7)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import Logs
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_group_retention_policy_specific_days_enabled.cloudwatch_log_group_retention_policy_specific_days_enabled.logs_client",
new=Logs(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_log_group_retention_policy_specific_days_enabled.cloudwatch_log_group_retention_policy_specific_days_enabled import (
cloudwatch_log_group_retention_policy_specific_days_enabled,
)
check = cloudwatch_log_group_retention_policy_specific_days_enabled()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "Log Group test has less than 365 days retention period (7 days)."
)
assert result[0].resource_id == "test"

View File

@@ -0,0 +1,302 @@
from unittest import mock
from boto3 import client
from moto import mock_cloudtrail, mock_cloudwatch, mock_logs, mock_s3
from moto.core import DEFAULT_ACCOUNT_ID
AWS_REGION = "us-east-1"
class Test_cloudwatch_log_metric_filter_unauthorized_api_calls:
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
def test_cloudwatch_no_log_groups(self):
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_and_alarm_for_aws_config_configuration_changes_enabled.cloudwatch_log_metric_filter_and_alarm_for_aws_config_configuration_changes_enabled.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_and_alarm_for_aws_config_configuration_changes_enabled.cloudwatch_log_metric_filter_and_alarm_for_aws_config_configuration_changes_enabled.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_and_alarm_for_aws_config_configuration_changes_enabled.cloudwatch_log_metric_filter_and_alarm_for_aws_config_configuration_changes_enabled.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_and_alarm_for_aws_config_configuration_changes_enabled.cloudwatch_log_metric_filter_and_alarm_for_aws_config_configuration_changes_enabled import (
cloudwatch_log_metric_filter_and_alarm_for_aws_config_configuration_changes_enabled,
)
check = (
cloudwatch_log_metric_filter_and_alarm_for_aws_config_configuration_changes_enabled()
)
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "No CloudWatch log groups found with metric filters or alarms associated."
)
assert result[0].resource_id == ""
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
@mock_s3
def test_cloudwatch_trail_no_log_group(self):
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
s3_client.create_bucket(Bucket="test")
cloudtrail_client.create_trail(Name="test_trail", S3BucketName="test")
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_and_alarm_for_aws_config_configuration_changes_enabled.cloudwatch_log_metric_filter_and_alarm_for_aws_config_configuration_changes_enabled.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_and_alarm_for_aws_config_configuration_changes_enabled.cloudwatch_log_metric_filter_and_alarm_for_aws_config_configuration_changes_enabled.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_and_alarm_for_aws_config_configuration_changes_enabled.cloudwatch_log_metric_filter_and_alarm_for_aws_config_configuration_changes_enabled.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_and_alarm_for_aws_config_configuration_changes_enabled.cloudwatch_log_metric_filter_and_alarm_for_aws_config_configuration_changes_enabled import (
cloudwatch_log_metric_filter_and_alarm_for_aws_config_configuration_changes_enabled,
)
check = (
cloudwatch_log_metric_filter_and_alarm_for_aws_config_configuration_changes_enabled()
)
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "No CloudWatch log groups found with metric filters or alarms associated."
)
assert result[0].resource_id == ""
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
@mock_s3
def test_cloudwatch_trail_with_log_group(self):
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
logs_client = client("logs", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
s3_client.create_bucket(Bucket="test")
logs_client.create_log_group(logGroupName="/log-group/test")
cloudtrail_client.create_trail(
Name="test_trail",
S3BucketName="test",
CloudWatchLogsLogGroupArn=f"arn:aws:logs:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:log-group:/log-group/test:*",
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_and_alarm_for_aws_config_configuration_changes_enabled.cloudwatch_log_metric_filter_and_alarm_for_aws_config_configuration_changes_enabled.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_and_alarm_for_aws_config_configuration_changes_enabled.cloudwatch_log_metric_filter_and_alarm_for_aws_config_configuration_changes_enabled.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_and_alarm_for_aws_config_configuration_changes_enabled.cloudwatch_log_metric_filter_and_alarm_for_aws_config_configuration_changes_enabled.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_and_alarm_for_aws_config_configuration_changes_enabled.cloudwatch_log_metric_filter_and_alarm_for_aws_config_configuration_changes_enabled import (
cloudwatch_log_metric_filter_and_alarm_for_aws_config_configuration_changes_enabled,
)
check = (
cloudwatch_log_metric_filter_and_alarm_for_aws_config_configuration_changes_enabled()
)
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "No CloudWatch log groups found with metric filters or alarms associated."
)
assert result[0].resource_id == ""
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
@mock_s3
def test_cloudwatch_trail_with_log_group_with_metric(self):
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
logs_client = client("logs", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
s3_client.create_bucket(Bucket="test")
logs_client.create_log_group(logGroupName="/log-group/test")
cloudtrail_client.create_trail(
Name="test_trail",
S3BucketName="test",
CloudWatchLogsLogGroupArn=f"arn:aws:logs:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:log-group:/log-group/test:*",
)
logs_client.put_metric_filter(
logGroupName="/log-group/test",
filterName="test-filter",
filterPattern="{($.eventSource = config.amazonaws.com) && (($.eventName=StopConfigurationRecorder)||($.eventName=DeleteDeliveryChannel)|| ($.eventName=PutDeliveryChannel)||($.eventName=PutConfigurationRecorder))}",
metricTransformations=[
{
"metricName": "my-metric",
"metricNamespace": "my-namespace",
"metricValue": "$.value",
}
],
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_and_alarm_for_aws_config_configuration_changes_enabled.cloudwatch_log_metric_filter_and_alarm_for_aws_config_configuration_changes_enabled.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_and_alarm_for_aws_config_configuration_changes_enabled.cloudwatch_log_metric_filter_and_alarm_for_aws_config_configuration_changes_enabled.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_and_alarm_for_aws_config_configuration_changes_enabled.cloudwatch_log_metric_filter_and_alarm_for_aws_config_configuration_changes_enabled.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_and_alarm_for_aws_config_configuration_changes_enabled.cloudwatch_log_metric_filter_and_alarm_for_aws_config_configuration_changes_enabled import (
cloudwatch_log_metric_filter_and_alarm_for_aws_config_configuration_changes_enabled,
)
check = (
cloudwatch_log_metric_filter_and_alarm_for_aws_config_configuration_changes_enabled()
)
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "CloudWatch log group /log-group/test found with metric filter test-filter but no alarms associated."
)
assert result[0].resource_id == "/log-group/test"
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
@mock_s3
def test_cloudwatch_trail_with_log_group_with_metric_and_alarm(self):
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
cloudwatch_client = client("cloudwatch", region_name=AWS_REGION)
logs_client = client("logs", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
s3_client.create_bucket(Bucket="test")
logs_client.create_log_group(logGroupName="/log-group/test")
cloudtrail_client.create_trail(
Name="test_trail",
S3BucketName="test",
CloudWatchLogsLogGroupArn=f"arn:aws:logs:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:log-group:/log-group/test:*",
)
logs_client.put_metric_filter(
logGroupName="/log-group/test",
filterName="test-filter",
filterPattern="{($.eventSource = config.amazonaws.com) && (($.eventName=StopConfigurationRecorder)||($.eventName=DeleteDeliveryChannel)|| ($.eventName=PutDeliveryChannel)||($.eventName=PutConfigurationRecorder))}",
metricTransformations=[
{
"metricName": "my-metric",
"metricNamespace": "my-namespace",
"metricValue": "$.value",
}
],
)
cloudwatch_client.put_metric_alarm(
AlarmName="test-alarm",
MetricName="my-metric",
Namespace="my-namespace",
Period=10,
EvaluationPeriods=5,
Statistic="Average",
Threshold=2,
ComparisonOperator="GreaterThanThreshold",
ActionsEnabled=True,
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_and_alarm_for_aws_config_configuration_changes_enabled.cloudwatch_log_metric_filter_and_alarm_for_aws_config_configuration_changes_enabled.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_and_alarm_for_aws_config_configuration_changes_enabled.cloudwatch_log_metric_filter_and_alarm_for_aws_config_configuration_changes_enabled.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_and_alarm_for_aws_config_configuration_changes_enabled.cloudwatch_log_metric_filter_and_alarm_for_aws_config_configuration_changes_enabled.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_and_alarm_for_aws_config_configuration_changes_enabled.cloudwatch_log_metric_filter_and_alarm_for_aws_config_configuration_changes_enabled import (
cloudwatch_log_metric_filter_and_alarm_for_aws_config_configuration_changes_enabled,
)
check = (
cloudwatch_log_metric_filter_and_alarm_for_aws_config_configuration_changes_enabled()
)
result = check.execute()
assert len(result) == 1
assert result[0].status == "PASS"
assert (
result[0].status_extended
== "CloudWatch log group /log-group/test found with metric filter test-filter and alarms set."
)
assert result[0].resource_id == "/log-group/test"

View File

@@ -0,0 +1,302 @@
from unittest import mock
from boto3 import client
from moto import mock_cloudtrail, mock_cloudwatch, mock_logs, mock_s3
from moto.core import DEFAULT_ACCOUNT_ID
AWS_REGION = "us-east-1"
class Test_cloudwatch_log_metric_filter_unauthorized_api_calls:
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
def test_cloudwatch_no_log_groups(self):
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_and_alarm_for_cloudtrail_configuration_changes_enabled.cloudwatch_log_metric_filter_and_alarm_for_cloudtrail_configuration_changes_enabled.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_and_alarm_for_cloudtrail_configuration_changes_enabled.cloudwatch_log_metric_filter_and_alarm_for_cloudtrail_configuration_changes_enabled.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_and_alarm_for_cloudtrail_configuration_changes_enabled.cloudwatch_log_metric_filter_and_alarm_for_cloudtrail_configuration_changes_enabled.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_and_alarm_for_cloudtrail_configuration_changes_enabled.cloudwatch_log_metric_filter_and_alarm_for_cloudtrail_configuration_changes_enabled import (
cloudwatch_log_metric_filter_and_alarm_for_cloudtrail_configuration_changes_enabled,
)
check = (
cloudwatch_log_metric_filter_and_alarm_for_cloudtrail_configuration_changes_enabled()
)
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "No CloudWatch log groups found with metric filters or alarms associated."
)
assert result[0].resource_id == ""
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
@mock_s3
def test_cloudwatch_trail_no_log_group(self):
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
s3_client.create_bucket(Bucket="test")
cloudtrail_client.create_trail(Name="test_trail", S3BucketName="test")
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_and_alarm_for_cloudtrail_configuration_changes_enabled.cloudwatch_log_metric_filter_and_alarm_for_cloudtrail_configuration_changes_enabled.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_and_alarm_for_cloudtrail_configuration_changes_enabled.cloudwatch_log_metric_filter_and_alarm_for_cloudtrail_configuration_changes_enabled.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_and_alarm_for_cloudtrail_configuration_changes_enabled.cloudwatch_log_metric_filter_and_alarm_for_cloudtrail_configuration_changes_enabled.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_and_alarm_for_cloudtrail_configuration_changes_enabled.cloudwatch_log_metric_filter_and_alarm_for_cloudtrail_configuration_changes_enabled import (
cloudwatch_log_metric_filter_and_alarm_for_cloudtrail_configuration_changes_enabled,
)
check = (
cloudwatch_log_metric_filter_and_alarm_for_cloudtrail_configuration_changes_enabled()
)
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "No CloudWatch log groups found with metric filters or alarms associated."
)
assert result[0].resource_id == ""
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
@mock_s3
def test_cloudwatch_trail_with_log_group(self):
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
logs_client = client("logs", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
s3_client.create_bucket(Bucket="test")
logs_client.create_log_group(logGroupName="/log-group/test")
cloudtrail_client.create_trail(
Name="test_trail",
S3BucketName="test",
CloudWatchLogsLogGroupArn=f"arn:aws:logs:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:log-group:/log-group/test:*",
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_and_alarm_for_cloudtrail_configuration_changes_enabled.cloudwatch_log_metric_filter_and_alarm_for_cloudtrail_configuration_changes_enabled.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_and_alarm_for_cloudtrail_configuration_changes_enabled.cloudwatch_log_metric_filter_and_alarm_for_cloudtrail_configuration_changes_enabled.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_and_alarm_for_cloudtrail_configuration_changes_enabled.cloudwatch_log_metric_filter_and_alarm_for_cloudtrail_configuration_changes_enabled.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_and_alarm_for_cloudtrail_configuration_changes_enabled.cloudwatch_log_metric_filter_and_alarm_for_cloudtrail_configuration_changes_enabled import (
cloudwatch_log_metric_filter_and_alarm_for_cloudtrail_configuration_changes_enabled,
)
check = (
cloudwatch_log_metric_filter_and_alarm_for_cloudtrail_configuration_changes_enabled()
)
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "No CloudWatch log groups found with metric filters or alarms associated."
)
assert result[0].resource_id == ""
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
@mock_s3
def test_cloudwatch_trail_with_log_group_with_metric(self):
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
logs_client = client("logs", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
s3_client.create_bucket(Bucket="test")
logs_client.create_log_group(logGroupName="/log-group/test")
cloudtrail_client.create_trail(
Name="test_trail",
S3BucketName="test",
CloudWatchLogsLogGroupArn=f"arn:aws:logs:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:log-group:/log-group/test:*",
)
logs_client.put_metric_filter(
logGroupName="/log-group/test",
filterName="test-filter",
filterPattern="{($.eventName = CreateTrail) || ($.eventName = UpdateTrail) || ($.eventName = DeleteTrail) || ($.eventName = StartLogging) || ($.eventName = StopLogging)}",
metricTransformations=[
{
"metricName": "my-metric",
"metricNamespace": "my-namespace",
"metricValue": "$.value",
}
],
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_and_alarm_for_cloudtrail_configuration_changes_enabled.cloudwatch_log_metric_filter_and_alarm_for_cloudtrail_configuration_changes_enabled.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_and_alarm_for_cloudtrail_configuration_changes_enabled.cloudwatch_log_metric_filter_and_alarm_for_cloudtrail_configuration_changes_enabled.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_and_alarm_for_cloudtrail_configuration_changes_enabled.cloudwatch_log_metric_filter_and_alarm_for_cloudtrail_configuration_changes_enabled.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_and_alarm_for_cloudtrail_configuration_changes_enabled.cloudwatch_log_metric_filter_and_alarm_for_cloudtrail_configuration_changes_enabled import (
cloudwatch_log_metric_filter_and_alarm_for_cloudtrail_configuration_changes_enabled,
)
check = (
cloudwatch_log_metric_filter_and_alarm_for_cloudtrail_configuration_changes_enabled()
)
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "CloudWatch log group /log-group/test found with metric filter test-filter but no alarms associated."
)
assert result[0].resource_id == "/log-group/test"
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
@mock_s3
def test_cloudwatch_trail_with_log_group_with_metric_and_alarm(self):
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
cloudwatch_client = client("cloudwatch", region_name=AWS_REGION)
logs_client = client("logs", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
s3_client.create_bucket(Bucket="test")
logs_client.create_log_group(logGroupName="/log-group/test")
cloudtrail_client.create_trail(
Name="test_trail",
S3BucketName="test",
CloudWatchLogsLogGroupArn=f"arn:aws:logs:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:log-group:/log-group/test:*",
)
logs_client.put_metric_filter(
logGroupName="/log-group/test",
filterName="test-filter",
filterPattern="{($.eventName = CreateTrail) || ($.eventName = UpdateTrail) || ($.eventName = DeleteTrail) || ($.eventName = StartLogging) || ($.eventName = StopLogging)}",
metricTransformations=[
{
"metricName": "my-metric",
"metricNamespace": "my-namespace",
"metricValue": "$.value",
}
],
)
cloudwatch_client.put_metric_alarm(
AlarmName="test-alarm",
MetricName="my-metric",
Namespace="my-namespace",
Period=10,
EvaluationPeriods=5,
Statistic="Average",
Threshold=2,
ComparisonOperator="GreaterThanThreshold",
ActionsEnabled=True,
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_and_alarm_for_cloudtrail_configuration_changes_enabled.cloudwatch_log_metric_filter_and_alarm_for_cloudtrail_configuration_changes_enabled.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_and_alarm_for_cloudtrail_configuration_changes_enabled.cloudwatch_log_metric_filter_and_alarm_for_cloudtrail_configuration_changes_enabled.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_and_alarm_for_cloudtrail_configuration_changes_enabled.cloudwatch_log_metric_filter_and_alarm_for_cloudtrail_configuration_changes_enabled.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_and_alarm_for_cloudtrail_configuration_changes_enabled.cloudwatch_log_metric_filter_and_alarm_for_cloudtrail_configuration_changes_enabled import (
cloudwatch_log_metric_filter_and_alarm_for_cloudtrail_configuration_changes_enabled,
)
check = (
cloudwatch_log_metric_filter_and_alarm_for_cloudtrail_configuration_changes_enabled()
)
result = check.execute()
assert len(result) == 1
assert result[0].status == "PASS"
assert (
result[0].status_extended
== "CloudWatch log group /log-group/test found with metric filter test-filter and alarms set."
)
assert result[0].resource_id == "/log-group/test"

View File

@@ -0,0 +1,292 @@
from unittest import mock
from boto3 import client
from moto import mock_cloudtrail, mock_cloudwatch, mock_logs, mock_s3
from moto.core import DEFAULT_ACCOUNT_ID
AWS_REGION = "us-east-1"
class Test_cloudwatch_log_metric_filter_unauthorized_api_calls:
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
def test_cloudwatch_no_log_groups(self):
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_authentication_failures.cloudwatch_log_metric_filter_authentication_failures.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_authentication_failures.cloudwatch_log_metric_filter_authentication_failures.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_authentication_failures.cloudwatch_log_metric_filter_authentication_failures.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_authentication_failures.cloudwatch_log_metric_filter_authentication_failures import (
cloudwatch_log_metric_filter_authentication_failures,
)
check = cloudwatch_log_metric_filter_authentication_failures()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "No CloudWatch log groups found with metric filters or alarms associated."
)
assert result[0].resource_id == ""
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
@mock_s3
def test_cloudwatch_trail_no_log_group(self):
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
s3_client.create_bucket(Bucket="test")
cloudtrail_client.create_trail(Name="test_trail", S3BucketName="test")
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_authentication_failures.cloudwatch_log_metric_filter_authentication_failures.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_authentication_failures.cloudwatch_log_metric_filter_authentication_failures.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_authentication_failures.cloudwatch_log_metric_filter_authentication_failures.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_authentication_failures.cloudwatch_log_metric_filter_authentication_failures import (
cloudwatch_log_metric_filter_authentication_failures,
)
check = cloudwatch_log_metric_filter_authentication_failures()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "No CloudWatch log groups found with metric filters or alarms associated."
)
assert result[0].resource_id == ""
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
@mock_s3
def test_cloudwatch_trail_with_log_group(self):
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
logs_client = client("logs", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
s3_client.create_bucket(Bucket="test")
logs_client.create_log_group(logGroupName="/log-group/test")
cloudtrail_client.create_trail(
Name="test_trail",
S3BucketName="test",
CloudWatchLogsLogGroupArn=f"arn:aws:logs:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:log-group:/log-group/test:*",
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_authentication_failures.cloudwatch_log_metric_filter_authentication_failures.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_authentication_failures.cloudwatch_log_metric_filter_authentication_failures.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_authentication_failures.cloudwatch_log_metric_filter_authentication_failures.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_authentication_failures.cloudwatch_log_metric_filter_authentication_failures import (
cloudwatch_log_metric_filter_authentication_failures,
)
check = cloudwatch_log_metric_filter_authentication_failures()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "No CloudWatch log groups found with metric filters or alarms associated."
)
assert result[0].resource_id == ""
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
@mock_s3
def test_cloudwatch_trail_with_log_group_with_metric(self):
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
logs_client = client("logs", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
s3_client.create_bucket(Bucket="test")
logs_client.create_log_group(logGroupName="/log-group/test")
cloudtrail_client.create_trail(
Name="test_trail",
S3BucketName="test",
CloudWatchLogsLogGroupArn=f"arn:aws:logs:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:log-group:/log-group/test:*",
)
logs_client.put_metric_filter(
logGroupName="/log-group/test",
filterName="test-filter",
filterPattern="{($.eventName = ConsoleLogin) && ($.errorMessage = Failed authentication)}",
metricTransformations=[
{
"metricName": "my-metric",
"metricNamespace": "my-namespace",
"metricValue": "$.value",
}
],
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_authentication_failures.cloudwatch_log_metric_filter_authentication_failures.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_authentication_failures.cloudwatch_log_metric_filter_authentication_failures.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_authentication_failures.cloudwatch_log_metric_filter_authentication_failures.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_authentication_failures.cloudwatch_log_metric_filter_authentication_failures import (
cloudwatch_log_metric_filter_authentication_failures,
)
check = cloudwatch_log_metric_filter_authentication_failures()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "CloudWatch log group /log-group/test found with metric filter test-filter but no alarms associated."
)
assert result[0].resource_id == "/log-group/test"
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
@mock_s3
def test_cloudwatch_trail_with_log_group_with_metric_and_alarm(self):
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
cloudwatch_client = client("cloudwatch", region_name=AWS_REGION)
logs_client = client("logs", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
s3_client.create_bucket(Bucket="test")
logs_client.create_log_group(logGroupName="/log-group/test")
cloudtrail_client.create_trail(
Name="test_trail",
S3BucketName="test",
CloudWatchLogsLogGroupArn=f"arn:aws:logs:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:log-group:/log-group/test:*",
)
logs_client.put_metric_filter(
logGroupName="/log-group/test",
filterName="test-filter",
filterPattern="{($.eventName = ConsoleLogin) && ($.errorMessage = Failed authentication)}",
metricTransformations=[
{
"metricName": "my-metric",
"metricNamespace": "my-namespace",
"metricValue": "$.value",
}
],
)
cloudwatch_client.put_metric_alarm(
AlarmName="test-alarm",
MetricName="my-metric",
Namespace="my-namespace",
Period=10,
EvaluationPeriods=5,
Statistic="Average",
Threshold=2,
ComparisonOperator="GreaterThanThreshold",
ActionsEnabled=True,
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_authentication_failures.cloudwatch_log_metric_filter_authentication_failures.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_authentication_failures.cloudwatch_log_metric_filter_authentication_failures.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_authentication_failures.cloudwatch_log_metric_filter_authentication_failures.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_authentication_failures.cloudwatch_log_metric_filter_authentication_failures import (
cloudwatch_log_metric_filter_authentication_failures,
)
check = cloudwatch_log_metric_filter_authentication_failures()
result = check.execute()
assert len(result) == 1
assert result[0].status == "PASS"
assert (
result[0].status_extended
== "CloudWatch log group /log-group/test found with metric filter test-filter and alarms set."
)
assert result[0].resource_id == "/log-group/test"

View File

@@ -0,0 +1,292 @@
from unittest import mock
from boto3 import client
from moto import mock_cloudtrail, mock_cloudwatch, mock_logs, mock_s3
from moto.core import DEFAULT_ACCOUNT_ID
AWS_REGION = "us-east-1"
class Test_cloudwatch_log_metric_filter_aws_organizations_changes:
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
def test_cloudwatch_no_log_groups(self):
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_aws_organizations_changes.cloudwatch_log_metric_filter_aws_organizations_changes.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_aws_organizations_changes.cloudwatch_log_metric_filter_aws_organizations_changes.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_aws_organizations_changes.cloudwatch_log_metric_filter_aws_organizations_changes.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_aws_organizations_changes.cloudwatch_log_metric_filter_aws_organizations_changes import (
cloudwatch_log_metric_filter_aws_organizations_changes,
)
check = cloudwatch_log_metric_filter_aws_organizations_changes()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "No CloudWatch log groups found with metric filters or alarms associated."
)
assert result[0].resource_id == ""
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
@mock_s3
def test_cloudwatch_trail_no_log_group(self):
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
s3_client.create_bucket(Bucket="test")
cloudtrail_client.create_trail(Name="test_trail", S3BucketName="test")
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_aws_organizations_changes.cloudwatch_log_metric_filter_aws_organizations_changes.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_aws_organizations_changes.cloudwatch_log_metric_filter_aws_organizations_changes.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_aws_organizations_changes.cloudwatch_log_metric_filter_aws_organizations_changes.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_aws_organizations_changes.cloudwatch_log_metric_filter_aws_organizations_changes import (
cloudwatch_log_metric_filter_aws_organizations_changes,
)
check = cloudwatch_log_metric_filter_aws_organizations_changes()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "No CloudWatch log groups found with metric filters or alarms associated."
)
assert result[0].resource_id == ""
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
@mock_s3
def test_cloudwatch_trail_with_log_group(self):
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
logs_client = client("logs", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
s3_client.create_bucket(Bucket="test")
logs_client.create_log_group(logGroupName="/log-group/test")
cloudtrail_client.create_trail(
Name="test_trail",
S3BucketName="test",
CloudWatchLogsLogGroupArn=f"arn:aws:logs:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:log-group:/log-group/test:*",
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_aws_organizations_changes.cloudwatch_log_metric_filter_aws_organizations_changes.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_aws_organizations_changes.cloudwatch_log_metric_filter_aws_organizations_changes.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_aws_organizations_changes.cloudwatch_log_metric_filter_aws_organizations_changes.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_aws_organizations_changes.cloudwatch_log_metric_filter_aws_organizations_changes import (
cloudwatch_log_metric_filter_aws_organizations_changes,
)
check = cloudwatch_log_metric_filter_aws_organizations_changes()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "No CloudWatch log groups found with metric filters or alarms associated."
)
assert result[0].resource_id == ""
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
@mock_s3
def test_cloudwatch_trail_with_log_group_with_metric(self):
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
logs_client = client("logs", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
s3_client.create_bucket(Bucket="test")
logs_client.create_log_group(logGroupName="/log-group/test")
cloudtrail_client.create_trail(
Name="test_trail",
S3BucketName="test",
CloudWatchLogsLogGroupArn=f"arn:aws:logs:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:log-group:/log-group/test:*",
)
logs_client.put_metric_filter(
logGroupName="/log-group/test",
filterName="test-filter",
filterPattern="{ ($.eventSource = organizations.amazonaws.com) && ($.eventName = AcceptHandshake) || ($.eventName = AttachPolicy) || ($.eventName = CancelHandshake) || ($.eventName = CreateAccount) || ($.eventName = CreateOrganization) || ($.eventName = CreateOrganizationalUnit) || ($.eventName = CreatePolicy) || ($.eventName = DeclineHandshake) || ($.eventName = DeleteOrganization) || ($.eventName = DeleteOrganizationalUnit) || ($.eventName = DeletePolicy) || ($.eventName = EnableAllFeatures) || ($.eventName = EnablePolicyType) || ($.eventName = InviteAccountToOrganization) || ($.eventName = LeaveOrganization) || ($.eventName = DetachPolicy) || ($.eventName = DisablePolicyType) || ($.eventName = MoveAccount) || ($.eventName = RemoveAccountFromOrganization) || ($.eventName = UpdateOrganizationalUnit) || ($.eventName = UpdatePolicy) }",
metricTransformations=[
{
"metricName": "my-metric",
"metricNamespace": "my-namespace",
"metricValue": "$.value",
}
],
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_aws_organizations_changes.cloudwatch_log_metric_filter_aws_organizations_changes.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_aws_organizations_changes.cloudwatch_log_metric_filter_aws_organizations_changes.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_aws_organizations_changes.cloudwatch_log_metric_filter_aws_organizations_changes.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_aws_organizations_changes.cloudwatch_log_metric_filter_aws_organizations_changes import (
cloudwatch_log_metric_filter_aws_organizations_changes,
)
check = cloudwatch_log_metric_filter_aws_organizations_changes()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "CloudWatch log group /log-group/test found with metric filter test-filter but no alarms associated."
)
assert result[0].resource_id == "/log-group/test"
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
@mock_s3
def test_cloudwatch_trail_with_log_group_with_metric_and_alarm(self):
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
cloudwatch_client = client("cloudwatch", region_name=AWS_REGION)
logs_client = client("logs", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
s3_client.create_bucket(Bucket="test")
logs_client.create_log_group(logGroupName="/log-group/test")
cloudtrail_client.create_trail(
Name="test_trail",
S3BucketName="test",
CloudWatchLogsLogGroupArn=f"arn:aws:logs:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:log-group:/log-group/test:*",
)
logs_client.put_metric_filter(
logGroupName="/log-group/test",
filterName="test-filter",
filterPattern="{ ($.eventSource = organizations.amazonaws.com) && ($.eventName = AcceptHandshake) || ($.eventName = AttachPolicy) || ($.eventName = CancelHandshake) || ($.eventName = CreateAccount) || ($.eventName = CreateOrganization) || ($.eventName = CreateOrganizationalUnit) || ($.eventName = CreatePolicy) || ($.eventName = DeclineHandshake) || ($.eventName = DeleteOrganization) || ($.eventName = DeleteOrganizationalUnit) || ($.eventName = DeletePolicy) || ($.eventName = EnableAllFeatures) || ($.eventName = EnablePolicyType) || ($.eventName = InviteAccountToOrganization) || ($.eventName = LeaveOrganization) || ($.eventName = DetachPolicy) || ($.eventName = DisablePolicyType) || ($.eventName = MoveAccount) || ($.eventName = RemoveAccountFromOrganization) || ($.eventName = UpdateOrganizationalUnit) || ($.eventName = UpdatePolicy) }",
metricTransformations=[
{
"metricName": "my-metric",
"metricNamespace": "my-namespace",
"metricValue": "$.value",
}
],
)
cloudwatch_client.put_metric_alarm(
AlarmName="test-alarm",
MetricName="my-metric",
Namespace="my-namespace",
Period=10,
EvaluationPeriods=5,
Statistic="Average",
Threshold=2,
ComparisonOperator="GreaterThanThreshold",
ActionsEnabled=True,
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_aws_organizations_changes.cloudwatch_log_metric_filter_aws_organizations_changes.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_aws_organizations_changes.cloudwatch_log_metric_filter_aws_organizations_changes.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_aws_organizations_changes.cloudwatch_log_metric_filter_aws_organizations_changes.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_aws_organizations_changes.cloudwatch_log_metric_filter_aws_organizations_changes import (
cloudwatch_log_metric_filter_aws_organizations_changes,
)
check = cloudwatch_log_metric_filter_aws_organizations_changes()
result = check.execute()
assert len(result) == 1
assert result[0].status == "PASS"
assert (
result[0].status_extended
== "CloudWatch log group /log-group/test found with metric filter test-filter and alarms set."
)
assert result[0].resource_id == "/log-group/test"

View File

@@ -0,0 +1,302 @@
from unittest import mock
from boto3 import client
from moto import mock_cloudtrail, mock_cloudwatch, mock_logs, mock_s3
from moto.core import DEFAULT_ACCOUNT_ID
AWS_REGION = "us-east-1"
class Test_cloudwatch_log_metric_filter_unauthorized_api_calls:
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
def test_cloudwatch_no_log_groups(self):
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_disable_or_scheduled_deletion_of_kms_cmk.cloudwatch_log_metric_filter_disable_or_scheduled_deletion_of_kms_cmk.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_disable_or_scheduled_deletion_of_kms_cmk.cloudwatch_log_metric_filter_disable_or_scheduled_deletion_of_kms_cmk.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_disable_or_scheduled_deletion_of_kms_cmk.cloudwatch_log_metric_filter_disable_or_scheduled_deletion_of_kms_cmk.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_disable_or_scheduled_deletion_of_kms_cmk.cloudwatch_log_metric_filter_disable_or_scheduled_deletion_of_kms_cmk import (
cloudwatch_log_metric_filter_disable_or_scheduled_deletion_of_kms_cmk,
)
check = (
cloudwatch_log_metric_filter_disable_or_scheduled_deletion_of_kms_cmk()
)
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "No CloudWatch log groups found with metric filters or alarms associated."
)
assert result[0].resource_id == ""
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
@mock_s3
def test_cloudwatch_trail_no_log_group(self):
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
s3_client.create_bucket(Bucket="test")
cloudtrail_client.create_trail(Name="test_trail", S3BucketName="test")
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_disable_or_scheduled_deletion_of_kms_cmk.cloudwatch_log_metric_filter_disable_or_scheduled_deletion_of_kms_cmk.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_disable_or_scheduled_deletion_of_kms_cmk.cloudwatch_log_metric_filter_disable_or_scheduled_deletion_of_kms_cmk.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_disable_or_scheduled_deletion_of_kms_cmk.cloudwatch_log_metric_filter_disable_or_scheduled_deletion_of_kms_cmk.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_disable_or_scheduled_deletion_of_kms_cmk.cloudwatch_log_metric_filter_disable_or_scheduled_deletion_of_kms_cmk import (
cloudwatch_log_metric_filter_disable_or_scheduled_deletion_of_kms_cmk,
)
check = (
cloudwatch_log_metric_filter_disable_or_scheduled_deletion_of_kms_cmk()
)
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "No CloudWatch log groups found with metric filters or alarms associated."
)
assert result[0].resource_id == ""
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
@mock_s3
def test_cloudwatch_trail_with_log_group(self):
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
logs_client = client("logs", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
s3_client.create_bucket(Bucket="test")
logs_client.create_log_group(logGroupName="/log-group/test")
cloudtrail_client.create_trail(
Name="test_trail",
S3BucketName="test",
CloudWatchLogsLogGroupArn=f"arn:aws:logs:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:log-group:/log-group/test:*",
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_disable_or_scheduled_deletion_of_kms_cmk.cloudwatch_log_metric_filter_disable_or_scheduled_deletion_of_kms_cmk.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_disable_or_scheduled_deletion_of_kms_cmk.cloudwatch_log_metric_filter_disable_or_scheduled_deletion_of_kms_cmk.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_disable_or_scheduled_deletion_of_kms_cmk.cloudwatch_log_metric_filter_disable_or_scheduled_deletion_of_kms_cmk.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_disable_or_scheduled_deletion_of_kms_cmk.cloudwatch_log_metric_filter_disable_or_scheduled_deletion_of_kms_cmk import (
cloudwatch_log_metric_filter_disable_or_scheduled_deletion_of_kms_cmk,
)
check = (
cloudwatch_log_metric_filter_disable_or_scheduled_deletion_of_kms_cmk()
)
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "No CloudWatch log groups found with metric filters or alarms associated."
)
assert result[0].resource_id == ""
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
@mock_s3
def test_cloudwatch_trail_with_log_group_with_metric(self):
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
logs_client = client("logs", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
s3_client.create_bucket(Bucket="test")
logs_client.create_log_group(logGroupName="/log-group/test")
cloudtrail_client.create_trail(
Name="test_trail",
S3BucketName="test",
CloudWatchLogsLogGroupArn=f"arn:aws:logs:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:log-group:/log-group/test:*",
)
logs_client.put_metric_filter(
logGroupName="/log-group/test",
filterName="test-filter",
filterPattern="{($.eventSource = kms.amazonaws.com) &&(($.eventName=DisableKey)||($.eventName=ScheduleKeyDeletion)) }",
metricTransformations=[
{
"metricName": "my-metric",
"metricNamespace": "my-namespace",
"metricValue": "$.value",
}
],
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_disable_or_scheduled_deletion_of_kms_cmk.cloudwatch_log_metric_filter_disable_or_scheduled_deletion_of_kms_cmk.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_disable_or_scheduled_deletion_of_kms_cmk.cloudwatch_log_metric_filter_disable_or_scheduled_deletion_of_kms_cmk.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_disable_or_scheduled_deletion_of_kms_cmk.cloudwatch_log_metric_filter_disable_or_scheduled_deletion_of_kms_cmk.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_disable_or_scheduled_deletion_of_kms_cmk.cloudwatch_log_metric_filter_disable_or_scheduled_deletion_of_kms_cmk import (
cloudwatch_log_metric_filter_disable_or_scheduled_deletion_of_kms_cmk,
)
check = (
cloudwatch_log_metric_filter_disable_or_scheduled_deletion_of_kms_cmk()
)
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "CloudWatch log group /log-group/test found with metric filter test-filter but no alarms associated."
)
assert result[0].resource_id == "/log-group/test"
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
@mock_s3
def test_cloudwatch_trail_with_log_group_with_metric_and_alarm(self):
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
cloudwatch_client = client("cloudwatch", region_name=AWS_REGION)
logs_client = client("logs", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
s3_client.create_bucket(Bucket="test")
logs_client.create_log_group(logGroupName="/log-group/test")
cloudtrail_client.create_trail(
Name="test_trail",
S3BucketName="test",
CloudWatchLogsLogGroupArn=f"arn:aws:logs:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:log-group:/log-group/test:*",
)
logs_client.put_metric_filter(
logGroupName="/log-group/test",
filterName="test-filter",
filterPattern="{($.eventSource = kms.amazonaws.com) &&(($.eventName=DisableKey)||($.eventName=ScheduleKeyDeletion)) }",
metricTransformations=[
{
"metricName": "my-metric",
"metricNamespace": "my-namespace",
"metricValue": "$.value",
}
],
)
cloudwatch_client.put_metric_alarm(
AlarmName="test-alarm",
MetricName="my-metric",
Namespace="my-namespace",
Period=10,
EvaluationPeriods=5,
Statistic="Average",
Threshold=2,
ComparisonOperator="GreaterThanThreshold",
ActionsEnabled=True,
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_disable_or_scheduled_deletion_of_kms_cmk.cloudwatch_log_metric_filter_disable_or_scheduled_deletion_of_kms_cmk.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_disable_or_scheduled_deletion_of_kms_cmk.cloudwatch_log_metric_filter_disable_or_scheduled_deletion_of_kms_cmk.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_disable_or_scheduled_deletion_of_kms_cmk.cloudwatch_log_metric_filter_disable_or_scheduled_deletion_of_kms_cmk.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_disable_or_scheduled_deletion_of_kms_cmk.cloudwatch_log_metric_filter_disable_or_scheduled_deletion_of_kms_cmk import (
cloudwatch_log_metric_filter_disable_or_scheduled_deletion_of_kms_cmk,
)
check = (
cloudwatch_log_metric_filter_disable_or_scheduled_deletion_of_kms_cmk()
)
result = check.execute()
assert len(result) == 1
assert result[0].status == "PASS"
assert (
result[0].status_extended
== "CloudWatch log group /log-group/test found with metric filter test-filter and alarms set."
)
assert result[0].resource_id == "/log-group/test"

View File

@@ -0,0 +1,292 @@
from unittest import mock
from boto3 import client
from moto import mock_cloudtrail, mock_cloudwatch, mock_logs, mock_s3
from moto.core import DEFAULT_ACCOUNT_ID
AWS_REGION = "us-east-1"
class Test_cloudwatch_log_metric_filter_unauthorized_api_calls:
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
def test_cloudwatch_no_log_groups(self):
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_for_s3_bucket_policy_changes.cloudwatch_log_metric_filter_for_s3_bucket_policy_changes.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_for_s3_bucket_policy_changes.cloudwatch_log_metric_filter_for_s3_bucket_policy_changes.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_for_s3_bucket_policy_changes.cloudwatch_log_metric_filter_for_s3_bucket_policy_changes.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_for_s3_bucket_policy_changes.cloudwatch_log_metric_filter_for_s3_bucket_policy_changes import (
cloudwatch_log_metric_filter_for_s3_bucket_policy_changes,
)
check = cloudwatch_log_metric_filter_for_s3_bucket_policy_changes()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "No CloudWatch log groups found with metric filters or alarms associated."
)
assert result[0].resource_id == ""
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
@mock_s3
def test_cloudwatch_trail_no_log_group(self):
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
s3_client.create_bucket(Bucket="test")
cloudtrail_client.create_trail(Name="test_trail", S3BucketName="test")
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_for_s3_bucket_policy_changes.cloudwatch_log_metric_filter_for_s3_bucket_policy_changes.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_for_s3_bucket_policy_changes.cloudwatch_log_metric_filter_for_s3_bucket_policy_changes.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_for_s3_bucket_policy_changes.cloudwatch_log_metric_filter_for_s3_bucket_policy_changes.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_for_s3_bucket_policy_changes.cloudwatch_log_metric_filter_for_s3_bucket_policy_changes import (
cloudwatch_log_metric_filter_for_s3_bucket_policy_changes,
)
check = cloudwatch_log_metric_filter_for_s3_bucket_policy_changes()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "No CloudWatch log groups found with metric filters or alarms associated."
)
assert result[0].resource_id == ""
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
@mock_s3
def test_cloudwatch_trail_with_log_group(self):
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
logs_client = client("logs", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
s3_client.create_bucket(Bucket="test")
logs_client.create_log_group(logGroupName="/log-group/test")
cloudtrail_client.create_trail(
Name="test_trail",
S3BucketName="test",
CloudWatchLogsLogGroupArn=f"arn:aws:logs:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:log-group:/log-group/test:*",
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_for_s3_bucket_policy_changes.cloudwatch_log_metric_filter_for_s3_bucket_policy_changes.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_for_s3_bucket_policy_changes.cloudwatch_log_metric_filter_for_s3_bucket_policy_changes.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_for_s3_bucket_policy_changes.cloudwatch_log_metric_filter_for_s3_bucket_policy_changes.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_for_s3_bucket_policy_changes.cloudwatch_log_metric_filter_for_s3_bucket_policy_changes import (
cloudwatch_log_metric_filter_for_s3_bucket_policy_changes,
)
check = cloudwatch_log_metric_filter_for_s3_bucket_policy_changes()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "No CloudWatch log groups found with metric filters or alarms associated."
)
assert result[0].resource_id == ""
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
@mock_s3
def test_cloudwatch_trail_with_log_group_with_metric(self):
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
logs_client = client("logs", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
s3_client.create_bucket(Bucket="test")
logs_client.create_log_group(logGroupName="/log-group/test")
cloudtrail_client.create_trail(
Name="test_trail",
S3BucketName="test",
CloudWatchLogsLogGroupArn=f"arn:aws:logs:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:log-group:/log-group/test:*",
)
logs_client.put_metric_filter(
logGroupName="/log-group/test",
filterName="test-filter",
filterPattern="{($.eventSource = s3.amazonaws.com) && (($.eventName = PutBucketAcl) || ($.eventName = PutBucketPolicy) || ($.eventName = PutBucketCors) || ($.eventName = PutBucketLifecycle) || ($.eventName = PutBucketReplication) || ($.eventName = DeleteBucketPolicy) || ($.eventName = DeleteBucketCors) || ($.eventName = DeleteBucketLifecycle) || ($.eventName = DeleteBucketReplication)) }",
metricTransformations=[
{
"metricName": "my-metric",
"metricNamespace": "my-namespace",
"metricValue": "$.value",
}
],
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_for_s3_bucket_policy_changes.cloudwatch_log_metric_filter_for_s3_bucket_policy_changes.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_for_s3_bucket_policy_changes.cloudwatch_log_metric_filter_for_s3_bucket_policy_changes.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_for_s3_bucket_policy_changes.cloudwatch_log_metric_filter_for_s3_bucket_policy_changes.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_for_s3_bucket_policy_changes.cloudwatch_log_metric_filter_for_s3_bucket_policy_changes import (
cloudwatch_log_metric_filter_for_s3_bucket_policy_changes,
)
check = cloudwatch_log_metric_filter_for_s3_bucket_policy_changes()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "CloudWatch log group /log-group/test found with metric filter test-filter but no alarms associated."
)
assert result[0].resource_id == "/log-group/test"
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
@mock_s3
def test_cloudwatch_trail_with_log_group_with_metric_and_alarm(self):
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
cloudwatch_client = client("cloudwatch", region_name=AWS_REGION)
logs_client = client("logs", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
s3_client.create_bucket(Bucket="test")
logs_client.create_log_group(logGroupName="/log-group/test")
cloudtrail_client.create_trail(
Name="test_trail",
S3BucketName="test",
CloudWatchLogsLogGroupArn=f"arn:aws:logs:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:log-group:/log-group/test:*",
)
logs_client.put_metric_filter(
logGroupName="/log-group/test",
filterName="test-filter",
filterPattern="{($.eventSource = s3.amazonaws.com) && (($.eventName = PutBucketAcl) || ($.eventName = PutBucketPolicy) || ($.eventName = PutBucketCors) || ($.eventName = PutBucketLifecycle) || ($.eventName = PutBucketReplication) || ($.eventName = DeleteBucketPolicy) || ($.eventName = DeleteBucketCors) || ($.eventName = DeleteBucketLifecycle) || ($.eventName = DeleteBucketReplication)) }",
metricTransformations=[
{
"metricName": "my-metric",
"metricNamespace": "my-namespace",
"metricValue": "$.value",
}
],
)
cloudwatch_client.put_metric_alarm(
AlarmName="test-alarm",
MetricName="my-metric",
Namespace="my-namespace",
Period=10,
EvaluationPeriods=5,
Statistic="Average",
Threshold=2,
ComparisonOperator="GreaterThanThreshold",
ActionsEnabled=True,
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_for_s3_bucket_policy_changes.cloudwatch_log_metric_filter_for_s3_bucket_policy_changes.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_for_s3_bucket_policy_changes.cloudwatch_log_metric_filter_for_s3_bucket_policy_changes.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_for_s3_bucket_policy_changes.cloudwatch_log_metric_filter_for_s3_bucket_policy_changes.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_for_s3_bucket_policy_changes.cloudwatch_log_metric_filter_for_s3_bucket_policy_changes import (
cloudwatch_log_metric_filter_for_s3_bucket_policy_changes,
)
check = cloudwatch_log_metric_filter_for_s3_bucket_policy_changes()
result = check.execute()
assert len(result) == 1
assert result[0].status == "PASS"
assert (
result[0].status_extended
== "CloudWatch log group /log-group/test found with metric filter test-filter and alarms set."
)
assert result[0].resource_id == "/log-group/test"

View File

@@ -0,0 +1,292 @@
from unittest import mock
from boto3 import client
from moto import mock_cloudtrail, mock_cloudwatch, mock_logs, mock_s3
from moto.core import DEFAULT_ACCOUNT_ID
AWS_REGION = "us-east-1"
class Test_cloudwatch_log_metric_filter_unauthorized_api_calls:
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
def test_cloudwatch_no_log_groups(self):
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_policy_changes.cloudwatch_log_metric_filter_policy_changes.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_policy_changes.cloudwatch_log_metric_filter_policy_changes.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_policy_changes.cloudwatch_log_metric_filter_policy_changes.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_policy_changes.cloudwatch_log_metric_filter_policy_changes import (
cloudwatch_log_metric_filter_policy_changes,
)
check = cloudwatch_log_metric_filter_policy_changes()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "No CloudWatch log groups found with metric filters or alarms associated."
)
assert result[0].resource_id == ""
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
@mock_s3
def test_cloudwatch_trail_no_log_group(self):
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
s3_client.create_bucket(Bucket="test")
cloudtrail_client.create_trail(Name="test_trail", S3BucketName="test")
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_policy_changes.cloudwatch_log_metric_filter_policy_changes.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_policy_changes.cloudwatch_log_metric_filter_policy_changes.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_policy_changes.cloudwatch_log_metric_filter_policy_changes.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_policy_changes.cloudwatch_log_metric_filter_policy_changes import (
cloudwatch_log_metric_filter_policy_changes,
)
check = cloudwatch_log_metric_filter_policy_changes()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "No CloudWatch log groups found with metric filters or alarms associated."
)
assert result[0].resource_id == ""
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
@mock_s3
def test_cloudwatch_trail_with_log_group(self):
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
logs_client = client("logs", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
s3_client.create_bucket(Bucket="test")
logs_client.create_log_group(logGroupName="/log-group/test")
cloudtrail_client.create_trail(
Name="test_trail",
S3BucketName="test",
CloudWatchLogsLogGroupArn=f"arn:aws:logs:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:log-group:/log-group/test:*",
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_policy_changes.cloudwatch_log_metric_filter_policy_changes.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_policy_changes.cloudwatch_log_metric_filter_policy_changes.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_policy_changes.cloudwatch_log_metric_filter_policy_changes.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_policy_changes.cloudwatch_log_metric_filter_policy_changes import (
cloudwatch_log_metric_filter_policy_changes,
)
check = cloudwatch_log_metric_filter_policy_changes()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "No CloudWatch log groups found with metric filters or alarms associated."
)
assert result[0].resource_id == ""
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
@mock_s3
def test_cloudwatch_trail_with_log_group_with_metric(self):
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
logs_client = client("logs", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
s3_client.create_bucket(Bucket="test")
logs_client.create_log_group(logGroupName="/log-group/test")
cloudtrail_client.create_trail(
Name="test_trail",
S3BucketName="test",
CloudWatchLogsLogGroupArn=f"arn:aws:logs:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:log-group:/log-group/test:*",
)
logs_client.put_metric_filter(
logGroupName="/log-group/test",
filterName="test-filter",
filterPattern="{($.eventName=DeleteGroupPolicy)||($.eventName=DeleteRolePolicy)||($.eventName=DeleteUserPolicy)||($.eventName=PutGroupPolicy)||($.eventName=PutRolePolicy)||($.eventName=PutUserPolicy)||($.eventName=CreatePolicy)||($.eventName=DeletePolicy)||($.eventName=CreatePolicyVersion)||($.eventName=DeletePolicyVersion)||($.eventName=AttachRolePolicy)||($.eventName=DetachRolePolicy)||($.eventName=AttachUserPolicy)||($.eventName=DetachUserPolicy)||($.eventName=AttachGroupPolicy)||($.eventName=DetachGroupPolicy)}",
metricTransformations=[
{
"metricName": "my-metric",
"metricNamespace": "my-namespace",
"metricValue": "$.value",
}
],
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_policy_changes.cloudwatch_log_metric_filter_policy_changes.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_policy_changes.cloudwatch_log_metric_filter_policy_changes.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_policy_changes.cloudwatch_log_metric_filter_policy_changes.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_policy_changes.cloudwatch_log_metric_filter_policy_changes import (
cloudwatch_log_metric_filter_policy_changes,
)
check = cloudwatch_log_metric_filter_policy_changes()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "CloudWatch log group /log-group/test found with metric filter test-filter but no alarms associated."
)
assert result[0].resource_id == "/log-group/test"
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
@mock_s3
def test_cloudwatch_trail_with_log_group_with_metric_and_alarm(self):
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
cloudwatch_client = client("cloudwatch", region_name=AWS_REGION)
logs_client = client("logs", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
s3_client.create_bucket(Bucket="test")
logs_client.create_log_group(logGroupName="/log-group/test")
cloudtrail_client.create_trail(
Name="test_trail",
S3BucketName="test",
CloudWatchLogsLogGroupArn=f"arn:aws:logs:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:log-group:/log-group/test:*",
)
logs_client.put_metric_filter(
logGroupName="/log-group/test",
filterName="test-filter",
filterPattern="{($.eventName=DeleteGroupPolicy)||($.eventName=DeleteRolePolicy)||($.eventName=DeleteUserPolicy)||($.eventName=PutGroupPolicy)||($.eventName=PutRolePolicy)||($.eventName=PutUserPolicy)||($.eventName=CreatePolicy)||($.eventName=DeletePolicy)||($.eventName=CreatePolicyVersion)||($.eventName=DeletePolicyVersion)||($.eventName=AttachRolePolicy)||($.eventName=DetachRolePolicy)||($.eventName=AttachUserPolicy)||($.eventName=DetachUserPolicy)||($.eventName=AttachGroupPolicy)||($.eventName=DetachGroupPolicy)}",
metricTransformations=[
{
"metricName": "my-metric",
"metricNamespace": "my-namespace",
"metricValue": "$.value",
}
],
)
cloudwatch_client.put_metric_alarm(
AlarmName="test-alarm",
MetricName="my-metric",
Namespace="my-namespace",
Period=10,
EvaluationPeriods=5,
Statistic="Average",
Threshold=2,
ComparisonOperator="GreaterThanThreshold",
ActionsEnabled=True,
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_policy_changes.cloudwatch_log_metric_filter_policy_changes.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_policy_changes.cloudwatch_log_metric_filter_policy_changes.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_policy_changes.cloudwatch_log_metric_filter_policy_changes.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_policy_changes.cloudwatch_log_metric_filter_policy_changes import (
cloudwatch_log_metric_filter_policy_changes,
)
check = cloudwatch_log_metric_filter_policy_changes()
result = check.execute()
assert len(result) == 1
assert result[0].status == "PASS"
assert (
result[0].status_extended
== "CloudWatch log group /log-group/test found with metric filter test-filter and alarms set."
)
assert result[0].resource_id == "/log-group/test"

View File

@@ -0,0 +1,292 @@
from unittest import mock
from boto3 import client
from moto import mock_cloudtrail, mock_cloudwatch, mock_logs, mock_s3
from moto.core import DEFAULT_ACCOUNT_ID
AWS_REGION = "us-east-1"
class Test_cloudwatch_log_metric_filter_unauthorized_api_calls:
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
def test_cloudwatch_no_log_groups(self):
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_root_usage.cloudwatch_log_metric_filter_root_usage.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_root_usage.cloudwatch_log_metric_filter_root_usage.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_root_usage.cloudwatch_log_metric_filter_root_usage.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_root_usage.cloudwatch_log_metric_filter_root_usage import (
cloudwatch_log_metric_filter_root_usage,
)
check = cloudwatch_log_metric_filter_root_usage()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "No CloudWatch log groups found with metric filters or alarms associated."
)
assert result[0].resource_id == ""
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
@mock_s3
def test_cloudwatch_trail_no_log_group(self):
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
s3_client.create_bucket(Bucket="test")
cloudtrail_client.create_trail(Name="test_trail", S3BucketName="test")
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_root_usage.cloudwatch_log_metric_filter_root_usage.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_root_usage.cloudwatch_log_metric_filter_root_usage.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_root_usage.cloudwatch_log_metric_filter_root_usage.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_root_usage.cloudwatch_log_metric_filter_root_usage import (
cloudwatch_log_metric_filter_root_usage,
)
check = cloudwatch_log_metric_filter_root_usage()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "No CloudWatch log groups found with metric filters or alarms associated."
)
assert result[0].resource_id == ""
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
@mock_s3
def test_cloudwatch_trail_with_log_group(self):
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
logs_client = client("logs", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
s3_client.create_bucket(Bucket="test")
logs_client.create_log_group(logGroupName="/log-group/test")
cloudtrail_client.create_trail(
Name="test_trail",
S3BucketName="test",
CloudWatchLogsLogGroupArn=f"arn:aws:logs:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:log-group:/log-group/test:*",
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_root_usage.cloudwatch_log_metric_filter_root_usage.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_root_usage.cloudwatch_log_metric_filter_root_usage.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_root_usage.cloudwatch_log_metric_filter_root_usage.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_root_usage.cloudwatch_log_metric_filter_root_usage import (
cloudwatch_log_metric_filter_root_usage,
)
check = cloudwatch_log_metric_filter_root_usage()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "No CloudWatch log groups found with metric filters or alarms associated."
)
assert result[0].resource_id == ""
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
@mock_s3
def test_cloudwatch_trail_with_log_group_with_metric(self):
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
logs_client = client("logs", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
s3_client.create_bucket(Bucket="test")
logs_client.create_log_group(logGroupName="/log-group/test")
cloudtrail_client.create_trail(
Name="test_trail",
S3BucketName="test",
CloudWatchLogsLogGroupArn=f"arn:aws:logs:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:log-group:/log-group/test:*",
)
logs_client.put_metric_filter(
logGroupName="/log-group/test",
filterName="test-filter",
filterPattern="{ $.userIdentity.type = Root && $.userIdentity.invokedBy NOT EXISTS && $.eventType != AwsServiceEvent }",
metricTransformations=[
{
"metricName": "my-metric",
"metricNamespace": "my-namespace",
"metricValue": "$.value",
}
],
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_root_usage.cloudwatch_log_metric_filter_root_usage.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_root_usage.cloudwatch_log_metric_filter_root_usage.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_root_usage.cloudwatch_log_metric_filter_root_usage.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_root_usage.cloudwatch_log_metric_filter_root_usage import (
cloudwatch_log_metric_filter_root_usage,
)
check = cloudwatch_log_metric_filter_root_usage()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "CloudWatch log group /log-group/test found with metric filter test-filter but no alarms associated."
)
assert result[0].resource_id == "/log-group/test"
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
@mock_s3
def test_cloudwatch_trail_with_log_group_with_metric_and_alarm(self):
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
cloudwatch_client = client("cloudwatch", region_name=AWS_REGION)
logs_client = client("logs", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
s3_client.create_bucket(Bucket="test")
logs_client.create_log_group(logGroupName="/log-group/test")
cloudtrail_client.create_trail(
Name="test_trail",
S3BucketName="test",
CloudWatchLogsLogGroupArn=f"arn:aws:logs:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:log-group:/log-group/test:*",
)
logs_client.put_metric_filter(
logGroupName="/log-group/test",
filterName="test-filter",
filterPattern="{ $.userIdentity.type = Root && $.userIdentity.invokedBy NOT EXISTS && $.eventType != AwsServiceEvent }",
metricTransformations=[
{
"metricName": "my-metric",
"metricNamespace": "my-namespace",
"metricValue": "$.value",
}
],
)
cloudwatch_client.put_metric_alarm(
AlarmName="test-alarm",
MetricName="my-metric",
Namespace="my-namespace",
Period=10,
EvaluationPeriods=5,
Statistic="Average",
Threshold=2,
ComparisonOperator="GreaterThanThreshold",
ActionsEnabled=True,
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_root_usage.cloudwatch_log_metric_filter_root_usage.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_root_usage.cloudwatch_log_metric_filter_root_usage.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_root_usage.cloudwatch_log_metric_filter_root_usage.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_root_usage.cloudwatch_log_metric_filter_root_usage import (
cloudwatch_log_metric_filter_root_usage,
)
check = cloudwatch_log_metric_filter_root_usage()
result = check.execute()
assert len(result) == 1
assert result[0].status == "PASS"
assert (
result[0].status_extended
== "CloudWatch log group /log-group/test found with metric filter test-filter and alarms set."
)
assert result[0].resource_id == "/log-group/test"

View File

@@ -0,0 +1,292 @@
from unittest import mock
from boto3 import client
from moto import mock_cloudtrail, mock_cloudwatch, mock_logs, mock_s3
from moto.core import DEFAULT_ACCOUNT_ID
AWS_REGION = "us-east-1"
class Test_cloudwatch_log_metric_filter_unauthorized_api_calls:
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
def test_cloudwatch_no_log_groups(self):
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_security_group_changes.cloudwatch_log_metric_filter_security_group_changes.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_security_group_changes.cloudwatch_log_metric_filter_security_group_changes.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_security_group_changes.cloudwatch_log_metric_filter_security_group_changes.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_security_group_changes.cloudwatch_log_metric_filter_security_group_changes import (
cloudwatch_log_metric_filter_security_group_changes,
)
check = cloudwatch_log_metric_filter_security_group_changes()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "No CloudWatch log groups found with metric filters or alarms associated."
)
assert result[0].resource_id == ""
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
@mock_s3
def test_cloudwatch_trail_no_log_group(self):
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
s3_client.create_bucket(Bucket="test")
cloudtrail_client.create_trail(Name="test_trail", S3BucketName="test")
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_security_group_changes.cloudwatch_log_metric_filter_security_group_changes.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_security_group_changes.cloudwatch_log_metric_filter_security_group_changes.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_security_group_changes.cloudwatch_log_metric_filter_security_group_changes.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_security_group_changes.cloudwatch_log_metric_filter_security_group_changes import (
cloudwatch_log_metric_filter_security_group_changes,
)
check = cloudwatch_log_metric_filter_security_group_changes()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "No CloudWatch log groups found with metric filters or alarms associated."
)
assert result[0].resource_id == ""
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
@mock_s3
def test_cloudwatch_trail_with_log_group(self):
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
logs_client = client("logs", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
s3_client.create_bucket(Bucket="test")
logs_client.create_log_group(logGroupName="/log-group/test")
cloudtrail_client.create_trail(
Name="test_trail",
S3BucketName="test",
CloudWatchLogsLogGroupArn=f"arn:aws:logs:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:log-group:/log-group/test:*",
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_security_group_changes.cloudwatch_log_metric_filter_security_group_changes.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_security_group_changes.cloudwatch_log_metric_filter_security_group_changes.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_security_group_changes.cloudwatch_log_metric_filter_security_group_changes.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_security_group_changes.cloudwatch_log_metric_filter_security_group_changes import (
cloudwatch_log_metric_filter_security_group_changes,
)
check = cloudwatch_log_metric_filter_security_group_changes()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "No CloudWatch log groups found with metric filters or alarms associated."
)
assert result[0].resource_id == ""
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
@mock_s3
def test_cloudwatch_trail_with_log_group_with_metric(self):
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
logs_client = client("logs", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
s3_client.create_bucket(Bucket="test")
logs_client.create_log_group(logGroupName="/log-group/test")
cloudtrail_client.create_trail(
Name="test_trail",
S3BucketName="test",
CloudWatchLogsLogGroupArn=f"arn:aws:logs:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:log-group:/log-group/test:*",
)
logs_client.put_metric_filter(
logGroupName="/log-group/test",
filterName="test-filter",
filterPattern="{($.eventName = AuthorizeSecurityGroupIngress) || ($.eventName = AuthorizeSecurityGroupEgress) || ($.eventName = RevokeSecurityGroupIngress) || ($.eventName = RevokeSecurityGroupEgress) || ($.eventName = CreateSecurityGroup) || ($.eventName = DeleteSecurityGroup) }",
metricTransformations=[
{
"metricName": "my-metric",
"metricNamespace": "my-namespace",
"metricValue": "$.value",
}
],
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_security_group_changes.cloudwatch_log_metric_filter_security_group_changes.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_security_group_changes.cloudwatch_log_metric_filter_security_group_changes.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_security_group_changes.cloudwatch_log_metric_filter_security_group_changes.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_security_group_changes.cloudwatch_log_metric_filter_security_group_changes import (
cloudwatch_log_metric_filter_security_group_changes,
)
check = cloudwatch_log_metric_filter_security_group_changes()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "CloudWatch log group /log-group/test found with metric filter test-filter but no alarms associated."
)
assert result[0].resource_id == "/log-group/test"
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
@mock_s3
def test_cloudwatch_trail_with_log_group_with_metric_and_alarm(self):
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
cloudwatch_client = client("cloudwatch", region_name=AWS_REGION)
logs_client = client("logs", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
s3_client.create_bucket(Bucket="test")
logs_client.create_log_group(logGroupName="/log-group/test")
cloudtrail_client.create_trail(
Name="test_trail",
S3BucketName="test",
CloudWatchLogsLogGroupArn=f"arn:aws:logs:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:log-group:/log-group/test:*",
)
logs_client.put_metric_filter(
logGroupName="/log-group/test",
filterName="test-filter",
filterPattern="{($.eventName = AuthorizeSecurityGroupIngress) || ($.eventName = AuthorizeSecurityGroupEgress) || ($.eventName = RevokeSecurityGroupIngress) || ($.eventName = RevokeSecurityGroupEgress) || ($.eventName = CreateSecurityGroup) || ($.eventName = DeleteSecurityGroup) }",
metricTransformations=[
{
"metricName": "my-metric",
"metricNamespace": "my-namespace",
"metricValue": "$.value",
}
],
)
cloudwatch_client.put_metric_alarm(
AlarmName="test-alarm",
MetricName="my-metric",
Namespace="my-namespace",
Period=10,
EvaluationPeriods=5,
Statistic="Average",
Threshold=2,
ComparisonOperator="GreaterThanThreshold",
ActionsEnabled=True,
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_security_group_changes.cloudwatch_log_metric_filter_security_group_changes.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_security_group_changes.cloudwatch_log_metric_filter_security_group_changes.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_security_group_changes.cloudwatch_log_metric_filter_security_group_changes.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_security_group_changes.cloudwatch_log_metric_filter_security_group_changes import (
cloudwatch_log_metric_filter_security_group_changes,
)
check = cloudwatch_log_metric_filter_security_group_changes()
result = check.execute()
assert len(result) == 1
assert result[0].status == "PASS"
assert (
result[0].status_extended
== "CloudWatch log group /log-group/test found with metric filter test-filter and alarms set."
)
assert result[0].resource_id == "/log-group/test"

View File

@@ -0,0 +1,292 @@
from unittest import mock
from boto3 import client
from moto import mock_cloudtrail, mock_cloudwatch, mock_logs, mock_s3
from moto.core import DEFAULT_ACCOUNT_ID
AWS_REGION = "us-east-1"
class Test_cloudwatch_log_metric_filter_unauthorized_api_calls:
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
def test_cloudwatch_no_log_groups(self):
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_sign_in_without_mfa.cloudwatch_log_metric_filter_sign_in_without_mfa.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_sign_in_without_mfa.cloudwatch_log_metric_filter_sign_in_without_mfa.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_sign_in_without_mfa.cloudwatch_log_metric_filter_sign_in_without_mfa.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_sign_in_without_mfa.cloudwatch_log_metric_filter_sign_in_without_mfa import (
cloudwatch_log_metric_filter_sign_in_without_mfa,
)
check = cloudwatch_log_metric_filter_sign_in_without_mfa()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "No CloudWatch log groups found with metric filters or alarms associated."
)
assert result[0].resource_id == ""
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
@mock_s3
def test_cloudwatch_trail_no_log_group(self):
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
s3_client.create_bucket(Bucket="test")
cloudtrail_client.create_trail(Name="test_trail", S3BucketName="test")
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_sign_in_without_mfa.cloudwatch_log_metric_filter_sign_in_without_mfa.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_sign_in_without_mfa.cloudwatch_log_metric_filter_sign_in_without_mfa.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_sign_in_without_mfa.cloudwatch_log_metric_filter_sign_in_without_mfa.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_sign_in_without_mfa.cloudwatch_log_metric_filter_sign_in_without_mfa import (
cloudwatch_log_metric_filter_sign_in_without_mfa,
)
check = cloudwatch_log_metric_filter_sign_in_without_mfa()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "No CloudWatch log groups found with metric filters or alarms associated."
)
assert result[0].resource_id == ""
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
@mock_s3
def test_cloudwatch_trail_with_log_group(self):
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
logs_client = client("logs", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
s3_client.create_bucket(Bucket="test")
logs_client.create_log_group(logGroupName="/log-group/test")
cloudtrail_client.create_trail(
Name="test_trail",
S3BucketName="test",
CloudWatchLogsLogGroupArn=f"arn:aws:logs:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:log-group:/log-group/test:*",
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_sign_in_without_mfa.cloudwatch_log_metric_filter_sign_in_without_mfa.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_sign_in_without_mfa.cloudwatch_log_metric_filter_sign_in_without_mfa.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_sign_in_without_mfa.cloudwatch_log_metric_filter_sign_in_without_mfa.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_sign_in_without_mfa.cloudwatch_log_metric_filter_sign_in_without_mfa import (
cloudwatch_log_metric_filter_sign_in_without_mfa,
)
check = cloudwatch_log_metric_filter_sign_in_without_mfa()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "No CloudWatch log groups found with metric filters or alarms associated."
)
assert result[0].resource_id == ""
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
@mock_s3
def test_cloudwatch_trail_with_log_group_with_metric(self):
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
logs_client = client("logs", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
s3_client.create_bucket(Bucket="test")
logs_client.create_log_group(logGroupName="/log-group/test")
cloudtrail_client.create_trail(
Name="test_trail",
S3BucketName="test",
CloudWatchLogsLogGroupArn=f"arn:aws:logs:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:log-group:/log-group/test:*",
)
logs_client.put_metric_filter(
logGroupName="/log-group/test",
filterName="test-filter",
filterPattern="{ ($.eventName = ConsoleLogin) && ($.additionalEventData.MFAUsed != Yes) }",
metricTransformations=[
{
"metricName": "my-metric",
"metricNamespace": "my-namespace",
"metricValue": "$.value",
}
],
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_sign_in_without_mfa.cloudwatch_log_metric_filter_sign_in_without_mfa.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_sign_in_without_mfa.cloudwatch_log_metric_filter_sign_in_without_mfa.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_sign_in_without_mfa.cloudwatch_log_metric_filter_sign_in_without_mfa.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_sign_in_without_mfa.cloudwatch_log_metric_filter_sign_in_without_mfa import (
cloudwatch_log_metric_filter_sign_in_without_mfa,
)
check = cloudwatch_log_metric_filter_sign_in_without_mfa()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "CloudWatch log group /log-group/test found with metric filter test-filter but no alarms associated."
)
assert result[0].resource_id == "/log-group/test"
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
@mock_s3
def test_cloudwatch_trail_with_log_group_with_metric_and_alarm(self):
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
cloudwatch_client = client("cloudwatch", region_name=AWS_REGION)
logs_client = client("logs", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
s3_client.create_bucket(Bucket="test")
logs_client.create_log_group(logGroupName="/log-group/test")
cloudtrail_client.create_trail(
Name="test_trail",
S3BucketName="test",
CloudWatchLogsLogGroupArn=f"arn:aws:logs:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:log-group:/log-group/test:*",
)
logs_client.put_metric_filter(
logGroupName="/log-group/test",
filterName="test-filter",
filterPattern="{ ($.eventName = ConsoleLogin) && ($.additionalEventData.MFAUsed != Yes) }",
metricTransformations=[
{
"metricName": "my-metric",
"metricNamespace": "my-namespace",
"metricValue": "$.value",
}
],
)
cloudwatch_client.put_metric_alarm(
AlarmName="test-alarm",
MetricName="my-metric",
Namespace="my-namespace",
Period=10,
EvaluationPeriods=5,
Statistic="Average",
Threshold=2,
ComparisonOperator="GreaterThanThreshold",
ActionsEnabled=True,
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_sign_in_without_mfa.cloudwatch_log_metric_filter_sign_in_without_mfa.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_sign_in_without_mfa.cloudwatch_log_metric_filter_sign_in_without_mfa.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_sign_in_without_mfa.cloudwatch_log_metric_filter_sign_in_without_mfa.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_sign_in_without_mfa.cloudwatch_log_metric_filter_sign_in_without_mfa import (
cloudwatch_log_metric_filter_sign_in_without_mfa,
)
check = cloudwatch_log_metric_filter_sign_in_without_mfa()
result = check.execute()
assert len(result) == 1
assert result[0].status == "PASS"
assert (
result[0].status_extended
== "CloudWatch log group /log-group/test found with metric filter test-filter and alarms set."
)
assert result[0].resource_id == "/log-group/test"

View File

@@ -0,0 +1,292 @@
from unittest import mock
from boto3 import client
from moto import mock_cloudtrail, mock_cloudwatch, mock_logs, mock_s3
from moto.core import DEFAULT_ACCOUNT_ID
AWS_REGION = "us-east-1"
class Test_cloudwatch_log_metric_filter_unauthorized_api_calls:
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
def test_cloudwatch_no_log_groups(self):
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_unauthorized_api_calls.cloudwatch_log_metric_filter_unauthorized_api_calls.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_unauthorized_api_calls.cloudwatch_log_metric_filter_unauthorized_api_calls.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_unauthorized_api_calls.cloudwatch_log_metric_filter_unauthorized_api_calls.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_unauthorized_api_calls.cloudwatch_log_metric_filter_unauthorized_api_calls import (
cloudwatch_log_metric_filter_unauthorized_api_calls,
)
check = cloudwatch_log_metric_filter_unauthorized_api_calls()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "No CloudWatch log groups found with metric filters or alarms associated."
)
assert result[0].resource_id == ""
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
@mock_s3
def test_cloudwatch_trail_no_log_group(self):
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
s3_client.create_bucket(Bucket="test")
cloudtrail_client.create_trail(Name="test_trail", S3BucketName="test")
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_unauthorized_api_calls.cloudwatch_log_metric_filter_unauthorized_api_calls.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_unauthorized_api_calls.cloudwatch_log_metric_filter_unauthorized_api_calls.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_unauthorized_api_calls.cloudwatch_log_metric_filter_unauthorized_api_calls.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_unauthorized_api_calls.cloudwatch_log_metric_filter_unauthorized_api_calls import (
cloudwatch_log_metric_filter_unauthorized_api_calls,
)
check = cloudwatch_log_metric_filter_unauthorized_api_calls()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "No CloudWatch log groups found with metric filters or alarms associated."
)
assert result[0].resource_id == ""
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
@mock_s3
def test_cloudwatch_trail_with_log_group(self):
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
logs_client = client("logs", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
s3_client.create_bucket(Bucket="test")
logs_client.create_log_group(logGroupName="/log-group/test")
cloudtrail_client.create_trail(
Name="test_trail",
S3BucketName="test",
CloudWatchLogsLogGroupArn=f"arn:aws:logs:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:log-group:/log-group/test:*",
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_unauthorized_api_calls.cloudwatch_log_metric_filter_unauthorized_api_calls.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_unauthorized_api_calls.cloudwatch_log_metric_filter_unauthorized_api_calls.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_unauthorized_api_calls.cloudwatch_log_metric_filter_unauthorized_api_calls.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_unauthorized_api_calls.cloudwatch_log_metric_filter_unauthorized_api_calls import (
cloudwatch_log_metric_filter_unauthorized_api_calls,
)
check = cloudwatch_log_metric_filter_unauthorized_api_calls()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "No CloudWatch log groups found with metric filters or alarms associated."
)
assert result[0].resource_id == ""
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
@mock_s3
def test_cloudwatch_trail_with_log_group_with_metric(self):
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
logs_client = client("logs", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
s3_client.create_bucket(Bucket="test")
logs_client.create_log_group(logGroupName="/log-group/test")
cloudtrail_client.create_trail(
Name="test_trail",
S3BucketName="test",
CloudWatchLogsLogGroupArn=f"arn:aws:logs:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:log-group:/log-group/test:*",
)
logs_client.put_metric_filter(
logGroupName="/log-group/test",
filterName="test-filter",
filterPattern="{ ($.errorCode = *UnauthorizedOperation) || ($.errorCode = AccessDenied*) || ($.sourceIPAddress!=delivery.logs.amazonaws.com) || ($.eventName!=HeadBucket) }",
metricTransformations=[
{
"metricName": "my-metric",
"metricNamespace": "my-namespace",
"metricValue": "$.value",
}
],
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_unauthorized_api_calls.cloudwatch_log_metric_filter_unauthorized_api_calls.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_unauthorized_api_calls.cloudwatch_log_metric_filter_unauthorized_api_calls.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_unauthorized_api_calls.cloudwatch_log_metric_filter_unauthorized_api_calls.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_unauthorized_api_calls.cloudwatch_log_metric_filter_unauthorized_api_calls import (
cloudwatch_log_metric_filter_unauthorized_api_calls,
)
check = cloudwatch_log_metric_filter_unauthorized_api_calls()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "CloudWatch log group /log-group/test found with metric filter test-filter but no alarms associated."
)
assert result[0].resource_id == "/log-group/test"
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
@mock_s3
def test_cloudwatch_trail_with_log_group_with_metric_and_alarm(self):
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
cloudwatch_client = client("cloudwatch", region_name=AWS_REGION)
logs_client = client("logs", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
s3_client.create_bucket(Bucket="test")
logs_client.create_log_group(logGroupName="/log-group/test")
cloudtrail_client.create_trail(
Name="test_trail",
S3BucketName="test",
CloudWatchLogsLogGroupArn=f"arn:aws:logs:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:log-group:/log-group/test:*",
)
logs_client.put_metric_filter(
logGroupName="/log-group/test",
filterName="test-filter",
filterPattern="{ ($.errorCode = *UnauthorizedOperation) || ($.errorCode = AccessDenied*) || ($.sourceIPAddress!=delivery.logs.amazonaws.com) || ($.eventName!=HeadBucket) }",
metricTransformations=[
{
"metricName": "my-metric",
"metricNamespace": "my-namespace",
"metricValue": "$.value",
}
],
)
cloudwatch_client.put_metric_alarm(
AlarmName="test-alarm",
MetricName="my-metric",
Namespace="my-namespace",
Period=10,
EvaluationPeriods=5,
Statistic="Average",
Threshold=2,
ComparisonOperator="GreaterThanThreshold",
ActionsEnabled=True,
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_unauthorized_api_calls.cloudwatch_log_metric_filter_unauthorized_api_calls.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_unauthorized_api_calls.cloudwatch_log_metric_filter_unauthorized_api_calls.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_unauthorized_api_calls.cloudwatch_log_metric_filter_unauthorized_api_calls.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_unauthorized_api_calls.cloudwatch_log_metric_filter_unauthorized_api_calls import (
cloudwatch_log_metric_filter_unauthorized_api_calls,
)
check = cloudwatch_log_metric_filter_unauthorized_api_calls()
result = check.execute()
assert len(result) == 1
assert result[0].status == "PASS"
assert (
result[0].status_extended
== "CloudWatch log group /log-group/test found with metric filter test-filter and alarms set."
)
assert result[0].resource_id == "/log-group/test"

View File

@@ -0,0 +1,64 @@
from boto3 import session
from moto import mock_cloudwatch
from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import CloudWatch
AWS_ACCOUNT_NUMBER = 123456789012
AWS_REGION = "us-east-1"
class Test_CloudWatch_Service:
# Mocked Audit Info
def set_mocked_audit_info(self):
audit_info = AWS_Audit_Info(
original_session=None,
audit_session=session.Session(
profile_name=None,
botocore_session=None,
),
audited_account=AWS_ACCOUNT_NUMBER,
audited_user_id=None,
audited_partition="aws",
audited_identity_arn=None,
profile=None,
profile_region=None,
credentials=None,
assumed_role_info=None,
audited_regions=None,
organizations_metadata=None,
)
return audit_info
# Test CloudWatch Service
@mock_cloudwatch
def test_service(self):
# CloudWatch client for this test class
audit_info = self.set_mocked_audit_info()
cloudwatch = CloudWatch(audit_info)
assert cloudwatch.service == "cloudwatch"
# Test CloudWatch Client
@mock_cloudwatch
def test_client(self):
# CloudWatch client for this test class
audit_info = self.set_mocked_audit_info()
cloudwatch = CloudWatch(audit_info)
for client in cloudwatch.regional_clients.values():
assert client.__class__.__name__ == "CloudWatch"
# Test CloudWatch Session
@mock_cloudwatch
def test__get_session__(self):
# CloudWatch client for this test class
audit_info = self.set_mocked_audit_info()
cloudwatch = CloudWatch(audit_info)
assert cloudwatch.session.__class__.__name__ == "Session"
# Test CloudWatch Session
@mock_cloudwatch
def test_audited_account(self):
# CloudWatch client for this test class
audit_info = self.set_mocked_audit_info()
cloudwatch = CloudWatch(audit_info)
assert cloudwatch.audited_account == AWS_ACCOUNT_NUMBER

View File

@@ -0,0 +1,172 @@
from unittest import mock
from moto.core import DEFAULT_ACCOUNT_ID
from prowler.providers.aws.services.codeartifact.codeartifact_service import (
LatestPackageVersion,
LatestPackageVersionStatus,
OriginConfiguration,
OriginInformation,
OriginInformationValues,
Package,
Repository,
Restrictions,
RestrictionValues,
)
AWS_REGION = "eu-west-1"
class Test_codeartifact_packages_external_public_publishing_disabled:
def test_no_repositories(self):
codeartifact_client = mock.MagicMock
codeartifact_client.repositories = {}
with mock.patch(
"prowler.providers.aws.services.codeartifact.codeartifact_service.CodeArtifact",
new=codeartifact_client,
):
# Test Check
from prowler.providers.aws.services.codeartifact.codeartifact_packages_external_public_publishing_disabled.codeartifact_packages_external_public_publishing_disabled import (
codeartifact_packages_external_public_publishing_disabled,
)
check = codeartifact_packages_external_public_publishing_disabled()
result = check.execute()
assert len(result) == 0
def test_repository_without_packages(self):
codeartifact_client = mock.MagicMock
codeartifact_client.repositories = {
"test-repository": Repository(
name="test-repository",
arn="",
domain_name="",
domain_owner="",
region=AWS_REGION,
packages=[],
)
}
with mock.patch(
"prowler.providers.aws.services.codeartifact.codeartifact_service.CodeArtifact",
new=codeartifact_client,
):
# Test Check
from prowler.providers.aws.services.codeartifact.codeartifact_packages_external_public_publishing_disabled.codeartifact_packages_external_public_publishing_disabled import (
codeartifact_packages_external_public_publishing_disabled,
)
check = codeartifact_packages_external_public_publishing_disabled()
result = check.execute()
assert len(result) == 0
def test_repository_package_public_publishing_origin_internal(self):
codeartifact_client = mock.MagicMock
package_name = "test-package"
package_namespace = "test-namespace"
repository_arn = f"arn:aws:codebuild:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:repository/test-repository"
codeartifact_client.repositories = {
"test-repository": Repository(
name="test-repository",
arn=repository_arn,
domain_name="",
domain_owner="",
region=AWS_REGION,
packages=[
Package(
name=package_name,
namespace=package_namespace,
format="pypi",
origin_configuration=OriginConfiguration(
restrictions=Restrictions(
publish=RestrictionValues.ALLOW,
upstream=RestrictionValues.ALLOW,
)
),
latest_version=LatestPackageVersion(
version="latest",
status=LatestPackageVersionStatus.Published,
origin=OriginInformation(
origin_type=OriginInformationValues.INTERNAL
),
),
)
],
)
}
with mock.patch(
"prowler.providers.aws.services.codeartifact.codeartifact_service.CodeArtifact",
new=codeartifact_client,
):
# Test Check
from prowler.providers.aws.services.codeartifact.codeartifact_packages_external_public_publishing_disabled.codeartifact_packages_external_public_publishing_disabled import (
codeartifact_packages_external_public_publishing_disabled,
)
check = codeartifact_packages_external_public_publishing_disabled()
result = check.execute()
assert len(result) == 1
assert result[0].region == AWS_REGION
assert result[0].resource_id == "test-package"
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== f"Internal package {package_namespace} {package_name} is vulnerable to dependency confusion in repository {repository_arn}"
)
def test_repository_package_private_publishing_origin_internal(self):
codeartifact_client = mock.MagicMock
package_name = "test-package"
package_namespace = "test-namespace"
repository_arn = f"arn:aws:codebuild:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:repository/test-repository"
codeartifact_client.repositories = {
"test-repository": Repository(
name="test-repository",
arn=repository_arn,
domain_name="",
domain_owner="",
region=AWS_REGION,
packages=[
Package(
name=package_name,
namespace=package_namespace,
format="pypi",
origin_configuration=OriginConfiguration(
restrictions=Restrictions(
publish=RestrictionValues.BLOCK,
upstream=RestrictionValues.BLOCK,
)
),
latest_version=LatestPackageVersion(
version="latest",
status=LatestPackageVersionStatus.Published,
origin=OriginInformation(
origin_type=OriginInformationValues.INTERNAL
),
),
)
],
)
}
with mock.patch(
"prowler.providers.aws.services.codeartifact.codeartifact_service.CodeArtifact",
new=codeartifact_client,
):
# Test Check
from prowler.providers.aws.services.codeartifact.codeartifact_packages_external_public_publishing_disabled.codeartifact_packages_external_public_publishing_disabled import (
codeartifact_packages_external_public_publishing_disabled,
)
check = codeartifact_packages_external_public_publishing_disabled()
result = check.execute()
assert len(result) == 1
assert result[0].region == AWS_REGION
assert result[0].resource_id == "test-package"
assert result[0].status == "PASS"
assert (
result[0].status_extended
== f"Internal package {package_namespace} {package_name} is not vulnerable to dependency confusion in repository {repository_arn}"
)

View File

@@ -0,0 +1,174 @@
from unittest.mock import patch
import botocore
from moto.core import DEFAULT_ACCOUNT_ID
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.codeartifact.codeartifact_service import (
CodeArtifact,
LatestPackageVersionStatus,
OriginInformationValues,
RestrictionValues,
)
# Mock Test Region
AWS_REGION = "eu-west-1"
# Mocking Access Analyzer Calls
make_api_call = botocore.client.BaseClient._make_api_call
def mock_make_api_call(self, operation_name, kwarg):
"""We have to mock every AWS API call using Boto3"""
if operation_name == "ListRepositories":
return {
"repositories": [
{
"name": "test-repository",
"administratorAccount": DEFAULT_ACCOUNT_ID,
"domainName": "test-domain",
"domainOwner": DEFAULT_ACCOUNT_ID,
"arn": f"arn:aws:codebuild:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:repository/test-repository",
"description": "test description",
},
]
}
if operation_name == "ListPackages":
return {
"packages": [
{
"format": "pypi",
"namespace": "test-namespace",
"package": "test-package",
"originConfiguration": {
"restrictions": {
"publish": "ALLOW",
"upstream": "ALLOW",
}
},
},
],
}
if operation_name == "ListPackageVersions":
return {
"defaultDisplayVersion": "latest",
"format": "pypi",
"namespace": "test-namespace",
"package": "test-package",
"versions": [
{
"version": "latest",
"revision": "lates",
"status": "Published",
"origin": {
"domainEntryPoint": {
"repositoryName": "test-repository",
"externalConnectionName": "",
},
"originType": "INTERNAL",
},
},
],
}
return make_api_call(self, operation_name, kwarg)
# Mock generate_regional_clients()
def mock_generate_regional_clients(service, audit_info):
regional_client = audit_info.audit_session.client(service, region_name=AWS_REGION)
regional_client.region = AWS_REGION
return {AWS_REGION: regional_client}
# Patch every AWS call using Boto3 and generate_regional_clients to have 1 client
@patch("botocore.client.BaseClient._make_api_call", new=mock_make_api_call)
@patch(
"prowler.providers.aws.services.codeartifact.codeartifact_service.generate_regional_clients",
new=mock_generate_regional_clients,
)
class Test_CodeArtifact_Service:
# Test CodeArtifact Client
def test__get_client__(self):
codeartifact = CodeArtifact(current_audit_info)
assert (
codeartifact.regional_clients[AWS_REGION].__class__.__name__
== "CodeArtifact"
)
# Test CodeArtifact Session
def test__get_session__(self):
codeartifact = CodeArtifact(current_audit_info)
assert codeartifact.session.__class__.__name__ == "Session"
# Test CodeArtifact Service
def test__get_service__(self):
codeartifact = CodeArtifact(current_audit_info)
assert codeartifact.service == "codeartifact"
def test__list_repositories__(self):
# Set partition for the service
current_audit_info.audited_partition = "aws"
codeartifact = CodeArtifact(current_audit_info)
assert len(codeartifact.repositories) == 1
assert codeartifact.repositories
assert codeartifact.repositories["test-repository"]
assert codeartifact.repositories["test-repository"].name == "test-repository"
assert (
codeartifact.repositories["test-repository"].arn
== f"arn:aws:codebuild:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:repository/test-repository"
)
assert codeartifact.repositories["test-repository"].domain_name == "test-domain"
assert (
codeartifact.repositories["test-repository"].domain_owner
== DEFAULT_ACCOUNT_ID
)
assert codeartifact.repositories["test-repository"].region == AWS_REGION
assert codeartifact.repositories["test-repository"].packages
assert len(codeartifact.repositories["test-repository"].packages) == 1
assert (
codeartifact.repositories["test-repository"].packages[0].name
== "test-package"
)
assert (
codeartifact.repositories["test-repository"].packages[0].namespace
== "test-namespace"
)
assert codeartifact.repositories["test-repository"].packages[0].format == "pypi"
assert (
codeartifact.repositories["test-repository"]
.packages[0]
.origin_configuration.restrictions.publish
== RestrictionValues.ALLOW
)
assert (
codeartifact.repositories["test-repository"]
.packages[0]
.origin_configuration.restrictions.upstream
== RestrictionValues.ALLOW
)
assert (
codeartifact.repositories["test-repository"]
.packages[0]
.latest_version.version
== "latest"
)
assert (
codeartifact.repositories["test-repository"]
.packages[0]
.latest_version.status
== LatestPackageVersionStatus.Published
)
assert (
codeartifact.repositories["test-repository"]
.packages[0]
.latest_version.origin.origin_type
== OriginInformationValues.INTERNAL
)

View File

@@ -0,0 +1,89 @@
from datetime import datetime, timedelta, timezone
from re import search
from unittest import mock
from prowler.providers.aws.services.codebuild.codebuild_service import CodebuildProject
class Test_codebuild_project_older_90_days:
def test_project_not_built_in_last_90_days(self):
codebuild_client = mock.MagicMock
codebuild_client.projects = [
CodebuildProject(
name="test",
region="eu-west-1",
last_invoked_time=datetime.now(timezone.utc) - timedelta(days=100),
buildspec=None,
)
]
with mock.patch(
"prowler.providers.aws.services.codebuild.codebuild_service.Codebuild",
codebuild_client,
):
from prowler.providers.aws.services.codebuild.codebuild_project_older_90_days.codebuild_project_older_90_days import (
codebuild_project_older_90_days,
)
check = codebuild_project_older_90_days()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert search(
"has not been invoked in the last 90 days", result[0].status_extended
)
assert result[0].resource_id == "test"
assert result[0].resource_arn == ""
def test_project_not_built(self):
codebuild_client = mock.MagicMock
codebuild_client.projects = [
CodebuildProject(
name="test", region="eu-west-1", last_invoked_time=None, buildspec=None
)
]
with mock.patch(
"prowler.providers.aws.services.codebuild.codebuild_service.Codebuild",
codebuild_client,
):
from prowler.providers.aws.services.codebuild.codebuild_project_older_90_days.codebuild_project_older_90_days import (
codebuild_project_older_90_days,
)
check = codebuild_project_older_90_days()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert search("has never been built", result[0].status_extended)
assert result[0].resource_id == "test"
assert result[0].resource_arn == ""
def test_project_built_in_last_90_days(self):
codebuild_client = mock.MagicMock
codebuild_client.projects = [
CodebuildProject(
name="test",
region="eu-west-1",
last_invoked_time=datetime.now(timezone.utc) - timedelta(days=10),
buildspec=None,
)
]
with mock.patch(
"prowler.providers.aws.services.codebuild.codebuild_service.Codebuild",
codebuild_client,
):
from prowler.providers.aws.services.codebuild.codebuild_project_older_90_days.codebuild_project_older_90_days import (
codebuild_project_older_90_days,
)
check = codebuild_project_older_90_days()
result = check.execute()
assert len(result) == 1
assert result[0].status == "PASS"
assert search(
"has been invoked in the last 90 days", result[0].status_extended
)
assert result[0].resource_id == "test"
assert result[0].resource_arn == ""

View File

@@ -0,0 +1,123 @@
from re import search
from unittest import mock
from prowler.providers.aws.services.codebuild.codebuild_service import CodebuildProject
class Test_codebuild_project_user_controlled_buildspec:
def test_project_not_buildspec(self):
codebuild_client = mock.MagicMock
codebuild_client.projects = [
CodebuildProject(
name="test",
region="eu-west-1",
last_invoked_time=None,
buildspec=None,
)
]
with mock.patch(
"prowler.providers.aws.services.codebuild.codebuild_service.Codebuild",
codebuild_client,
):
from prowler.providers.aws.services.codebuild.codebuild_project_user_controlled_buildspec.codebuild_project_user_controlled_buildspec import (
codebuild_project_user_controlled_buildspec,
)
check = codebuild_project_user_controlled_buildspec()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert search(
"does not use a user controlled buildspec",
result[0].status_extended,
)
assert result[0].resource_id == "test"
assert result[0].resource_arn == ""
def test_project_buildspec_not_yaml(self):
codebuild_client = mock.MagicMock
codebuild_client.projects = [
CodebuildProject(
name="test",
region="eu-west-1",
last_invoked_time=None,
buildspec="arn:aws:s3:::my-codebuild-sample2/buildspec.out",
)
]
with mock.patch(
"prowler.providers.aws.services.codebuild.codebuild_service.Codebuild",
codebuild_client,
):
from prowler.providers.aws.services.codebuild.codebuild_project_user_controlled_buildspec.codebuild_project_user_controlled_buildspec import (
codebuild_project_user_controlled_buildspec,
)
check = codebuild_project_user_controlled_buildspec()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert search(
"does not use a user controlled buildspec",
result[0].status_extended,
)
assert result[0].resource_id == "test"
assert result[0].resource_arn == ""
def test_project_valid_buildspec(self):
codebuild_client = mock.MagicMock
codebuild_client.projects = [
CodebuildProject(
name="test",
region="eu-west-1",
last_invoked_time=None,
buildspec="arn:aws:s3:::my-codebuild-sample2/buildspec.yaml",
)
]
with mock.patch(
"prowler.providers.aws.services.codebuild.codebuild_service.Codebuild",
codebuild_client,
):
from prowler.providers.aws.services.codebuild.codebuild_project_user_controlled_buildspec.codebuild_project_user_controlled_buildspec import (
codebuild_project_user_controlled_buildspec,
)
check = codebuild_project_user_controlled_buildspec()
result = check.execute()
assert len(result) == 1
assert result[0].status == "PASS"
assert search("uses a user controlled buildspec", result[0].status_extended)
assert result[0].resource_id == "test"
assert result[0].resource_arn == ""
def test_project_invalid_buildspec_without_extension(self):
codebuild_client = mock.MagicMock
codebuild_client.projects = [
CodebuildProject(
name="test",
region="eu-west-1",
last_invoked_time=None,
buildspec="arn:aws:s3:::my-codebuild-sample2/buildspecyaml",
)
]
with mock.patch(
"prowler.providers.aws.services.codebuild.codebuild_service.Codebuild",
codebuild_client,
):
from prowler.providers.aws.services.codebuild.codebuild_project_user_controlled_buildspec.codebuild_project_user_controlled_buildspec import (
codebuild_project_user_controlled_buildspec,
)
check = codebuild_project_user_controlled_buildspec()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert search(
"does not use a user controlled buildspec",
result[0].status_extended,
)
assert result[0].resource_id == "test"
assert result[0].resource_arn == ""

View File

@@ -0,0 +1,101 @@
from datetime import datetime, timedelta
from unittest.mock import patch
import botocore
from boto3 import session
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
from prowler.providers.aws.services.codebuild.codebuild_service import Codebuild
# Mock Test Region
AWS_REGION = "eu-west-1"
AWS_ACCOUNT_NUMBER = 123456789012
# last time invoked time
last_invoked_time = datetime.now() - timedelta(days=2)
# Mocking batch_get_projects
make_api_call = botocore.client.BaseClient._make_api_call
def mock_make_api_call(self, operation_name, kwarg):
if operation_name == "ListProjects":
return {"projects": ["test"]}
if operation_name == "ListBuildsForProject":
return {"ids": ["test:93f838a7-cd20-48ae-90e5-c10fbbc78ca6"]}
if operation_name == "BatchGetBuilds":
return {"builds": [{"endTime": last_invoked_time}]}
if operation_name == "BatchGetProjects":
return {
"projects": [
{
"source": {
"buildspec": "arn:aws:s3:::my-codebuild-sample2/buildspec.yml"
}
}
]
}
return make_api_call(self, operation_name, kwarg)
# Mock generate_regional_clients()
def mock_generate_regional_clients(service, audit_info):
regional_client = audit_info.audit_session.client(service, region_name=AWS_REGION)
regional_client.region = AWS_REGION
return {AWS_REGION: regional_client}
@patch("botocore.client.BaseClient._make_api_call", new=mock_make_api_call)
@patch(
"prowler.providers.aws.services.codebuild.codebuild_service.generate_regional_clients",
new=mock_generate_regional_clients,
)
class Test_Codebuild_Service:
def set_mocked_audit_info(self):
audit_info = AWS_Audit_Info(
original_session=None,
audit_session=session.Session(
profile_name=None,
botocore_session=None,
),
audited_account=AWS_ACCOUNT_NUMBER,
audited_user_id=None,
audited_partition="aws",
audited_identity_arn=None,
profile=None,
profile_region=None,
credentials=None,
assumed_role_info=None,
audited_regions=None,
organizations_metadata=None,
)
return audit_info
# Test Codebuild Session
def test__get_session__(self):
codebuild = Codebuild(current_audit_info)
assert codebuild.session.__class__.__name__ == "Session"
# Test Codebuild Service
def test__get_service__(self):
codebuild = Codebuild(current_audit_info)
assert codebuild.service == "codebuild"
def test__list_projects__(self):
codebuild = Codebuild(current_audit_info)
assert len(codebuild.projects) == 1
assert codebuild.projects[0].name == "test"
assert codebuild.projects[0].region == AWS_REGION
def test__list_builds_for_project__(self):
codebuild = Codebuild(current_audit_info)
assert len(codebuild.projects) == 1
assert codebuild.projects[0].name == "test"
assert codebuild.projects[0].region == AWS_REGION
assert codebuild.projects[0].last_invoked_time == last_invoked_time
assert (
codebuild.projects[0].buildspec
== "arn:aws:s3:::my-codebuild-sample2/buildspec.yml"
)

View File

@@ -0,0 +1,107 @@
from unittest import mock
from boto3 import client
from moto import mock_config
AWS_REGION = "us-east-1"
class Test_config_recorder_all_regions_enabled:
@mock_config
def test_config_no_recorders(self):
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.config.config_service import Config
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.config.config_recorder_all_regions_enabled.config_recorder_all_regions_enabled.config_client",
new=Config(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.config.config_recorder_all_regions_enabled.config_recorder_all_regions_enabled import (
config_recorder_all_regions_enabled,
)
check = config_recorder_all_regions_enabled()
result = check.execute()
assert (
len(result) == 25
) # One fail result per region, since there are no recorders
assert result[0].status == "FAIL"
@mock_config
def test_config_one_recoder_disabled(self):
# Create Config Mocked Resources
config_client = client("config", region_name=AWS_REGION)
# Create Config Recorder
config_client.put_configuration_recorder(
ConfigurationRecorder={"name": "default", "roleARN": "somearn"}
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.config.config_service import Config
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.config.config_recorder_all_regions_enabled.config_recorder_all_regions_enabled.config_client",
new=Config(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.config.config_recorder_all_regions_enabled.config_recorder_all_regions_enabled import (
config_recorder_all_regions_enabled,
)
check = config_recorder_all_regions_enabled()
result = check.execute()
assert len(result) == 25
# Search for the recorder just created
for recorder in result:
if recorder.resource_id:
assert recorder.status == "FAIL"
assert (
recorder.status_extended
== "AWS Config recorder default is disabled."
)
assert recorder.resource_id == "default"
@mock_config
def test_config_one_recoder_enabled(self):
# Create Config Mocked Resources
config_client = client("config", region_name=AWS_REGION)
# Create Config Recorder and start it
config_client.put_configuration_recorder(
ConfigurationRecorder={"name": "default", "roleARN": "somearn"}
)
# Make the delivery channel
config_client.put_delivery_channel(
DeliveryChannel={"name": "testchannel", "s3BucketName": "somebucket"}
)
config_client.start_configuration_recorder(ConfigurationRecorderName="default")
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.config.config_service import Config
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.config.config_recorder_all_regions_enabled.config_recorder_all_regions_enabled.config_client",
new=Config(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.config.config_recorder_all_regions_enabled.config_recorder_all_regions_enabled import (
config_recorder_all_regions_enabled,
)
check = config_recorder_all_regions_enabled()
result = check.execute()
assert len(result) == 25
# Search for the recorder just created
for recorder in result:
if recorder.resource_id:
assert recorder.status == "PASS"
assert (
recorder.status_extended
== "AWS Config recorder default is enabled."
)
assert recorder.resource_id == "default"

View File

@@ -0,0 +1,89 @@
from boto3 import client, session
from moto import mock_config
from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
from prowler.providers.aws.services.config.config_service import Config
AWS_ACCOUNT_NUMBER = 123456789012
AWS_REGION = "us-east-1"
class Test_Config_Service:
# Mocked Audit Info
def set_mocked_audit_info(self):
audit_info = AWS_Audit_Info(
original_session=None,
audit_session=session.Session(
profile_name=None,
botocore_session=None,
),
audited_account=AWS_ACCOUNT_NUMBER,
audited_user_id=None,
audited_partition="aws",
audited_identity_arn=None,
profile=None,
profile_region=None,
credentials=None,
assumed_role_info=None,
audited_regions=None,
organizations_metadata=None,
)
return audit_info
# Test Config Service
@mock_config
def test_service(self):
# Config client for this test class
audit_info = self.set_mocked_audit_info()
config = Config(audit_info)
assert config.service == "config"
# Test Config Client
@mock_config
def test_client(self):
# Config client for this test class
audit_info = self.set_mocked_audit_info()
config = Config(audit_info)
for regional_client in config.regional_clients.values():
assert regional_client.__class__.__name__ == "ConfigService"
# Test Config Session
@mock_config
def test__get_session__(self):
# Config client for this test class
audit_info = self.set_mocked_audit_info()
config = Config(audit_info)
assert config.session.__class__.__name__ == "Session"
# Test Config Session
@mock_config
def test_audited_account(self):
# Config client for this test class
audit_info = self.set_mocked_audit_info()
config = Config(audit_info)
assert config.audited_account == AWS_ACCOUNT_NUMBER
# Test Config Get Rest APIs
@mock_config
def test__describe_configuration_recorder_status__(self):
# Generate Config Client
config_client = client("config", region_name=AWS_REGION)
# Create Config Recorder and start it
config_client.put_configuration_recorder(
ConfigurationRecorder={"name": "default", "roleARN": "somearn"}
)
# Make the delivery channel
config_client.put_delivery_channel(
DeliveryChannel={"name": "testchannel", "s3BucketName": "somebucket"}
)
config_client.start_configuration_recorder(ConfigurationRecorderName="default")
# Config client for this test class
audit_info = self.set_mocked_audit_info()
config = Config(audit_info)
# One recorder per region
assert len(config.recorders) == 25
# Check the active one
# Search for the recorder just created
for recorder in config.recorders:
if recorder.name == "default":
assert recorder.recording is True

View File

@@ -0,0 +1,103 @@
from datetime import datetime
from unittest import mock
from prowler.providers.aws.services.directoryservice.directoryservice_service import (
Directory,
DirectoryType,
LogSubscriptions,
)
AWS_REGION = "eu-west-1"
class Test_directoryservice_directory_log_forwarding_enabled:
def test_no_directories(self):
directoryservice_client = mock.MagicMock
directoryservice_client.directories = {}
with mock.patch(
"prowler.providers.aws.services.directoryservice.directoryservice_service.DirectoryService",
new=directoryservice_client,
):
# Test Check
from prowler.providers.aws.services.directoryservice.directoryservice_directory_log_forwarding_enabled.directoryservice_directory_log_forwarding_enabled import (
directoryservice_directory_log_forwarding_enabled,
)
check = directoryservice_directory_log_forwarding_enabled()
result = check.execute()
assert len(result) == 0
def test_one_directory_logging_disabled(self):
directoryservice_client = mock.MagicMock
directory_name = "test-directory"
directory_id = "d-12345a1b2"
directoryservice_client.directories = {
directory_name: Directory(
name=directory_name,
id=directory_id,
type=DirectoryType.MicrosoftAD,
region=AWS_REGION,
log_subscriptions=[],
)
}
with mock.patch(
"prowler.providers.aws.services.directoryservice.directoryservice_service.DirectoryService",
new=directoryservice_client,
):
# Test Check
from prowler.providers.aws.services.directoryservice.directoryservice_directory_log_forwarding_enabled.directoryservice_directory_log_forwarding_enabled import (
directoryservice_directory_log_forwarding_enabled,
)
check = directoryservice_directory_log_forwarding_enabled()
result = check.execute()
assert len(result) == 1
assert result[0].resource_id == directory_id
assert result[0].region == AWS_REGION
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== f"Directory Service {directory_id} have log forwarding to CloudWatch disabled"
)
def test_one_directory_logging_enabled(self):
directoryservice_client = mock.MagicMock
directory_name = "test-directory"
directory_id = "d-12345a1b2"
directoryservice_client.directories = {
directory_name: Directory(
name=directory_name,
id=directory_id,
type=DirectoryType.MicrosoftAD,
region=AWS_REGION,
log_subscriptions=[
LogSubscriptions(
log_group_name="test-log-group",
created_date_time=datetime(2022, 1, 1),
)
],
)
}
with mock.patch(
"prowler.providers.aws.services.directoryservice.directoryservice_service.DirectoryService",
new=directoryservice_client,
):
# Test Check
from prowler.providers.aws.services.directoryservice.directoryservice_directory_log_forwarding_enabled.directoryservice_directory_log_forwarding_enabled import (
directoryservice_directory_log_forwarding_enabled,
)
check = directoryservice_directory_log_forwarding_enabled()
result = check.execute()
assert len(result) == 1
assert result[0].resource_id == directory_id
assert result[0].region == AWS_REGION
assert result[0].status == "PASS"
assert (
result[0].status_extended
== f"Directory Service {directory_id} have log forwarding to CloudWatch enabled"
)

View File

@@ -0,0 +1,107 @@
from datetime import datetime
from unittest import mock
from moto.core import DEFAULT_ACCOUNT_ID
from prowler.providers.aws.services.directoryservice.directoryservice_service import (
Directory,
DirectoryType,
EventTopics,
EventTopicStatus,
)
AWS_REGION = "eu-west-1"
class Test_directoryservice_directory_monitor_notifications:
def test_no_directories(self):
directoryservice_client = mock.MagicMock
directoryservice_client.directories = {}
with mock.patch(
"prowler.providers.aws.services.directoryservice.directoryservice_service.DirectoryService",
new=directoryservice_client,
):
# Test Check
from prowler.providers.aws.services.directoryservice.directoryservice_directory_monitor_notifications.directoryservice_directory_monitor_notifications import (
directoryservice_directory_monitor_notifications,
)
check = directoryservice_directory_monitor_notifications()
result = check.execute()
assert len(result) == 0
def test_one_directory_logging_disabled(self):
directoryservice_client = mock.MagicMock
directory_name = "test-directory"
directory_id = "d-12345a1b2"
directoryservice_client.directories = {
directory_name: Directory(
id=directory_id,
type=DirectoryType.MicrosoftAD,
name=directory_name,
region=AWS_REGION,
event_topics=[],
)
}
with mock.patch(
"prowler.providers.aws.services.directoryservice.directoryservice_service.DirectoryService",
new=directoryservice_client,
):
# Test Check
from prowler.providers.aws.services.directoryservice.directoryservice_directory_monitor_notifications.directoryservice_directory_monitor_notifications import (
directoryservice_directory_monitor_notifications,
)
check = directoryservice_directory_monitor_notifications()
result = check.execute()
assert len(result) == 1
assert result[0].resource_id == directory_id
assert result[0].region == AWS_REGION
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== f"Directory Service {directory_id} have SNS messaging disabled"
)
def test_one_directory_logging_enabled(self):
directoryservice_client = mock.MagicMock
directory_name = "test-directory"
directory_id = "d-12345a1b2"
directoryservice_client.directories = {
directory_name: Directory(
name=directory_name,
id=directory_id,
type=DirectoryType.MicrosoftAD,
region=AWS_REGION,
event_topics=[
EventTopics(
topic_arn=f"arn:aws:sns:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:test-topic",
topic_name="test-topic",
status=EventTopicStatus.Registered,
created_date_time=datetime(2022, 1, 1),
)
],
)
}
with mock.patch(
"prowler.providers.aws.services.directoryservice.directoryservice_service.DirectoryService",
new=directoryservice_client,
):
# Test Check
from prowler.providers.aws.services.directoryservice.directoryservice_directory_monitor_notifications.directoryservice_directory_monitor_notifications import (
directoryservice_directory_monitor_notifications,
)
check = directoryservice_directory_monitor_notifications()
result = check.execute()
assert len(result) == 1
assert result[0].resource_id == directory_id
assert result[0].region == AWS_REGION
assert result[0].status == "PASS"
assert (
result[0].status_extended
== f"Directory Service {directory_id} have SNS messaging enabled"
)

View File

@@ -0,0 +1,192 @@
from unittest import mock
from prowler.providers.aws.services.directoryservice.directoryservice_service import (
Directory,
DirectoryType,
SnapshotLimit,
)
AWS_REGION = "eu-west-1"
class Test_directoryservice_directory_snapshots_limit:
def test_no_directories(self):
directoryservice_client = mock.MagicMock
directoryservice_client.directories = {}
with mock.patch(
"prowler.providers.aws.services.directoryservice.directoryservice_service.DirectoryService",
new=directoryservice_client,
):
# Test Check
from prowler.providers.aws.services.directoryservice.directoryservice_directory_snapshots_limit.directoryservice_directory_snapshots_limit import (
directoryservice_directory_snapshots_limit,
)
check = directoryservice_directory_snapshots_limit()
result = check.execute()
assert len(result) == 0
def test_one_directory_snapshots_limit_reached(self):
directoryservice_client = mock.MagicMock
directory_name = "test-directory"
directory_id = "d-12345a1b2"
manual_snapshots_current_count = 5
manual_snapshots_limit = 5
manual_snapshots_limit_reached = True
directoryservice_client.directories = {
directory_name: Directory(
name=directory_name,
id=directory_id,
type=DirectoryType.MicrosoftAD,
region=AWS_REGION,
snapshots_limits=SnapshotLimit(
manual_snapshots_current_count=manual_snapshots_current_count,
manual_snapshots_limit=manual_snapshots_limit,
manual_snapshots_limit_reached=manual_snapshots_limit_reached,
),
)
}
with mock.patch(
"prowler.providers.aws.services.directoryservice.directoryservice_service.DirectoryService",
new=directoryservice_client,
):
# Test Check
from prowler.providers.aws.services.directoryservice.directoryservice_directory_snapshots_limit.directoryservice_directory_snapshots_limit import (
directoryservice_directory_snapshots_limit,
)
check = directoryservice_directory_snapshots_limit()
result = check.execute()
assert len(result) == 1
assert result[0].resource_id == directory_id
assert result[0].region == AWS_REGION
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== f"Directory Service {directory_id} reached {manual_snapshots_limit} Snapshots limit"
)
def test_one_directory_snapshots_limit_over_threshold(self):
directoryservice_client = mock.MagicMock
directory_name = "test-directory"
directory_id = "d-12345a1b2"
manual_snapshots_current_count = 4
manual_snapshots_limit = 5
manual_snapshots_limit_reached = False
directoryservice_client.directories = {
directory_name: Directory(
name=directory_name,
id=directory_id,
type=DirectoryType.MicrosoftAD,
region=AWS_REGION,
snapshots_limits=SnapshotLimit(
manual_snapshots_current_count=manual_snapshots_current_count,
manual_snapshots_limit=manual_snapshots_limit,
manual_snapshots_limit_reached=manual_snapshots_limit_reached,
),
)
}
with mock.patch(
"prowler.providers.aws.services.directoryservice.directoryservice_service.DirectoryService",
new=directoryservice_client,
):
# Test Check
from prowler.providers.aws.services.directoryservice.directoryservice_directory_snapshots_limit.directoryservice_directory_snapshots_limit import (
directoryservice_directory_snapshots_limit,
)
check = directoryservice_directory_snapshots_limit()
result = check.execute()
assert len(result) == 1
assert result[0].resource_id == directory_id
assert result[0].region == AWS_REGION
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== f"Directory Service {directory_id} is about to reach {manual_snapshots_limit} Snapshots which is the limit"
)
def test_one_directory_snapshots_limit_equal_threshold(self):
directoryservice_client = mock.MagicMock
directory_name = "test-directory"
directory_id = "d-12345a1b2"
manual_snapshots_current_count = 3
manual_snapshots_limit = 5
manual_snapshots_limit_reached = False
directoryservice_client.directories = {
directory_name: Directory(
name=directory_name,
id=directory_id,
type=DirectoryType.MicrosoftAD,
region=AWS_REGION,
snapshots_limits=SnapshotLimit(
manual_snapshots_current_count=manual_snapshots_current_count,
manual_snapshots_limit=manual_snapshots_limit,
manual_snapshots_limit_reached=manual_snapshots_limit_reached,
),
)
}
with mock.patch(
"prowler.providers.aws.services.directoryservice.directoryservice_service.DirectoryService",
new=directoryservice_client,
):
# Test Check
from prowler.providers.aws.services.directoryservice.directoryservice_directory_snapshots_limit.directoryservice_directory_snapshots_limit import (
directoryservice_directory_snapshots_limit,
)
check = directoryservice_directory_snapshots_limit()
result = check.execute()
assert len(result) == 1
assert result[0].resource_id == directory_id
assert result[0].region == AWS_REGION
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== f"Directory Service {directory_id} is about to reach {manual_snapshots_limit} Snapshots which is the limit"
)
def test_one_directory_snapshots_limit_more_threshold(self):
directoryservice_client = mock.MagicMock
directory_name = "test-directory"
directory_id = "d-12345a1b2"
manual_snapshots_current_count = 1
manual_snapshots_limit = 5
manual_snapshots_limit_reached = False
directoryservice_client.directories = {
directory_name: Directory(
name=directory_name,
id=directory_id,
type=DirectoryType.MicrosoftAD,
region=AWS_REGION,
snapshots_limits=SnapshotLimit(
manual_snapshots_current_count=manual_snapshots_current_count,
manual_snapshots_limit=manual_snapshots_limit,
manual_snapshots_limit_reached=manual_snapshots_limit_reached,
),
)
}
with mock.patch(
"prowler.providers.aws.services.directoryservice.directoryservice_service.DirectoryService",
new=directoryservice_client,
):
# Test Check
from prowler.providers.aws.services.directoryservice.directoryservice_directory_snapshots_limit.directoryservice_directory_snapshots_limit import (
directoryservice_directory_snapshots_limit,
)
check = directoryservice_directory_snapshots_limit()
result = check.execute()
assert len(result) == 1
assert result[0].resource_id == directory_id
assert result[0].region == AWS_REGION
assert result[0].status == "PASS"
assert (
result[0].status_extended
== f"Directory Service {directory_id} is using {manual_snapshots_current_count} out of {manual_snapshots_limit} from the Snapshots Limit"
)

View File

@@ -0,0 +1,200 @@
from datetime import datetime
from unittest import mock
from freezegun import freeze_time
from prowler.providers.aws.services.directoryservice.directoryservice_service import (
Certificate,
CertificateState,
CertificateType,
Directory,
DirectoryType,
)
AWS_REGION = "eu-west-1"
# Always use a mocked date to test the certificates expiration
@freeze_time("2023-01-01")
class Test_directoryservice_ldap_certificate_expiration:
def test_no_directories(self):
directoryservice_client = mock.MagicMock
directoryservice_client.directories = {}
with mock.patch(
"prowler.providers.aws.services.directoryservice.directoryservice_service.DirectoryService",
new=directoryservice_client,
):
# Test Check
from prowler.providers.aws.services.directoryservice.directoryservice_ldap_certificate_expiration.directoryservice_ldap_certificate_expiration import (
directoryservice_ldap_certificate_expiration,
)
check = directoryservice_ldap_certificate_expiration()
result = check.execute()
assert len(result) == 0
def test_directory_no_certificate(self):
directoryservice_client = mock.MagicMock
directory_name = "test-directory"
directory_id = "d-12345a1b2"
directoryservice_client.directories = {
directory_name: Directory(
id=directory_id,
type=DirectoryType.MicrosoftAD,
name=directory_name,
region=AWS_REGION,
certificates=[],
)
}
with mock.patch(
"prowler.providers.aws.services.directoryservice.directoryservice_service.DirectoryService",
new=directoryservice_client,
):
# Test Check
from prowler.providers.aws.services.directoryservice.directoryservice_ldap_certificate_expiration.directoryservice_ldap_certificate_expiration import (
directoryservice_ldap_certificate_expiration,
)
check = directoryservice_ldap_certificate_expiration()
result = check.execute()
assert len(result) == 0
def test_directory_certificate_expires_in_365_days(self):
remaining_days_to_expire = 365
directoryservice_client = mock.MagicMock
directory_name = "test-directory"
certificate_id = "test-certificate"
directory_id = "d-12345a1b2"
directoryservice_client.directories = {
directory_name: Directory(
name=directory_name,
id=directory_id,
type=DirectoryType.MicrosoftAD,
region=AWS_REGION,
certificates=[
Certificate(
id=certificate_id,
common_name=certificate_id,
state=CertificateState.Registered,
type=CertificateType.ClientLDAPS,
expiry_date_time=datetime(2024, 1, 1),
)
],
)
}
with mock.patch(
"prowler.providers.aws.services.directoryservice.directoryservice_service.DirectoryService",
new=directoryservice_client,
):
# Test Check
from prowler.providers.aws.services.directoryservice.directoryservice_ldap_certificate_expiration.directoryservice_ldap_certificate_expiration import (
directoryservice_ldap_certificate_expiration,
)
check = directoryservice_ldap_certificate_expiration()
result = check.execute()
assert len(result) == 1
assert result[0].resource_id == certificate_id
assert result[0].region == AWS_REGION
assert result[0].status == "PASS"
assert (
result[0].status_extended
== f"LDAP Certificate {certificate_id} configured at {directory_id} expires in {remaining_days_to_expire} days"
)
def test_directory_certificate_expires_in_90_days(self):
remaining_days_to_expire = 90
directoryservice_client = mock.MagicMock
directory_name = "test-directory"
certificate_id = "test-certificate"
directory_id = "d-12345a1b2"
directoryservice_client.directories = {
directory_name: Directory(
name=directory_name,
id=directory_id,
type=DirectoryType.MicrosoftAD,
region=AWS_REGION,
certificates=[
Certificate(
id=certificate_id,
common_name=certificate_id,
state=CertificateState.Registered,
type=CertificateType.ClientLDAPS,
expiry_date_time=datetime(2023, 4, 1),
)
],
)
}
with mock.patch(
"prowler.providers.aws.services.directoryservice.directoryservice_service.DirectoryService",
new=directoryservice_client,
):
# Test Check
from prowler.providers.aws.services.directoryservice.directoryservice_ldap_certificate_expiration.directoryservice_ldap_certificate_expiration import (
directoryservice_ldap_certificate_expiration,
)
check = directoryservice_ldap_certificate_expiration()
result = check.execute()
assert len(result) == 1
assert result[0].resource_id == certificate_id
assert result[0].region == AWS_REGION
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== f"LDAP Certificate {certificate_id} configured at {directory_id} is about to expire in {remaining_days_to_expire} days"
)
def test_directory_certificate_expires_in_31_days(self):
remaining_days_to_expire = 31
directoryservice_client = mock.MagicMock
directory_name = "test-directory"
certificate_id = "test-certificate"
directory_id = "d-12345a1b2"
directoryservice_client.directories = {
directory_name: Directory(
name=directory_name,
id=directory_id,
type=DirectoryType.MicrosoftAD,
region=AWS_REGION,
certificates=[
Certificate(
id=certificate_id,
common_name=certificate_id,
state=CertificateState.Registered,
type=CertificateType.ClientLDAPS,
expiry_date_time=datetime(2023, 2, 1),
)
],
)
}
with mock.patch(
"prowler.providers.aws.services.directoryservice.directoryservice_service.DirectoryService",
new=directoryservice_client,
):
# Test Check
from prowler.providers.aws.services.directoryservice.directoryservice_ldap_certificate_expiration.directoryservice_ldap_certificate_expiration import (
directoryservice_ldap_certificate_expiration,
)
check = directoryservice_ldap_certificate_expiration()
result = check.execute()
assert len(result) == 1
assert result[0].resource_id == certificate_id
assert result[0].region == AWS_REGION
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== f"LDAP Certificate {certificate_id} configured at {directory_id} is about to expire in {remaining_days_to_expire} days"
)

View File

@@ -0,0 +1,131 @@
from unittest import mock
from prowler.providers.aws.services.directoryservice.directoryservice_service import (
AuthenticationProtocol,
Directory,
DirectoryType,
RadiusSettings,
RadiusStatus,
)
AWS_REGION = "eu-west-1"
class Test_directoryservice_radius_server_security_protocol:
def test_no_directories(self):
directoryservice_client = mock.MagicMock
directoryservice_client.directories = {}
with mock.patch(
"prowler.providers.aws.services.directoryservice.directoryservice_service.DirectoryService",
new=directoryservice_client,
):
# Test Check
from prowler.providers.aws.services.directoryservice.directoryservice_radius_server_security_protocol.directoryservice_radius_server_security_protocol import (
directoryservice_radius_server_security_protocol,
)
check = directoryservice_radius_server_security_protocol()
result = check.execute()
assert len(result) == 0
def test_directory_no_radius_server(self):
directoryservice_client = mock.MagicMock
directory_name = "test-directory"
directory_id = "d-12345a1b2"
directoryservice_client.directories = {
directory_name: Directory(
name=directory_name,
id=directory_id,
type=DirectoryType.MicrosoftAD,
region=AWS_REGION,
radius_settings=None,
)
}
with mock.patch(
"prowler.providers.aws.services.directoryservice.directoryservice_service.DirectoryService",
new=directoryservice_client,
):
# Test Check
from prowler.providers.aws.services.directoryservice.directoryservice_radius_server_security_protocol.directoryservice_radius_server_security_protocol import (
directoryservice_radius_server_security_protocol,
)
check = directoryservice_radius_server_security_protocol()
result = check.execute()
assert len(result) == 0
def test_directory_radius_server_bad_auth_protocol(self):
directoryservice_client = mock.MagicMock
directory_name = "test-directory"
directory_id = "d-12345a1b2"
directoryservice_client.directories = {
directory_name: Directory(
name=directory_name,
id=directory_id,
type=DirectoryType.MicrosoftAD,
region=AWS_REGION,
radius_settings=RadiusSettings(
authentication_protocol=AuthenticationProtocol.MS_CHAPv1,
status=RadiusStatus.Completed,
),
)
}
with mock.patch(
"prowler.providers.aws.services.directoryservice.directoryservice_service.DirectoryService",
new=directoryservice_client,
):
# Test Check
from prowler.providers.aws.services.directoryservice.directoryservice_radius_server_security_protocol.directoryservice_radius_server_security_protocol import (
directoryservice_radius_server_security_protocol,
)
check = directoryservice_radius_server_security_protocol()
result = check.execute()
assert len(result) == 1
assert result[0].resource_id == directory_id
assert result[0].region == AWS_REGION
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== f"Radius server of Directory {directory_id} does not have recommended security protocol for the Radius server"
)
def test_directory_radius_server_secure_auth_protocol(self):
directoryservice_client = mock.MagicMock
directory_name = "test-directory"
directory_id = "d-12345a1b2"
directoryservice_client.directories = {
directory_name: Directory(
name=directory_name,
id=directory_id,
type=DirectoryType.MicrosoftAD,
region=AWS_REGION,
radius_settings=RadiusSettings(
authentication_protocol=AuthenticationProtocol.MS_CHAPv2,
status=RadiusStatus.Completed,
),
)
}
with mock.patch(
"prowler.providers.aws.services.directoryservice.directoryservice_service.DirectoryService",
new=directoryservice_client,
):
# Test Check
from prowler.providers.aws.services.directoryservice.directoryservice_radius_server_security_protocol.directoryservice_radius_server_security_protocol import (
directoryservice_radius_server_security_protocol,
)
check = directoryservice_radius_server_security_protocol()
result = check.execute()
assert len(result) == 1
assert result[0].resource_id == directory_id
assert result[0].region == AWS_REGION
assert result[0].status == "PASS"
assert (
result[0].status_extended
== f"Radius server of Directory {directory_id} have recommended security protocol for the Radius server"
)

View File

@@ -0,0 +1,230 @@
from datetime import datetime
from unittest.mock import patch
import botocore
from moto import mock_ds
from moto.core import DEFAULT_ACCOUNT_ID
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.directoryservice.directoryservice_service import (
AuthenticationProtocol,
CertificateState,
CertificateType,
DirectoryService,
DirectoryType,
EventTopicStatus,
RadiusStatus,
)
# Mock Test Region
AWS_REGION = "eu-west-1"
# Mocking Access Analyzer Calls
make_api_call = botocore.client.BaseClient._make_api_call
def mock_make_api_call(self, operation_name, kwarg):
"""We have to mock every AWS API call using Boto3"""
if operation_name == "DescribeDirectories":
return {
"DirectoryDescriptions": [
{
"DirectoryId": "d-12345a1b2",
"Name": "test-directory",
"Type": "MicrosoftAD",
"ShortName": "test-directory",
"RadiusSettings": {
"RadiusServers": [
"test-server",
],
"RadiusPort": 9999,
"RadiusTimeout": 100,
"RadiusRetries": 100,
"SharedSecret": "test-shared-secret",
"AuthenticationProtocol": "MS-CHAPv2",
"DisplayLabel": "test-directory",
"UseSameUsername": True | False,
},
"RadiusStatus": "Creating",
},
],
}
if operation_name == "ListLogSubscriptions":
return {
"LogSubscriptions": [
{
"DirectoryId": "d-12345a1b2",
"LogGroupName": "test-log-group",
"SubscriptionCreatedDateTime": datetime(2022, 1, 1),
},
],
}
if operation_name == "DescribeEventTopics":
return {
"EventTopics": [
{
"DirectoryId": "d-12345a1b2",
"TopicName": "test-topic",
"TopicArn": f"arn:aws:sns:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:test-topic",
"CreatedDateTime": datetime(2022, 1, 1),
"Status": "Registered",
},
]
}
if operation_name == "ListCertificates":
return {
"CertificatesInfo": [
{
"CertificateId": "test-certificate",
"CommonName": "test-certificate",
"State": "Registered",
"ExpiryDateTime": datetime(2023, 1, 1),
"Type": "ClientLDAPS",
},
]
}
if operation_name == "GetSnapshotLimits":
return {
"SnapshotLimits": {
"ManualSnapshotsLimit": 123,
"ManualSnapshotsCurrentCount": 123,
"ManualSnapshotsLimitReached": True,
}
}
return make_api_call(self, operation_name, kwarg)
# Mock generate_regional_clients()
def mock_generate_regional_clients(service, audit_info):
regional_client = audit_info.audit_session.client(service, region_name=AWS_REGION)
regional_client.region = AWS_REGION
return {AWS_REGION: regional_client}
# Patch every AWS call using Boto3 and generate_regional_clients to have 1 client
@patch("botocore.client.BaseClient._make_api_call", new=mock_make_api_call)
@patch(
"prowler.providers.aws.services.directoryservice.directoryservice_service.generate_regional_clients",
new=mock_generate_regional_clients,
)
class Test_DirectoryService_Service:
# Test DirectoryService Client
@mock_ds
def test__get_client__(self):
directoryservice = DirectoryService(current_audit_info)
assert (
directoryservice.regional_clients[AWS_REGION].__class__.__name__
== "DirectoryService"
)
# Test DirectoryService Session
@mock_ds
def test__get_session__(self):
directoryservice = DirectoryService(current_audit_info)
assert directoryservice.session.__class__.__name__ == "Session"
# Test DirectoryService Service
@mock_ds
def test__get_service__(self):
directoryservice = DirectoryService(current_audit_info)
assert directoryservice.service == "ds"
@mock_ds
def test__describe_directories__(self):
# Set partition for the service
current_audit_info.audited_partition = "aws"
directoryservice = DirectoryService(current_audit_info)
# __describe_directories__
assert directoryservice.directories["d-12345a1b2"].id == "d-12345a1b2"
assert (
directoryservice.directories["d-12345a1b2"].type
== DirectoryType.MicrosoftAD
)
assert directoryservice.directories["d-12345a1b2"].name == "test-directory"
assert directoryservice.directories["d-12345a1b2"].region == AWS_REGION
assert (
directoryservice.directories[
"d-12345a1b2"
].radius_settings.authentication_protocol
== AuthenticationProtocol.MS_CHAPv2
)
assert (
directoryservice.directories["d-12345a1b2"].radius_settings.status
== RadiusStatus.Creating
)
# __list_log_subscriptions__
assert len(directoryservice.directories["d-12345a1b2"].log_subscriptions) == 1
assert (
directoryservice.directories["d-12345a1b2"]
.log_subscriptions[0]
.log_group_name
== "test-log-group"
)
assert directoryservice.directories["d-12345a1b2"].log_subscriptions[
0
].created_date_time == datetime(2022, 1, 1)
# __describe_event_topics__
assert len(directoryservice.directories["d-12345a1b2"].event_topics) == 1
assert (
directoryservice.directories["d-12345a1b2"].event_topics[0].topic_name
== "test-topic"
)
assert (
directoryservice.directories["d-12345a1b2"].event_topics[0].topic_arn
== f"arn:aws:sns:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:test-topic"
)
assert (
directoryservice.directories["d-12345a1b2"].event_topics[0].status
== EventTopicStatus.Registered
)
assert directoryservice.directories["d-12345a1b2"].event_topics[
0
].created_date_time == datetime(2022, 1, 1)
# __list_certificates__
assert len(directoryservice.directories["d-12345a1b2"].certificates) == 1
assert (
directoryservice.directories["d-12345a1b2"].certificates[0].id
== "test-certificate"
)
assert (
directoryservice.directories["d-12345a1b2"].certificates[0].common_name
== "test-certificate"
)
assert (
directoryservice.directories["d-12345a1b2"].certificates[0].state
== CertificateState.Registered
)
assert directoryservice.directories["d-12345a1b2"].certificates[
0
].expiry_date_time == datetime(2023, 1, 1)
assert (
directoryservice.directories["d-12345a1b2"].certificates[0].type
== CertificateType.ClientLDAPS
)
# __get_snapshot_limits__
assert directoryservice.directories["d-12345a1b2"].snapshots_limits
assert (
directoryservice.directories[
"d-12345a1b2"
].snapshots_limits.manual_snapshots_limit
== 123
)
assert (
directoryservice.directories[
"d-12345a1b2"
].snapshots_limits.manual_snapshots_current_count
== 123
)
assert (
directoryservice.directories[
"d-12345a1b2"
].snapshots_limits.manual_snapshots_limit_reached
is True
)

View File

@@ -0,0 +1,168 @@
from unittest import mock
from prowler.providers.aws.services.directoryservice.directoryservice_service import (
AuthenticationProtocol,
Directory,
DirectoryType,
RadiusSettings,
RadiusStatus,
)
AWS_REGION = "eu-west-1"
class Test_directoryservice_supported_mfa_radius_enabled:
def test_no_directories(self):
directoryservice_client = mock.MagicMock
directoryservice_client.directories = {}
with mock.patch(
"prowler.providers.aws.services.directoryservice.directoryservice_service.DirectoryService",
new=directoryservice_client,
):
# Test Check
from prowler.providers.aws.services.directoryservice.directoryservice_supported_mfa_radius_enabled.directoryservice_supported_mfa_radius_enabled import (
directoryservice_supported_mfa_radius_enabled,
)
check = directoryservice_supported_mfa_radius_enabled()
result = check.execute()
assert len(result) == 0
def test_directory_no_radius_server(self):
directoryservice_client = mock.MagicMock
directory_name = "test-directory"
directory_id = "d-12345a1b2"
directoryservice_client.directories = {
directory_name: Directory(
name=directory_name,
id=directory_id,
type=DirectoryType.MicrosoftAD,
region=AWS_REGION,
radius_settings=None,
)
}
with mock.patch(
"prowler.providers.aws.services.directoryservice.directoryservice_service.DirectoryService",
new=directoryservice_client,
):
# Test Check
from prowler.providers.aws.services.directoryservice.directoryservice_supported_mfa_radius_enabled.directoryservice_supported_mfa_radius_enabled import (
directoryservice_supported_mfa_radius_enabled,
)
check = directoryservice_supported_mfa_radius_enabled()
result = check.execute()
assert len(result) == 0
def test_directory_radius_server_status_failed(self):
directoryservice_client = mock.MagicMock
directory_name = "test-directory"
directory_id = "d-12345a1b2"
directoryservice_client.directories = {
directory_name: Directory(
name=directory_name,
id=directory_id,
type=DirectoryType.MicrosoftAD,
region=AWS_REGION,
radius_settings=RadiusSettings(
authentication_protocol=AuthenticationProtocol.MS_CHAPv1,
status=RadiusStatus.Failed,
),
)
}
with mock.patch(
"prowler.providers.aws.services.directoryservice.directoryservice_service.DirectoryService",
new=directoryservice_client,
):
# Test Check
from prowler.providers.aws.services.directoryservice.directoryservice_supported_mfa_radius_enabled.directoryservice_supported_mfa_radius_enabled import (
directoryservice_supported_mfa_radius_enabled,
)
check = directoryservice_supported_mfa_radius_enabled()
result = check.execute()
assert len(result) == 1
assert result[0].resource_id == directory_id
assert result[0].region == AWS_REGION
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== f"Directory {directory_id} does not have Radius MFA enabled"
)
def test_directory_radius_server_status_creating(self):
directoryservice_client = mock.MagicMock
directory_name = "test-directory"
directory_id = "d-12345a1b2"
directoryservice_client.directories = {
directory_name: Directory(
name=directory_name,
id=directory_id,
type=DirectoryType.MicrosoftAD,
region=AWS_REGION,
radius_settings=RadiusSettings(
authentication_protocol=AuthenticationProtocol.MS_CHAPv2,
status=RadiusStatus.Creating,
),
)
}
with mock.patch(
"prowler.providers.aws.services.directoryservice.directoryservice_service.DirectoryService",
new=directoryservice_client,
):
# Test Check
from prowler.providers.aws.services.directoryservice.directoryservice_supported_mfa_radius_enabled.directoryservice_supported_mfa_radius_enabled import (
directoryservice_supported_mfa_radius_enabled,
)
check = directoryservice_supported_mfa_radius_enabled()
result = check.execute()
assert len(result) == 1
assert result[0].resource_id == directory_id
assert result[0].region == AWS_REGION
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== f"Directory {directory_id} does not have Radius MFA enabled"
)
def test_directory_radius_server_status_completed(self):
directoryservice_client = mock.MagicMock
directory_name = "test-directory"
directory_id = "d-12345a1b2"
directoryservice_client.directories = {
directory_name: Directory(
name=directory_name,
id=directory_id,
type=DirectoryType.MicrosoftAD,
region=AWS_REGION,
radius_settings=RadiusSettings(
authentication_protocol=AuthenticationProtocol.MS_CHAPv2,
status=RadiusStatus.Completed,
),
)
}
with mock.patch(
"prowler.providers.aws.services.directoryservice.directoryservice_service.DirectoryService",
new=directoryservice_client,
):
# Test Check
from prowler.providers.aws.services.directoryservice.directoryservice_supported_mfa_radius_enabled.directoryservice_supported_mfa_radius_enabled import (
directoryservice_supported_mfa_radius_enabled,
)
check = directoryservice_supported_mfa_radius_enabled()
result = check.execute()
assert len(result) == 1
assert result[0].resource_id == directory_id
assert result[0].region == AWS_REGION
assert result[0].status == "PASS"
assert (
result[0].status_extended
== f"Directory {directory_id} have Radius MFA enabled"
)

View File

@@ -0,0 +1,101 @@
from re import search
from unittest import mock
from boto3 import client
from moto import mock_dax
from moto.core import DEFAULT_ACCOUNT_ID
AWS_REGION = "us-east-1"
class Test_dynamodb_accelerator_cluster_encryption_enabled:
@mock_dax
def test_dax_no_clusters(self):
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.dynamodb.dynamodb_service import DAX
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.dynamodb.dynamodb_accelerator_cluster_encryption_enabled.dynamodb_accelerator_cluster_encryption_enabled.dax_client",
new=DAX(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.dynamodb.dynamodb_accelerator_cluster_encryption_enabled.dynamodb_accelerator_cluster_encryption_enabled import (
dynamodb_accelerator_cluster_encryption_enabled,
)
check = dynamodb_accelerator_cluster_encryption_enabled()
result = check.execute()
assert len(result) == 0
@mock_dax
def test_dax_cluster_no_encryption(self):
dax_client = client("dax", region_name=AWS_REGION)
iam_role_arn = f"arn:aws:iam::{DEFAULT_ACCOUNT_ID}:role/aws-service-role/dax.amazonaws.com/AWSServiceRoleForDAX"
cluster = dax_client.create_cluster(
ClusterName="daxcluster",
NodeType="dax.t3.small",
ReplicationFactor=3,
IamRoleArn=iam_role_arn,
)["Cluster"]
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.dynamodb.dynamodb_service import DAX
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.dynamodb.dynamodb_accelerator_cluster_encryption_enabled.dynamodb_accelerator_cluster_encryption_enabled.dax_client",
new=DAX(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.dynamodb.dynamodb_accelerator_cluster_encryption_enabled.dynamodb_accelerator_cluster_encryption_enabled import (
dynamodb_accelerator_cluster_encryption_enabled,
)
check = dynamodb_accelerator_cluster_encryption_enabled()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert search(
"does not have encryption at rest enabled",
result[0].status_extended,
)
assert result[0].resource_id == cluster["ClusterName"]
assert result[0].resource_arn == cluster["ClusterArn"]
@mock_dax
def test_dax_cluster_with_encryption(self):
dax_client = client("dax", region_name=AWS_REGION)
iam_role_arn = f"arn:aws:iam::{DEFAULT_ACCOUNT_ID}:role/aws-service-role/dax.amazonaws.com/AWSServiceRoleForDAX"
cluster = dax_client.create_cluster(
ClusterName="daxcluster",
NodeType="dax.t3.small",
ReplicationFactor=3,
IamRoleArn=iam_role_arn,
SSESpecification={"Enabled": True},
)["Cluster"]
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.dynamodb.dynamodb_service import DAX
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.dynamodb.dynamodb_accelerator_cluster_encryption_enabled.dynamodb_accelerator_cluster_encryption_enabled.dax_client",
new=DAX(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.dynamodb.dynamodb_accelerator_cluster_encryption_enabled.dynamodb_accelerator_cluster_encryption_enabled import (
dynamodb_accelerator_cluster_encryption_enabled,
)
check = dynamodb_accelerator_cluster_encryption_enabled()
result = check.execute()
assert len(result) == 1
assert result[0].status == "PASS"
assert search("has encryption at rest enabled", result[0].status_extended)
assert result[0].resource_id == cluster["ClusterName"]
assert result[0].resource_arn == cluster["ClusterArn"]

View File

@@ -0,0 +1,190 @@
from boto3 import client, session
from moto import mock_dax, mock_dynamodb
from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
from prowler.providers.aws.services.dynamodb.dynamodb_service import DAX, DynamoDB
AWS_ACCOUNT_NUMBER = 123456789012
AWS_REGION = "us-east-1"
class Test_DynamoDB_Service:
# Mocked Audit Info
def set_mocked_audit_info(self):
audit_info = AWS_Audit_Info(
original_session=None,
audit_session=session.Session(
profile_name=None,
botocore_session=None,
),
audited_account=AWS_ACCOUNT_NUMBER,
audited_user_id=None,
audited_partition="aws",
audited_identity_arn=None,
profile=None,
profile_region=None,
credentials=None,
assumed_role_info=None,
audited_regions=None,
organizations_metadata=None,
)
return audit_info
# Test Dynamo Service
@mock_dynamodb
def test_service(self):
# Dynamo client for this test class
audit_info = self.set_mocked_audit_info()
dynamodb = DynamoDB(audit_info)
assert dynamodb.service == "dynamodb"
# Test Dynamo Client
@mock_dynamodb
def test_client(self):
# Dynamo client for this test class
audit_info = self.set_mocked_audit_info()
dynamodb = DynamoDB(audit_info)
for regional_client in dynamodb.regional_clients.values():
assert regional_client.__class__.__name__ == "DynamoDB"
# Test Dynamo Session
@mock_dynamodb
def test__get_session__(self):
# Dynamo client for this test class
audit_info = self.set_mocked_audit_info()
dynamodb = DynamoDB(audit_info)
assert dynamodb.session.__class__.__name__ == "Session"
# Test Dynamo Session
@mock_dynamodb
def test_audited_account(self):
# Dynamo client for this test class
audit_info = self.set_mocked_audit_info()
dynamodb = DynamoDB(audit_info)
assert dynamodb.audited_account == AWS_ACCOUNT_NUMBER
# Test DynamoDB List Tables
@mock_dynamodb
def test__list_tables__(self):
# Generate DynamoDB Client
dynamodb_client = client("dynamodb", region_name=AWS_REGION)
# Create DynamoDB Tables
dynamodb_client.create_table(
TableName="test1",
AttributeDefinitions=[
{"AttributeName": "client", "AttributeType": "S"},
{"AttributeName": "app", "AttributeType": "S"},
],
KeySchema=[
{"AttributeName": "client", "KeyType": "HASH"},
{"AttributeName": "app", "KeyType": "RANGE"},
],
BillingMode="PAY_PER_REQUEST",
)
dynamodb_client.create_table(
TableName="test2",
AttributeDefinitions=[
{"AttributeName": "client", "AttributeType": "S"},
{"AttributeName": "app", "AttributeType": "S"},
],
KeySchema=[
{"AttributeName": "client", "KeyType": "HASH"},
{"AttributeName": "app", "KeyType": "RANGE"},
],
BillingMode="PAY_PER_REQUEST",
)
# DynamoDB client for this test class
audit_info = self.set_mocked_audit_info()
dynamo = DynamoDB(audit_info)
assert len(dynamo.tables) == 2
assert dynamo.tables[0].name == "test1"
assert dynamo.tables[1].name == "test2"
assert dynamo.tables[0].region == AWS_REGION
assert dynamo.tables[1].region == AWS_REGION
# Test DynamoDB Describe Table
@mock_dynamodb
def test__describe_table__(self):
# Generate DynamoDB Client
dynamodb_client = client("dynamodb", region_name=AWS_REGION)
# Create DynamoDB Table
table = dynamodb_client.create_table(
TableName="test1",
AttributeDefinitions=[
{"AttributeName": "client", "AttributeType": "S"},
{"AttributeName": "app", "AttributeType": "S"},
],
KeySchema=[
{"AttributeName": "client", "KeyType": "HASH"},
{"AttributeName": "app", "KeyType": "RANGE"},
],
BillingMode="PAY_PER_REQUEST",
)["TableDescription"]
# DynamoDB client for this test class
audit_info = self.set_mocked_audit_info()
dynamo = DynamoDB(audit_info)
assert len(dynamo.tables) == 1
assert dynamo.tables[0].arn == table["TableArn"]
assert dynamo.tables[0].name == "test1"
assert dynamo.tables[0].region == AWS_REGION
# Test DynamoDB Describe Table
@mock_dynamodb
def test__describe_continuous_backups__(self):
# Generate DynamoDB Client
dynamodb_client = client("dynamodb", region_name=AWS_REGION)
# Create DynamoDB Table
table = dynamodb_client.create_table(
TableName="test1",
AttributeDefinitions=[
{"AttributeName": "client", "AttributeType": "S"},
{"AttributeName": "app", "AttributeType": "S"},
],
KeySchema=[
{"AttributeName": "client", "KeyType": "HASH"},
{"AttributeName": "app", "KeyType": "RANGE"},
],
BillingMode="PAY_PER_REQUEST",
)["TableDescription"]
dynamodb_client.update_continuous_backups(
TableName="test1",
PointInTimeRecoverySpecification={"PointInTimeRecoveryEnabled": True},
)
# DynamoDB client for this test class
audit_info = self.set_mocked_audit_info()
dynamo = DynamoDB(audit_info)
assert len(dynamo.tables) == 1
assert dynamo.tables[0].arn == table["TableArn"]
assert dynamo.tables[0].name == "test1"
assert dynamo.tables[0].pitr
assert dynamo.tables[0].region == AWS_REGION
# Test DAX List Tables
@mock_dax
def test__describe_clusters__(self):
# Generate DAX Client
dax_client = client("dax", region_name=AWS_REGION)
# Create DAX Clusters
iam_role_arn = f"arn:aws:iam::{AWS_ACCOUNT_NUMBER}:role/aws-service-role/dax.amazonaws.com/AWSServiceRoleForDAX"
dax_client.create_cluster(
ClusterName="daxcluster1",
NodeType="dax.t3.small",
ReplicationFactor=3,
IamRoleArn=iam_role_arn,
SSESpecification={"Enabled": True},
)
dax_client.create_cluster(
ClusterName="daxcluster2",
NodeType="dax.t3.small",
ReplicationFactor=3,
IamRoleArn=iam_role_arn,
SSESpecification={"Enabled": True},
)
# DAX client for this test class
audit_info = self.set_mocked_audit_info()
dax = DAX(audit_info)
assert len(dax.clusters) == 2
assert dax.clusters[0].name == "daxcluster1"
assert dax.clusters[1].name == "daxcluster2"
assert dax.clusters[0].region == AWS_REGION
assert dax.clusters[1].region == AWS_REGION

View File

@@ -0,0 +1,107 @@
from re import search
from unittest import mock
from boto3 import client
from moto import mock_dynamodb
AWS_REGION = "us-east-1"
class Test_dynamodb_tables_kms_cmk_encryption_enabled:
@mock_dynamodb
def test_dynamodb_no_tables(self):
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.dynamodb.dynamodb_service import DynamoDB
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.dynamodb.dynamodb_tables_kms_cmk_encryption_enabled.dynamodb_tables_kms_cmk_encryption_enabled.dynamodb_client",
new=DynamoDB(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.dynamodb.dynamodb_tables_kms_cmk_encryption_enabled.dynamodb_tables_kms_cmk_encryption_enabled import (
dynamodb_tables_kms_cmk_encryption_enabled,
)
check = dynamodb_tables_kms_cmk_encryption_enabled()
result = check.execute()
assert len(result) == 0
@mock_dynamodb
def test_dynamodb_table_kms_encryption(self):
dynamodb_client = client("dynamodb", region_name=AWS_REGION)
table = dynamodb_client.create_table(
TableName="test1",
AttributeDefinitions=[
{"AttributeName": "client", "AttributeType": "S"},
{"AttributeName": "app", "AttributeType": "S"},
],
KeySchema=[
{"AttributeName": "client", "KeyType": "HASH"},
{"AttributeName": "app", "KeyType": "RANGE"},
],
BillingMode="PAY_PER_REQUEST",
SSESpecification={"Enabled": True, "KMSMasterKeyId": "/custom-kms-key"},
)["TableDescription"]
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.dynamodb.dynamodb_service import DynamoDB
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.dynamodb.dynamodb_tables_kms_cmk_encryption_enabled.dynamodb_tables_kms_cmk_encryption_enabled.dynamodb_client",
new=DynamoDB(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.dynamodb.dynamodb_tables_kms_cmk_encryption_enabled.dynamodb_tables_kms_cmk_encryption_enabled import (
dynamodb_tables_kms_cmk_encryption_enabled,
)
check = dynamodb_tables_kms_cmk_encryption_enabled()
result = check.execute()
assert len(result) == 1
assert result[0].status == "PASS"
assert search("KMS encryption enabled", result[0].status_extended)
assert result[0].resource_id == table["TableName"]
assert result[0].resource_arn == table["TableArn"]
@mock_dynamodb
def test_dynamodb_table_default_encryption(self):
dynamodb_client = client("dynamodb", region_name=AWS_REGION)
table = dynamodb_client.create_table(
TableName="test1",
AttributeDefinitions=[
{"AttributeName": "client", "AttributeType": "S"},
{"AttributeName": "app", "AttributeType": "S"},
],
KeySchema=[
{"AttributeName": "client", "KeyType": "HASH"},
{"AttributeName": "app", "KeyType": "RANGE"},
],
BillingMode="PAY_PER_REQUEST",
)["TableDescription"]
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.dynamodb.dynamodb_service import DynamoDB
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.dynamodb.dynamodb_tables_kms_cmk_encryption_enabled.dynamodb_tables_kms_cmk_encryption_enabled.dynamodb_client",
new=DynamoDB(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.dynamodb.dynamodb_tables_kms_cmk_encryption_enabled.dynamodb_tables_kms_cmk_encryption_enabled import (
dynamodb_tables_kms_cmk_encryption_enabled,
)
check = dynamodb_tables_kms_cmk_encryption_enabled()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert search("DEFAULT encryption enabled", result[0].status_extended)
assert result[0].resource_id == table["TableName"]
assert result[0].resource_arn == table["TableArn"]

View File

@@ -0,0 +1,115 @@
from re import search
from unittest import mock
from boto3 import client
from moto import mock_dynamodb
AWS_REGION = "us-east-1"
class Test_dynamodb_tables_pitr_enabled:
@mock_dynamodb
def test_dynamodb_no_tables(self):
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.dynamodb.dynamodb_service import DynamoDB
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.dynamodb.dynamodb_tables_pitr_enabled.dynamodb_tables_pitr_enabled.dynamodb_client",
new=DynamoDB(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.dynamodb.dynamodb_tables_pitr_enabled.dynamodb_tables_pitr_enabled import (
dynamodb_tables_pitr_enabled,
)
check = dynamodb_tables_pitr_enabled()
result = check.execute()
assert len(result) == 0
@mock_dynamodb
def test_dynamodb_table_no_pitr(self):
dynamodb_client = client("dynamodb", region_name=AWS_REGION)
table = dynamodb_client.create_table(
TableName="test1",
AttributeDefinitions=[
{"AttributeName": "client", "AttributeType": "S"},
{"AttributeName": "app", "AttributeType": "S"},
],
KeySchema=[
{"AttributeName": "client", "KeyType": "HASH"},
{"AttributeName": "app", "KeyType": "RANGE"},
],
BillingMode="PAY_PER_REQUEST",
)["TableDescription"]
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.dynamodb.dynamodb_service import DynamoDB
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.dynamodb.dynamodb_tables_pitr_enabled.dynamodb_tables_pitr_enabled.dynamodb_client",
new=DynamoDB(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.dynamodb.dynamodb_tables_pitr_enabled.dynamodb_tables_pitr_enabled import (
dynamodb_tables_pitr_enabled,
)
check = dynamodb_tables_pitr_enabled()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert search(
"does not have point-in-time recovery enabled",
result[0].status_extended,
)
assert result[0].resource_id == table["TableName"]
assert result[0].resource_arn == table["TableArn"]
@mock_dynamodb
def test_dynamodb_table_with_pitr(self):
dynamodb_client = client("dynamodb", region_name=AWS_REGION)
table = dynamodb_client.create_table(
TableName="test1",
AttributeDefinitions=[
{"AttributeName": "client", "AttributeType": "S"},
{"AttributeName": "app", "AttributeType": "S"},
],
KeySchema=[
{"AttributeName": "client", "KeyType": "HASH"},
{"AttributeName": "app", "KeyType": "RANGE"},
],
BillingMode="PAY_PER_REQUEST",
)["TableDescription"]
dynamodb_client.update_continuous_backups(
TableName="test1",
PointInTimeRecoverySpecification={"PointInTimeRecoveryEnabled": True},
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.dynamodb.dynamodb_service import DynamoDB
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.dynamodb.dynamodb_tables_pitr_enabled.dynamodb_tables_pitr_enabled.dynamodb_client",
new=DynamoDB(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.dynamodb.dynamodb_tables_pitr_enabled.dynamodb_tables_pitr_enabled import (
dynamodb_tables_pitr_enabled,
)
check = dynamodb_tables_pitr_enabled()
result = check.execute()
assert len(result) == 1
assert result[0].status == "PASS"
assert search(
"has point-in-time recovery enabled", result[0].status_extended
)
assert result[0].resource_id == table["TableName"]
assert result[0].resource_arn == table["TableArn"]

View File

@@ -0,0 +1,110 @@
from unittest import mock
from boto3 import client, resource
from moto import mock_ec2
AWS_REGION = "us-east-1"
EXAMPLE_AMI_ID = "ami-12c6146b"
class Test_ec2_ami_public:
@mock_ec2
def test_no_amis(self):
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.ec2.ec2_service import EC2
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.ec2.ec2_ami_public.ec2_ami_public.ec2_client",
new=EC2(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.ec2.ec2_ami_public.ec2_ami_public import (
ec2_ami_public,
)
check = ec2_ami_public()
result = check.execute()
assert len(result) == 0
@mock_ec2
def test_one_private_ami(self):
ec2 = client("ec2", region_name="us-east-1")
reservation = ec2.run_instances(ImageId=EXAMPLE_AMI_ID, MinCount=1, MaxCount=1)
instance = reservation["Instances"][0]
instance_id = instance["InstanceId"]
image_id = ec2.create_image(
InstanceId=instance_id, Name="test-ami", Description="this is a test ami"
)["ImageId"]
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.ec2.ec2_service import EC2
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.ec2.ec2_ami_public.ec2_ami_public.ec2_client",
new=EC2(current_audit_info),
):
from prowler.providers.aws.services.ec2.ec2_ami_public.ec2_ami_public import (
ec2_ami_public,
)
check = ec2_ami_public()
result = check.execute()
assert len(result) == 1
assert result[0].status == "PASS"
assert result[0].status_extended == f"EC2 AMI {image_id} is not public."
assert result[0].resource_id == image_id
@mock_ec2
def test_one_public_ami(self):
ec2 = client("ec2", region_name="us-east-1")
reservation = ec2.run_instances(ImageId=EXAMPLE_AMI_ID, MinCount=1, MaxCount=1)
instance = reservation["Instances"][0]
instance_id = instance["InstanceId"]
image_id = ec2.create_image(
InstanceId=instance_id, Name="test-ami", Description="this is a test ami"
)["ImageId"]
image = resource("ec2", region_name="us-east-1").Image(image_id)
ADD_GROUP_ARGS = {
"ImageId": image_id,
"Attribute": "launchPermission",
"OperationType": "add",
"UserGroups": ["all"],
}
image.modify_attribute(**ADD_GROUP_ARGS)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.ec2.ec2_service import EC2
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.ec2.ec2_ami_public.ec2_ami_public.ec2_client",
new=EC2(current_audit_info),
):
from prowler.providers.aws.services.ec2.ec2_ami_public.ec2_ami_public import (
ec2_ami_public,
)
check = ec2_ami_public()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended == f"EC2 AMI {image_id} is currently public."
)
assert result[0].resource_id == image_id

Some files were not shown because too many files have changed in this diff Show More