refactor(load_checks_to_execute): Refactor function and add tests (#3066)

This commit is contained in:
Pepe Fagoaga
2023-11-30 17:41:14 +01:00
committed by GitHub
parent de4166bf0d
commit 6d2b2a9a93
4 changed files with 436 additions and 107 deletions

View File

@@ -0,0 +1,319 @@
from mock import patch
from prowler.lib.check.checks_loader import (
load_checks_to_execute,
update_checks_to_execute_with_aliases,
)
from prowler.lib.check.models import (
Check_Metadata_Model,
Code,
Recommendation,
Remediation,
)
S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME = "s3_bucket_level_public_access_block"
S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME_CUSTOM_ALIAS = (
"s3_bucket_level_public_access_block"
)
S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_SEVERITY = "medium"
S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME_SERVICE = "s3"
class TestCheckLoader:
provider = "aws"
def get_custom_check_metadata(self):
return Check_Metadata_Model(
Provider="aws",
CheckID=S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME,
CheckTitle="Check S3 Bucket Level Public Access Block.",
CheckType=["Data Protection"],
CheckAliases=[S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME_CUSTOM_ALIAS],
ServiceName=S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME_SERVICE,
SubServiceName="",
ResourceIdTemplate="arn:partition:s3:::bucket_name",
Severity=S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_SEVERITY,
ResourceType="AwsS3Bucket",
Description="Check S3 Bucket Level Public Access Block.",
Risk="Public access policies may be applied to sensitive data buckets.",
RelatedUrl="https://docs.aws.amazon.com/AmazonS3/latest/userguide/access-control-block-public-access.html",
Remediation=Remediation(
Code=Code(
NativeIaC="",
Terraform="https://docs.bridgecrew.io/docs/bc_aws_s3_20#terraform",
CLI="aws s3api put-public-access-block --region <REGION_NAME> --public-access-block-configuration BlockPublicAcls=true,IgnorePublicAcls=true,BlockPublicPolicy=true,RestrictPublicBuckets=true --bucket <BUCKET_NAME>",
Other="https://github.com/cloudmatos/matos/tree/master/remediations/aws/s3/s3/block-public-access",
),
Recommendation=Recommendation(
Text="You can enable Public Access Block at the bucket level to prevent the exposure of your data stored in S3.",
Url="https://docs.aws.amazon.com/AmazonS3/latest/userguide/access-control-block-public-access.html",
),
),
Categories=["internet-exposed"],
DependsOn=[],
RelatedTo=[],
Notes="",
Compliance=[],
)
def test_load_checks_to_execute(self):
bulk_checks_metatada = {
S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME: self.get_custom_check_metadata()
}
bulk_compliance_frameworks = None
checks_file = None
check_list = None
service_list = None
severities = None
compliance_frameworks = None
categories = None
with patch(
"prowler.lib.check.checks_loader.recover_checks_from_provider",
return_value=[
(
f"{S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME}",
"path/to/{S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME}",
)
],
):
assert {S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME} == load_checks_to_execute(
bulk_checks_metatada,
bulk_compliance_frameworks,
checks_file,
check_list,
service_list,
severities,
compliance_frameworks,
categories,
self.provider,
)
def test_load_checks_to_execute_with_check_list(self):
bulk_checks_metatada = {
S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME: self.get_custom_check_metadata()
}
bulk_compliance_frameworks = None
checks_file = None
check_list = [S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME]
service_list = None
severities = None
compliance_frameworks = None
categories = None
assert {S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME} == load_checks_to_execute(
bulk_checks_metatada,
bulk_compliance_frameworks,
checks_file,
check_list,
service_list,
severities,
compliance_frameworks,
categories,
self.provider,
)
def test_load_checks_to_execute_with_severities(self):
bulk_checks_metatada = {
S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME: self.get_custom_check_metadata()
}
bulk_compliance_frameworks = None
checks_file = None
check_list = []
service_list = None
severities = [S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_SEVERITY]
compliance_frameworks = None
categories = None
assert {S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME} == load_checks_to_execute(
bulk_checks_metatada,
bulk_compliance_frameworks,
checks_file,
check_list,
service_list,
severities,
compliance_frameworks,
categories,
self.provider,
)
def test_load_checks_to_execute_with_severities_and_services(self):
bulk_checks_metatada = {
S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME: self.get_custom_check_metadata()
}
bulk_compliance_frameworks = None
checks_file = None
check_list = []
service_list = [S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME_SERVICE]
severities = [S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_SEVERITY]
compliance_frameworks = None
categories = None
with patch(
"prowler.lib.check.checks_loader.recover_checks_from_service",
return_value={S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME},
):
assert {S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME} == load_checks_to_execute(
bulk_checks_metatada,
bulk_compliance_frameworks,
checks_file,
check_list,
service_list,
severities,
compliance_frameworks,
categories,
self.provider,
)
def test_load_checks_to_execute_with_severities_and_services_not_within_severity(
self,
):
bulk_checks_metatada = {
S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME: self.get_custom_check_metadata()
}
bulk_compliance_frameworks = None
checks_file = None
check_list = []
service_list = ["ec2"]
severities = [S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_SEVERITY]
compliance_frameworks = None
categories = None
with patch(
"prowler.lib.check.checks_loader.recover_checks_from_service",
return_value={"ec2_ami_public"},
):
assert set() == load_checks_to_execute(
bulk_checks_metatada,
bulk_compliance_frameworks,
checks_file,
check_list,
service_list,
severities,
compliance_frameworks,
categories,
self.provider,
)
def test_load_checks_to_execute_with_checks_file(
self,
):
bulk_checks_metatada = {
S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME: self.get_custom_check_metadata()
}
bulk_compliance_frameworks = None
checks_file = "path/to/test_file"
check_list = []
service_list = []
severities = []
compliance_frameworks = None
categories = None
with patch(
"prowler.lib.check.checks_loader.parse_checks_from_file",
return_value={S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME},
):
assert {S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME} == load_checks_to_execute(
bulk_checks_metatada,
bulk_compliance_frameworks,
checks_file,
check_list,
service_list,
severities,
compliance_frameworks,
categories,
self.provider,
)
def test_load_checks_to_execute_with_service_list(
self,
):
bulk_checks_metatada = {
S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME: self.get_custom_check_metadata()
}
bulk_compliance_frameworks = None
checks_file = None
check_list = []
service_list = [S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME_SERVICE]
severities = []
compliance_frameworks = None
categories = None
with patch(
"prowler.lib.check.checks_loader.recover_checks_from_service",
return_value={S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME},
):
assert {S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME} == load_checks_to_execute(
bulk_checks_metatada,
bulk_compliance_frameworks,
checks_file,
check_list,
service_list,
severities,
compliance_frameworks,
categories,
self.provider,
)
def test_load_checks_to_execute_with_compliance_frameworks(
self,
):
bulk_checks_metatada = {
S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME: self.get_custom_check_metadata()
}
bulk_compliance_frameworks = None
checks_file = None
check_list = []
service_list = []
severities = []
compliance_frameworks = ["test-compliance-framework"]
categories = None
with patch(
"prowler.lib.check.checks_loader.parse_checks_from_compliance_framework",
return_value={S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME},
):
assert {S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME} == load_checks_to_execute(
bulk_checks_metatada,
bulk_compliance_frameworks,
checks_file,
check_list,
service_list,
severities,
compliance_frameworks,
categories,
self.provider,
)
def test_load_checks_to_execute_with_categories(
self,
):
bulk_checks_metatada = {
S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME: self.get_custom_check_metadata()
}
bulk_compliance_frameworks = None
checks_file = None
check_list = []
service_list = []
severities = []
compliance_frameworks = []
categories = {"internet-exposed"}
assert {S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME} == load_checks_to_execute(
bulk_checks_metatada,
bulk_compliance_frameworks,
checks_file,
check_list,
service_list,
severities,
compliance_frameworks,
categories,
self.provider,
)
def test_update_checks_to_execute_with_aliases(self):
checks_to_execute = {"renamed_check"}
check_aliases = {"renamed_check": "check_name"}
assert {"check_name"} == update_checks_to_execute_with_aliases(
checks_to_execute, check_aliases
)

View File

@@ -3,7 +3,7 @@ import pathlib
from importlib.machinery import FileFinder
from pkgutil import ModuleInfo
from boto3 import client, session
from boto3 import client
from fixtures.bulk_checks_metadata import test_bulk_checks_metadata
from mock import patch
from moto import mock_s3
@@ -27,8 +27,7 @@ from prowler.providers.aws.aws_provider import (
get_checks_from_input_arn,
get_regions_from_audit_resources,
)
from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
from prowler.providers.common.models import Audit_Metadata
from tests.providers.aws.audit_info_utils import set_mocked_aws_audit_info
AWS_ACCOUNT_NUMBER = "123456789012"
AWS_REGION = "us-east-1"
@@ -258,36 +257,6 @@ def mock_recover_checks_from_aws_provider_rds_service(*_):
class Test_Check:
def set_mocked_audit_info(self):
audit_info = AWS_Audit_Info(
session_config=None,
original_session=None,
audit_session=session.Session(
profile_name=None,
botocore_session=None,
),
audited_account=AWS_ACCOUNT_NUMBER,
audited_account_arn=f"arn:aws:iam::{AWS_ACCOUNT_NUMBER}:root",
audited_user_id=None,
audited_partition="aws",
audited_identity_arn=None,
profile=None,
profile_region=None,
credentials=None,
assumed_role_info=None,
audited_regions=None,
organizations_metadata=None,
audit_resources=None,
mfa_enabled=False,
audit_metadata=Audit_Metadata(
services_scanned=0,
expected_checks=[],
completed_checks=0,
audit_progress=0,
),
)
return audit_info
def test_load_check_metadata(self):
test_cases = [
{
@@ -363,7 +332,7 @@ class Test_Check:
provider = test["input"]["provider"]
assert (
parse_checks_from_folder(
self.set_mocked_audit_info(), check_folder, provider
set_mocked_aws_audit_info(), check_folder, provider
)
== test["expected"]
)