mirror of
https://github.com/ghndrx/prowler.git
synced 2026-02-10 14:55:00 +00:00
feat(pip): Prepare for PyPI (#1531)
This commit is contained in:
180
tests/lib/check/check_test.py
Normal file
180
tests/lib/check/check_test.py
Normal file
@@ -0,0 +1,180 @@
|
||||
import os
|
||||
|
||||
from prowler.lib.check.check import (
|
||||
exclude_checks_to_run,
|
||||
exclude_services_to_run,
|
||||
parse_checks_from_file,
|
||||
)
|
||||
from prowler.lib.check.models import load_check_metadata
|
||||
|
||||
|
||||
class Test_Check:
|
||||
def test_load_check_metadata(self):
|
||||
test_cases = [
|
||||
{
|
||||
"input": {
|
||||
"metadata_path": f"{os.path.dirname(os.path.realpath(__file__))}/fixtures/metadata.json",
|
||||
},
|
||||
"expected": {
|
||||
"CheckID": "iam_disable_30_days_credentials",
|
||||
"CheckTitle": "Ensure credentials unused for 30 days or greater are disabled",
|
||||
"ServiceName": "iam",
|
||||
"Severity": "low",
|
||||
},
|
||||
}
|
||||
]
|
||||
for test in test_cases:
|
||||
metadata_path = test["input"]["metadata_path"]
|
||||
check_metadata = load_check_metadata(metadata_path)
|
||||
assert check_metadata.CheckID == test["expected"]["CheckID"]
|
||||
assert check_metadata.CheckTitle == test["expected"]["CheckTitle"]
|
||||
assert check_metadata.ServiceName == test["expected"]["ServiceName"]
|
||||
assert check_metadata.Severity == test["expected"]["Severity"]
|
||||
|
||||
def test_parse_checks_from_file(self):
|
||||
test_cases = [
|
||||
{
|
||||
"input": {
|
||||
"path": f"{os.path.dirname(os.path.realpath(__file__))}/fixtures/checklistA.json",
|
||||
"provider": "aws",
|
||||
},
|
||||
"expected": {"check11", "check12", "check7777"},
|
||||
}
|
||||
]
|
||||
for test in test_cases:
|
||||
check_file = test["input"]["path"]
|
||||
provider = test["input"]["provider"]
|
||||
assert parse_checks_from_file(check_file, provider) == test["expected"]
|
||||
|
||||
def test_exclude_checks_to_run(self):
|
||||
test_cases = [
|
||||
{
|
||||
"input": {
|
||||
"check_list": {"check12", "check11", "extra72", "check13"},
|
||||
"excluded_checks": {"check12", "check13"},
|
||||
},
|
||||
"expected": {"check11", "extra72"},
|
||||
},
|
||||
{
|
||||
"input": {
|
||||
"check_list": {"check112", "check11", "extra72", "check13"},
|
||||
"excluded_checks": {"check12", "check13", "check14"},
|
||||
},
|
||||
"expected": {"check112", "check11", "extra72"},
|
||||
},
|
||||
]
|
||||
for test in test_cases:
|
||||
check_list = test["input"]["check_list"]
|
||||
excluded_checks = test["input"]["excluded_checks"]
|
||||
assert (
|
||||
exclude_checks_to_run(check_list, excluded_checks) == test["expected"]
|
||||
)
|
||||
|
||||
def test_exclude_services_to_run(self):
|
||||
test_cases = [
|
||||
{
|
||||
"input": {
|
||||
"checks_to_run": {
|
||||
"iam_disable_30_days_credentials",
|
||||
"iam_disable_90_days_credentials",
|
||||
},
|
||||
"excluded_services": {"ec2"},
|
||||
"provider": "aws",
|
||||
},
|
||||
"expected": {
|
||||
"iam_disable_30_days_credentials",
|
||||
"iam_disable_90_days_credentials",
|
||||
},
|
||||
},
|
||||
{
|
||||
"input": {
|
||||
"checks_to_run": {
|
||||
"iam_disable_30_days_credentials",
|
||||
"iam_disable_90_days_credentials",
|
||||
},
|
||||
"excluded_services": {"iam"},
|
||||
"provider": "aws",
|
||||
},
|
||||
"expected": set(),
|
||||
},
|
||||
]
|
||||
for test in test_cases:
|
||||
excluded_services = test["input"]["excluded_services"]
|
||||
checks_to_run = test["input"]["checks_to_run"]
|
||||
provider = test["input"]["provider"]
|
||||
assert (
|
||||
exclude_services_to_run(checks_to_run, excluded_services, provider)
|
||||
== test["expected"]
|
||||
)
|
||||
|
||||
# def test_parse_checks_from_compliance_framework_two(self):
|
||||
# test_case = {
|
||||
# "input": {"compliance_frameworks": ["cis_v1.4_aws", "ens_v3_aws"]},
|
||||
# "expected": {
|
||||
# "vpc_flow_logs_enabled",
|
||||
# "ec2_ebs_snapshot_encryption",
|
||||
# "iam_user_mfa_enabled_console_access",
|
||||
# "cloudtrail_multi_region_enabled",
|
||||
# "ec2_elbv2_insecure_ssl_ciphers",
|
||||
# "guardduty_is_enabled",
|
||||
# "s3_bucket_default_encryption",
|
||||
# "cloudfront_distributions_https_enabled",
|
||||
# "iam_avoid_root_usage",
|
||||
# "s3_bucket_secure_transport_policy",
|
||||
# },
|
||||
# }
|
||||
# with mock.patch(
|
||||
# "prowler.lib.check.check.compliance_specification_dir_path",
|
||||
# new=f"{os.path.dirname(os.path.realpath(__file__))}/fixtures",
|
||||
# ):
|
||||
# provider = "aws"
|
||||
# bulk_compliance_frameworks = bulk_load_compliance_frameworks(provider)
|
||||
# compliance_frameworks = test_case["input"]["compliance_frameworks"]
|
||||
# assert (
|
||||
# parse_checks_from_compliance_framework(
|
||||
# compliance_frameworks, bulk_compliance_frameworks
|
||||
# )
|
||||
# == test_case["expected"]
|
||||
# )
|
||||
|
||||
# def test_parse_checks_from_compliance_framework_one(self):
|
||||
# test_case = {
|
||||
# "input": {"compliance_frameworks": ["cis_v1.4_aws"]},
|
||||
# "expected": {
|
||||
# "iam_user_mfa_enabled_console_access",
|
||||
# "s3_bucket_default_encryption",
|
||||
# "iam_avoid_root_usage",
|
||||
# },
|
||||
# }
|
||||
# with mock.patch(
|
||||
# "prowler.lib.check.check.compliance_specification_dir",
|
||||
# new=f"{os.path.dirname(os.path.realpath(__file__))}/fixtures",
|
||||
# ):
|
||||
# provider = "aws"
|
||||
# bulk_compliance_frameworks = bulk_load_compliance_frameworks(provider)
|
||||
# compliance_frameworks = test_case["input"]["compliance_frameworks"]
|
||||
# assert (
|
||||
# parse_checks_from_compliance_framework(
|
||||
# compliance_frameworks, bulk_compliance_frameworks
|
||||
# )
|
||||
# == test_case["expected"]
|
||||
# )
|
||||
|
||||
# def test_parse_checks_from_compliance_framework_no_compliance(self):
|
||||
# test_case = {
|
||||
# "input": {"compliance_frameworks": []},
|
||||
# "expected": set(),
|
||||
# }
|
||||
# with mock.patch(
|
||||
# "prowler.lib.check.check.compliance_specification_dir",
|
||||
# new=f"{os.path.dirname(os.path.realpath(__file__))}/fixtures",
|
||||
# ):
|
||||
# provider = "aws"
|
||||
# bulk_compliance_frameworks = bulk_load_compliance_frameworks(provider)
|
||||
# compliance_frameworks = test_case["input"]["compliance_frameworks"]
|
||||
# assert (
|
||||
# parse_checks_from_compliance_framework(
|
||||
# compliance_frameworks, bulk_compliance_frameworks
|
||||
# )
|
||||
# == test_case["expected"]
|
||||
# )
|
||||
82
tests/lib/check/fixtures/aws/cis_v1.4_aws.json
Normal file
82
tests/lib/check/fixtures/aws/cis_v1.4_aws.json
Normal file
@@ -0,0 +1,82 @@
|
||||
{
|
||||
"Framework": "CIS",
|
||||
"Provider": "AWS",
|
||||
"Version": "1.4",
|
||||
"Requirements": [
|
||||
{
|
||||
"Id": "1.4",
|
||||
"Description": "Ensure no 'root' user account access key exists (Automated)",
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "1. Identity and Access Management (IAM)",
|
||||
"Level": [
|
||||
"level1"
|
||||
],
|
||||
"Rationale": "Removing access keys associated with the 'root' user account limits vectors by which the account can be compromised. Additionally, removing the 'root' access keys encourages the creation and use of role based accounts that are least privileged.",
|
||||
"Guidance": "The 'root' user account is the most privileged user in an AWS account. AWS Access Keys provide programmatic access to a given AWS account. It is recommended that all access keys associated with the 'root' user account be removed.",
|
||||
"Additional information": "IAM User account \"root\" for us-gov cloud regions is not enabled by default. However, on request to AWS support enables 'root' access only through access-keys (CLI, API methods) for us-gov cloud region.",
|
||||
"References": [
|
||||
"CCE-78910-7",
|
||||
"https://docs.aws.amazon.com/general/latest/gr/aws-access-keys-best-practices.html",
|
||||
"https://docs.aws.amazon.com/general/latest/gr/managing-aws-access-keys.html",
|
||||
"https://docs.aws.amazon.com/IAM/latest/APIReference/API_GetAccountSummary.html",
|
||||
"https://aws.amazon.com/blogs/security/an-easier-way-to-determine-the-presence-of-aws-account-access-keys/"
|
||||
]
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"iam_avoid_root_usage"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "1.10",
|
||||
"Description": "Ensure multi-factor authentication (MFA) is enabled for all IAM users that have a console password (Automated)",
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "1. Identity and Access Management (IAM)",
|
||||
"Level": [
|
||||
"level1"
|
||||
],
|
||||
"Guidance": "Multi-Factor Authentication (MFA) adds an extra layer of authentication assurance beyond traditional credentials. With MFA enabled, when a user signs in to the AWS Console, they will be prompted for their user name and password as well as for an authentication code from their physical or virtual MFA token. It is recommended that MFA be enabled for all accounts that have a console password.",
|
||||
"Rationale": "Enabling MFA provides increased security for console access as it requires the authenticating principal to possess a device that displays a time-sensitive key and have knowledge of a credential.",
|
||||
"Impact": "AWS will soon end support for SMS multi-factor authentication (MFA). New customers are not allowed to use this feature. We recommend that existing customers switch to one of the following alternative methods of MFA.",
|
||||
"Additional information": "Forced IAM User Self-Service Remediation. Amazon has published a pattern that forces users to self-service setup MFA before they have access to their complete permissions set. Until they complete this step, they cannot access their full permissions. This pattern can be used on new AWS accounts. It can also be used on existing accounts - it is recommended users are given instructions and a grace period to accomplish MFA enrollment before active enforcement on existing AWS accounts.",
|
||||
"References": [
|
||||
"CCE-78901-6",
|
||||
"https://tools.ietf.org/html/rfc6238",
|
||||
"https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_mfa.html",
|
||||
"https://docs.aws.amazon.com/IAM/latest/UserGuide/best-practices.html#enable-mfa-for-privileged-users",
|
||||
"https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_mfa_enable_virtual.html",
|
||||
"https://blogs.aws.amazon.com/security/post/Tx2SJJYE082KBUK/How-to-Delegate-Management-of-Multi-Factor-Authentication-to-AWS-IAM-Users"
|
||||
]
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"iam_user_mfa_enabled_console_access"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "2.1.1",
|
||||
"Description": "Ensure all S3 buckets employ encryption-at-rest (Automated)",
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "2. Storage",
|
||||
"Level": [
|
||||
"level2"
|
||||
],
|
||||
"Guidance": "Amazon S3 provides a variety of no, or low, cost encryption options to protect data at rest.",
|
||||
"Rationale": "Encrypting data at rest reduces the likelihood that it is unintentionally exposed and can nullify the impact of disclosure if the encryption remains unbroken.",
|
||||
"Impact": "Amazon S3 buckets with default bucket encryption using SSE-KMS cannot be used as destination buckets for Amazon S3 server access logging. Only SSE-S3 default encryption is supported for server access log destination buckets.",
|
||||
"Additional information": "S3 bucket encryption only applies to objects as they are placed in the bucket. Enabling S3 bucket encryption does not encrypt objects previously stored within the bucket",
|
||||
"References": [
|
||||
"https://docs.aws.amazon.com/AmazonS3/latest/user-guide/default-bucket-encryption.html",
|
||||
"https://docs.aws.amazon.com/AmazonS3/latest/dev/bucket-encryption.html#bucket-encryption-related-resources"
|
||||
]
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"s3_bucket_default_encryption"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
82
tests/lib/check/fixtures/aws/ens_v3_aws.json
Normal file
82
tests/lib/check/fixtures/aws/ens_v3_aws.json
Normal file
@@ -0,0 +1,82 @@
|
||||
{
|
||||
"Framework": "ENS",
|
||||
"Version": "3",
|
||||
"Requirements": [
|
||||
{
|
||||
"Id": "op.mon.1",
|
||||
"Description": "Detección de intrusión",
|
||||
"Attributes": [
|
||||
{
|
||||
"Marco": "operacional",
|
||||
"Categoria": "monitorización del sistema",
|
||||
"Descripcion_Control": "- En ausencia de otras herramientas de terceros, habilitar Amazon GuarDuty para la detección de amenazas e intrusiones..- Activar el servicio de eventos AWS CloudTrail para todas las regiones..- Activar el servicio VPC FlowLogs..-Deberá habilitarse Amazon GuardDuty para todas las regiones tanto en la cuenta raíz como en las cuentas miembro de un entorno multi-cuenta..-Todas las cuentas miembro deberán estar añadidas para la supervisión bajo la cuenta raíz..-La adminsitración de Amazon GuardDuty quedará delegada exclusivamente a la cuenta de seguridad para garantizar una correcta asignación de los roles para este servicio.",
|
||||
"Nivel": [
|
||||
"bajo",
|
||||
"medio",
|
||||
"alto"
|
||||
],
|
||||
"Dimensiones": [
|
||||
"confidencialidad",
|
||||
"integridad",
|
||||
"trazabilidad",
|
||||
"autenticidad",
|
||||
"disponibilidad"
|
||||
]
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"guardduty_is_enabled",
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"vpc_flow_logs_enabled",
|
||||
"guardduty_is_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "op.mon.3",
|
||||
"Description": "Protección de la integridad y de la autenticidad",
|
||||
"Attributes": [
|
||||
{
|
||||
"Marco": "operacional",
|
||||
"Categoria": "protección de las comunicaciones",
|
||||
"Descripcion_Control": "- Habilitar TLS en los balanceadores de carga ELB.- Evitar el uso de protocolos de cifrado inseguros en la conexión TLS entre clientes y balanceadores de carga.- Asegurar que los Buckets de almacenamiento S3 apliquen cifrado para la transferencia de datos empleando TLS.- Asegurar que la distribución entre frontales CloudFront y sus orígenes únicamente emplee tráfico HTTPS.",
|
||||
"Nivel": [
|
||||
"bajo",
|
||||
"medio",
|
||||
"alto"
|
||||
],
|
||||
"Dimensiones": [
|
||||
"integridad",
|
||||
"autenticidad"
|
||||
]
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"ec2_elbv2_insecure_ssl_ciphers",
|
||||
"ec2_elbv2_insecure_ssl_ciphers",
|
||||
"s3_bucket_secure_transport_policy",
|
||||
"cloudfront_distributions_https_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "mp.si.2.r2.1",
|
||||
"Description": "Copias de seguridad",
|
||||
"Attributes": [
|
||||
{
|
||||
"Marco": "medidas de protección",
|
||||
"Categoria": "protección de los soportes de información",
|
||||
"Descripcion_Control": "Se deberá asegurar el cifrado de las copias de seguridad de EBS.",
|
||||
"Nivel": [
|
||||
"alto"
|
||||
],
|
||||
"Dimensiones": [
|
||||
"confidencialidad",
|
||||
"integridad"
|
||||
]
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"ec2_ebs_snapshot_encryption"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
7
tests/lib/check/fixtures/checklistA.json
Normal file
7
tests/lib/check/fixtures/checklistA.json
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"aws": [
|
||||
"check11",
|
||||
"check12",
|
||||
"check7777"
|
||||
]
|
||||
}
|
||||
18
tests/lib/check/fixtures/groupsA.json
Normal file
18
tests/lib/check/fixtures/groupsA.json
Normal file
@@ -0,0 +1,18 @@
|
||||
{
|
||||
"aws": {
|
||||
"gdpr": {
|
||||
"checks": [
|
||||
"check11",
|
||||
"check12"
|
||||
],
|
||||
"description": "GDPR Readiness"
|
||||
},
|
||||
"iam": {
|
||||
"checks": [
|
||||
"iam_disable_30_days_credentials",
|
||||
"iam_disable_90_days_credentials"
|
||||
],
|
||||
"description": "Identity and Access Management"
|
||||
}
|
||||
}
|
||||
}
|
||||
58
tests/lib/check/fixtures/metadata.json
Normal file
58
tests/lib/check/fixtures/metadata.json
Normal file
@@ -0,0 +1,58 @@
|
||||
{
|
||||
"Categories": [
|
||||
"cat1",
|
||||
"cat2"
|
||||
],
|
||||
"CheckID": "iam_disable_30_days_credentials",
|
||||
"CheckTitle": "Ensure credentials unused for 30 days or greater are disabled",
|
||||
"CheckType": [
|
||||
"Software and Configuration Checks"
|
||||
],
|
||||
"Compliance": [
|
||||
{
|
||||
"Control": [
|
||||
"4.4"
|
||||
],
|
||||
"Framework": "CIS-AWS",
|
||||
"Group": [
|
||||
"level1",
|
||||
"level2"
|
||||
],
|
||||
"Version": "1.4"
|
||||
}
|
||||
],
|
||||
"DependsOn": [
|
||||
"othercheck1",
|
||||
"othercheck2"
|
||||
],
|
||||
"Description": "Ensure credentials unused for 30 days or greater are disabled",
|
||||
"Notes": "additional information",
|
||||
"Provider": "aws",
|
||||
"RelatedTo": [
|
||||
"othercheck3",
|
||||
"othercheck4"
|
||||
],
|
||||
"RelatedUrl": "https://serviceofficialsiteorpageforthissubject",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "cli command or URL to the cli command location.",
|
||||
"NativeIaC": "code or URL to the code location.",
|
||||
"Other": "cli command or URL to the cli command location.",
|
||||
"Terraform": "code or URL to the code location."
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Run sudo yum update and cross your fingers and toes.",
|
||||
"Url": "https://myfp.com/recommendations/dangerous_things_and_how_to_fix_them.html"
|
||||
}
|
||||
},
|
||||
"ResourceIdTemplate": "arn:partition:service:region:account-id:resource-id",
|
||||
"ResourceType": "AwsIamAccessAnalyzer",
|
||||
"Risk": "Risk associated.",
|
||||
"ServiceName": "iam",
|
||||
"Severity": "low",
|
||||
"SubServiceName": "accessanalyzer",
|
||||
"Tags": {
|
||||
"Tag1Key": "value",
|
||||
"Tag2Key": "value"
|
||||
}
|
||||
}
|
||||
58
tests/lib/outputs/fixtures/metadata.json
Normal file
58
tests/lib/outputs/fixtures/metadata.json
Normal file
@@ -0,0 +1,58 @@
|
||||
{
|
||||
"Categories": [
|
||||
"cat1",
|
||||
"cat2"
|
||||
],
|
||||
"CheckID": "iam_disable_30_days_credentials",
|
||||
"CheckTitle": "Ensure credentials unused for 30 days or greater are disabled",
|
||||
"CheckType": [
|
||||
"Software and Configuration Checks"
|
||||
],
|
||||
"Compliance": [
|
||||
{
|
||||
"Control": [
|
||||
"4.4"
|
||||
],
|
||||
"Framework": "CIS-AWS",
|
||||
"Group": [
|
||||
"level1",
|
||||
"level2"
|
||||
],
|
||||
"Version": "1.4"
|
||||
}
|
||||
],
|
||||
"DependsOn": [
|
||||
"othercheck1",
|
||||
"othercheck2"
|
||||
],
|
||||
"Description": "Ensure credentials unused for 30 days or greater are disabled",
|
||||
"Notes": "additional information",
|
||||
"Provider": "aws",
|
||||
"RelatedTo": [
|
||||
"othercheck3",
|
||||
"othercheck4"
|
||||
],
|
||||
"RelatedUrl": "https://serviceofficialsiteorpageforthissubject",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "cli command or URL to the cli command location.",
|
||||
"NativeIaC": "code or URL to the code location.",
|
||||
"Other": "cli command or URL to the cli command location.",
|
||||
"Terraform": "code or URL to the code location."
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Run sudo yum update and cross your fingers and toes.",
|
||||
"Url": "https://myfp.com/recommendations/dangerous_things_and_how_to_fix_them.html"
|
||||
}
|
||||
},
|
||||
"ResourceIdTemplate": "arn:partition:service:region:account-id:resource-id",
|
||||
"ResourceType": "AwsIamAccessAnalyzer",
|
||||
"Risk": "Risk associated.",
|
||||
"ServiceName": "iam",
|
||||
"Severity": "low",
|
||||
"SubServiceName": "accessanalyzer",
|
||||
"Tags": {
|
||||
"Tag1Key": "value",
|
||||
"Tag2Key": "value"
|
||||
}
|
||||
}
|
||||
344
tests/lib/outputs/outputs_test.py
Normal file
344
tests/lib/outputs/outputs_test.py
Normal file
@@ -0,0 +1,344 @@
|
||||
import os
|
||||
from os import path, remove
|
||||
|
||||
import boto3
|
||||
import pytest
|
||||
from colorama import Fore
|
||||
from moto import mock_s3
|
||||
|
||||
from prowler.config.config import (
|
||||
csv_file_suffix,
|
||||
json_asff_file_suffix,
|
||||
json_file_suffix,
|
||||
orange_color,
|
||||
output_file_timestamp,
|
||||
prowler_version,
|
||||
timestamp_iso,
|
||||
timestamp_utc,
|
||||
)
|
||||
from prowler.lib.check.models import Check_Report, load_check_metadata
|
||||
from prowler.lib.outputs.models import (
|
||||
Check_Output_CSV,
|
||||
Check_Output_JSON,
|
||||
Check_Output_JSON_ASFF,
|
||||
Compliance,
|
||||
ProductFields,
|
||||
Resource,
|
||||
Severity,
|
||||
)
|
||||
from prowler.lib.outputs.outputs import (
|
||||
fill_file_descriptors,
|
||||
fill_json,
|
||||
fill_json_asff,
|
||||
generate_csv_fields,
|
||||
send_to_s3_bucket,
|
||||
set_report_color,
|
||||
)
|
||||
from prowler.lib.utils.utils import hash_sha512, open_file
|
||||
from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
|
||||
|
||||
|
||||
class Test_Outputs:
|
||||
def test_fill_file_descriptors(self):
|
||||
audited_account = "123456789012"
|
||||
output_directory = f"{os.path.dirname(os.path.realpath(__file__))}"
|
||||
audit_info = AWS_Audit_Info(
|
||||
original_session=None,
|
||||
audit_session=None,
|
||||
audited_account="123456789012",
|
||||
audited_identity_arn="test-arn",
|
||||
audited_user_id="test",
|
||||
audited_partition="aws",
|
||||
profile="default",
|
||||
profile_region="eu-west-1",
|
||||
credentials=None,
|
||||
assumed_role_info=None,
|
||||
audited_regions=["eu-west-2", "eu-west-1"],
|
||||
organizations_metadata=None,
|
||||
)
|
||||
test_output_modes = [
|
||||
["csv"],
|
||||
["json"],
|
||||
["json-asff"],
|
||||
["csv", "json"],
|
||||
["csv", "json", "json-asff"],
|
||||
]
|
||||
output_filename = f"prowler-output-{audited_account}-{output_file_timestamp}"
|
||||
expected = [
|
||||
{
|
||||
"csv": open_file(
|
||||
f"{output_directory}/{output_filename}{csv_file_suffix}",
|
||||
"a",
|
||||
)
|
||||
},
|
||||
{
|
||||
"json": open_file(
|
||||
f"{output_directory}/{output_filename}{json_file_suffix}",
|
||||
"a",
|
||||
)
|
||||
},
|
||||
{
|
||||
"json-asff": open_file(
|
||||
f"{output_directory}/{output_filename}{json_asff_file_suffix}",
|
||||
"a",
|
||||
)
|
||||
},
|
||||
{
|
||||
"csv": open_file(
|
||||
f"{output_directory}/{output_filename}{csv_file_suffix}",
|
||||
"a",
|
||||
),
|
||||
"json": open_file(
|
||||
f"{output_directory}/{output_filename}{json_file_suffix}",
|
||||
"a",
|
||||
),
|
||||
},
|
||||
{
|
||||
"csv": open_file(
|
||||
f"{output_directory}/{output_filename}{csv_file_suffix}",
|
||||
"a",
|
||||
),
|
||||
"json": open_file(
|
||||
f"{output_directory}/{output_filename}{json_file_suffix}",
|
||||
"a",
|
||||
),
|
||||
"json-asff": open_file(
|
||||
f"{output_directory}/{output_filename}{json_asff_file_suffix}",
|
||||
"a",
|
||||
),
|
||||
},
|
||||
]
|
||||
|
||||
for index, output_mode_list in enumerate(test_output_modes):
|
||||
test_output_file_descriptors = fill_file_descriptors(
|
||||
output_mode_list,
|
||||
output_directory,
|
||||
output_filename,
|
||||
audit_info,
|
||||
)
|
||||
for output_mode in output_mode_list:
|
||||
assert (
|
||||
test_output_file_descriptors[output_mode].name
|
||||
== expected[index][output_mode].name
|
||||
)
|
||||
remove(expected[index][output_mode].name)
|
||||
|
||||
def test_set_report_color(self):
|
||||
test_status = ["PASS", "FAIL", "ERROR", "WARNING"]
|
||||
test_colors = [Fore.GREEN, Fore.RED, Fore.BLACK, orange_color]
|
||||
|
||||
for status in test_status:
|
||||
assert set_report_color(status) in test_colors
|
||||
|
||||
def test_set_report_color_invalid(self):
|
||||
test_status = "INVALID"
|
||||
|
||||
with pytest.raises(Exception) as exc:
|
||||
set_report_color(test_status)
|
||||
|
||||
assert "Invalid Report Status. Must be PASS, FAIL, ERROR or WARNING" in str(
|
||||
exc.value
|
||||
)
|
||||
assert exc.type == Exception
|
||||
|
||||
def test_generate_csv_fields(self):
|
||||
expected = [
|
||||
"assessment_start_time",
|
||||
"finding_unique_id",
|
||||
"provider",
|
||||
"profile",
|
||||
"account_id",
|
||||
"account_name",
|
||||
"account_email",
|
||||
"account_arn",
|
||||
"account_org",
|
||||
"account_tags",
|
||||
"region",
|
||||
"check_id",
|
||||
"check_title",
|
||||
"check_type",
|
||||
"status",
|
||||
"status_extended",
|
||||
"service_name",
|
||||
"subservice_name",
|
||||
"severity",
|
||||
"resource_id",
|
||||
"resource_arn",
|
||||
"resource_type",
|
||||
"resource_details",
|
||||
"resource_tags",
|
||||
"description",
|
||||
"risk",
|
||||
"related_url",
|
||||
"remediation_recommendation_text",
|
||||
"remediation_recommendation_url",
|
||||
"remediation_recommendation_code_nativeiac",
|
||||
"remediation_recommendation_code_terraform",
|
||||
"remediation_recommendation_code_cli",
|
||||
"remediation_recommendation_code_other",
|
||||
"categories",
|
||||
"depends_on",
|
||||
"related_to",
|
||||
"notes",
|
||||
# "compliance",
|
||||
]
|
||||
|
||||
assert generate_csv_fields(Check_Output_CSV) == expected
|
||||
|
||||
def test_fill_json(self):
|
||||
input_audit_info = AWS_Audit_Info(
|
||||
original_session=None,
|
||||
audit_session=None,
|
||||
audited_account="123456789012",
|
||||
audited_identity_arn="test-arn",
|
||||
audited_user_id="test",
|
||||
audited_partition="aws",
|
||||
profile="default",
|
||||
profile_region="eu-west-1",
|
||||
credentials=None,
|
||||
assumed_role_info=None,
|
||||
audited_regions=["eu-west-2", "eu-west-1"],
|
||||
organizations_metadata=None,
|
||||
)
|
||||
finding = Check_Report(
|
||||
load_check_metadata(
|
||||
f"{path.dirname(path.realpath(__file__))}/fixtures/metadata.json"
|
||||
).json()
|
||||
)
|
||||
finding.resource_details = "Test resource details"
|
||||
finding.resource_id = "test-resource"
|
||||
finding.resource_arn = "test-arn"
|
||||
finding.region = "eu-west-1"
|
||||
finding.status = "PASS"
|
||||
finding.status_extended = "This is a test"
|
||||
|
||||
input = Check_Output_JSON(**finding.check_metadata.dict())
|
||||
|
||||
expected = Check_Output_JSON(**finding.check_metadata.dict())
|
||||
expected.AssessmentStartTime = timestamp_iso
|
||||
expected.FindingUniqueId = ""
|
||||
expected.Profile = "default"
|
||||
expected.AccountId = "123456789012"
|
||||
expected.OrganizationsInfo = None
|
||||
expected.Region = "eu-west-1"
|
||||
expected.Status = "PASS"
|
||||
expected.StatusExtended = "This is a test"
|
||||
expected.ResourceId = "test-resource"
|
||||
expected.ResourceArn = "test-arn"
|
||||
expected.ResourceDetails = "Test resource details"
|
||||
|
||||
assert fill_json(input, input_audit_info, finding) == expected
|
||||
|
||||
def test_fill_json_asff(self):
|
||||
input_audit_info = AWS_Audit_Info(
|
||||
original_session=None,
|
||||
audit_session=None,
|
||||
audited_account="123456789012",
|
||||
audited_identity_arn="test-arn",
|
||||
audited_user_id="test",
|
||||
audited_partition="aws",
|
||||
profile="default",
|
||||
profile_region="eu-west-1",
|
||||
credentials=None,
|
||||
assumed_role_info=None,
|
||||
audited_regions=["eu-west-2", "eu-west-1"],
|
||||
organizations_metadata=None,
|
||||
)
|
||||
finding = Check_Report(
|
||||
load_check_metadata(
|
||||
f"{path.dirname(path.realpath(__file__))}/fixtures/metadata.json"
|
||||
).json()
|
||||
)
|
||||
finding.resource_details = "Test resource details"
|
||||
finding.resource_id = "test-resource"
|
||||
finding.resource_arn = "test-arn"
|
||||
finding.region = "eu-west-1"
|
||||
finding.status = "PASS"
|
||||
finding.status_extended = "This is a test"
|
||||
|
||||
input = Check_Output_JSON_ASFF()
|
||||
|
||||
expected = Check_Output_JSON_ASFF()
|
||||
expected.Id = f"prowler-{finding.check_metadata.CheckID}-123456789012-eu-west-1-{hash_sha512('test-resource')}"
|
||||
expected.ProductArn = "arn:aws:securityhub:eu-west-1::product/prowler/prowler"
|
||||
expected.ProductFields = ProductFields(
|
||||
ProviderVersion=prowler_version, ProwlerResourceName="test-resource"
|
||||
)
|
||||
expected.GeneratorId = "prowler-" + finding.check_metadata.CheckID
|
||||
expected.AwsAccountId = "123456789012"
|
||||
expected.Types = finding.check_metadata.CheckType
|
||||
expected.FirstObservedAt = (
|
||||
expected.UpdatedAt
|
||||
) = expected.CreatedAt = timestamp_utc.strftime("%Y-%m-%dT%H:%M:%SZ")
|
||||
expected.Severity = Severity(Label=finding.check_metadata.Severity.upper())
|
||||
expected.Title = finding.check_metadata.CheckTitle
|
||||
expected.Description = finding.check_metadata.Description
|
||||
expected.Resources = [
|
||||
Resource(
|
||||
Id="test-resource",
|
||||
Type=finding.check_metadata.ResourceType,
|
||||
Partition="aws",
|
||||
Region="eu-west-1",
|
||||
)
|
||||
]
|
||||
|
||||
expected.Compliance = Compliance(
|
||||
Status="PASS" + "ED",
|
||||
RelatedRequirements=finding.check_metadata.CheckType,
|
||||
)
|
||||
expected.Remediation = {
|
||||
"Recommendation": finding.check_metadata.Remediation.Recommendation
|
||||
}
|
||||
|
||||
assert fill_json_asff(input, input_audit_info, finding) == expected
|
||||
|
||||
@mock_s3
|
||||
def test_send_to_s3_bucket(self):
|
||||
# Create mock session
|
||||
session = boto3.session.Session(
|
||||
region_name="us-east-1",
|
||||
)
|
||||
# Create mock audit_info
|
||||
input_audit_info = AWS_Audit_Info(
|
||||
original_session=None,
|
||||
audit_session=session,
|
||||
audited_account="123456789012",
|
||||
audited_identity_arn="test-arn",
|
||||
audited_user_id="test",
|
||||
audited_partition="aws",
|
||||
profile="default",
|
||||
profile_region="eu-west-1",
|
||||
credentials=None,
|
||||
assumed_role_info=None,
|
||||
audited_regions=["eu-west-2", "eu-west-1"],
|
||||
organizations_metadata=None,
|
||||
)
|
||||
# Creat mock bucket
|
||||
bucket_name = "test_bucket"
|
||||
client = boto3.client("s3")
|
||||
client.create_bucket(Bucket=bucket_name)
|
||||
# Create mock csv output file
|
||||
output_directory = f"{os.path.dirname(os.path.realpath(__file__))}/fixtures"
|
||||
output_mode = "csv"
|
||||
filename = f"prowler-output-{input_audit_info.audited_account}"
|
||||
# Send mock csv file to mock S3 Bucket
|
||||
send_to_s3_bucket(
|
||||
filename,
|
||||
output_directory,
|
||||
output_mode,
|
||||
bucket_name,
|
||||
input_audit_info.audit_session,
|
||||
)
|
||||
# Check if the file has been sent by checking its content type
|
||||
assert (
|
||||
client.get_object(
|
||||
Bucket=bucket_name,
|
||||
Key=output_directory
|
||||
+ "/"
|
||||
+ output_mode
|
||||
+ "/"
|
||||
+ filename
|
||||
+ csv_file_suffix,
|
||||
)["ContentType"]
|
||||
== "binary/octet-stream"
|
||||
)
|
||||
Reference in New Issue
Block a user