feat(outputs): Unify classes to generate outputs dynamically based on the provider (#1545)

Co-authored-by: n4ch04 <nachor1992@gmail.com>
Co-authored-by: sergargar <sergio@verica.io>
This commit is contained in:
Pepe Fagoaga
2022-12-19 13:03:04 +01:00
committed by GitHub
parent fdf80ed89d
commit a55649b3e1
278 changed files with 2070 additions and 1071 deletions

View File

@@ -35,7 +35,6 @@ from prowler.providers.aws.lib.security_hub.security_hub import (
resolve_security_hub_previous_findings,
)
from prowler.providers.common.audit_info import set_provider_audit_info
from prowler.providers.common.outputs import set_provider_output_options
@@ -137,7 +136,7 @@ def prowler():
audit_info = set_provider_audit_info(provider, args.__dict__)
# Parse content from Allowlist file and get it, if necessary, from S3
if args.allowlist_file:
if provider == "aws" and args.allowlist_file:
allowlist_file = parse_allowlist_file(audit_info, args.allowlist_file)
else:
allowlist_file = None
@@ -175,7 +174,9 @@ def prowler():
audit_output_options.output_filename, args.output_directory
)
# Send output to S3 if needed (-B / -D)
if args.output_bucket or args.output_bucket_no_assume:
if provider == "aws" and (
args.output_bucket or args.output_bucket_no_assume
):
output_bucket = args.output_bucket
bucket_session = audit_info.audit_session
# Check if -D was input
@@ -191,7 +192,7 @@ def prowler():
)
# Resolve previous fails of Security Hub
if args.security_hub:
if provider == "aws" and args.security_hub:
resolve_security_hub_previous_findings(args.output_directory, audit_info)
# Display summary table

View File

@@ -85,21 +85,46 @@ class Check_Report:
"""Contains the Check's finding information."""
status: str
region: str
status_extended: str
check_metadata: Check_Metadata_Model
resource_id: str
resource_details: str
resource_tags: list
resource_arn: str
def __init__(self, metadata):
self.check_metadata = Check_Metadata_Model.parse_raw(metadata)
self.status_extended = ""
self.resource_details = ""
self.resource_tags = []
@dataclass
class Check_Report_AWS(Check_Report):
"""Contains the AWS Check's finding information."""
resource_id: str
resource_arn: str
region: str
def __init__(self, metadata):
super().__init__(metadata)
self.resource_id = ""
self.resource_arn = ""
self.region = ""
@dataclass
class Check_Report_Azure(Check_Report):
"""Contains the Azure Check's finding information."""
resource_name: str
resource_id: str
subscription: str
def __init__(self, metadata):
super().__init__(metadata)
self.resource_name = ""
self.resource_id = ""
self.subscription = ""
# Testing Pending

View File

@@ -1,8 +1,12 @@
import argparse
from prowler.config.config import default_output_directory, prowler_version
import sys
from prowler.config.config import (
default_output_directory,
get_aws_available_regions,
prowler_version,
)
class ProwlerArgumentParser:
# Set the default parser
@@ -29,7 +33,6 @@ class ProwlerArgumentParser:
dest="provider",
)
self.__init_allowlist_parser__()
self.__init_outputs_parser__()
self.__init_logging_parser__()
self.__init_checks_parser__()
@@ -40,10 +43,14 @@ class ProwlerArgumentParser:
self.__init_aws_parser__()
self.__init_azure_parser__()
def parse(self) -> argparse.Namespace:
def parse(self, args=None) -> argparse.Namespace:
"""
parse is a wrapper to call parse_args() and do some validation
"""
# We can override sys.argv
if args:
sys.argv = args
# Set AWS as the default provider if no provider is supplied
if len(sys.argv) == 1:
sys.argv = self.__set_default_provider__(sys.argv)
@@ -78,17 +85,6 @@ class ProwlerArgumentParser:
# Save the arguments with the default provider included
return default_args
def __init_allowlist_parser__(self):
# Allowlist
allowlist_parser = self.common_providers_parser.add_argument_group("Allowlist")
allowlist_parser.add_argument(
"-w",
"--allowlist-file",
nargs="?",
default=None,
help="Path for allowlist yaml file. See example prowler/config/allowlist.yaml for reference and format. It also accepts AWS DynamoDB Table ARN or S3 URI, see more in https://docs.prowler.cloud/en/latest/tutorials/allowlist/",
)
def __init_outputs_parser__(self):
# Outputs
common_outputs_parser = self.common_providers_parser.add_argument_group(
@@ -197,6 +193,7 @@ class ProwlerArgumentParser:
nargs="+",
help="List of categories to be executed.",
default=[],
# Pending validate choices
)
def __init_list_checks_parser__(self):
@@ -246,6 +243,7 @@ class ProwlerArgumentParser:
nargs="?",
default=None,
help="ARN of the role to be assumed",
# Pending ARN validation
)
aws_auth_subparser.add_argument(
"-T",
@@ -254,6 +252,7 @@ class ProwlerArgumentParser:
default=3600,
type=int,
help="Assumed role session duration in seconds, must be between 900 and 43200. Default: 3600",
# Pending session duration validation
)
aws_auth_subparser.add_argument(
"-I",
@@ -270,6 +269,7 @@ class ProwlerArgumentParser:
"--filter-region",
nargs="+",
help="AWS region names to run Prowler against",
choices=get_aws_available_regions(),
)
# AWS Organizations
aws_orgs_subparser = aws_parser.add_argument_group("AWS Organizations")
@@ -322,6 +322,15 @@ class ProwlerArgumentParser:
default=None,
help="Shodan API key used by check ec2_elastic_ip_shodan.",
)
# Allowlist
allowlist_subparser = aws_parser.add_argument_group("Allowlist")
allowlist_subparser.add_argument(
"-w",
"--allowlist-file",
nargs="?",
default=None,
help="Path for allowlist yaml file. See example prowler/config/allowlist.yaml for reference and format. It also accepts AWS DynamoDB Table ARN or S3 URI, see more in https://docs.prowler.cloud/en/latest/tutorials/allowlist/",
)
def __init_azure_parser__(self):
"""Init the Azure Provider CLI parser"""

View File

@@ -1,29 +1,263 @@
from dataclasses import asdict, dataclass
from typing import List, Optional
from pydantic import BaseModel
from prowler.lib.logger import logger
from prowler.config.config import timestamp
from prowler.lib.check.models import Check_Report, Remediation
from prowler.lib.check.models import Remediation
# Check_Report_AWS, Check_Report_Azure
from prowler.providers.aws.lib.audit_info.models import AWS_Organizations_Info
import importlib
import sys
from typing import Any
from csv import DictWriter
# from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
# from prowler.providers.azure.lib.audit_info.models import Azure_Audit_Info
@dataclass
class Compliance_Framework:
Framework: str
Version: str
Group: list
Control: list
def generate_provider_output_csv(provider: str, finding, audit_info, mode: str, fd):
"""
set_provider_output_options configures automatically the outputs based on the selected provider and returns the Provider_Output_Options object.
"""
try:
finding_output_model = f"{provider.capitalize()}_Check_Output_{mode.upper()}"
output_model = getattr(importlib.import_module(__name__), finding_output_model)
# Dynamically load the Provider_Output_Options class
finding_output_model = f"{provider.capitalize()}_Check_Output_{mode.upper()}"
output_model = getattr(importlib.import_module(__name__), finding_output_model)
# Fill common data among providers
data = fill_common_data_csv(finding)
if provider == "azure":
data["resource_id"] = finding.resource_id
data["resource_name"] = finding.resource_name
data["subscription"] = finding.subscription
data["tenant_domain"] = audit_info.identity.domain
finding_output = output_model(**data)
if provider == "aws":
data["profile"] = audit_info.profile
data["account_id"] = audit_info.audited_account
data["region"] = finding.region
data["resource_id"] = finding.resource_id
data["resource_arn"] = finding.resource_arn
finding_output = output_model(**data)
if audit_info.organizations_metadata:
finding_output.account_name = (
audit_info.organizations_metadata.account_details_name
)
finding_output.account_email = (
audit_info.organizations_metadata.account_details_email
)
finding_output.account_arn = (
audit_info.organizations_metadata.account_details_arn
)
finding_output.account_org = (
audit_info.organizations_metadata.account_details_org
)
finding_output.account_tags = (
audit_info.organizations_metadata.account_details_tags
)
csv_writer = DictWriter(
fd,
fieldnames=generate_csv_fields(output_model),
delimiter=";",
)
except Exception as error:
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
else:
return csv_writer, finding_output
def fill_common_data_csv(finding: dict) -> dict:
data = {
"assessment_start_time": timestamp.isoformat(),
"finding_unique_id": "",
"provider": finding.check_metadata.Provider,
"check_id": finding.check_metadata.CheckID,
"check_title": finding.check_metadata.CheckTitle,
"check_type": ",".join(finding.check_metadata.CheckType),
"status": finding.status,
"status_extended": finding.status_extended,
"service_name": finding.check_metadata.ServiceName,
"subservice_name": finding.check_metadata.SubServiceName,
"severity": finding.check_metadata.Severity,
"resource_type": finding.check_metadata.ResourceType,
"resource_details": finding.resource_details,
"resource_tags": finding.resource_tags,
"description": finding.check_metadata.Description,
"risk": finding.check_metadata.Risk,
"related_url": finding.check_metadata.RelatedUrl,
"remediation_recommendation_text": (
finding.check_metadata.Remediation.Recommendation.Text
),
"remediation_recommendation_url": (
finding.check_metadata.Remediation.Recommendation.Url
),
"remediation_recommendation_code_nativeiac": (
finding.check_metadata.Remediation.Code.NativeIaC
),
"remediation_recommendation_code_terraform": (
finding.check_metadata.Remediation.Code.Terraform
),
"remediation_recommendation_code_cli": (
finding.check_metadata.Remediation.Code.CLI
),
"remediation_recommendation_code_other": (
finding.check_metadata.Remediation.Code.Other
),
"categories": __unroll_list__(finding.check_metadata.Categories),
"depends_on": __unroll_list__(finding.check_metadata.DependsOn),
"related_to": __unroll_list__(finding.check_metadata.RelatedTo),
"notes": finding.check_metadata.Notes,
}
return data
def __unroll_list__(listed_items: list):
unrolled_items = ""
separator = "|"
for item in listed_items:
if not unrolled_items:
unrolled_items = f"{item}"
else:
unrolled_items = f"{unrolled_items}{separator}{item}"
return unrolled_items
def generate_csv_fields(format: Any) -> list[str]:
"""Generates the CSV headers for the given class"""
csv_fields = []
# __fields__ is alwayis available in the Pydantic's BaseModel class
for field in format.__dict__.get("__fields__").keys():
csv_fields.append(field)
return csv_fields
class Check_Output_CSV(BaseModel):
"""
Check_Output_CSV generates a finding's output in CSV format.
This is the base CSV output model for every provider.
"""
assessment_start_time: str
finding_unique_id: str
provider: str
check_id: str
check_title: str
check_type: str
status: str
status_extended: str
service_name: str
subservice_name: str
severity: str
resource_type: str
resource_details: str
resource_tags: list
description: str
risk: str
related_url: str
remediation_recommendation_text: str
remediation_recommendation_url: str
remediation_recommendation_code_nativeiac: str
remediation_recommendation_code_terraform: str
remediation_recommendation_code_cli: str
remediation_recommendation_code_other: str
categories: str
depends_on: str
related_to: str
notes: str
class Aws_Check_Output_CSV(Check_Output_CSV):
"""
Aws_Check_Output_CSV generates a finding's output in CSV format for the AWS provider.
"""
profile: str
account_id: int
account_name: Optional[str]
account_email: Optional[str]
account_arn: Optional[str]
account_org: Optional[str]
account_tags: Optional[str]
region: str
resource_id: str
resource_arn: str
class Azure_Check_Output_CSV(Check_Output_CSV):
"""
Azure_Check_Output_CSV generates a finding's output in CSV format for the Azure provider.
"""
tenant_domain: str = ""
subscription: str = ""
resource_id: str = ""
resource_name: str = ""
def generate_provider_output_json(provider: str, finding, audit_info, mode: str, fd):
"""
generate_provider_output_json configures automatically the outputs based on the selected provider and returns the Check_Output_JSON object.
"""
try:
# Dynamically load the Provider_Output_Options class for the JSON format
finding_output_model = f"{provider.capitalize()}_Check_Output_{mode.upper()}"
output_model = getattr(importlib.import_module(__name__), finding_output_model)
# Instantiate the class for the cloud provider
finding_output = output_model(**finding.check_metadata.dict())
# Fill common fields
finding_output.AssessmentStartTime = timestamp.isoformat()
finding_output.Status = finding.status
finding_output.StatusExtended = finding.status_extended
finding_output.ResourceDetails = finding.resource_details
if provider == "azure":
finding_output.Tenant_Domain = audit_info.identity.domain
finding_output.Subscription = finding.subscription
finding_output.ResourceId = finding.resource_id
finding_output.ResourceName = finding.resource_name
if provider == "aws":
finding_output.Profile = audit_info.profile
finding_output.AccountId = audit_info.audited_account
finding_output.Region = finding.region
finding_output.ResourceId = finding.resource_id
finding_output.ResourceArn = finding.resource_arn
if audit_info.organizations_metadata:
finding_output.OrganizationsInfo = (
audit_info.organizations_metadata.__dict__
)
except Exception as error:
logger.critical(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
sys.exit()
else:
return finding_output
class Check_Output_JSON(BaseModel):
"""
Check_Output_JSON generates a finding's output in JSON format.
This is the base JSON output model for every provider.
"""
AssessmentStartTime: str = ""
FindingUniqueId: str = ""
Provider: str
Profile: str = ""
AccountId: str = ""
OrganizationsInfo: Optional[AWS_Organizations_Info]
Region: str = ""
CheckID: str
CheckTitle: str
CheckType: List[str]
@@ -32,8 +266,6 @@ class Check_Output_JSON(BaseModel):
Status: str = ""
StatusExtended: str = ""
Severity: str
ResourceId: str = ""
ResourceArn: str = ""
ResourceType: str
ResourceDetails: str = ""
Tags: dict
@@ -45,7 +277,87 @@ class Check_Output_JSON(BaseModel):
DependsOn: List[str]
RelatedTo: List[str]
Notes: str
# Compliance: List[ComplianceItem]
class Aws_Check_Output_JSON(Check_Output_JSON):
"""
Aws_Check_Output_JSON generates a finding's output in JSON format for the AWS provider.
"""
Profile: str = ""
AccountId: str = ""
OrganizationsInfo: Optional[AWS_Organizations_Info]
Region: str = ""
ResourceId: str = ""
ResourceArn: str = ""
def __init__(self, **metadata):
super().__init__(**metadata)
class Azure_Check_Output_JSON(Check_Output_JSON):
"""
Aws_Check_Output_JSON generates a finding's output in JSON format for the AWS provider.
"""
Tenant_Domain: str = ""
Subscription: str = ""
ResourceId: str = ""
ResourceName: str = ""
def __init__(self, **metadata):
super().__init__(**metadata)
class Check_Output_CSV_ENS_RD2022(BaseModel):
"""
Check_Output_CSV_ENS_RD2022 generates a finding's output in CSV ENS RD2022 format.
"""
Provider: str
AccountId: str
Region: str
AssessmentDate: str
Requirements_Id: str
Requirements_Description: str
Requirements_Attributes_IdGrupoControl: str
Requirements_Attributes_Marco: str
Requirements_Attributes_Categoria: str
Requirements_Attributes_DescripcionControl: str
Requirements_Attributes_Nivel: str
Requirements_Attributes_Tipo: str
Requirements_Attributes_Dimensiones: str
Status: str
StatusExtended: str
ResourceId: str
CheckId: str
class Check_Output_CSV_CIS(BaseModel):
"""
Check_Output_CSV_ENS_RD2022 generates a finding's output in CSV CIS format.
"""
Provider: str
AccountId: str
Region: str
AssessmentDate: str
Requirements_Id: str
Requirements_Description: str
Requirements_Attributes_Section: str
Requirements_Attributes_Profile: str
Requirements_Attributes_AssessmentStatus: str
Requirements_Attributes_Description: str
Requirements_Attributes_RationaleStatement: str
Requirements_Attributes_ImpactStatement: str
Requirements_Attributes_RemediationProcedure: str
Requirements_Attributes_AuditProcedure: str
Requirements_Attributes_AdditionalInformation: str
Requirements_Attributes_References: str
Status: str
StatusExtended: str
ResourceId: str
CheckId: str
# JSON ASFF Output
@@ -72,6 +384,10 @@ class Compliance(BaseModel):
class Check_Output_JSON_ASFF(BaseModel):
"""
Check_Output_JSON_ASFF generates a finding's output in JSON ASFF format.
"""
SchemaVersion: str = "2018-10-08"
Id: str = ""
ProductArn: str = ""
@@ -89,213 +405,3 @@ class Check_Output_JSON_ASFF(BaseModel):
Resources: List[Resource] = None
Compliance: Compliance = None
Remediation: dict = None
class Check_Output_CSV_ENS_RD2022(BaseModel):
Provider: str
AccountId: str
Region: str
AssessmentDate: str
Requirements_Id: str
Requirements_Description: str
Requirements_Attributes_IdGrupoControl: str
Requirements_Attributes_Marco: str
Requirements_Attributes_Categoria: str
Requirements_Attributes_DescripcionControl: str
Requirements_Attributes_Nivel: str
Requirements_Attributes_Tipo: str
Requirements_Attributes_Dimensiones: str
Status: str
StatusExtended: str
ResourceId: str
CheckId: str
class Check_Output_CSV_CIS(BaseModel):
Provider: str
AccountId: str
Region: str
AssessmentDate: str
Requirements_Id: str
Requirements_Description: str
Requirements_Attributes_Section: str
Requirements_Attributes_Profile: str
Requirements_Attributes_AssessmentStatus: str
Requirements_Attributes_Description: str
Requirements_Attributes_RationaleStatement: str
Requirements_Attributes_ImpactStatement: str
Requirements_Attributes_RemediationProcedure: str
Requirements_Attributes_AuditProcedure: str
Requirements_Attributes_AdditionalInformation: str
Requirements_Attributes_References: str
Status: str
StatusExtended: str
ResourceId: str
CheckId: str
@dataclass
class Check_Output_CSV:
assessment_start_time: str
finding_unique_id: str
provider: str
profile: str
account_id: int
account_name: str
account_email: str
account_arn: str
account_org: str
account_tags: str
region: str
check_id: str
check_title: str
check_type: str
status: str
status_extended: str
service_name: str
subservice_name: str
severity: str
resource_id: str
resource_arn: str
resource_type: str
resource_details: str
resource_tags: list
description: dict
risk: list
related_url: list
remediation_recommendation_text: str
remediation_recommendation_url: list
remediation_recommendation_code_nativeiac: str
remediation_recommendation_code_terraform: str
remediation_recommendation_code_cli: str
remediation_recommendation_code_other: str
categories: str
depends_on: str
related_to: str
notes: str
# compliance: str
def get_csv_header(self):
csv_header = []
for key in asdict(self):
csv_header = csv_header.append(key)
return csv_header
def __init__(
self,
account: str,
profile: str,
report: Check_Report,
organizations: AWS_Organizations_Info,
):
self.assessment_start_time = timestamp.isoformat()
self.finding_unique_id = ""
self.provider = report.check_metadata.Provider
self.profile = profile
self.account_id = account
if organizations:
self.account_name = organizations.account_details_name
self.account_email = organizations.account_details_email
self.account_arn = organizations.account_details_arn
self.account_org = organizations.account_details_org
self.account_tags = organizations.account_details_tags
self.region = report.region
self.check_id = report.check_metadata.CheckID
self.check_title = report.check_metadata.CheckTitle
self.check_type = report.check_metadata.CheckType
self.status = report.status
self.status_extended = report.status_extended
self.service_name = report.check_metadata.ServiceName
self.subservice_name = report.check_metadata.SubServiceName
self.severity = report.check_metadata.Severity
self.resource_id = report.resource_id
self.resource_arn = report.resource_arn
self.resource_type = report.check_metadata.ResourceType
self.resource_details = report.resource_details
self.resource_tags = report.resource_tags
self.description = report.check_metadata.Description
self.risk = report.check_metadata.Risk
self.related_url = report.check_metadata.RelatedUrl
self.remediation_recommendation_text = (
report.check_metadata.Remediation.Recommendation.Text
)
self.remediation_recommendation_url = (
report.check_metadata.Remediation.Recommendation.Url
)
self.remediation_recommendation_code_nativeiac = (
report.check_metadata.Remediation.Code.NativeIaC
)
self.remediation_recommendation_code_terraform = (
report.check_metadata.Remediation.Code.Terraform
)
self.remediation_recommendation_code_cli = (
report.check_metadata.Remediation.Code.CLI
)
self.remediation_recommendation_code_other = (
report.check_metadata.Remediation.Code.Other
)
self.categories = self.__unroll_list__(report.check_metadata.Categories)
self.depends_on = self.__unroll_list__(report.check_metadata.DependsOn)
self.related_to = self.__unroll_list__(report.check_metadata.RelatedTo)
self.notes = report.check_metadata.Notes
# self.compliance = self.__unroll_compliance__(report.check_metadata.Compliance)
def __unroll_list__(self, listed_items: list):
unrolled_items = ""
separator = "|"
for item in listed_items:
if not unrolled_items:
unrolled_items = f"{item}"
else:
unrolled_items = f"{unrolled_items}{separator}{item}"
return unrolled_items
def __unroll_dict__(self, dict_items: dict):
unrolled_items = ""
separator = "|"
for key, value in dict_items.items():
unrolled_item = f"{key}:{value}"
if not unrolled_items:
unrolled_items = f"{unrolled_item}"
else:
unrolled_items = f"{unrolled_items}{separator}{unrolled_item}"
return unrolled_items
def __unroll_compliance__(self, compliance: list):
compliance_frameworks = []
# fill list of dataclasses
for item in compliance:
compliance_framework = Compliance_Framework(
Framework=item.Framework,
Version=item.Version,
Group=item.Group,
Control=item.Control,
)
compliance_frameworks.append(compliance_framework)
# iterate over list of dataclasses to output info
unrolled_compliance = ""
groups = ""
controls = ""
item_separator = ","
framework_separator = "|"
generic_separator = "/"
for framework in compliance_frameworks:
for group in framework.Group:
if groups:
groups = f"{groups}{generic_separator}"
groups = f"{groups}{group}"
for control in framework.Control:
if controls:
controls = f"{controls}{generic_separator}"
controls = f"{controls}{control}"
if unrolled_compliance:
unrolled_compliance = f"{unrolled_compliance}{framework_separator}"
unrolled_compliance = f"{unrolled_compliance}{framework.Framework}{item_separator}{framework.Version}{item_separator}{groups}{item_separator}{controls}"
# unset groups and controls for next framework
controls = ""
groups = ""
return unrolled_compliance

View File

@@ -18,45 +18,67 @@ from prowler.config.config import (
orange_color,
prowler_version,
timestamp,
timestamp_iso,
timestamp_utc,
)
from prowler.lib.logger import logger
from prowler.lib.outputs.models import (
Check_Output_CSV,
Aws_Check_Output_CSV,
Azure_Check_Output_CSV,
Check_Output_CSV_CIS,
Check_Output_CSV_ENS_RD2022,
Check_Output_JSON,
Check_Output_JSON_ASFF,
Compliance,
ProductFields,
Resource,
Severity,
generate_csv_fields,
generate_provider_output_csv,
generate_provider_output_json,
)
from prowler.lib.utils.utils import file_exists, hash_sha512, open_file
from prowler.providers.aws.lib.allowlist.allowlist import is_allowlisted
from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
from prowler.providers.aws.lib.security_hub.security_hub import send_to_security_hub
from prowler.providers.azure.lib.audit_info.models import Azure_Audit_Info
from prowler.providers.common.outputs import Provider_Output_Options
def stdout_report(finding, color, verbose, is_quiet):
if finding.check_metadata.Provider == "aws":
details = finding.region
if finding.check_metadata.Provider == "azure":
details = finding.check_metadata.ServiceName
if is_quiet and "FAIL" in finding.status:
print(
f"\t{color}{finding.status}{Style.RESET_ALL} {details}: {finding.status_extended}"
)
elif not is_quiet and verbose:
print(
f"\t{color}{finding.status}{Style.RESET_ALL} {details}: {finding.status_extended}"
)
def report(check_findings, output_options, audit_info):
try:
# Sort check findings
check_findings.sort(key=lambda x: x.region)
# TO-DO Generic Function
if isinstance(audit_info, AWS_Audit_Info):
check_findings.sort(key=lambda x: x.region)
if isinstance(audit_info, Azure_Audit_Info):
check_findings.sort(key=lambda x: x.subscription)
# Generate the required output files
# csv_fields = []
file_descriptors = {}
if output_options.output_modes:
if isinstance(audit_info, AWS_Audit_Info):
# We have to create the required output files
file_descriptors = fill_file_descriptors(
output_options.output_modes,
output_options.output_directory,
output_options.output_filename,
audit_info,
)
# if isinstance(audit_info, AWS_Audit_Info):
# We have to create the required output files
file_descriptors = fill_file_descriptors(
output_options.output_modes,
output_options.output_directory,
output_options.output_filename,
audit_info,
)
if check_findings:
for finding in check_findings:
@@ -72,15 +94,12 @@ def report(check_findings, output_options, audit_info):
finding.status = "WARNING"
# Print findings by stdout
color = set_report_color(finding.status)
if output_options.is_quiet and "FAIL" in finding.status:
print(
f"\t{color}{finding.status}{Style.RESET_ALL} {finding.region}: {finding.status_extended}"
)
elif not output_options.is_quiet and output_options.verbose:
print(
f"\t{color}{finding.status}{Style.RESET_ALL} {finding.region}: {finding.status_extended}"
)
stdout_report(
finding, color, output_options.verbose, output_options.is_quiet
)
if file_descriptors:
# AWS specific outputs
if finding.check_metadata.Provider == "aws":
if "ens_rd2022_aws" in output_options.output_modes:
# We have to retrieve all the check's compliance requirements
@@ -209,32 +228,10 @@ def report(check_findings, output_options, audit_info):
)
csv_writer.writerow(compliance_row.__dict__)
if "csv" in file_descriptors:
finding_output = Check_Output_CSV(
audit_info.audited_account,
audit_info.profile,
finding,
audit_info.organizations_metadata,
)
csv_writer = DictWriter(
file_descriptors["csv"],
fieldnames=generate_csv_fields(Check_Output_CSV),
delimiter=";",
)
csv_writer.writerow(finding_output.__dict__)
if "html" in file_descriptors:
fill_html(file_descriptors["html"], audit_info, finding)
if "json" in file_descriptors:
finding_output = Check_Output_JSON(
**finding.check_metadata.dict()
)
fill_json(finding_output, audit_info, finding)
json.dump(
finding_output.dict(),
file_descriptors["json"],
indent=4,
)
file_descriptors["json"].write(",")
file_descriptors["html"].write("")
if "json-asff" in file_descriptors:
finding_output = Check_Output_JSON_ASFF()
@@ -247,16 +244,38 @@ def report(check_findings, output_options, audit_info):
)
file_descriptors["json-asff"].write(",")
if "html" in file_descriptors:
fill_html(file_descriptors["html"], audit_info, finding)
file_descriptors["html"].write("")
# Check if it is needed to send findings to security hub
if output_options.security_hub_enabled:
send_to_security_hub(
finding.region, finding_output, audit_info.audit_session
)
# Common outputs
if "csv" in file_descriptors:
csv_writer, finding_output = generate_provider_output_csv(
finding.check_metadata.Provider,
finding,
audit_info,
"csv",
file_descriptors["csv"],
)
csv_writer.writerow(finding_output.__dict__)
if "json" in file_descriptors:
finding_output = generate_provider_output_json(
finding.check_metadata.Provider,
finding,
audit_info,
"json",
file_descriptors["json"],
)
json.dump(
finding_output.dict(),
file_descriptors["json"],
indent=4,
)
file_descriptors["json"].write(",")
else: # No service resources in the whole account
color = set_report_color("INFO")
if not output_options.is_quiet and output_options.verbose:
@@ -320,12 +339,20 @@ def fill_file_descriptors(output_modes, output_directory, output_filename, audit
for output_mode in output_modes:
if output_mode == "csv":
filename = f"{output_directory}/{output_filename}{csv_file_suffix}"
file_descriptor = initialize_file_descriptor(
filename,
output_mode,
audit_info,
Check_Output_CSV,
)
if isinstance(audit_info, AWS_Audit_Info):
file_descriptor = initialize_file_descriptor(
filename,
output_mode,
audit_info,
Aws_Check_Output_CSV,
)
if isinstance(audit_info, Azure_Audit_Info):
file_descriptor = initialize_file_descriptor(
filename,
output_mode,
audit_info,
Azure_Check_Output_CSV,
)
file_descriptors.update({output_mode: file_descriptor})
if output_mode == "json":
@@ -335,42 +362,47 @@ def fill_file_descriptors(output_modes, output_directory, output_filename, audit
)
file_descriptors.update({output_mode: file_descriptor})
if output_mode == "json-asff":
filename = (
f"{output_directory}/{output_filename}{json_asff_file_suffix}"
)
file_descriptor = initialize_file_descriptor(
filename, output_mode, audit_info
)
file_descriptors.update({output_mode: file_descriptor})
if isinstance(audit_info, AWS_Audit_Info):
if output_mode == "html":
filename = f"{output_directory}/{output_filename}{html_file_suffix}"
file_descriptor = initialize_file_descriptor(
filename, output_mode, audit_info
)
file_descriptors.update({output_mode: file_descriptor})
if output_mode == "json-asff":
filename = f"{output_directory}/{output_filename}{json_asff_file_suffix}"
file_descriptor = initialize_file_descriptor(
filename, output_mode, audit_info
)
file_descriptors.update({output_mode: file_descriptor})
if output_mode == "ens_rd2022_aws":
filename = f"{output_directory}/{output_filename}_ens_rd2022_aws{csv_file_suffix}"
file_descriptor = initialize_file_descriptor(
filename, output_mode, audit_info, Check_Output_CSV_ENS_RD2022
)
file_descriptors.update({output_mode: file_descriptor})
if output_mode == "html":
filename = (
f"{output_directory}/{output_filename}{html_file_suffix}"
)
file_descriptor = initialize_file_descriptor(
filename, output_mode, audit_info
)
file_descriptors.update({output_mode: file_descriptor})
if output_mode == "cis_1.5_aws":
filename = f"{output_directory}/{output_filename}_cis_1.5_aws{csv_file_suffix}"
file_descriptor = initialize_file_descriptor(
filename, output_mode, audit_info, Check_Output_CSV_CIS
)
file_descriptors.update({output_mode: file_descriptor})
if output_mode == "ens_rd2022_aws":
filename = f"{output_directory}/{output_filename}_ens_rd2022_aws{csv_file_suffix}"
file_descriptor = initialize_file_descriptor(
filename,
output_mode,
audit_info,
Check_Output_CSV_ENS_RD2022,
)
file_descriptors.update({output_mode: file_descriptor})
if output_mode == "cis_1.4_aws":
filename = f"{output_directory}/{output_filename}_cis_1.4_aws{csv_file_suffix}"
file_descriptor = initialize_file_descriptor(
filename, output_mode, audit_info, Check_Output_CSV_CIS
)
file_descriptors.update({output_mode: file_descriptor})
if output_mode == "cis_1.5_aws":
filename = f"{output_directory}/{output_filename}_cis_1.5_aws{csv_file_suffix}"
file_descriptor = initialize_file_descriptor(
filename, output_mode, audit_info, Check_Output_CSV_CIS
)
file_descriptors.update({output_mode: file_descriptor})
if output_mode == "cis_1.4_aws":
filename = f"{output_directory}/{output_filename}_cis_1.4_aws{csv_file_suffix}"
file_descriptor = initialize_file_descriptor(
filename, output_mode, audit_info, Check_Output_CSV_CIS
)
file_descriptors.update({output_mode: file_descriptor})
except Exception as error:
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
@@ -397,32 +429,6 @@ def set_report_color(status: str) -> str:
return color
def generate_csv_fields(format: Any) -> list[str]:
"""Generates the CSV headers for the given class"""
csv_fields = []
for field in format.__dict__.get("__annotations__").keys():
csv_fields.append(field)
return csv_fields
def fill_json(finding_output, audit_info, finding):
finding_output.AssessmentStartTime = timestamp_iso
finding_output.FindingUniqueId = ""
finding_output.Profile = audit_info.profile
finding_output.AccountId = audit_info.audited_account
if audit_info.organizations_metadata:
finding_output.OrganizationsInfo = audit_info.organizations_metadata.__dict__
finding_output.Region = finding.region
finding_output.Status = finding.status
finding_output.StatusExtended = finding.status_extended
finding_output.ResourceId = finding.resource_id
finding_output.ResourceArn = finding.resource_arn
finding_output.ResourceDetails = finding.resource_details
return finding_output
def fill_json_asff(finding_output, audit_info, finding):
# Check if there are no resources in the finding
if finding.resource_id == "":
@@ -680,6 +686,9 @@ def display_compliance_table(
and compliance.Provider == "AWS"
and compliance.Version == "RD2022"
):
compliance_version = compliance.Version
compliance_fm = compliance.Framework
compliance_provider = compliance.Provider
for requirement in compliance.Requirements:
for attribute in requirement.Attributes:
marco_categoria = (
@@ -729,11 +738,11 @@ def display_compliance_table(
)
if fail_count + pass_count < 0:
print(
f"\n {Style.BRIGHT}There are no resources for {Fore.YELLOW}ENS RD2022 - AWS{Style.RESET_ALL}.\n"
f"\n {Style.BRIGHT}There are no resources for {Fore.YELLOW}{compliance_fm} {compliance_version} - {compliance_provider}{Style.RESET_ALL}.\n"
)
else:
print(
f"\nEstado de Cumplimiento de {Fore.YELLOW}ENS RD2022 - AWS{Style.RESET_ALL}:"
f"\nEstado de Cumplimiento de {Fore.YELLOW}{compliance_fm} {compliance_version} - {compliance_provider}{Style.RESET_ALL}:"
)
overview_table = [
[
@@ -743,7 +752,7 @@ def display_compliance_table(
]
print(tabulate(overview_table, tablefmt="rounded_grid"))
print(
f"\nResultados de {Fore.YELLOW}ENS RD2022 - AWS{Style.RESET_ALL}:"
f"\nResultados de {Fore.YELLOW}{compliance_fm} {compliance_version} - {compliance_provider}{Style.RESET_ALL}:"
)
print(
tabulate(
@@ -774,6 +783,7 @@ def display_compliance_table(
compliance_framework
):
compliance_version = compliance.Version
compliance_fm = compliance.Framework
for requirement in compliance.Requirements:
for attribute in requirement.Attributes:
section = attribute["Section"]
@@ -822,11 +832,11 @@ def display_compliance_table(
)
if fail_count + pass_count < 0:
print(
f"\n {Style.BRIGHT}There are no resources for {Fore.YELLOW}{compliance.Framework}-{compliance.Version}{Style.RESET_ALL}.\n"
f"\n {Style.BRIGHT}There are no resources for {Fore.YELLOW}{compliance_fm}-{compliance_version}{Style.RESET_ALL}.\n"
)
else:
print(
f"\nCompliance Status of {Fore.YELLOW}{compliance.Framework}-{compliance_version}{Style.RESET_ALL} Framework:"
f"\nCompliance Status of {Fore.YELLOW}{compliance_fm}-{compliance_version}{Style.RESET_ALL} Framework:"
)
overview_table = [
[
@@ -836,7 +846,7 @@ def display_compliance_table(
]
print(tabulate(overview_table, tablefmt="rounded_grid"))
print(
f"\nFramework {Fore.YELLOW}{compliance.Framework}-{compliance_version}{Style.RESET_ALL} Results:"
f"\nFramework {Fore.YELLOW}{compliance_fm}-{compliance_version}{Style.RESET_ALL} Results:"
)
print(
tabulate(

View File

@@ -212,5 +212,5 @@ def create_output(resources: list, audit_info: AWS_Audit_Info, output_directory:
csv_file.close()
print("\nMore details in files:")
print(f" - CSV: {Fore.GREEN}{output_file+csv_file_suffix}{Style.RESET_ALL}")
print(f" - JSON: {Fore.GREEN}{output_file+json_file_suffix}{Style.RESET_ALL}")
print(f" - CSV: {output_file+csv_file_suffix}")
print(f" - JSON: {output_file+json_file_suffix}")

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.accessanalyzer.accessanalyzer_client import (
accessanalyzer_client,
)
@@ -8,7 +8,7 @@ class accessanalyzer_enabled_without_findings(Check):
def execute(self):
findings = []
for analyzer in accessanalyzer_client.analyzers:
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.region = analyzer.region
if analyzer.status == "ACTIVE":
if analyzer.findings_count > 0:

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.account.account_client import account_client
# This check has no findings since it is manual
@@ -6,7 +6,7 @@ from prowler.providers.aws.services.account.account_client import account_client
class account_maintain_current_contact_details(Check):
def execute(self):
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.region = account_client.region
report.resource_id = account_client.audited_account
report.status = "INFO"

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.account.account_client import account_client
# This check has no findings since it is manual
@@ -6,7 +6,7 @@ from prowler.providers.aws.services.account.account_client import account_client
class account_security_contact_information_is_registered(Check):
def execute(self):
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.region = account_client.region
report.resource_id = account_client.audited_account
report.status = "INFO"

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.account.account_client import account_client
# This check has no findings since it is manual
@@ -6,7 +6,7 @@ from prowler.providers.aws.services.account.account_client import account_client
class account_security_questions_are_registered_in_the_aws_account(Check):
def execute(self):
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.region = account_client.region
report.resource_id = account_client.audited_account
report.status = "INFO"

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.acm.acm_client import acm_client
DAYS_TO_EXPIRE_THRESHOLD = 7
@@ -8,7 +8,7 @@ class acm_certificates_expiration_check(Check):
def execute(self):
findings = []
for certificate in acm_client.certificates:
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.region = certificate.region
if certificate.expiration_days > DAYS_TO_EXPIRE_THRESHOLD:
report.status = "PASS"

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.acm.acm_client import acm_client
@@ -6,7 +6,7 @@ class acm_certificates_transparency_logs_enabled(Check):
def execute(self):
findings = []
for certificate in acm_client.certificates:
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.region = certificate.region
if certificate.type == "IMPORTED":
report.status = "PASS"

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.apigateway.apigateway_client import (
apigateway_client,
)
@@ -8,7 +8,7 @@ class apigateway_authorizers_enabled(Check):
def execute(self):
findings = []
for rest_api in apigateway_client.rest_apis:
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.region = rest_api.region
if rest_api.authorizer:
report.status = "PASS"

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.apigateway.apigateway_client import (
apigateway_client,
)
@@ -9,7 +9,7 @@ class apigateway_client_certificate_enabled(Check):
findings = []
for rest_api in apigateway_client.rest_apis:
for stage in rest_api.stages:
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
if stage.client_certificate:
report.status = "PASS"
report.status_extended = f"API Gateway {rest_api.name} ID {rest_api.id} in stage {stage.name} has client certificate enabled."

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.apigateway.apigateway_client import (
apigateway_client,
)
@@ -8,7 +8,7 @@ class apigateway_endpoint_public(Check):
def execute(self):
findings = []
for rest_api in apigateway_client.rest_apis:
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.region = rest_api.region
if rest_api.public_endpoint:
report.status = "FAIL"

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.apigateway.apigateway_client import (
apigateway_client,
)
@@ -8,7 +8,7 @@ class apigateway_logging_enabled(Check):
def execute(self):
findings = []
for rest_api in apigateway_client.rest_apis:
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.region = rest_api.region
for stage in rest_api.stages:
if stage.logging:

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.apigateway.apigateway_client import (
apigateway_client,
)
@@ -8,7 +8,7 @@ class apigateway_waf_acl_attached(Check):
def execute(self):
findings = []
for rest_api in apigateway_client.rest_apis:
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.region = rest_api.region
for stage in rest_api.stages:
if stage.waf:

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.apigatewayv2.apigatewayv2_client import (
apigatewayv2_client,
)
@@ -8,7 +8,7 @@ class apigatewayv2_access_logging_enabled(Check):
def execute(self):
findings = []
for api in apigatewayv2_client.apis:
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.region = api.region
for stage in api.stages:
if stage.logging:

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.apigatewayv2.apigatewayv2_client import (
apigatewayv2_client,
)
@@ -8,7 +8,7 @@ class apigatewayv2_authorizers_enabled(Check):
def execute(self):
findings = []
for api in apigatewayv2_client.apis:
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.region = api.region
if api.authorizer:
report.status = "PASS"

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.appstream.appstream_client import appstream_client
@@ -10,7 +10,7 @@ class appstream_fleet_default_internet_access_disabled(Check):
"""Execute the appstream_fleet_default_internet_access_disabled check"""
findings = []
for fleet in appstream_client.fleets:
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.region = fleet.region
report.resource_id = fleet.name
report.resource_arn = fleet.arn

View File

@@ -1,5 +1,5 @@
from prowler.config.config import get_config_var
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.appstream.appstream_client import appstream_client
max_session_duration_seconds = get_config_var("max_session_duration_seconds")
@@ -13,7 +13,7 @@ class appstream_fleet_maximum_session_duration(Check):
"""Execute the appstream_fleet_maximum_session_duration check"""
findings = []
for fleet in appstream_client.fleets:
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.region = fleet.region
report.resource_id = fleet.name
report.resource_arn = fleet.arn

View File

@@ -1,5 +1,5 @@
from prowler.config.config import get_config_var
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.appstream.appstream_client import appstream_client
max_disconnect_timeout_in_seconds = get_config_var("max_disconnect_timeout_in_seconds")
@@ -13,7 +13,7 @@ class appstream_fleet_session_disconnect_timeout(Check):
"""Execute the appstream_fleet_maximum_session_duration check"""
findings = []
for fleet in appstream_client.fleets:
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.region = fleet.region
report.resource_id = fleet.name
report.resource_arn = fleet.arn

View File

@@ -1,5 +1,5 @@
from prowler.config.config import get_config_var
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.appstream.appstream_client import appstream_client
max_idle_disconnect_timeout_in_seconds = get_config_var(
@@ -15,7 +15,7 @@ class appstream_fleet_session_idle_disconnect_timeout(Check):
"""Execute the appstream_fleet_session_idle_disconnect_timeout check"""
findings = []
for fleet in appstream_client.fleets:
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.region = fleet.region
report.resource_id = fleet.name
report.resource_arn = fleet.arn

View File

@@ -5,7 +5,7 @@ from base64 import b64decode
from detect_secrets import SecretsCollection
from detect_secrets.settings import default_settings
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.autoscaling.autoscaling_client import (
autoscaling_client,
)
@@ -15,7 +15,7 @@ class autoscaling_find_secrets_ec2_launch_configuration(Check):
def execute(self):
findings = []
for configuration in autoscaling_client.launch_configurations:
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.region = configuration.region
report.resource_id = configuration.name
report.resource_arn = configuration.arn

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.awslambda.awslambda_client import awslambda_client
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
cloudtrail_client,
@@ -9,7 +9,7 @@ class awslambda_function_invoke_api_operations_cloudtrail_logging_enabled(Check)
def execute(self):
findings = []
for function in awslambda_client.functions.values():
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.region = function.region
report.resource_id = function.name
report.resource_arn = function.arn

View File

@@ -4,7 +4,7 @@ import tempfile
from detect_secrets import SecretsCollection
from detect_secrets.settings import default_settings
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.awslambda.awslambda_client import awslambda_client
@@ -12,7 +12,7 @@ class awslambda_function_no_secrets_in_code(Check):
def execute(self):
findings = []
for function in awslambda_client.functions.values():
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.region = function.region
report.resource_id = function.name
report.resource_arn = function.arn

View File

@@ -5,7 +5,7 @@ import tempfile
from detect_secrets import SecretsCollection
from detect_secrets.settings import default_settings
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.awslambda.awslambda_client import awslambda_client
@@ -13,7 +13,7 @@ class awslambda_function_no_secrets_in_variables(Check):
def execute(self):
findings = []
for function in awslambda_client.functions.values():
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.region = function.region
report.resource_id = function.name
report.resource_arn = function.arn

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.awslambda.awslambda_client import awslambda_client
@@ -6,7 +6,7 @@ class awslambda_function_not_publicly_accessible(Check):
def execute(self):
findings = []
for function in awslambda_client.functions.values():
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.region = function.region
report.resource_id = function.name
report.resource_arn = function.arn

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.awslambda.awslambda_client import awslambda_client
@@ -6,7 +6,7 @@ class awslambda_function_url_cors_policy(Check):
def execute(self):
findings = []
for function in awslambda_client.functions.values():
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.region = function.region
report.resource_id = function.name
report.resource_arn = function.arn

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.awslambda.awslambda_client import awslambda_client
from prowler.providers.aws.services.awslambda.awslambda_service import AuthType
@@ -7,7 +7,7 @@ class awslambda_function_url_public(Check):
def execute(self):
findings = []
for function in awslambda_client.functions.values():
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.region = function.region
report.resource_id = function.name
report.resource_arn = function.arn

View File

@@ -1,5 +1,5 @@
from prowler.config.config import get_config_var
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.awslambda.awslambda_client import awslambda_client
@@ -7,7 +7,7 @@ class awslambda_function_using_supported_runtimes(Check):
def execute(self):
findings = []
for function in awslambda_client.functions.values():
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.region = function.region
report.resource_id = function.name
report.resource_arn = function.arn

View File

@@ -4,7 +4,7 @@ import tempfile
from detect_secrets import SecretsCollection
from detect_secrets.settings import default_settings
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.cloudformation.cloudformation_client import (
cloudformation_client,
)
@@ -17,7 +17,7 @@ class cloudformation_outputs_find_secrets(Check):
"""Execute the cloudformation_outputs_find_secrets check"""
findings = []
for stack in cloudformation_client.stacks:
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.region = stack.region
report.resource_id = stack.name
report.resource_arn = stack.arn

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.cloudformation.cloudformation_client import (
cloudformation_client,
)
@@ -12,7 +12,7 @@ class cloudformation_stacks_termination_protection_enabled(Check):
findings = []
for stack in cloudformation_client.stacks:
if not stack.is_nested_stack:
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.region = stack.region
report.resource_id = stack.name
report.resource_arn = stack.arn

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.cloudfront.cloudfront_client import (
cloudfront_client,
)
@@ -8,7 +8,7 @@ class cloudfront_distributions_field_level_encryption_enabled(Check):
def execute(self):
findings = []
for distribution in cloudfront_client.distributions.values():
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.region = distribution.region
report.resource_arn = distribution.arn
report.resource_id = distribution.id

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.cloudfront.cloudfront_client import (
cloudfront_client,
)
@@ -11,7 +11,7 @@ class cloudfront_distributions_geo_restrictions_enabled(Check):
def execute(self):
findings = []
for distribution in cloudfront_client.distributions.values():
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.region = distribution.region
report.resource_arn = distribution.arn
report.resource_id = distribution.id

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.cloudfront.cloudfront_client import (
cloudfront_client,
)
@@ -11,7 +11,7 @@ class cloudfront_distributions_https_enabled(Check):
def execute(self):
findings = []
for distribution in cloudfront_client.distributions.values():
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.region = distribution.region
report.resource_arn = distribution.arn
report.resource_id = distribution.id

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.cloudfront.cloudfront_client import (
cloudfront_client,
)
@@ -8,7 +8,7 @@ class cloudfront_distributions_logging_enabled(Check):
def execute(self):
findings = []
for distribution in cloudfront_client.distributions.values():
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.region = distribution.region
report.resource_arn = distribution.arn
report.resource_id = distribution.id

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.cloudfront.cloudfront_client import (
cloudfront_client,
)
@@ -11,7 +11,7 @@ class cloudfront_distributions_using_deprecated_ssl_protocols(Check):
def execute(self):
findings = []
for distribution in cloudfront_client.distributions.values():
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.region = distribution.region
report.resource_arn = distribution.arn
report.resource_id = distribution.id

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.cloudfront.cloudfront_client import (
cloudfront_client,
)
@@ -8,7 +8,7 @@ class cloudfront_distributions_using_waf(Check):
def execute(self):
findings = []
for distribution in cloudfront_client.distributions.values():
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.region = distribution.region
report.resource_arn = distribution.arn
report.resource_id = distribution.id

View File

@@ -1,6 +1,6 @@
from datetime import datetime, timedelta, timezone
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
cloudtrail_client,
)
@@ -13,7 +13,7 @@ class cloudtrail_cloudwatch_logging_enabled(Check):
findings = []
for trail in cloudtrail_client.trails:
if trail.name:
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.region = trail.region
report.resource_id = trail.name
report.resource_arn = trail.arn

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
cloudtrail_client,
)
@@ -9,7 +9,7 @@ class cloudtrail_kms_encryption_enabled(Check):
findings = []
for trail in cloudtrail_client.trails:
if trail.name:
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.region = trail.region
report.resource_id = trail.name
report.resource_arn = trail.arn

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
cloudtrail_client,
)
@@ -9,7 +9,7 @@ class cloudtrail_log_file_validation_enabled(Check):
findings = []
for trail in cloudtrail_client.trails:
if trail.name:
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.region = trail.region
report.resource_id = trail.name
report.resource_arn = trail.arn

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
cloudtrail_client,
)
@@ -11,7 +11,7 @@ class cloudtrail_logs_s3_bucket_access_logging_enabled(Check):
for trail in cloudtrail_client.trails:
if trail.name:
trail_bucket = trail.s3_bucket
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.region = trail.region
report.resource_id = trail.name
report.resource_arn = trail.arn

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
cloudtrail_client,
)
@@ -11,7 +11,7 @@ class cloudtrail_logs_s3_bucket_is_not_publicly_accessible(Check):
for trail in cloudtrail_client.trails:
if trail.name:
trail_bucket = trail.s3_bucket
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.region = trail.region
report.resource_id = trail.name
report.resource_arn = trail.arn

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
cloudtrail_client,
)
@@ -9,7 +9,7 @@ class cloudtrail_multi_region_enabled(Check):
findings = []
actual_region = None
for trail in cloudtrail_client.trails:
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.region = trail.region
if trail.name: # Check if there are trails in region
# Check if region has changed and add report of previous region

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
cloudtrail_client,
)
@@ -7,7 +7,7 @@ from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
class cloudtrail_s3_dataevents_read_enabled(Check):
def execute(self):
findings = []
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.region = cloudtrail_client.region
report.resource_id = "No trails"
report.resource_arn = "No trails"

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
cloudtrail_client,
)
@@ -7,7 +7,7 @@ from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
class cloudtrail_s3_dataevents_write_enabled(Check):
def execute(self):
findings = []
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.region = cloudtrail_client.region
report.resource_id = "No trails"
report.resource_arn = "No trails"

View File

@@ -1,6 +1,6 @@
import re
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
cloudtrail_client,
)
@@ -14,7 +14,7 @@ class cloudwatch_changes_to_network_acls_alarm_configured(Check):
def execute(self):
pattern = r"\$\.eventName\s*=\s*CreateNetworkAcl.+\$\.eventName\s*=\s*CreateNetworkAclEntry.+\$\.eventName\s*=\s*DeleteNetworkAcl.+\$\.eventName\s*=\s*DeleteNetworkAclEntry.+\$\.eventName\s*=\s*ReplaceNetworkAclEntry.+\$\.eventName\s*=\s*ReplaceNetworkAclAssociation"
findings = []
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.status = "FAIL"
report.status_extended = (
"No CloudWatch log groups found with metric filters or alarms associated."

View File

@@ -1,6 +1,6 @@
import re
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
cloudtrail_client,
)
@@ -14,7 +14,7 @@ class cloudwatch_changes_to_network_gateways_alarm_configured(Check):
def execute(self):
pattern = r"\$\.eventName\s*=\s*CreateCustomerGateway.+\$\.eventName\s*=\s*DeleteCustomerGateway.+\$\.eventName\s*=\s*AttachInternetGateway.+\$\.eventName\s*=\s*CreateInternetGateway.+\$\.eventName\s*=\s*DeleteInternetGateway.+\$\.eventName\s*=\s*DetachInternetGateway"
findings = []
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.status = "FAIL"
report.status_extended = (
"No CloudWatch log groups found with metric filters or alarms associated."

View File

@@ -1,6 +1,6 @@
import re
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
cloudtrail_client,
)
@@ -14,7 +14,7 @@ class cloudwatch_changes_to_network_route_tables_alarm_configured(Check):
def execute(self):
pattern = r"\$\.eventName\s*=\s*CreateRoute.+\$\.eventName\s*=\s*CreateRouteTable.+\$\.eventName\s*=\s*ReplaceRoute.+\$\.eventName\s*=\s*ReplaceRouteTableAssociation.+\$\.eventName\s*=\s*DeleteRouteTable.+\$\.eventName\s*=\s*DeleteRoute.+\$\.eventName\s*=\s*DisassociateRouteTable"
findings = []
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.status = "FAIL"
report.status_extended = (
"No CloudWatch log groups found with metric filters or alarms associated."

View File

@@ -1,6 +1,6 @@
import re
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
cloudtrail_client,
)
@@ -14,7 +14,7 @@ class cloudwatch_changes_to_vpcs_alarm_configured(Check):
def execute(self):
pattern = r"\$\.eventName\s*=\s*CreateVpc.+\$\.eventName\s*=\s*DeleteVpc.+\$\.eventName\s*=\s*ModifyVpcAttribute.+\$\.eventName\s*=\s*AcceptVpcPeeringConnection.+\$\.eventName\s*=\s*CreateVpcPeeringConnection.+\$\.eventName\s*=\s*DeleteVpcPeeringConnection.+\$\.eventName\s*=\s*RejectVpcPeeringConnection.+\$\.eventName\s*=\s*AttachClassicLinkVpc.+\$\.eventName\s*=\s*DetachClassicLinkVpc.+\$\.eventName\s*=\s*DisableVpcClassicLink.+\$\.eventName\s*=\s*EnableVpcClassicLink"
findings = []
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.status = "FAIL"
report.status_extended = (
"No CloudWatch log groups found with metric filters or alarms associated."

View File

@@ -1,11 +1,11 @@
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.iam.iam_client import iam_client
class cloudwatch_cross_account_sharing_disabled(Check):
def execute(self):
findings = []
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.status = "PASS"
report.status_extended = "CloudWatch doesn't allows cross-account sharing"
report.resource_id = "CloudWatch-CrossAccountSharingRole"

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.cloudwatch.logs_client import logs_client
@@ -6,7 +6,7 @@ class cloudwatch_log_group_kms_encryption_enabled(Check):
def execute(self):
findings = []
for log_group in logs_client.log_groups:
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.region = log_group.region
report.resource_id = log_group.name
report.resource_arn = log_group.arn

View File

@@ -1,5 +1,5 @@
from prowler.config.config import get_config_var
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.cloudwatch.logs_client import logs_client
@@ -8,7 +8,7 @@ class cloudwatch_log_group_retention_policy_specific_days_enabled(Check):
findings = []
specific_retention_days = get_config_var("log_group_retention_days")
for log_group in logs_client.log_groups:
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.region = log_group.region
report.resource_id = log_group.name
report.resource_arn = log_group.arn

View File

@@ -1,6 +1,6 @@
import re
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
cloudtrail_client,
)
@@ -16,7 +16,7 @@ class cloudwatch_log_metric_filter_and_alarm_for_aws_config_configuration_change
def execute(self):
pattern = r"\$\.eventSource\s*=\s*config.amazonaws.com.+\$\.eventName\s*=\s*StopConfigurationRecorder.+\$\.eventName\s*=\s*DeleteDeliveryChannel.+\$\.eventName\s*=\s*PutDeliveryChannel.+\$\.eventName\s*=\s*PutConfigurationRecorder"
findings = []
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.status = "FAIL"
report.status_extended = (
"No CloudWatch log groups found with metric filters or alarms associated."

View File

@@ -1,6 +1,6 @@
import re
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
cloudtrail_client,
)
@@ -16,7 +16,7 @@ class cloudwatch_log_metric_filter_and_alarm_for_cloudtrail_configuration_change
def execute(self):
pattern = r"\$\.eventName\s*=\s*CreateTrail.+\$\.eventName\s*=\s*UpdateTrail.+\$\.eventName\s*=\s*DeleteTrail.+\$\.eventName\s*=\s*StartLogging.+\$\.eventName\s*=\s*StopLogging"
findings = []
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.status = "FAIL"
report.status_extended = (
"No CloudWatch log groups found with metric filters or alarms associated."

View File

@@ -1,6 +1,6 @@
import re
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
cloudtrail_client,
)
@@ -14,7 +14,7 @@ class cloudwatch_log_metric_filter_authentication_failures(Check):
def execute(self):
pattern = r"\$\.eventName\s*=\s*ConsoleLogin.+\$\.errorMessage\s*=\s*Failed authentication"
findings = []
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.status = "FAIL"
report.status_extended = (
"No CloudWatch log groups found with metric filters or alarms associated."

View File

@@ -1,6 +1,6 @@
import re
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
cloudtrail_client,
)
@@ -14,7 +14,7 @@ class cloudwatch_log_metric_filter_aws_organizations_changes(Check):
def execute(self):
pattern = r"\$\.eventSource\s*=\s*organizations\.amazonaws\.com.+\$\.eventName\s*=\s*AcceptHandshake.+\$\.eventName\s*=\s*AttachPolicy.+\$\.eventName\s*=\s*CancelHandshake.+\$\.eventName\s*=\s*CreateAccount.+\$\.eventName\s*=\s*CreateOrganization.+\$\.eventName\s*=\s*CreateOrganizationalUnit.+\$\.eventName\s*=\s*CreatePolicy.+\$\.eventName\s*=\s*DeclineHandshake.+\$\.eventName\s*=\s*DeleteOrganization.+\$\.eventName\s*=\s*DeleteOrganizationalUnit.+\$\.eventName\s*=\s*DeletePolicy.+\$\.eventName\s*=\s*EnableAllFeatures.+\$\.eventName\s*=\s*EnablePolicyType.+\$\.eventName\s*=\s*InviteAccountToOrganization.+\$\.eventName\s*=\s*LeaveOrganization.+\$\.eventName\s*=\s*DetachPolicy.+\$\.eventName\s*=\s*DisablePolicyType.+\$\.eventName\s*=\s*MoveAccount.+\$\.eventName\s*=\s*RemoveAccountFromOrganization.+\$\.eventName\s*=\s*UpdateOrganizationalUnit.+\$\.eventName\s*=\s*UpdatePolicy"
findings = []
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.status = "FAIL"
report.status_extended = (
"No CloudWatch log groups found with metric filters or alarms associated."

View File

@@ -1,6 +1,6 @@
import re
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
cloudtrail_client,
)
@@ -14,7 +14,7 @@ class cloudwatch_log_metric_filter_disable_or_scheduled_deletion_of_kms_cmk(Chec
def execute(self):
pattern = r"\$\.eventSource\s*=\s*kms.amazonaws.com.+\$\.eventName\s*=\s*DisableKey.+\$\.eventName\s*=\s*ScheduleKeyDeletion"
findings = []
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.status = "FAIL"
report.status_extended = (
"No CloudWatch log groups found with metric filters or alarms associated."

View File

@@ -1,6 +1,6 @@
import re
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
cloudtrail_client,
)
@@ -14,7 +14,7 @@ class cloudwatch_log_metric_filter_for_s3_bucket_policy_changes(Check):
def execute(self):
pattern = r"\$\.eventSource\s*=\s*s3.amazonaws.com.+\$\.eventName\s*=\s*PutBucketAcl.+\$\.eventName\s*=\s*PutBucketPolicy.+\$\.eventName\s*=\s*PutBucketCors.+\$\.eventName\s*=\s*PutBucketLifecycle.+\$\.eventName\s*=\s*PutBucketReplication.+\$\.eventName\s*=\s*DeleteBucketPolicy.+\$\.eventName\s*=\s*DeleteBucketCors.+\$\.eventName\s*=\s*DeleteBucketLifecycle.+\$\.eventName\s*=\s*DeleteBucketReplication"
findings = []
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.status = "FAIL"
report.status_extended = (
"No CloudWatch log groups found with metric filters or alarms associated."

View File

@@ -1,6 +1,6 @@
import re
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
cloudtrail_client,
)
@@ -14,7 +14,7 @@ class cloudwatch_log_metric_filter_policy_changes(Check):
def execute(self):
pattern = r"\$\.eventName\s*=\s*DeleteGroupPolicy.+\$\.eventName\s*=\s*DeleteRolePolicy.+\$\.eventName\s*=\s*DeleteUserPolicy.+\$\.eventName\s*=\s*PutGroupPolicy.+\$\.eventName\s*=\s*PutRolePolicy.+\$\.eventName\s*=\s*PutUserPolicy.+\$\.eventName\s*=\s*CreatePolicy.+\$\.eventName\s*=\s*DeletePolicy.+\$\.eventName\s*=\s*CreatePolicyVersion.+\$\.eventName\s*=\s*DeletePolicyVersion.+\$\.eventName\s*=\s*AttachRolePolicy.+\$\.eventName\s*=\s*DetachRolePolicy.+\$\.eventName\s*=\s*AttachUserPolicy.+\$\.eventName\s*=\s*DetachUserPolicy.+\$\.eventName\s*=\s*AttachGroupPolicy.+\$\.eventName\s*=\s*DetachGroupPolicy"
findings = []
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.status = "FAIL"
report.status_extended = (
"No CloudWatch log groups found with metric filters or alarms associated."

View File

@@ -1,6 +1,6 @@
import re
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
cloudtrail_client,
)
@@ -14,7 +14,7 @@ class cloudwatch_log_metric_filter_root_usage(Check):
def execute(self):
pattern = r"\$\.userIdentity\.type\s*=\s*Root.+\$\.userIdentity\.invokedBy NOT EXISTS.+\$\.eventType\s*!=\s*AwsServiceEvent"
findings = []
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.status = "FAIL"
report.status_extended = (
"No CloudWatch log groups found with metric filters or alarms associated."

View File

@@ -1,6 +1,6 @@
import re
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
cloudtrail_client,
)
@@ -14,7 +14,7 @@ class cloudwatch_log_metric_filter_security_group_changes(Check):
def execute(self):
pattern = r"\$\.eventName\s*=\s*AuthorizeSecurityGroupIngress.+\$\.eventName\s*=\s*AuthorizeSecurityGroupEgress.+\$\.eventName\s*=\s*RevokeSecurityGroupIngress.+\$\.eventName\s*=\s*RevokeSecurityGroupEgress.+\$\.eventName\s*=\s*CreateSecurityGroup.+\$\.eventName\s*=\s*DeleteSecurityGroup"
findings = []
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.status = "FAIL"
report.status_extended = (
"No CloudWatch log groups found with metric filters or alarms associated."

View File

@@ -1,6 +1,6 @@
import re
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
cloudtrail_client,
)
@@ -14,7 +14,7 @@ class cloudwatch_log_metric_filter_sign_in_without_mfa(Check):
def execute(self):
pattern = r"\$\.eventName\s*=\s*ConsoleLogin.+\$\.additionalEventData\.MFAUsed\s*!=\s*Yes"
findings = []
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.status = "FAIL"
report.status_extended = (
"No CloudWatch log groups found with metric filters or alarms associated."

View File

@@ -1,6 +1,6 @@
import re
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
cloudtrail_client,
)
@@ -14,7 +14,7 @@ class cloudwatch_log_metric_filter_unauthorized_api_calls(Check):
def execute(self):
pattern = r"\$\.errorCode\s*=\s*\*UnauthorizedOperation.+\$\.errorCode\s*=\s*AccessDenied\*"
findings = []
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.status = "FAIL"
report.status_extended = (
"No CloudWatch log groups found with metric filters or alarms associated."

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.codeartifact.codeartifact_client import (
codeartifact_client,
)
@@ -13,7 +13,7 @@ class codeartifact_packages_external_public_publishing_disabled(Check):
findings = []
for repository in codeartifact_client.repositories.values():
for package in repository.packages:
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.region = repository.region
report.resource_id = package.name

View File

@@ -1,6 +1,6 @@
from datetime import datetime, timezone
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.codebuild.codebuild_client import codebuild_client
@@ -8,7 +8,7 @@ class codebuild_project_older_90_days(Check):
def execute(self):
findings = []
for project in codebuild_client.projects:
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.region = project.region
report.resource_id = project.name
report.resource_arn = ""

View File

@@ -1,6 +1,6 @@
from re import search
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.codebuild.codebuild_client import codebuild_client
@@ -8,7 +8,7 @@ class codebuild_project_user_controlled_buildspec(Check):
def execute(self):
findings = []
for project in codebuild_client.projects:
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.region = project.region
report.resource_id = project.name
report.resource_arn = ""

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.config.config_client import config_client
@@ -6,7 +6,7 @@ class config_recorder_all_regions_enabled(Check):
def execute(self):
findings = []
for recorder in config_client.recorders:
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.region = recorder.region
report.resource_id = "" if not recorder.name else recorder.name
# Check if Config is enabled in region

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.directoryservice.directoryservice_client import (
directoryservice_client,
)
@@ -8,7 +8,7 @@ class directoryservice_directory_log_forwarding_enabled(Check):
def execute(self):
findings = []
for directory in directoryservice_client.directories.values():
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.region = directory.region
report.resource_id = directory.id
if directory.log_subscriptions:

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.directoryservice.directoryservice_client import (
directoryservice_client,
)
@@ -8,7 +8,7 @@ class directoryservice_directory_monitor_notifications(Check):
def execute(self):
findings = []
for directory in directoryservice_client.directories.values():
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.region = directory.region
report.resource_id = directory.id
if directory.event_topics:

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.directoryservice.directoryservice_client import (
directoryservice_client,
)
@@ -11,7 +11,7 @@ class directoryservice_directory_snapshots_limit(Check):
def execute(self):
findings = []
for directory in directoryservice_client.directories.values():
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.region = directory.region
report.resource_id = directory.id
if directory.snapshots_limits:

View File

@@ -1,6 +1,6 @@
from datetime import datetime
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.directoryservice.directoryservice_client import (
directoryservice_client,
)
@@ -14,7 +14,7 @@ class directoryservice_ldap_certificate_expiration(Check):
findings = []
for directory in directoryservice_client.directories.values():
for certificate in directory.certificates:
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.region = directory.region
report.resource_id = certificate.id

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.directoryservice.directoryservice_client import (
directoryservice_client,
)
@@ -12,7 +12,7 @@ class directoryservice_radius_server_security_protocol(Check):
findings = []
for directory in directoryservice_client.directories.values():
if directory.radius_settings:
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.region = directory.region
report.resource_id = directory.id
if (

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.directoryservice.directoryservice_client import (
directoryservice_client,
)
@@ -12,7 +12,7 @@ class directoryservice_supported_mfa_radius_enabled(Check):
findings = []
for directory in directoryservice_client.directories.values():
if directory.radius_settings:
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.region = directory.region
report.resource_id = directory.id
if directory.radius_settings.status == RadiusStatus.Completed:

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.dynamodb.dax_client import dax_client
@@ -6,7 +6,7 @@ class dynamodb_accelerator_cluster_encryption_enabled(Check):
def execute(self):
findings = []
for cluster in dax_client.clusters:
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.resource_id = cluster.name
report.resource_arn = cluster.arn
report.region = cluster.region

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.dynamodb.dynamodb_client import dynamodb_client
@@ -6,7 +6,7 @@ class dynamodb_tables_kms_cmk_encryption_enabled(Check):
def execute(self):
findings = []
for table in dynamodb_client.tables:
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.resource_id = table.name
report.resource_arn = table.arn
report.region = table.region

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.dynamodb.dynamodb_client import dynamodb_client
@@ -6,7 +6,7 @@ class dynamodb_tables_pitr_enabled(Check):
def execute(self):
findings = []
for table in dynamodb_client.tables:
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.resource_id = table.name
report.resource_arn = table.arn
report.region = table.region

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.ec2.ec2_client import ec2_client
@@ -6,7 +6,7 @@ class ec2_ami_public(Check):
def execute(self):
findings = []
for image in ec2_client.images:
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.region = image.region
report.resource_id = image.id
report.status = "PASS"

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.ec2.ec2_client import ec2_client
@@ -6,7 +6,7 @@ class ec2_ebs_default_encryption(Check):
def execute(self):
findings = []
for ebs_encryption in ec2_client.ebs_encryption_by_default:
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.region = ebs_encryption.region
report.resource_id = "EBS Default Encryption"
report.status = "FAIL"

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.ec2.ec2_client import ec2_client
@@ -6,7 +6,7 @@ class ec2_ebs_public_snapshot(Check):
def execute(self):
findings = []
for snapshot in ec2_client.snapshots:
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.region = snapshot.region
if not snapshot.public:
report.status = "PASS"

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.ec2.ec2_client import ec2_client
@@ -6,7 +6,7 @@ class ec2_ebs_snapshots_encrypted(Check):
def execute(self):
findings = []
for snapshot in ec2_client.snapshots:
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.region = snapshot.region
if snapshot.encrypted:
report.status = "PASS"

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.ec2.ec2_client import ec2_client
@@ -6,7 +6,7 @@ class ec2_ebs_volume_encryption(Check):
def execute(self):
findings = []
for volume in ec2_client.volumes:
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.region = volume.region
report.resource_id = volume.id
if volume.encrypted:

View File

@@ -1,7 +1,7 @@
import shodan
from prowler.config.config import get_config_var
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.lib.logger import logger
from prowler.providers.aws.services.ec2.ec2_client import ec2_client
@@ -13,7 +13,7 @@ class ec2_elastic_ip_shodan(Check):
if shodan_api_key:
api = shodan.Shodan(shodan_api_key)
for eip in ec2_client.elastic_ips:
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.region = eip.region
if eip.public_ip:
try:

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.ec2.ec2_client import ec2_client
@@ -6,7 +6,7 @@ class ec2_elastic_ip_unassgined(Check):
def execute(self):
findings = []
for eip in ec2_client.elastic_ips:
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.region = eip.region
if eip.public_ip:
report.resource_id = eip.public_ip

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.ec2.ec2_client import ec2_client
@@ -6,7 +6,7 @@ class ec2_instance_imdsv2_enabled(Check):
def execute(self):
findings = []
for instance in ec2_client.instances:
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.region = instance.region
report.resource_id = instance.id
report.status = "FAIL"

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.ec2.ec2_client import ec2_client
@@ -6,7 +6,7 @@ class ec2_instance_internet_facing_with_instance_profile(Check):
def execute(self):
findings = []
for instance in ec2_client.instances:
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.region = instance.region
report.resource_id = instance.id
report.status = "PASS"

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.ec2.ec2_client import ec2_client
from prowler.providers.aws.services.ssm.ssm_client import ssm_client
@@ -7,7 +7,7 @@ class ec2_instance_managed_by_ssm(Check):
def execute(self):
findings = []
for instance in ec2_client.instances:
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.region = instance.region
if not ssm_client.managed_instances.get(instance.id):
report.status = "FAIL"

View File

@@ -1,7 +1,7 @@
from datetime import datetime, timezone
from prowler.config.config import get_config_var
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.ec2.ec2_client import ec2_client
@@ -10,7 +10,7 @@ class ec2_instance_older_than_specific_days(Check):
findings = []
max_ec2_instance_age_in_days = get_config_var("max_ec2_instance_age_in_days")
for instance in ec2_client.instances:
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.region = instance.region
report.resource_id = instance.id
report.status = "PASS"

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.ec2.ec2_client import ec2_client
@@ -6,7 +6,7 @@ class ec2_instance_profile_attached(Check):
def execute(self):
findings = []
for instance in ec2_client.instances:
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.region = instance.region
report.resource_id = instance.id
report.status = "FAIL"

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.ec2.ec2_client import ec2_client
@@ -6,7 +6,7 @@ class ec2_instance_public_ip(Check):
def execute(self):
findings = []
for instance in ec2_client.instances:
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.region = instance.region
if instance.public_ip:
report.status = "FAIL"

View File

@@ -5,7 +5,7 @@ from base64 import b64decode
from detect_secrets import SecretsCollection
from detect_secrets.settings import default_settings
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.ec2.ec2_client import ec2_client
@@ -13,7 +13,7 @@ class ec2_instance_secrets_user_data(Check):
def execute(self):
findings = []
for instance in ec2_client.instances:
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.region = instance.region
report.resource_id = instance.id

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.ec2.ec2_client import ec2_client
from prowler.providers.aws.services.ec2.lib.network_acls import check_network_acl
@@ -9,7 +9,7 @@ class ec2_networkacl_allow_ingress_any_port(Check):
tcp_protocol = "-1"
check_port = 0
for network_acl in ec2_client.network_acls:
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.region = network_acl.region
report.resource_id = network_acl.id
# If some entry allows it, that ACL is not securely configured

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.ec2.ec2_client import ec2_client
from prowler.providers.aws.services.ec2.lib.network_acls import check_network_acl
@@ -9,7 +9,7 @@ class ec2_networkacl_allow_ingress_tcp_port_22(Check):
tcp_protocol = "6"
check_port = 22
for network_acl in ec2_client.network_acls:
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.region = network_acl.region
# If some entry allows it, that ACL is not securely configured
if not check_network_acl(network_acl.entries, tcp_protocol, check_port):

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.ec2.ec2_client import ec2_client
from prowler.providers.aws.services.ec2.lib.network_acls import check_network_acl
@@ -9,7 +9,7 @@ class ec2_networkacl_allow_ingress_tcp_port_3389(Check):
tcp_protocol = "6"
check_port = 3389
for network_acl in ec2_client.network_acls:
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.region = network_acl.region
# If some entry allows it, that ACL is not securely configured
if not check_network_acl(network_acl.entries, tcp_protocol, check_port):

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.ec2.ec2_client import ec2_client
from prowler.providers.aws.services.ec2.lib.security_groups import check_security_group
@@ -7,7 +7,7 @@ class ec2_securitygroup_allow_ingress_from_internet_to_any_port(Check):
def execute(self):
findings = []
for security_group in ec2_client.security_groups:
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.region = security_group.region
report.status = "PASS"
report.status_extended = f"Security group {security_group.name} ({security_group.id}) has not all ports open to the Internet."

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.ec2.ec2_client import ec2_client
from prowler.providers.aws.services.ec2.lib.security_groups import check_security_group
@@ -8,7 +8,7 @@ class ec2_securitygroup_allow_ingress_from_internet_to_port_mongodb_27017_27018(
findings = []
check_ports = [27017, 27018]
for security_group in ec2_client.security_groups:
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.region = security_group.region
report.resource_id = security_group.id
report.status = "PASS"

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.ec2.ec2_client import ec2_client
from prowler.providers.aws.services.ec2.lib.security_groups import check_security_group
@@ -8,7 +8,7 @@ class ec2_securitygroup_allow_ingress_from_internet_to_tcp_ftp_port_20_21(Check)
findings = []
check_ports = [20, 21]
for security_group in ec2_client.security_groups:
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.region = security_group.region
report.status = "PASS"
report.status_extended = f"Security group {security_group.name} ({security_group.id}) has not FTP ports 20 and 21 open to the Internet."

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.ec2.ec2_client import ec2_client
from prowler.providers.aws.services.ec2.lib.security_groups import check_security_group
@@ -8,7 +8,7 @@ class ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22(Check):
findings = []
check_ports = [22]
for security_group in ec2_client.security_groups:
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.region = security_group.region
report.status = "PASS"
report.status_extended = f"Security group {security_group.name} ({security_group.id}) has not SSH port 22 open to the Internet."

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.models import Check, Check_Report
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.ec2.ec2_client import ec2_client
from prowler.providers.aws.services.ec2.lib.security_groups import check_security_group
@@ -8,7 +8,7 @@ class ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_3389(Check):
findings = []
check_ports = [3389]
for security_group in ec2_client.security_groups:
report = Check_Report(self.metadata())
report = Check_Report_AWS(self.metadata())
report.region = security_group.region
report.status = "PASS"
report.status_extended = f"Security group {security_group.name} ({security_group.id}) has not Microsoft RDP port 3389 open to the Internet."

Some files were not shown because too many files have changed in this diff Show More