mirror of
https://github.com/ghndrx/prowler.git
synced 2026-02-10 14:55:00 +00:00
feat(compliance): Loader and Execute (#1465)
This commit is contained in:
@@ -1,28 +1,34 @@
|
||||
repos:
|
||||
## GENERAL
|
||||
## GENERAL
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.3.0
|
||||
hooks:
|
||||
- id: check-merge-conflict
|
||||
- id: check-yaml
|
||||
args: ['--unsafe']
|
||||
args: ["--unsafe"]
|
||||
- id: check-json
|
||||
- id: end-of-file-fixer
|
||||
- id: trailing-whitespace
|
||||
- id: no-commit-to-branch
|
||||
- id: pretty-format-json
|
||||
args: ['--autofix', --no-sort-keys, --no-ensure-ascii]
|
||||
## BASH
|
||||
args: ["--autofix", --no-sort-keys, --no-ensure-ascii]
|
||||
|
||||
## BASH
|
||||
- repo: https://github.com/koalaman/shellcheck-precommit
|
||||
rev: v0.8.0
|
||||
hooks:
|
||||
- id: shellcheck
|
||||
## PYTHON
|
||||
- id: shellcheck
|
||||
## PYTHON
|
||||
- repo: https://github.com/myint/autoflake
|
||||
rev: v1.7.7
|
||||
hooks:
|
||||
- id: autoflake
|
||||
args: ['--in-place', '--remove-all-unused-imports', '--remove-unused-variable']
|
||||
args:
|
||||
[
|
||||
"--in-place",
|
||||
"--remove-all-unused-imports",
|
||||
"--remove-unused-variable",
|
||||
]
|
||||
|
||||
- repo: https://github.com/timothycrosley/isort
|
||||
rev: 5.10.1
|
||||
@@ -40,9 +46,7 @@ repos:
|
||||
hooks:
|
||||
- id: flake8
|
||||
exclude: contrib
|
||||
args: [
|
||||
"--ignore=E266,W503,E203,E501,W605"
|
||||
]
|
||||
args: ["--ignore=E266,W503,E203,E501,W605"]
|
||||
|
||||
- repo: https://github.com/haizaar/check-pipfile-lock
|
||||
rev: v0.0.5
|
||||
@@ -63,18 +67,18 @@ repos:
|
||||
|
||||
- id: bandit
|
||||
name: bandit
|
||||
description: 'Bandit is a tool for finding common security issues in Python code'
|
||||
description: "Bandit is a tool for finding common security issues in Python code"
|
||||
entry: bash -c 'bandit -q -lll -x '*_test.py,./contrib/' -r .'
|
||||
language: system
|
||||
|
||||
- id: safety
|
||||
name: safety
|
||||
description: 'Safety is a tool that checks your installed dependencies for known security vulnerabilities'
|
||||
description: "Safety is a tool that checks your installed dependencies for known security vulnerabilities"
|
||||
entry: bash -c 'safety check'
|
||||
language: system
|
||||
|
||||
- id: vulture
|
||||
name: vulture
|
||||
description: 'Vulture finds unused code in Python programs.'
|
||||
description: "Vulture finds unused code in Python programs."
|
||||
entry: bash -c 'vulture --exclude "contrib" --min-confidence 100 .'
|
||||
language: system
|
||||
|
||||
@@ -39,17 +39,5 @@
|
||||
"othercheck3",
|
||||
"othercheck4"
|
||||
],
|
||||
"Notes": "additional information",
|
||||
"Compliance": [
|
||||
{
|
||||
"Control": [
|
||||
"4.1"
|
||||
],
|
||||
"Framework": "CIS-AWS",
|
||||
"Group": [
|
||||
"level2"
|
||||
],
|
||||
"Version": "1.4"
|
||||
}
|
||||
]
|
||||
"Notes": "additional information"
|
||||
}
|
||||
|
||||
1636
compliance/aws/ens_rd2022_aws.json
Normal file
1636
compliance/aws/ens_rd2022_aws.json
Normal file
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -15,6 +15,9 @@ banner_color = "\033[1;92m"
|
||||
# Groups
|
||||
groups_file = "groups.json"
|
||||
|
||||
# Compliance
|
||||
compliance_specification_dir = "./compliance"
|
||||
|
||||
# AWS services-regions matrix json
|
||||
aws_services_json_file = "providers/aws/aws_regions_by_service.json"
|
||||
|
||||
|
||||
@@ -1,4 +1,7 @@
|
||||
import functools
|
||||
import importlib
|
||||
import os
|
||||
import sys
|
||||
from pkgutil import walk_packages
|
||||
from types import ModuleType
|
||||
from typing import Any
|
||||
@@ -6,7 +9,8 @@ from typing import Any
|
||||
from alive_progress import alive_bar
|
||||
from colorama import Fore, Style
|
||||
|
||||
from config.config import groups_file, orange_color
|
||||
from config.config import compliance_specification_dir, groups_file, orange_color
|
||||
from lib.check.compliance_models import load_compliance_framework
|
||||
from lib.check.models import Check, Output_From_Options, load_check_metadata
|
||||
from lib.logger import logger
|
||||
from lib.outputs.outputs import report
|
||||
@@ -31,6 +35,29 @@ def bulk_load_checks_metadata(provider: str) -> dict:
|
||||
return bulk_check_metadata
|
||||
|
||||
|
||||
# Bulk load all compliance frameworks specification
|
||||
def bulk_load_compliance_frameworks(provider: str) -> dict:
|
||||
"""Bulk load all compliance frameworks specification into a dict"""
|
||||
bulk_compliance_frameworks = {}
|
||||
compliance_specification_dir_path = f"{compliance_specification_dir}/{provider}"
|
||||
try:
|
||||
for filename in os.listdir(compliance_specification_dir_path):
|
||||
file_path = os.path.join(compliance_specification_dir_path, filename)
|
||||
# Check if it is a file
|
||||
if os.path.isfile(file_path):
|
||||
# Open Compliance file in JSON
|
||||
# cis_v1.4_aws.json --> cis_v1.4_aws
|
||||
compliance_framework_name = filename.split(".json")[0]
|
||||
# Store the compliance info
|
||||
bulk_compliance_frameworks[
|
||||
compliance_framework_name
|
||||
] = load_compliance_framework(file_path)
|
||||
except Exception as e:
|
||||
logger.error(f"{e.__class__.__name__} -- {e}")
|
||||
|
||||
return bulk_compliance_frameworks
|
||||
|
||||
|
||||
# Exclude checks to run
|
||||
def exclude_checks_to_run(checks_to_execute: set, excluded_checks: list) -> set:
|
||||
for check in excluded_checks:
|
||||
@@ -101,16 +128,43 @@ def print_services(service_list: set):
|
||||
print(f"- {service}")
|
||||
|
||||
|
||||
def print_checks(provider: str, check_list: set, bulk_checks_metadata: dict):
|
||||
def print_compliance_frameworks(bulk_compliance_frameworks: dict):
|
||||
print(
|
||||
f"There are {Fore.YELLOW}{len(bulk_compliance_frameworks.keys())}{Style.RESET_ALL} available Compliance Frameworks: \n"
|
||||
)
|
||||
for framework in bulk_compliance_frameworks.keys():
|
||||
print(f"\t- {Fore.YELLOW}{framework}{Style.RESET_ALL}")
|
||||
|
||||
|
||||
def print_compliance_requirements(bulk_compliance_frameworks: dict):
|
||||
if bulk_compliance_frameworks and "ens_rd2022_aws" in bulk_compliance_frameworks:
|
||||
print("Listing ENS RD2022 AWS Compliance Requirements:\n")
|
||||
for compliance in bulk_compliance_frameworks.values():
|
||||
for requirement in compliance.Requirements:
|
||||
checks = ""
|
||||
for check in requirement.Checks:
|
||||
checks += f" {Fore.YELLOW}\t\t{check}\n{Style.RESET_ALL}"
|
||||
print(
|
||||
f"Requirement Id: {Fore.MAGENTA}{requirement.Id}{Style.RESET_ALL}\n\t- Description: {requirement.Description}\n\t- Checks:\n{checks}"
|
||||
)
|
||||
|
||||
|
||||
def print_checks(
|
||||
provider: str,
|
||||
check_list: set,
|
||||
bulk_checks_metadata: dict,
|
||||
):
|
||||
for check in check_list:
|
||||
try:
|
||||
print(
|
||||
f"[{bulk_checks_metadata[check].CheckID}] {bulk_checks_metadata[check].CheckTitle} - {Fore.MAGENTA}{bulk_checks_metadata[check].ServiceName} {Fore.YELLOW}[{bulk_checks_metadata[check].Severity}]{Style.RESET_ALL}"
|
||||
)
|
||||
except KeyError as error:
|
||||
logger.error(
|
||||
logger.critical(
|
||||
f"Check {error} was not found for the {provider.upper()} provider"
|
||||
)
|
||||
sys.exit()
|
||||
|
||||
print(
|
||||
f"\nThere are {Fore.YELLOW}{len(check_list)}{Style.RESET_ALL} available checks.\n"
|
||||
)
|
||||
@@ -150,21 +204,51 @@ def load_checks_to_execute_from_groups(
|
||||
return checks_to_execute
|
||||
|
||||
|
||||
# Parse checks from compliance frameworks specification
|
||||
def parse_checks_from_compliance_framework(
|
||||
compliance_frameworks: list, bulk_compliance_frameworks: dict
|
||||
) -> list:
|
||||
"""Parse checks from compliance frameworks specification"""
|
||||
checks_to_execute = set()
|
||||
try:
|
||||
for framework in compliance_frameworks:
|
||||
# compliance_framework_json["Requirements"][*]["Checks"]
|
||||
compliance_framework_checks_list = [
|
||||
requirement.Checks
|
||||
for requirement in bulk_compliance_frameworks[framework].Requirements
|
||||
]
|
||||
# Reduce nested list into a list
|
||||
# Pythonic functional magic
|
||||
compliance_framework_checks = functools.reduce(
|
||||
lambda x, y: x + y, compliance_framework_checks_list
|
||||
)
|
||||
# Then union this list of checks with the initial one
|
||||
checks_to_execute = checks_to_execute.union(compliance_framework_checks)
|
||||
except Exception as e:
|
||||
logger.error(f"{e.__class__.__name__}[{e.__traceback__.tb_lineno}] -- {e}")
|
||||
|
||||
return checks_to_execute
|
||||
|
||||
|
||||
# Recover all checks from the selected provider and service
|
||||
def recover_checks_from_provider(provider: str, service: str = None) -> list:
|
||||
checks = []
|
||||
modules = list_modules(provider, service)
|
||||
for module_name in modules:
|
||||
# Format: "providers.{provider}.services.{service}.{check_name}.{check_name}"
|
||||
check_name = module_name.name
|
||||
# We need to exclude common shared libraries in services
|
||||
if (
|
||||
check_name.count(".") == 5
|
||||
and "lib" not in check_name
|
||||
and "test" not in check_name
|
||||
):
|
||||
checks.append(check_name)
|
||||
return checks
|
||||
try:
|
||||
checks = []
|
||||
modules = list_modules(provider, service)
|
||||
for module_name in modules:
|
||||
# Format: "providers.{provider}.services.{service}.{check_name}.{check_name}"
|
||||
check_name = module_name.name
|
||||
# We need to exclude common shared libraries in services
|
||||
if (
|
||||
check_name.count(".") == 5
|
||||
and "lib" not in check_name
|
||||
and "test" not in check_name
|
||||
):
|
||||
checks.append(check_name)
|
||||
return checks
|
||||
except Exception as e:
|
||||
logger.critical(f"{e.__class__.__name__}[{e.__traceback__.tb_lineno}]: {e}")
|
||||
sys.exit()
|
||||
|
||||
|
||||
# List all available modules in the selected provider and service
|
||||
@@ -184,6 +268,7 @@ def import_check(check_path: str) -> ModuleType:
|
||||
return lib
|
||||
|
||||
|
||||
# Sets the Output_From_Options to be used in the output modes
|
||||
def set_output_options(
|
||||
quiet: bool,
|
||||
output_modes: list,
|
||||
@@ -191,8 +276,10 @@ def set_output_options(
|
||||
security_hub_enabled: bool,
|
||||
output_filename: str,
|
||||
allowlist_file: str,
|
||||
bulk_checks_metadata: dict,
|
||||
verbose: bool,
|
||||
):
|
||||
"""Sets the Output_From_Options to be used in the output modes"""
|
||||
global output_options
|
||||
output_options = Output_From_Options(
|
||||
is_quiet=quiet,
|
||||
@@ -201,6 +288,7 @@ def set_output_options(
|
||||
security_hub_enabled=security_hub_enabled,
|
||||
output_filename=output_filename,
|
||||
allowlist_file=allowlist_file,
|
||||
bulk_checks_metadata=bulk_checks_metadata,
|
||||
verbose=verbose,
|
||||
# set input options here
|
||||
)
|
||||
@@ -211,15 +299,15 @@ def run_check(check: Check, output_options: Output_From_Options) -> list:
|
||||
findings = []
|
||||
if output_options.verbose or output_options.is_quiet:
|
||||
print(
|
||||
f"\nCheck ID: {check.checkID} - {Fore.MAGENTA}{check.serviceName}{Fore.YELLOW} [{check.severity}]{Style.RESET_ALL}"
|
||||
f"\nCheck ID: {check.CheckID} - {Fore.MAGENTA}{check.ServiceName}{Fore.YELLOW} [{check.Severity}]{Style.RESET_ALL}"
|
||||
)
|
||||
logger.debug(f"Executing check: {check.checkID}")
|
||||
logger.debug(f"Executing check: {check.CheckID}")
|
||||
try:
|
||||
findings = check.execute()
|
||||
except Exception as error:
|
||||
print(f"Something went wrong in {check.checkID}, please use --log-level ERROR")
|
||||
print(f"Something went wrong in {check.CheckID}, please use --log-level ERROR")
|
||||
logger.error(
|
||||
f"{check.checkID} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
f"{check.CheckID} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
finally:
|
||||
return findings
|
||||
@@ -264,13 +352,14 @@ def execute_checks(
|
||||
|
||||
# If check does not exists in the provider or is from another provider
|
||||
except ModuleNotFoundError:
|
||||
logger.error(
|
||||
logger.critical(
|
||||
f"Check '{check_name}' was not found for the {provider.upper()} provider"
|
||||
)
|
||||
bar.title = f"-> {Fore.RED}Scan was aborted!{Style.RESET_ALL}"
|
||||
sys.exit()
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
bar.title = f"-> {Fore.GREEN}Scan is completed!"
|
||||
print(Style.RESET_ALL)
|
||||
bar.title = f"-> {Fore.GREEN}Scan is completed!{Style.RESET_ALL}"
|
||||
return all_findings
|
||||
|
||||
@@ -1,10 +1,13 @@
|
||||
import os
|
||||
from unittest import mock
|
||||
|
||||
from lib.check.check import (
|
||||
bulk_load_compliance_frameworks,
|
||||
exclude_checks_to_run,
|
||||
exclude_groups_to_run,
|
||||
exclude_services_to_run,
|
||||
load_checks_to_execute_from_groups,
|
||||
parse_checks_from_compliance_framework,
|
||||
parse_checks_from_file,
|
||||
parse_groups_from_file,
|
||||
)
|
||||
@@ -12,17 +15,6 @@ from lib.check.models import load_check_metadata
|
||||
|
||||
|
||||
class Test_Check:
|
||||
# def test_import_check(self):
|
||||
# test_cases = [
|
||||
# {
|
||||
# "name": "Test valid check path",
|
||||
# "input": "providers.aws.services.iam.iam_disable_30_days_credentials.iam_disable_30_days_credentials",
|
||||
# "expected": "providers.aws.services.iam.iam_disable_30_days_credentials.iam_disable_30_days_credentials",
|
||||
# }
|
||||
# ]
|
||||
# for test in test_cases:
|
||||
# assert importlib.import_module(test["input"]).__name__ == test["expected"
|
||||
|
||||
def test_parse_groups_from_file(self):
|
||||
test_cases = [
|
||||
{
|
||||
@@ -222,3 +214,75 @@ class Test_Check:
|
||||
exclude_services_to_run(checks_to_run, excluded_services, provider)
|
||||
== test["expected"]
|
||||
)
|
||||
|
||||
def test_parse_checks_from_compliance_framework_two(self):
|
||||
test_case = {
|
||||
"input": {"compliance_frameworks": ["cis_v1.4_aws", "ens_v3_aws"]},
|
||||
"expected": {
|
||||
"vpc_flow_logs_enabled",
|
||||
"ec2_ebs_snapshot_encryption",
|
||||
"iam_user_mfa_enabled_console_access",
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"ec2_elbv2_insecure_ssl_ciphers",
|
||||
"guardduty_is_enabled",
|
||||
"s3_bucket_default_encryption",
|
||||
"cloudfront_distributions_https_enabled",
|
||||
"iam_avoid_root_usage",
|
||||
"s3_bucket_secure_transport_policy",
|
||||
},
|
||||
}
|
||||
with mock.patch(
|
||||
"lib.check.check.compliance_specification_dir",
|
||||
new=f"{os.path.dirname(os.path.realpath(__file__))}/fixtures",
|
||||
):
|
||||
provider = "aws"
|
||||
bulk_compliance_frameworks = bulk_load_compliance_frameworks(provider)
|
||||
compliance_frameworks = test_case["input"]["compliance_frameworks"]
|
||||
assert (
|
||||
parse_checks_from_compliance_framework(
|
||||
compliance_frameworks, bulk_compliance_frameworks
|
||||
)
|
||||
== test_case["expected"]
|
||||
)
|
||||
|
||||
def test_parse_checks_from_compliance_framework_one(self):
|
||||
test_case = {
|
||||
"input": {"compliance_frameworks": ["cis_v1.4_aws"]},
|
||||
"expected": {
|
||||
"iam_user_mfa_enabled_console_access",
|
||||
"s3_bucket_default_encryption",
|
||||
"iam_avoid_root_usage",
|
||||
},
|
||||
}
|
||||
with mock.patch(
|
||||
"lib.check.check.compliance_specification_dir",
|
||||
new=f"{os.path.dirname(os.path.realpath(__file__))}/fixtures",
|
||||
):
|
||||
provider = "aws"
|
||||
bulk_compliance_frameworks = bulk_load_compliance_frameworks(provider)
|
||||
compliance_frameworks = test_case["input"]["compliance_frameworks"]
|
||||
assert (
|
||||
parse_checks_from_compliance_framework(
|
||||
compliance_frameworks, bulk_compliance_frameworks
|
||||
)
|
||||
== test_case["expected"]
|
||||
)
|
||||
|
||||
def test_parse_checks_from_compliance_framework_no_compliance(self):
|
||||
test_case = {
|
||||
"input": {"compliance_frameworks": []},
|
||||
"expected": set(),
|
||||
}
|
||||
with mock.patch(
|
||||
"lib.check.check.compliance_specification_dir",
|
||||
new=f"{os.path.dirname(os.path.realpath(__file__))}/fixtures",
|
||||
):
|
||||
provider = "aws"
|
||||
bulk_compliance_frameworks = bulk_load_compliance_frameworks(provider)
|
||||
compliance_frameworks = test_case["input"]["compliance_frameworks"]
|
||||
assert (
|
||||
parse_checks_from_compliance_framework(
|
||||
compliance_frameworks, bulk_compliance_frameworks
|
||||
)
|
||||
== test_case["expected"]
|
||||
)
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
from config.config import groups_file
|
||||
from lib.check.check import (
|
||||
from lib.check.check import ( # load_checks_to_execute_from_compliance_framework,
|
||||
load_checks_to_execute_from_groups,
|
||||
parse_checks_from_compliance_framework,
|
||||
parse_checks_from_file,
|
||||
parse_groups_from_file,
|
||||
recover_checks_from_provider,
|
||||
@@ -8,18 +9,20 @@ from lib.check.check import (
|
||||
from lib.logger import logger
|
||||
|
||||
|
||||
# Generate the list of checks to execute
|
||||
# test this function
|
||||
# Generate the list of checks to execute
|
||||
# PENDING Test for this function
|
||||
def load_checks_to_execute(
|
||||
bulk_checks_metadata: dict,
|
||||
bulk_compliance_frameworks: dict,
|
||||
checks_file: str,
|
||||
check_list: list,
|
||||
service_list: list,
|
||||
group_list: list,
|
||||
severities: list,
|
||||
compliance_frameworks: list,
|
||||
provider: str,
|
||||
) -> set:
|
||||
|
||||
"""Generate the list of checks to execute based on the cloud provider and input arguments specified"""
|
||||
checks_to_execute = set()
|
||||
|
||||
# Handle if there are checks passed using -c/--checks
|
||||
@@ -39,7 +42,7 @@ def load_checks_to_execute(
|
||||
try:
|
||||
checks_to_execute = parse_checks_from_file(checks_file, provider)
|
||||
except Exception as e:
|
||||
logger.error(f"{e.__class__.__name__} -- {e}")
|
||||
logger.error(f"{e.__class__.__name__}[{e.__traceback__.tb_lineno}] -- {e}")
|
||||
|
||||
# Handle if there are services passed using -s/--services
|
||||
elif service_list:
|
||||
@@ -65,7 +68,16 @@ def load_checks_to_execute(
|
||||
available_groups, group_list, provider
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"{e.__class__.__name__} -- {e}")
|
||||
logger.error(f"{e.__class__.__name__}[{e.__traceback__.tb_lineno}] -- {e}")
|
||||
|
||||
# Handle if there are compliance frameworks passed using --compliance
|
||||
elif compliance_frameworks:
|
||||
try:
|
||||
checks_to_execute = parse_checks_from_compliance_framework(
|
||||
compliance_frameworks, bulk_compliance_frameworks
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"{e.__class__.__name__}[{e.__traceback__.tb_lineno}] -- {e}")
|
||||
|
||||
# If there are no checks passed as argument
|
||||
else:
|
||||
@@ -73,7 +85,7 @@ def load_checks_to_execute(
|
||||
# Get all check modules to run with the specific provider
|
||||
checks = recover_checks_from_provider(provider)
|
||||
except Exception as e:
|
||||
logger.error(f"{e.__class__.__name__} -- {e}")
|
||||
logger.error(f"{e.__class__.__name__}[{e.__traceback__.tb_lineno}] -- {e}")
|
||||
else:
|
||||
for check_name in checks:
|
||||
# Recover check name from import path (last part)
|
||||
|
||||
43
lib/check/compliance.py
Normal file
43
lib/check/compliance.py
Normal file
@@ -0,0 +1,43 @@
|
||||
import sys
|
||||
|
||||
from lib.check.compliance_models import Compliance_Base_Model, Compliance_Requirement
|
||||
from lib.logger import logger
|
||||
|
||||
|
||||
def update_checks_metadata_with_compliance(
|
||||
bulk_compliance_frameworks: dict, bulk_checks_metadata: dict
|
||||
):
|
||||
"""Update the check metadata model with the compliance framework"""
|
||||
try:
|
||||
for check in bulk_checks_metadata:
|
||||
check_compliance = []
|
||||
for framework in bulk_compliance_frameworks.values():
|
||||
for requirement in framework.Requirements:
|
||||
compliance_requirements = []
|
||||
if check in requirement.Checks:
|
||||
# Create the Compliance_Requirement
|
||||
requirement = Compliance_Requirement(
|
||||
Id=requirement.Id,
|
||||
Description=requirement.Description,
|
||||
Attributes=requirement.Attributes,
|
||||
Checks=requirement.Checks,
|
||||
)
|
||||
# For the check metadata we don't need the "Checks" key
|
||||
delattr(requirement, "Checks")
|
||||
# Include the requirment into the check's framework requirements
|
||||
compliance_requirements.append(requirement)
|
||||
# Create the Compliance_Model
|
||||
compliance = Compliance_Base_Model(
|
||||
Framework=framework.Framework,
|
||||
Provider=framework.Provider,
|
||||
Version=framework.Version,
|
||||
Requirements=compliance_requirements,
|
||||
)
|
||||
# Include the compliance framework for the check
|
||||
check_compliance.append(compliance)
|
||||
# Save it into the check's metadata
|
||||
bulk_checks_metadata[check].Compliance = check_compliance
|
||||
return bulk_checks_metadata
|
||||
except Exception as e:
|
||||
logger.critical(f"{e.__class__.__name__}[{e.__traceback__.tb_lineno}] -- {e}")
|
||||
sys.exit()
|
||||
75
lib/check/compliance_models.py
Normal file
75
lib/check/compliance_models.py
Normal file
@@ -0,0 +1,75 @@
|
||||
import sys
|
||||
from enum import Enum
|
||||
from typing import Any, List, Optional, Union
|
||||
|
||||
from pydantic import BaseModel, ValidationError
|
||||
|
||||
from lib.logger import logger
|
||||
|
||||
|
||||
# ENS - Esquema Nacional de Seguridad - España
|
||||
class ENS_Requirements_Nivel(str, Enum):
|
||||
"""ENS V3 Requirements Level"""
|
||||
|
||||
bajo = "bajo"
|
||||
medio = "medio"
|
||||
alto = "alto"
|
||||
pytec = "pytec"
|
||||
|
||||
|
||||
class ENS_Requirements_Dimensiones(str, Enum):
|
||||
"""ENS V3 Requirements Dimensions"""
|
||||
|
||||
confidencialidad = "confidencialidad"
|
||||
integridad = "integridad"
|
||||
trazabilidad = "trazabilidad"
|
||||
autenticidad = "autenticidad"
|
||||
disponibilidad = "disponibilidad"
|
||||
|
||||
|
||||
class ENS_Requirements(BaseModel):
|
||||
"""ENS V3 Framework Requirements"""
|
||||
|
||||
IdGrupoControl: str
|
||||
Marco: str
|
||||
Categoria: str
|
||||
Descripcion_Control: str
|
||||
Nivel: list[ENS_Requirements_Nivel]
|
||||
Dimensiones: list[ENS_Requirements_Dimensiones]
|
||||
|
||||
|
||||
# Base Compliance Model
|
||||
class Compliance_Requirement(BaseModel):
|
||||
"""Compliance_Requirement holds the base model for every requirement within a compliance framework"""
|
||||
|
||||
Id: str
|
||||
Description: str
|
||||
Attributes: list[Union[ENS_Requirements, Any]]
|
||||
Checks: List[str]
|
||||
|
||||
|
||||
class Compliance_Base_Model(BaseModel):
|
||||
"""Compliance_Base_Model holds the base model for every compliance framework"""
|
||||
|
||||
Framework: str
|
||||
Provider: Optional[str]
|
||||
Version: str
|
||||
Requirements: list[Compliance_Requirement]
|
||||
|
||||
|
||||
# Testing Pending
|
||||
def load_compliance_framework(
|
||||
compliance_specification_file: str,
|
||||
) -> Compliance_Base_Model:
|
||||
"""load_compliance_framework loads and parse a Compliance Framework Specification"""
|
||||
try:
|
||||
compliance_framework = Compliance_Base_Model.parse_file(
|
||||
compliance_specification_file
|
||||
)
|
||||
except ValidationError as error:
|
||||
logger.critical(
|
||||
f"Compliance Framework Specification from {compliance_specification_file} is not valid: {error}"
|
||||
)
|
||||
sys.exit()
|
||||
else:
|
||||
return compliance_framework
|
||||
82
lib/check/fixtures/aws/cis_v1.4_aws.json
Normal file
82
lib/check/fixtures/aws/cis_v1.4_aws.json
Normal file
@@ -0,0 +1,82 @@
|
||||
{
|
||||
"Framework": "CIS",
|
||||
"Provider": "AWS",
|
||||
"Version": "1.4",
|
||||
"Requirements": [
|
||||
{
|
||||
"Id": "1.4",
|
||||
"Description": "Ensure no 'root' user account access key exists (Automated)",
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "1. Identity and Access Management (IAM)",
|
||||
"Level": [
|
||||
"level1"
|
||||
],
|
||||
"Rationale": "Removing access keys associated with the 'root' user account limits vectors by which the account can be compromised. Additionally, removing the 'root' access keys encourages the creation and use of role based accounts that are least privileged.",
|
||||
"Guidance": "The 'root' user account is the most privileged user in an AWS account. AWS Access Keys provide programmatic access to a given AWS account. It is recommended that all access keys associated with the 'root' user account be removed.",
|
||||
"Additional information": "IAM User account \"root\" for us-gov cloud regions is not enabled by default. However, on request to AWS support enables 'root' access only through access-keys (CLI, API methods) for us-gov cloud region.",
|
||||
"References": [
|
||||
"CCE-78910-7",
|
||||
"https://docs.aws.amazon.com/general/latest/gr/aws-access-keys-best-practices.html",
|
||||
"https://docs.aws.amazon.com/general/latest/gr/managing-aws-access-keys.html",
|
||||
"https://docs.aws.amazon.com/IAM/latest/APIReference/API_GetAccountSummary.html",
|
||||
"https://aws.amazon.com/blogs/security/an-easier-way-to-determine-the-presence-of-aws-account-access-keys/"
|
||||
]
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"iam_avoid_root_usage"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "1.10",
|
||||
"Description": "Ensure multi-factor authentication (MFA) is enabled for all IAM users that have a console password (Automated)",
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "1. Identity and Access Management (IAM)",
|
||||
"Level": [
|
||||
"level1"
|
||||
],
|
||||
"Guidance": "Multi-Factor Authentication (MFA) adds an extra layer of authentication assurance beyond traditional credentials. With MFA enabled, when a user signs in to the AWS Console, they will be prompted for their user name and password as well as for an authentication code from their physical or virtual MFA token. It is recommended that MFA be enabled for all accounts that have a console password.",
|
||||
"Rationale": "Enabling MFA provides increased security for console access as it requires the authenticating principal to possess a device that displays a time-sensitive key and have knowledge of a credential.",
|
||||
"Impact": "AWS will soon end support for SMS multi-factor authentication (MFA). New customers are not allowed to use this feature. We recommend that existing customers switch to one of the following alternative methods of MFA.",
|
||||
"Additional information": "Forced IAM User Self-Service Remediation. Amazon has published a pattern that forces users to self-service setup MFA before they have access to their complete permissions set. Until they complete this step, they cannot access their full permissions. This pattern can be used on new AWS accounts. It can also be used on existing accounts - it is recommended users are given instructions and a grace period to accomplish MFA enrollment before active enforcement on existing AWS accounts.",
|
||||
"References": [
|
||||
"CCE-78901-6",
|
||||
"https://tools.ietf.org/html/rfc6238",
|
||||
"https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_mfa.html",
|
||||
"https://docs.aws.amazon.com/IAM/latest/UserGuide/best-practices.html#enable-mfa-for-privileged-users",
|
||||
"https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_mfa_enable_virtual.html",
|
||||
"https://blogs.aws.amazon.com/security/post/Tx2SJJYE082KBUK/How-to-Delegate-Management-of-Multi-Factor-Authentication-to-AWS-IAM-Users"
|
||||
]
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"iam_user_mfa_enabled_console_access"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "2.1.1",
|
||||
"Description": "Ensure all S3 buckets employ encryption-at-rest (Automated)",
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "2. Storage",
|
||||
"Level": [
|
||||
"level2"
|
||||
],
|
||||
"Guidance": "Amazon S3 provides a variety of no, or low, cost encryption options to protect data at rest.",
|
||||
"Rationale": "Encrypting data at rest reduces the likelihood that it is unintentionally exposed and can nullify the impact of disclosure if the encryption remains unbroken.",
|
||||
"Impact": "Amazon S3 buckets with default bucket encryption using SSE-KMS cannot be used as destination buckets for Amazon S3 server access logging. Only SSE-S3 default encryption is supported for server access log destination buckets.",
|
||||
"Additional information": "S3 bucket encryption only applies to objects as they are placed in the bucket. Enabling S3 bucket encryption does not encrypt objects previously stored within the bucket",
|
||||
"References": [
|
||||
"https://docs.aws.amazon.com/AmazonS3/latest/user-guide/default-bucket-encryption.html",
|
||||
"https://docs.aws.amazon.com/AmazonS3/latest/dev/bucket-encryption.html#bucket-encryption-related-resources"
|
||||
]
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"s3_bucket_default_encryption"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
82
lib/check/fixtures/aws/ens_v3_aws.json
Normal file
82
lib/check/fixtures/aws/ens_v3_aws.json
Normal file
@@ -0,0 +1,82 @@
|
||||
{
|
||||
"Framework": "ENS",
|
||||
"Version": "3",
|
||||
"Requirements": [
|
||||
{
|
||||
"Id": "op.mon.1",
|
||||
"Description": "Detección de intrusión",
|
||||
"Attributes": [
|
||||
{
|
||||
"Marco": "operacional",
|
||||
"Categoria": "monitorización del sistema",
|
||||
"Descripcion_Control": "- En ausencia de otras herramientas de terceros, habilitar Amazon GuarDuty para la detección de amenazas e intrusiones..- Activar el servicio de eventos AWS CloudTrail para todas las regiones..- Activar el servicio VPC FlowLogs..-Deberá habilitarse Amazon GuardDuty para todas las regiones tanto en la cuenta raíz como en las cuentas miembro de un entorno multi-cuenta..-Todas las cuentas miembro deberán estar añadidas para la supervisión bajo la cuenta raíz..-La adminsitración de Amazon GuardDuty quedará delegada exclusivamente a la cuenta de seguridad para garantizar una correcta asignación de los roles para este servicio.",
|
||||
"Nivel": [
|
||||
"bajo",
|
||||
"medio",
|
||||
"alto"
|
||||
],
|
||||
"Dimensiones": [
|
||||
"confidencialidad",
|
||||
"integridad",
|
||||
"trazabilidad",
|
||||
"autenticidad",
|
||||
"disponibilidad"
|
||||
]
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"guardduty_is_enabled",
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"vpc_flow_logs_enabled",
|
||||
"guardduty_is_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "op.mon.3",
|
||||
"Description": "Protección de la integridad y de la autenticidad",
|
||||
"Attributes": [
|
||||
{
|
||||
"Marco": "operacional",
|
||||
"Categoria": "protección de las comunicaciones",
|
||||
"Descripcion_Control": "- Habilitar TLS en los balanceadores de carga ELB.- Evitar el uso de protocolos de cifrado inseguros en la conexión TLS entre clientes y balanceadores de carga.- Asegurar que los Buckets de almacenamiento S3 apliquen cifrado para la transferencia de datos empleando TLS.- Asegurar que la distribución entre frontales CloudFront y sus orígenes únicamente emplee tráfico HTTPS.",
|
||||
"Nivel": [
|
||||
"bajo",
|
||||
"medio",
|
||||
"alto"
|
||||
],
|
||||
"Dimensiones": [
|
||||
"integridad",
|
||||
"autenticidad"
|
||||
]
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"ec2_elbv2_insecure_ssl_ciphers",
|
||||
"ec2_elbv2_insecure_ssl_ciphers",
|
||||
"s3_bucket_secure_transport_policy",
|
||||
"cloudfront_distributions_https_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "mp.si.2.r2.1",
|
||||
"Description": "Copias de seguridad",
|
||||
"Attributes": [
|
||||
{
|
||||
"Marco": "medidas de protección",
|
||||
"Categoria": "protección de los soportes de información",
|
||||
"Descripcion_Control": "Se deberá asegurar el cifrado de las copias de seguridad de EBS.",
|
||||
"Nivel": [
|
||||
"alto"
|
||||
],
|
||||
"Dimensiones": [
|
||||
"confidencialidad",
|
||||
"integridad"
|
||||
]
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"ec2_ebs_snapshot_encryption"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -1,7 +1,6 @@
|
||||
import sys
|
||||
from abc import ABC, abstractmethod
|
||||
from dataclasses import dataclass
|
||||
from typing import List
|
||||
|
||||
from pydantic import BaseModel, ValidationError
|
||||
|
||||
@@ -10,34 +9,21 @@ from lib.logger import logger
|
||||
|
||||
@dataclass
|
||||
class Output_From_Options:
|
||||
"""Class to store the Prowler output modes options"""
|
||||
|
||||
is_quiet: bool
|
||||
output_modes: list
|
||||
output_directory: str
|
||||
security_hub_enabled: bool
|
||||
output_filename: str
|
||||
allowlist_file: str
|
||||
bulk_checks_metadata: dict
|
||||
verbose: str
|
||||
|
||||
|
||||
# Testing Pending
|
||||
def load_check_metadata(metadata_file: str) -> dict:
|
||||
try:
|
||||
check_metadata = Check_Metadata_Model.parse_file(metadata_file)
|
||||
except ValidationError as error:
|
||||
logger.critical(f"Metadata from {metadata_file} is not valid: {error}")
|
||||
sys.exit()
|
||||
else:
|
||||
return check_metadata
|
||||
|
||||
|
||||
class ComplianceItem(BaseModel):
|
||||
Control: List[str]
|
||||
Framework: str
|
||||
Group: List[str]
|
||||
Version: str
|
||||
|
||||
|
||||
class Code(BaseModel):
|
||||
"""Check's remediation information using IaC like CloudFormation, Terraform or the native CLI"""
|
||||
|
||||
NativeIaC: str
|
||||
Terraform: str
|
||||
CLI: str
|
||||
@@ -45,22 +31,26 @@ class Code(BaseModel):
|
||||
|
||||
|
||||
class Recommendation(BaseModel):
|
||||
"""Check's recommendation information"""
|
||||
|
||||
Text: str
|
||||
Url: str
|
||||
|
||||
|
||||
class Remediation(BaseModel):
|
||||
"""Check's remediation: Code and Recommendation"""
|
||||
|
||||
Code: Code
|
||||
Recommendation: Recommendation
|
||||
|
||||
|
||||
class Check_Metadata_Model(BaseModel):
|
||||
"""Check Metadata Model"""
|
||||
|
||||
Provider: str
|
||||
CheckID: str
|
||||
# CheckName: str
|
||||
CheckTitle: str
|
||||
# CheckAlias: str
|
||||
CheckType: List[str]
|
||||
CheckType: list[str]
|
||||
ServiceName: str
|
||||
SubServiceName: str
|
||||
ResourceIdTemplate: str
|
||||
@@ -70,151 +60,67 @@ class Check_Metadata_Model(BaseModel):
|
||||
Risk: str
|
||||
RelatedUrl: str
|
||||
Remediation: Remediation
|
||||
Categories: List[str]
|
||||
Categories: list[str]
|
||||
Tags: dict
|
||||
DependsOn: List[str]
|
||||
RelatedTo: List[str]
|
||||
DependsOn: list[str]
|
||||
RelatedTo: list[str]
|
||||
Notes: str
|
||||
Compliance: List[ComplianceItem]
|
||||
# We set the compliance to None to
|
||||
# store the compliance later if supplied
|
||||
Compliance: list = None
|
||||
|
||||
|
||||
class Check(ABC):
|
||||
def __init__(self):
|
||||
# Load metadata from check
|
||||
class Check(ABC, Check_Metadata_Model):
|
||||
"""Prowler Check"""
|
||||
|
||||
def __init__(self, **data):
|
||||
"""Check's init function. Calls the CheckMetadataModel init."""
|
||||
# Parse the Check's metadata file
|
||||
check_path_name = self.__class__.__module__.replace(".", "/")
|
||||
metadata_file = f"{check_path_name}.metadata.json"
|
||||
self.__check_metadata__ = load_check_metadata(metadata_file)
|
||||
# Assign metadata values
|
||||
self.__Provider__ = self.__check_metadata__.Provider
|
||||
self.__CheckID__ = self.__check_metadata__.CheckID
|
||||
# self.__CheckName__ = self.__check_metadata__.CheckName
|
||||
self.__CheckTitle__ = self.__check_metadata__.CheckTitle
|
||||
# self.__CheckAlias__ = self.__check_metadata__.CheckAlias
|
||||
self.__CheckType__ = self.__check_metadata__.CheckType
|
||||
self.__ServiceName__ = self.__check_metadata__.ServiceName
|
||||
self.__SubServiceName__ = self.__check_metadata__.SubServiceName
|
||||
self.__ResourceIdTemplate__ = self.__check_metadata__.ResourceIdTemplate
|
||||
self.__Severity__ = self.__check_metadata__.Severity
|
||||
self.__ResourceType__ = self.__check_metadata__.ResourceType
|
||||
self.__Description__ = self.__check_metadata__.Description
|
||||
self.__Risk__ = self.__check_metadata__.Risk
|
||||
self.__RelatedUrl__ = self.__check_metadata__.RelatedUrl
|
||||
self.__Remediation__ = self.__check_metadata__.Remediation
|
||||
self.__Categories__ = self.__check_metadata__.Categories
|
||||
self.__Tags__ = self.__check_metadata__.Tags
|
||||
self.__DependsOn__ = self.__check_metadata__.DependsOn
|
||||
self.__RelatedTo__ = self.__check_metadata__.RelatedTo
|
||||
self.__Notes__ = self.__check_metadata__.Notes
|
||||
self.__Compliance__ = self.__check_metadata__.Compliance
|
||||
# Store it to validate them with Pydantic
|
||||
data = Check_Metadata_Model.parse_file(metadata_file).dict()
|
||||
# Calls parents init function
|
||||
super().__init__(**data)
|
||||
|
||||
@property
|
||||
def provider(self):
|
||||
return self.__Provider__
|
||||
|
||||
@property
|
||||
def checkID(self):
|
||||
return self.__CheckID__
|
||||
|
||||
# @property
|
||||
# def checkName(self):
|
||||
# return self.__CheckName__
|
||||
|
||||
@property
|
||||
def checkTitle(self):
|
||||
return self.__CheckTitle__
|
||||
|
||||
# @property
|
||||
# def checkAlias(self):
|
||||
# return self.__CheckAlias__
|
||||
|
||||
@property
|
||||
def checkType(self):
|
||||
return self.__CheckType__
|
||||
|
||||
@property
|
||||
def serviceName(self):
|
||||
return self.__ServiceName__
|
||||
|
||||
@property
|
||||
def subServiceName(self):
|
||||
return self.__SubServiceName__
|
||||
|
||||
@property
|
||||
def resourceIdTemplate(self):
|
||||
return self.__ResourceIdTemplate__
|
||||
|
||||
@property
|
||||
def severity(self):
|
||||
return self.__Severity__
|
||||
|
||||
@property
|
||||
def resourceType(self):
|
||||
return self.__ResourceType__
|
||||
|
||||
@property
|
||||
def description(self):
|
||||
return self.__Description__
|
||||
|
||||
@property
|
||||
def relatedUrl(self):
|
||||
return self.__RelatedUrl__
|
||||
|
||||
@property
|
||||
def risk(self):
|
||||
return self.__Risk__
|
||||
|
||||
@property
|
||||
def remediation(self):
|
||||
return self.__Remediation__
|
||||
|
||||
@property
|
||||
def categories(self):
|
||||
return self.__Categories__
|
||||
|
||||
@property
|
||||
def tags(self):
|
||||
return self.__Tags__
|
||||
|
||||
@property
|
||||
def dependsOn(self):
|
||||
return self.__DependsOn__
|
||||
|
||||
@property
|
||||
def relatedTo(self):
|
||||
return self.__RelatedTo__
|
||||
|
||||
@property
|
||||
def notes(self):
|
||||
return self.__Notes__
|
||||
|
||||
@property
|
||||
def compliance(self):
|
||||
return self.__Compliance__
|
||||
|
||||
@property
|
||||
def metadata(self):
|
||||
return self.__check_metadata__
|
||||
def metadata(self) -> dict:
|
||||
"""Return the JSON representation of the check's metadata"""
|
||||
return self.json()
|
||||
|
||||
@abstractmethod
|
||||
def execute(self):
|
||||
pass
|
||||
"""Execute the check's logic"""
|
||||
|
||||
|
||||
@dataclass
|
||||
class Check_Report:
|
||||
"""Contains the Check's finding information."""
|
||||
|
||||
status: str
|
||||
region: str
|
||||
status_extended: str
|
||||
check_metadata: dict
|
||||
check_metadata: Check_Metadata_Model
|
||||
resource_id: str
|
||||
resource_details: str
|
||||
resource_tags: list
|
||||
resource_arn: str
|
||||
|
||||
def __init__(self, metadata):
|
||||
self.check_metadata = metadata
|
||||
self.check_metadata = Check_Metadata_Model.parse_raw(metadata)
|
||||
self.status_extended = ""
|
||||
self.resource_details = ""
|
||||
self.resource_tags = []
|
||||
self.resource_id = ""
|
||||
self.resource_arn = ""
|
||||
|
||||
|
||||
# Testing Pending
|
||||
def load_check_metadata(metadata_file: str) -> Check_Metadata_Model:
|
||||
"""load_check_metadata loads and parse a Check's metadata file"""
|
||||
try:
|
||||
check_metadata = Check_Metadata_Model.parse_file(metadata_file)
|
||||
except ValidationError as error:
|
||||
logger.critical(f"Metadata from {metadata_file} is not valid: {error}")
|
||||
sys.exit()
|
||||
else:
|
||||
return check_metadata
|
||||
|
||||
@@ -4,7 +4,7 @@ from typing import List, Optional
|
||||
from pydantic import BaseModel
|
||||
|
||||
from config.config import timestamp
|
||||
from lib.check.models import Check_Report, ComplianceItem, Remediation
|
||||
from lib.check.models import Check_Report, Remediation
|
||||
from providers.aws.lib.audit_info.models import AWS_Organizations_Info
|
||||
|
||||
|
||||
@@ -25,7 +25,6 @@ class Check_Output_JSON(BaseModel):
|
||||
OrganizationsInfo: Optional[AWS_Organizations_Info]
|
||||
Region: str = ""
|
||||
CheckID: str
|
||||
# CheckName: str
|
||||
CheckTitle: str
|
||||
CheckType: List[str]
|
||||
ServiceName: str
|
||||
@@ -46,7 +45,7 @@ class Check_Output_JSON(BaseModel):
|
||||
DependsOn: List[str]
|
||||
RelatedTo: List[str]
|
||||
Notes: str
|
||||
Compliance: List[ComplianceItem]
|
||||
# Compliance: List[ComplianceItem]
|
||||
|
||||
|
||||
# JSON ASFF Output
|
||||
@@ -92,6 +91,26 @@ class Check_Output_JSON_ASFF(BaseModel):
|
||||
Remediation: dict = None
|
||||
|
||||
|
||||
class Check_Output_CSV_ENS_RD2022(BaseModel):
|
||||
Provider: str
|
||||
AccountId: str
|
||||
Region: str
|
||||
AssessmentDate: str
|
||||
Requirements_Id: str
|
||||
Requirements_Description: str
|
||||
Requirements_Attributes_IdGrupoControl: str
|
||||
Requirements_Attributes_Marco: str
|
||||
Requirements_Attributes_Categoria: str
|
||||
Requirements_Attributes_DescripcionControl: str
|
||||
Requirements_Attributes_Nivel: str
|
||||
Requirements_Attributes_Tipo: str
|
||||
Requirements_Attributes_Dimensiones: str
|
||||
Status: str
|
||||
StatusExtended: str
|
||||
ResourceId: str
|
||||
CheckId: str
|
||||
|
||||
|
||||
@dataclass
|
||||
class Check_Output_CSV:
|
||||
assessment_start_time: str
|
||||
@@ -106,7 +125,6 @@ class Check_Output_CSV:
|
||||
account_tags: str
|
||||
region: str
|
||||
check_id: str
|
||||
# check_name: str
|
||||
check_title: str
|
||||
check_type: str
|
||||
status: str
|
||||
@@ -132,7 +150,7 @@ class Check_Output_CSV:
|
||||
depends_on: str
|
||||
related_to: str
|
||||
notes: str
|
||||
compliance: str
|
||||
# compliance: str
|
||||
|
||||
def get_csv_header(self):
|
||||
csv_header = []
|
||||
@@ -160,7 +178,6 @@ class Check_Output_CSV:
|
||||
self.account_tags = organizations.account_details_tags
|
||||
self.region = report.region
|
||||
self.check_id = report.check_metadata.CheckID
|
||||
# self.check_name = report.check_metadata.CheckName
|
||||
self.check_title = report.check_metadata.CheckTitle
|
||||
self.check_type = report.check_metadata.CheckType
|
||||
self.status = report.status
|
||||
@@ -198,7 +215,7 @@ class Check_Output_CSV:
|
||||
self.depends_on = self.__unroll_list__(report.check_metadata.DependsOn)
|
||||
self.related_to = self.__unroll_list__(report.check_metadata.RelatedTo)
|
||||
self.notes = report.check_metadata.Notes
|
||||
self.compliance = self.__unroll_compliance__(report.check_metadata.Compliance)
|
||||
# self.compliance = self.__unroll_compliance__(report.check_metadata.Compliance)
|
||||
|
||||
def __unroll_list__(self, listed_items: list):
|
||||
unrolled_items = ""
|
||||
|
||||
@@ -2,6 +2,8 @@ import json
|
||||
import os
|
||||
import sys
|
||||
from csv import DictWriter
|
||||
from io import TextIOWrapper
|
||||
from typing import Any
|
||||
|
||||
from colorama import Fore, Style
|
||||
from tabulate import tabulate
|
||||
@@ -12,12 +14,14 @@ from config.config import (
|
||||
json_file_suffix,
|
||||
orange_color,
|
||||
prowler_version,
|
||||
timestamp,
|
||||
timestamp_iso,
|
||||
timestamp_utc,
|
||||
)
|
||||
from lib.logger import logger
|
||||
from lib.outputs.models import (
|
||||
Check_Output_CSV,
|
||||
Check_Output_CSV_ENS_RD2022,
|
||||
Check_Output_JSON,
|
||||
Check_Output_JSON_ASFF,
|
||||
Compliance,
|
||||
@@ -32,18 +36,17 @@ from providers.aws.lib.security_hub.security_hub import send_to_security_hub
|
||||
|
||||
|
||||
def report(check_findings, output_options, audit_info):
|
||||
# Sort check findings
|
||||
check_findings.sort(key=lambda x: x.region)
|
||||
csv_fields = []
|
||||
# check output options
|
||||
|
||||
# Generate the required output files
|
||||
# csv_fields = []
|
||||
file_descriptors = {}
|
||||
if output_options.output_modes:
|
||||
if "csv" in output_options.output_modes:
|
||||
csv_fields = generate_csv_fields()
|
||||
|
||||
# We have to create the required output files
|
||||
file_descriptors = fill_file_descriptors(
|
||||
output_options.output_modes,
|
||||
output_options.output_directory,
|
||||
csv_fields,
|
||||
output_options.output_filename,
|
||||
)
|
||||
|
||||
@@ -70,7 +73,64 @@ def report(check_findings, output_options, audit_info):
|
||||
f"\t{color}{finding.status}{Style.RESET_ALL} {finding.region}: {finding.status_extended}"
|
||||
)
|
||||
if file_descriptors:
|
||||
# sending the finding to input options
|
||||
if "ens_rd2022_aws" in output_options.output_modes:
|
||||
# We have to retrieve all the check's compliance requirements
|
||||
check_compliance = output_options.bulk_checks_metadata[
|
||||
finding.check_metadata.CheckID
|
||||
].Compliance
|
||||
for compliance in check_compliance:
|
||||
if (
|
||||
compliance.Framework == "ENS"
|
||||
and compliance.Version == "RD2022"
|
||||
):
|
||||
for requirement in compliance.Requirements:
|
||||
requirement_description = requirement.Description
|
||||
requirement_id = requirement.Id
|
||||
for attribute in requirement.Attributes:
|
||||
compliance_row = Check_Output_CSV_ENS_RD2022(
|
||||
Provider=finding.check_metadata.Provider,
|
||||
AccountId=audit_info.audited_account,
|
||||
Region=finding.region,
|
||||
AssessmentDate=timestamp.isoformat(),
|
||||
Requirements_Id=requirement_id,
|
||||
Requirements_Description=requirement_description,
|
||||
Requirements_Attributes_IdGrupoControl=attribute.get(
|
||||
"IdGrupoControl"
|
||||
),
|
||||
Requirements_Attributes_Marco=attribute.get(
|
||||
"Marco"
|
||||
),
|
||||
Requirements_Attributes_Categoria=attribute.get(
|
||||
"Categoria"
|
||||
),
|
||||
Requirements_Attributes_DescripcionControl=attribute.get(
|
||||
"DescripcionControl"
|
||||
),
|
||||
Requirements_Attributes_Nivel=attribute.get(
|
||||
"Nivel"
|
||||
),
|
||||
Requirements_Attributes_Tipo=attribute.get(
|
||||
"Tipo"
|
||||
),
|
||||
Requirements_Attributes_Dimensiones=",".join(
|
||||
attribute.get("Dimensiones")
|
||||
),
|
||||
Status=finding.status,
|
||||
StatusExtended=finding.status_extended,
|
||||
ResourceId=finding.resource_id,
|
||||
CheckId=finding.check_metadata.CheckID,
|
||||
)
|
||||
|
||||
csv_header = generate_csv_fields(
|
||||
Check_Output_CSV_ENS_RD2022
|
||||
)
|
||||
csv_writer = DictWriter(
|
||||
file_descriptors["ens_rd2022_aws"],
|
||||
fieldnames=csv_header,
|
||||
delimiter=";",
|
||||
)
|
||||
csv_writer.writerow(compliance_row.__dict__)
|
||||
|
||||
if "csv" in file_descriptors:
|
||||
finding_output = Check_Output_CSV(
|
||||
audit_info.audited_account,
|
||||
@@ -79,7 +139,9 @@ def report(check_findings, output_options, audit_info):
|
||||
audit_info.organizations_metadata,
|
||||
)
|
||||
csv_writer = DictWriter(
|
||||
file_descriptors["csv"], fieldnames=csv_fields, delimiter=";"
|
||||
file_descriptors["csv"],
|
||||
fieldnames=generate_csv_fields(Check_Output_CSV),
|
||||
delimiter=";",
|
||||
)
|
||||
csv_writer.writerow(finding_output.__dict__)
|
||||
|
||||
@@ -117,65 +179,75 @@ def report(check_findings, output_options, audit_info):
|
||||
file_descriptors.get(file_descriptor).close()
|
||||
|
||||
|
||||
def fill_file_descriptors(output_modes, output_directory, csv_fields, output_filename):
|
||||
def initialize_file_descriptor(
|
||||
filename: str, output_mode: str, format: Any = None
|
||||
) -> TextIOWrapper:
|
||||
"""Open/Create the output file. If needed include headers or the required format"""
|
||||
|
||||
if file_exists(filename):
|
||||
file_descriptor = open_file(
|
||||
filename,
|
||||
"a",
|
||||
)
|
||||
else:
|
||||
file_descriptor = open_file(
|
||||
filename,
|
||||
"a",
|
||||
)
|
||||
|
||||
if output_mode in ("csv", "ens_rd2022_aws"):
|
||||
# Format is the class model of the CSV format to print the headers
|
||||
csv_header = [x.upper() for x in generate_csv_fields(format)]
|
||||
csv_writer = DictWriter(
|
||||
file_descriptor, fieldnames=csv_header, delimiter=";"
|
||||
)
|
||||
csv_writer.writeheader()
|
||||
|
||||
if output_mode in ("json", "json-asff"):
|
||||
file_descriptor = open_file(
|
||||
filename,
|
||||
"a",
|
||||
)
|
||||
file_descriptor.write("[")
|
||||
|
||||
return file_descriptor
|
||||
|
||||
|
||||
def fill_file_descriptors(output_modes, output_directory, output_filename):
|
||||
file_descriptors = {}
|
||||
for output_mode in output_modes:
|
||||
if output_mode == "csv":
|
||||
filename = f"{output_directory}/{output_filename}{csv_file_suffix}"
|
||||
if file_exists(filename):
|
||||
file_descriptor = open_file(
|
||||
filename,
|
||||
"a",
|
||||
if output_modes:
|
||||
for output_mode in output_modes:
|
||||
if output_mode == "csv":
|
||||
filename = f"{output_directory}/{output_filename}{csv_file_suffix}"
|
||||
file_descriptor = initialize_file_descriptor(
|
||||
filename, output_mode, Check_Output_CSV
|
||||
)
|
||||
else:
|
||||
file_descriptor = open_file(
|
||||
filename,
|
||||
"a",
|
||||
)
|
||||
csv_header = [x.upper() for x in csv_fields]
|
||||
csv_writer = DictWriter(
|
||||
file_descriptor, fieldnames=csv_header, delimiter=";"
|
||||
)
|
||||
csv_writer.writeheader()
|
||||
file_descriptors.update({output_mode: file_descriptor})
|
||||
|
||||
file_descriptors.update({output_mode: file_descriptor})
|
||||
if output_mode == "json":
|
||||
filename = f"{output_directory}/{output_filename}{json_file_suffix}"
|
||||
file_descriptor = initialize_file_descriptor(filename, output_mode)
|
||||
file_descriptors.update({output_mode: file_descriptor})
|
||||
|
||||
if output_mode == "json":
|
||||
filename = f"{output_directory}/{output_filename}{json_file_suffix}"
|
||||
if file_exists(filename):
|
||||
file_descriptor = open_file(
|
||||
filename,
|
||||
"a",
|
||||
if output_mode == "json-asff":
|
||||
filename = (
|
||||
f"{output_directory}/{output_filename}{json_asff_file_suffix}"
|
||||
)
|
||||
else:
|
||||
file_descriptor = open_file(
|
||||
filename,
|
||||
"a",
|
||||
)
|
||||
file_descriptor.write("[")
|
||||
file_descriptor = initialize_file_descriptor(filename, output_mode)
|
||||
file_descriptors.update({output_mode: file_descriptor})
|
||||
|
||||
file_descriptors.update({output_mode: file_descriptor})
|
||||
|
||||
if output_mode == "json-asff":
|
||||
filename = f"{output_directory}/{output_filename}{json_asff_file_suffix}"
|
||||
if file_exists(filename):
|
||||
file_descriptor = open_file(
|
||||
filename,
|
||||
"a",
|
||||
if output_mode == "ens_rd2022_aws":
|
||||
filename = f"{output_directory}/{output_filename}_ens_rd2022_aws{csv_file_suffix}"
|
||||
file_descriptor = initialize_file_descriptor(
|
||||
filename, output_mode, Check_Output_CSV_ENS_RD2022
|
||||
)
|
||||
else:
|
||||
file_descriptor = open_file(
|
||||
filename,
|
||||
"a",
|
||||
)
|
||||
file_descriptor.write("[")
|
||||
|
||||
file_descriptors.update({output_mode: file_descriptor})
|
||||
file_descriptors.update({output_mode: file_descriptor})
|
||||
|
||||
return file_descriptors
|
||||
|
||||
|
||||
def set_report_color(status):
|
||||
def set_report_color(status: str) -> str:
|
||||
"""Return the color for a give result status"""
|
||||
color = ""
|
||||
if status == "PASS":
|
||||
color = Fore.GREEN
|
||||
@@ -192,9 +264,10 @@ def set_report_color(status):
|
||||
return color
|
||||
|
||||
|
||||
def generate_csv_fields():
|
||||
def generate_csv_fields(format: Any) -> list[str]:
|
||||
"""Generates the CSV headers for the given class"""
|
||||
csv_fields = []
|
||||
for field in Check_Output_CSV.__dict__["__annotations__"].keys():
|
||||
for field in format.__dict__.get("__annotations__").keys():
|
||||
csv_fields.append(field)
|
||||
return csv_fields
|
||||
|
||||
@@ -271,7 +344,9 @@ def close_json(output_filename, output_directory, mode):
|
||||
file_descriptor.write("]")
|
||||
file_descriptor.close()
|
||||
except Exception as error:
|
||||
logger.critical(f"{error.__class__.__name__} -- {error}")
|
||||
logger.critical(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
|
||||
)
|
||||
sys.exit()
|
||||
|
||||
|
||||
@@ -294,7 +369,9 @@ def send_to_s3_bucket(
|
||||
s3_client.upload_file(file_name, bucket_name, object_name)
|
||||
|
||||
except Exception as error:
|
||||
logger.critical(f"{error.__class__.__name__} -- {error}")
|
||||
logger.critical(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
|
||||
)
|
||||
sys.exit()
|
||||
|
||||
|
||||
@@ -305,75 +382,76 @@ def display_summary_table(
|
||||
output_directory: str,
|
||||
):
|
||||
try:
|
||||
if findings:
|
||||
current = {
|
||||
"Service": "",
|
||||
"Provider": "",
|
||||
"Critical": 0,
|
||||
"High": 0,
|
||||
"Medium": 0,
|
||||
"Low": 0,
|
||||
}
|
||||
findings_table = {
|
||||
"Provider": [],
|
||||
"Service": [],
|
||||
"Status": [],
|
||||
"Critical": [],
|
||||
"High": [],
|
||||
"Medium": [],
|
||||
"Low": [],
|
||||
}
|
||||
pass_count = fail_count = 0
|
||||
for finding in findings:
|
||||
# If new service and not first, add previous row
|
||||
if (
|
||||
current["Service"] != finding.check_metadata.ServiceName
|
||||
and current["Service"]
|
||||
):
|
||||
current = {
|
||||
"Service": "",
|
||||
"Provider": "",
|
||||
"Total": 0,
|
||||
"Critical": 0,
|
||||
"High": 0,
|
||||
"Medium": 0,
|
||||
"Low": 0,
|
||||
}
|
||||
findings_table = {
|
||||
"Provider": [],
|
||||
"Service": [],
|
||||
"Status": [],
|
||||
"Critical": [],
|
||||
"High": [],
|
||||
"Medium": [],
|
||||
"Low": [],
|
||||
}
|
||||
pass_count = fail_count = 0
|
||||
for finding in findings:
|
||||
# If new service and not first, add previous row
|
||||
if (
|
||||
current["Service"] != finding.check_metadata.ServiceName
|
||||
and current["Service"]
|
||||
):
|
||||
|
||||
add_service_to_table(findings_table, current)
|
||||
add_service_to_table(findings_table, current)
|
||||
|
||||
current["Critical"] = current["High"] = current["Medium"] = current[
|
||||
"Low"
|
||||
] = 0
|
||||
current["Total"] = current["Critical"] = current["High"] = current[
|
||||
"Medium"
|
||||
] = current["Low"] = 0
|
||||
|
||||
current["Service"] = finding.check_metadata.ServiceName
|
||||
current["Provider"] = finding.check_metadata.Provider
|
||||
current["Service"] = finding.check_metadata.ServiceName
|
||||
current["Provider"] = finding.check_metadata.Provider
|
||||
|
||||
if finding.status == "PASS":
|
||||
pass_count += 1
|
||||
elif finding.status == "FAIL":
|
||||
fail_count += 1
|
||||
if finding.check_metadata.Severity == "critical":
|
||||
current["Critical"] += 1
|
||||
elif finding.check_metadata.Severity == "high":
|
||||
current["High"] += 1
|
||||
elif finding.check_metadata.Severity == "medium":
|
||||
current["Medium"] += 1
|
||||
elif finding.check_metadata.Severity == "low":
|
||||
current["Low"] += 1
|
||||
current["Total"] += 1
|
||||
if finding.status == "PASS":
|
||||
pass_count += 1
|
||||
elif finding.status == "FAIL":
|
||||
fail_count += 1
|
||||
if finding.check_metadata.Severity == "critical":
|
||||
current["Critical"] += 1
|
||||
elif finding.check_metadata.Severity == "high":
|
||||
current["High"] += 1
|
||||
elif finding.check_metadata.Severity == "medium":
|
||||
current["Medium"] += 1
|
||||
elif finding.check_metadata.Severity == "low":
|
||||
current["Low"] += 1
|
||||
|
||||
# Add final service
|
||||
add_service_to_table(findings_table, current)
|
||||
# Add final service
|
||||
add_service_to_table(findings_table, current)
|
||||
|
||||
print("\nOverview Results:")
|
||||
overview_table = [
|
||||
[
|
||||
f"{Fore.RED}{round(fail_count/len(findings)*100, 2)}% ({fail_count}) Failed{Style.RESET_ALL}",
|
||||
f"{Fore.GREEN}{round(pass_count/len(findings)*100, 2)}% ({pass_count}) Passed{Style.RESET_ALL}",
|
||||
]
|
||||
print("\nOverview Results:")
|
||||
overview_table = [
|
||||
[
|
||||
f"{Fore.RED}{round(fail_count/len(findings)*100, 2)}% ({fail_count}) Failed{Style.RESET_ALL}",
|
||||
f"{Fore.GREEN}{round(pass_count/len(findings)*100, 2)}% ({pass_count}) Passed{Style.RESET_ALL}",
|
||||
]
|
||||
print(tabulate(overview_table, tablefmt="rounded_grid"))
|
||||
print(
|
||||
f"\nAccount {Fore.YELLOW}{audit_info.audited_account}{Style.RESET_ALL} Scan Results (severity columns are for fails only):"
|
||||
)
|
||||
print(tabulate(findings_table, headers="keys", tablefmt="rounded_grid"))
|
||||
print(
|
||||
f"{Style.BRIGHT}* You only see here those services that contains resources.{Style.RESET_ALL}"
|
||||
)
|
||||
print("\nDetailed results are in:")
|
||||
print(f" - CSV: {output_directory}/{output_filename}.csv")
|
||||
print(f" - JSON: {output_directory}/{output_filename}.json\n")
|
||||
]
|
||||
print(tabulate(overview_table, tablefmt="rounded_grid"))
|
||||
print(
|
||||
f"\nAccount {Fore.YELLOW}{audit_info.audited_account}{Style.RESET_ALL} Scan Results (severity columns are for fails only):"
|
||||
)
|
||||
print(tabulate(findings_table, headers="keys", tablefmt="rounded_grid"))
|
||||
print(
|
||||
f"{Style.BRIGHT}* You only see here those services that contains resources.{Style.RESET_ALL}"
|
||||
)
|
||||
print("\nDetailed results are in:")
|
||||
print(f" - CSV: {output_directory}/{output_filename}.csv")
|
||||
print(f" - JSON: {output_directory}/{output_filename}.json\n")
|
||||
|
||||
except Exception as error:
|
||||
logger.critical(
|
||||
@@ -389,9 +467,12 @@ def add_service_to_table(findings_table, current):
|
||||
or current["Medium"] > 0
|
||||
or current["Low"] > 0
|
||||
):
|
||||
current["Status"] = f"{Fore.RED}FAIL{Style.RESET_ALL}"
|
||||
total_fails = (
|
||||
current["Critical"] + current["High"] + current["Medium"] + current["Low"]
|
||||
)
|
||||
current["Status"] = f"{Fore.RED}FAIL ({total_fails}){Style.RESET_ALL}"
|
||||
else:
|
||||
current["Status"] = f"{Fore.GREEN}PASS{Style.RESET_ALL}"
|
||||
current["Status"] = f"{Fore.GREEN}PASS ({current['Total']}){Style.RESET_ALL}"
|
||||
findings_table["Provider"].append(current["Provider"])
|
||||
findings_table["Service"].append(current["Service"])
|
||||
findings_table["Status"].append(current["Status"])
|
||||
@@ -403,3 +484,109 @@ def add_service_to_table(findings_table, current):
|
||||
f"{Fore.YELLOW}{current['Medium']}{Style.RESET_ALL}"
|
||||
)
|
||||
findings_table["Low"].append(f"{Fore.BLUE}{current['Low']}{Style.RESET_ALL}")
|
||||
|
||||
|
||||
def display_compliance_table(
|
||||
findings: list,
|
||||
bulk_checks_metadata: dict,
|
||||
compliance_framework: str,
|
||||
output_filename: str,
|
||||
output_directory: str,
|
||||
):
|
||||
try:
|
||||
if "ens_rd2022_aws" in compliance_framework:
|
||||
marcos = {}
|
||||
ens_compliance_table = {
|
||||
"Proveedor": [],
|
||||
"Marco/Categoria": [],
|
||||
"Estado": [],
|
||||
"PYTEC": [],
|
||||
"Alto": [],
|
||||
"Medio": [],
|
||||
"Bajo": [],
|
||||
}
|
||||
pass_count = fail_count = 0
|
||||
for finding in findings:
|
||||
check = bulk_checks_metadata[finding.check_metadata.CheckID]
|
||||
check_compliances = check.Compliance
|
||||
for compliance in check_compliances:
|
||||
if (
|
||||
compliance.Framework == "ENS"
|
||||
and compliance.Provider == "AWS"
|
||||
and compliance.Version == "RD2022"
|
||||
):
|
||||
for requirement in compliance.Requirements:
|
||||
for attribute in requirement.Attributes:
|
||||
marco_categoria = (
|
||||
f"{attribute['Marco']}/{attribute['Categoria']}"
|
||||
)
|
||||
# Check if Marco/Categoria exists
|
||||
if marco_categoria not in marcos:
|
||||
marcos[marco_categoria] = {
|
||||
"Estado": f"{Fore.GREEN}CUMPLE{Style.RESET_ALL}",
|
||||
"Pytec": 0,
|
||||
"Alto": 0,
|
||||
"Medio": 0,
|
||||
"Bajo": 0,
|
||||
}
|
||||
if finding.status == "FAIL":
|
||||
fail_count += 1
|
||||
marcos[marco_categoria][
|
||||
"Estado"
|
||||
] = f"{Fore.RED}NO CUMPLE{Style.RESET_ALL}"
|
||||
elif finding.status == "PASS":
|
||||
pass_count += 1
|
||||
if attribute["Nivel"] == "pytec":
|
||||
marcos[marco_categoria]["Pytec"] += 1
|
||||
elif attribute["Nivel"] == "alto":
|
||||
marcos[marco_categoria]["Alto"] += 1
|
||||
elif attribute["Nivel"] == "medio":
|
||||
marcos[marco_categoria]["Medio"] += 1
|
||||
elif attribute["Nivel"] == "bajo":
|
||||
marcos[marco_categoria]["Bajo"] += 1
|
||||
|
||||
# Add results to table
|
||||
for marco in marcos:
|
||||
ens_compliance_table["Proveedor"].append("aws")
|
||||
ens_compliance_table["Marco/Categoria"].append(marco)
|
||||
ens_compliance_table["Estado"].append(marcos[marco]["Estado"])
|
||||
ens_compliance_table["PYTEC"].append(
|
||||
f"{Fore.LIGHTRED_EX}{marcos[marco]['Pytec']}{Style.RESET_ALL}"
|
||||
)
|
||||
ens_compliance_table["Alto"].append(
|
||||
f"{Fore.RED}{marcos[marco]['Alto']}{Style.RESET_ALL}"
|
||||
)
|
||||
ens_compliance_table["Medio"].append(
|
||||
f"{Fore.YELLOW}{marcos[marco]['Medio']}{Style.RESET_ALL}"
|
||||
)
|
||||
ens_compliance_table["Bajo"].append(
|
||||
f"{Fore.BLUE}{marcos[marco]['Bajo']}{Style.RESET_ALL}"
|
||||
)
|
||||
|
||||
print(
|
||||
f"\nEstado de Cumplimiento de {Fore.YELLOW}ENS RD2022 - AWS{Style.RESET_ALL}:"
|
||||
)
|
||||
overview_table = [
|
||||
[
|
||||
f"{Fore.RED}{round(fail_count/(fail_count+pass_count)*100, 2)}% ({fail_count}) NO CUMPLE{Style.RESET_ALL}",
|
||||
f"{Fore.GREEN}{round(pass_count/(fail_count+pass_count)*100, 2)}% ({pass_count}) CUMPLE{Style.RESET_ALL}",
|
||||
]
|
||||
]
|
||||
print(tabulate(overview_table, tablefmt="rounded_grid"))
|
||||
print(f"\nResultados de {Fore.YELLOW}ENS RD2022 - AWS{Style.RESET_ALL}:")
|
||||
print(
|
||||
tabulate(ens_compliance_table, headers="keys", tablefmt="rounded_grid")
|
||||
)
|
||||
print(
|
||||
f"{Style.BRIGHT}* Solo aparece el Marco/Categoria que contiene resultados.{Style.RESET_ALL}"
|
||||
)
|
||||
print("\nResultados detallados en:")
|
||||
print(
|
||||
f" - CSV: {output_directory}/{output_filename}_{compliance_framework[0]}.csv\n"
|
||||
)
|
||||
|
||||
except Exception as error:
|
||||
logger.critical(
|
||||
f"{error.__class__.__name__}:{error.__traceback__.tb_lineno} -- {error}"
|
||||
)
|
||||
sys.exit()
|
||||
|
||||
@@ -2,6 +2,7 @@ import os
|
||||
from os import path, remove
|
||||
|
||||
import boto3
|
||||
import pytest
|
||||
from colorama import Fore
|
||||
from moto import mock_s3
|
||||
|
||||
@@ -9,14 +10,15 @@ from config.config import (
|
||||
csv_file_suffix,
|
||||
json_asff_file_suffix,
|
||||
json_file_suffix,
|
||||
orange_color,
|
||||
output_file_timestamp,
|
||||
prowler_version,
|
||||
timestamp_iso,
|
||||
timestamp_utc,
|
||||
orange_color,
|
||||
)
|
||||
from lib.check.models import Check_Report, load_check_metadata
|
||||
from lib.outputs.models import (
|
||||
Check_Output_CSV,
|
||||
Check_Output_JSON,
|
||||
Check_Output_JSON_ASFF,
|
||||
Compliance,
|
||||
@@ -40,7 +42,7 @@ class Test_Outputs:
|
||||
def test_fill_file_descriptors(self):
|
||||
audited_account = "123456789012"
|
||||
output_directory = f"{os.path.dirname(os.path.realpath(__file__))}"
|
||||
csv_fields = generate_csv_fields()
|
||||
generate_csv_fields(Check_Output_CSV)
|
||||
test_output_modes = [
|
||||
["csv"],
|
||||
["json"],
|
||||
@@ -98,7 +100,6 @@ class Test_Outputs:
|
||||
test_output_file_descriptors = fill_file_descriptors(
|
||||
output_mode_list,
|
||||
output_directory,
|
||||
csv_fields,
|
||||
output_filename,
|
||||
)
|
||||
for output_mode in output_mode_list:
|
||||
@@ -115,6 +116,17 @@ class Test_Outputs:
|
||||
for status in test_status:
|
||||
assert set_report_color(status) in test_colors
|
||||
|
||||
def test_set_report_color_invalid(self):
|
||||
test_status = "INVALID"
|
||||
|
||||
with pytest.raises(Exception) as exc:
|
||||
set_report_color(test_status)
|
||||
|
||||
assert "Invalid Report Status. Must be PASS, FAIL, ERROR or WARNING" in str(
|
||||
exc.value
|
||||
)
|
||||
assert exc.type == Exception
|
||||
|
||||
def test_generate_csv_fields(self):
|
||||
expected = [
|
||||
"assessment_start_time",
|
||||
@@ -154,10 +166,10 @@ class Test_Outputs:
|
||||
"depends_on",
|
||||
"related_to",
|
||||
"notes",
|
||||
"compliance",
|
||||
# "compliance",
|
||||
]
|
||||
|
||||
assert generate_csv_fields() == expected
|
||||
assert generate_csv_fields(Check_Output_CSV) == expected
|
||||
|
||||
def test_fill_json(self):
|
||||
input_audit_info = AWS_Audit_Info(
|
||||
@@ -177,7 +189,7 @@ class Test_Outputs:
|
||||
finding = Check_Report(
|
||||
load_check_metadata(
|
||||
f"{path.dirname(path.realpath(__file__))}/fixtures/metadata.json"
|
||||
)
|
||||
).json()
|
||||
)
|
||||
finding.resource_details = "Test resource details"
|
||||
finding.resource_id = "test-resource"
|
||||
@@ -221,7 +233,7 @@ class Test_Outputs:
|
||||
finding = Check_Report(
|
||||
load_check_metadata(
|
||||
f"{path.dirname(path.realpath(__file__))}/fixtures/metadata.json"
|
||||
)
|
||||
).json()
|
||||
)
|
||||
finding.resource_details = "Test resource details"
|
||||
finding.resource_id = "test-resource"
|
||||
|
||||
@@ -12,7 +12,9 @@ def open_file(input_file: str, mode: str = "r") -> TextIOWrapper:
|
||||
try:
|
||||
f = open(input_file, mode)
|
||||
except Exception as e:
|
||||
logger.critical(f"{input_file}: {e.__class__.__name__}")
|
||||
logger.critical(
|
||||
f"{input_file}: {e.__class__.__name__}[{e.__traceback__.tb_lineno}]"
|
||||
)
|
||||
sys.exit()
|
||||
else:
|
||||
return f
|
||||
@@ -23,7 +25,9 @@ def parse_json_file(input_file: TextIOWrapper) -> Any:
|
||||
try:
|
||||
json_file = json.load(input_file)
|
||||
except Exception as e:
|
||||
logger.critical(f"{input_file.name}: {e.__class__.__name__}")
|
||||
logger.critical(
|
||||
f"{input_file.name}: {e.__class__.__name__}[{e.__traceback__.tb_lineno}]"
|
||||
)
|
||||
sys.exit()
|
||||
else:
|
||||
return json_file
|
||||
@@ -34,13 +38,12 @@ def file_exists(filename: str):
|
||||
try:
|
||||
exists_filename = exists(filename)
|
||||
except Exception as e:
|
||||
logger.critical(f"{exists_filename.name}: {e.__class__.__name__}")
|
||||
quit()
|
||||
logger.critical(
|
||||
f"{exists_filename.name}: {e.__class__.__name__}[{e.__traceback__.tb_lineno}]"
|
||||
)
|
||||
sys.exit()
|
||||
else:
|
||||
if exists_filename:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
return exists_filename
|
||||
|
||||
|
||||
# create sha512 hash for string
|
||||
|
||||
@@ -2,7 +2,9 @@
|
||||
"Provider": "aws",
|
||||
"CheckID": "accessanalyzer_enabled_without_findings",
|
||||
"CheckTitle": "Check if IAM Access Analyzer is enabled without findings",
|
||||
"CheckType": ["IAM"],
|
||||
"CheckType": [
|
||||
"IAM"
|
||||
],
|
||||
"ServiceName": "accessanalyzer",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "arn:partition:access-analyzer:region:account-id:analyzer/resource-id",
|
||||
@@ -30,13 +32,5 @@
|
||||
},
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "",
|
||||
"Compliance": [
|
||||
{
|
||||
"Framework": "CIS-AWS",
|
||||
"Version": "1.4",
|
||||
"Control": [ "1.20" ],
|
||||
"Group": [ "level1" ]
|
||||
}
|
||||
]
|
||||
"Notes": ""
|
||||
}
|
||||
|
||||
@@ -8,7 +8,7 @@ class accessanalyzer_enabled_without_findings(Check):
|
||||
def execute(self):
|
||||
findings = []
|
||||
for analyzer in accessanalyzer_client.analyzers:
|
||||
report = Check_Report(self.metadata)
|
||||
report = Check_Report(self.metadata())
|
||||
report.region = analyzer.region
|
||||
if analyzer.status == "ACTIVE":
|
||||
if analyzer.findings_count > 0:
|
||||
|
||||
@@ -2,7 +2,9 @@
|
||||
"Provider": "aws",
|
||||
"CheckID": "account_maintain_current_contact_details",
|
||||
"CheckTitle": "Maintain current contact details.",
|
||||
"CheckType": ["IAM"],
|
||||
"CheckType": [
|
||||
"IAM"
|
||||
],
|
||||
"ServiceName": "account",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "arn:partition:access-recorder:region:account-id:recorder/resource-id",
|
||||
@@ -30,6 +32,5 @@
|
||||
},
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "",
|
||||
"Compliance": []
|
||||
"Notes": ""
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@ from providers.aws.services.account.account_client import account_client
|
||||
|
||||
class account_maintain_current_contact_details(Check):
|
||||
def execute(self):
|
||||
report = Check_Report(self.metadata)
|
||||
report = Check_Report(self.metadata())
|
||||
report.region = account_client.region
|
||||
report.resource_id = account_client.audited_account
|
||||
report.status = "INFO"
|
||||
|
||||
@@ -2,7 +2,9 @@
|
||||
"Provider": "aws",
|
||||
"CheckID": "account_security_contact_information_is_registered",
|
||||
"CheckTitle": "Ensure security contact information is registered.",
|
||||
"CheckType": ["IAM"],
|
||||
"CheckType": [
|
||||
"IAM"
|
||||
],
|
||||
"ServiceName": "account",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "arn:partition:access-recorder:region:account-id:recorder/resource-id",
|
||||
@@ -30,6 +32,5 @@
|
||||
},
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "",
|
||||
"Compliance": []
|
||||
"Notes": ""
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@ from providers.aws.services.account.account_client import account_client
|
||||
|
||||
class account_security_contact_information_is_registered(Check):
|
||||
def execute(self):
|
||||
report = Check_Report(self.metadata)
|
||||
report = Check_Report(self.metadata())
|
||||
report.region = account_client.region
|
||||
report.resource_id = account_client.audited_account
|
||||
report.status = "INFO"
|
||||
|
||||
@@ -2,7 +2,9 @@
|
||||
"Provider": "aws",
|
||||
"CheckID": "account_security_questions_are_registered_in_the_aws_account",
|
||||
"CheckTitle": "Ensure security questions are registered in the AWS account.",
|
||||
"CheckType": ["IAM"],
|
||||
"CheckType": [
|
||||
"IAM"
|
||||
],
|
||||
"ServiceName": "account",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "arn:partition:access-recorder:region:account-id:recorder/resource-id",
|
||||
@@ -30,6 +32,5 @@
|
||||
},
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "",
|
||||
"Compliance": []
|
||||
"Notes": ""
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@ from providers.aws.services.account.account_client import account_client
|
||||
|
||||
class account_security_questions_are_registered_in_the_aws_account(Check):
|
||||
def execute(self):
|
||||
report = Check_Report(self.metadata)
|
||||
report = Check_Report(self.metadata())
|
||||
report.region = account_client.region
|
||||
report.resource_id = account_client.audited_account
|
||||
report.status = "INFO"
|
||||
|
||||
@@ -2,7 +2,9 @@
|
||||
"Provider": "aws",
|
||||
"CheckID": "acm_certificates_expiration_check",
|
||||
"CheckTitle": "Check if ACM Certificates are about to expire in specific days or less",
|
||||
"CheckType": ["Data Protection"],
|
||||
"CheckType": [
|
||||
"Data Protection"
|
||||
],
|
||||
"ServiceName": "acm",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "arn:partition:acm:region:account-id:certificate/resource-id",
|
||||
@@ -30,6 +32,5 @@
|
||||
},
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "",
|
||||
"Compliance": []
|
||||
"Notes": ""
|
||||
}
|
||||
|
||||
@@ -8,7 +8,7 @@ class acm_certificates_expiration_check(Check):
|
||||
def execute(self):
|
||||
findings = []
|
||||
for certificate in acm_client.certificates:
|
||||
report = Check_Report(self.metadata)
|
||||
report = Check_Report(self.metadata())
|
||||
report.region = certificate.region
|
||||
if certificate.expiration_days > DAYS_TO_EXPIRE_THRESHOLD:
|
||||
report.status = "PASS"
|
||||
|
||||
@@ -2,7 +2,9 @@
|
||||
"Provider": "aws",
|
||||
"CheckID": "acm_certificates_transparency_logs_enabled",
|
||||
"CheckTitle": "Check if ACM certificates have Certificate Transparency logging enabled",
|
||||
"CheckType": ["Logging and Monitoring"],
|
||||
"CheckType": [
|
||||
"Logging and Monitoring"
|
||||
],
|
||||
"ServiceName": "acm",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "arn:partition:acm:region:account-id:certificate/resource-id",
|
||||
@@ -30,6 +32,5 @@
|
||||
},
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "",
|
||||
"Compliance": []
|
||||
"Notes": ""
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@ class acm_certificates_transparency_logs_enabled(Check):
|
||||
def execute(self):
|
||||
findings = []
|
||||
for certificate in acm_client.certificates:
|
||||
report = Check_Report(self.metadata)
|
||||
report = Check_Report(self.metadata())
|
||||
report.region = certificate.region
|
||||
if certificate.type == "IMPORTED":
|
||||
report.status = "PASS"
|
||||
|
||||
@@ -2,7 +2,9 @@
|
||||
"Provider": "aws",
|
||||
"CheckID": "apigateway_authorizers_enabled",
|
||||
"CheckTitle": "Check if API Gateway has configured authorizers.",
|
||||
"CheckType": ["IAM"],
|
||||
"CheckType": [
|
||||
"IAM"
|
||||
],
|
||||
"ServiceName": "apigateway",
|
||||
"SubServiceName": "rest_api",
|
||||
"ResourceIdTemplate": "arn:partition:service:region:account-id:resource-id",
|
||||
@@ -30,6 +32,5 @@
|
||||
},
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "",
|
||||
"Compliance": []
|
||||
"Notes": ""
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@ class apigateway_authorizers_enabled(Check):
|
||||
def execute(self):
|
||||
findings = []
|
||||
for rest_api in apigateway_client.rest_apis:
|
||||
report = Check_Report(self.metadata)
|
||||
report = Check_Report(self.metadata())
|
||||
report.region = rest_api.region
|
||||
if rest_api.authorizer:
|
||||
report.status = "PASS"
|
||||
|
||||
@@ -2,7 +2,9 @@
|
||||
"Provider": "aws",
|
||||
"CheckID": "apigateway_client_certificate_enabled",
|
||||
"CheckTitle": "Check if API Gateway has client certificate enabled to access your backend endpoint.",
|
||||
"CheckType": ["Data Protection"],
|
||||
"CheckType": [
|
||||
"Data Protection"
|
||||
],
|
||||
"ServiceName": "apigateway",
|
||||
"SubServiceName": "rest_api",
|
||||
"ResourceIdTemplate": "arn:partition:service:region:account-id:resource-id",
|
||||
@@ -30,6 +32,5 @@
|
||||
},
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "",
|
||||
"Compliance": []
|
||||
"Notes": ""
|
||||
}
|
||||
|
||||
@@ -7,7 +7,7 @@ class apigateway_client_certificate_enabled(Check):
|
||||
findings = []
|
||||
for rest_api in apigateway_client.rest_apis:
|
||||
for stage in rest_api.stages:
|
||||
report = Check_Report(self.metadata)
|
||||
report = Check_Report(self.metadata())
|
||||
if stage.client_certificate:
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"API Gateway {rest_api.name} ID {rest_api.id} in stage {stage.name} has client certificate enabled."
|
||||
|
||||
@@ -2,7 +2,9 @@
|
||||
"Provider": "aws",
|
||||
"CheckID": "apigateway_endpoint_public",
|
||||
"CheckTitle": "Check if API Gateway endpoint is public or private.",
|
||||
"CheckType": ["Infrastructure Security"],
|
||||
"CheckType": [
|
||||
"Infrastructure Security"
|
||||
],
|
||||
"ServiceName": "apigateway",
|
||||
"SubServiceName": "rest_api",
|
||||
"ResourceIdTemplate": "arn:partition:service:region:account-id:resource-id",
|
||||
@@ -30,6 +32,5 @@
|
||||
},
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "",
|
||||
"Compliance": []
|
||||
"Notes": ""
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@ class apigateway_endpoint_public(Check):
|
||||
def execute(self):
|
||||
findings = []
|
||||
for rest_api in apigateway_client.rest_apis:
|
||||
report = Check_Report(self.metadata)
|
||||
report = Check_Report(self.metadata())
|
||||
report.region = rest_api.region
|
||||
if rest_api.public_endpoint:
|
||||
report.status = "FAIL"
|
||||
|
||||
@@ -2,7 +2,9 @@
|
||||
"Provider": "aws",
|
||||
"CheckID": "apigateway_logging_enabled",
|
||||
"CheckTitle": "Check if API Gateway has logging enabled.",
|
||||
"CheckType": ["Logging and Monitoring"],
|
||||
"CheckType": [
|
||||
"Logging and Monitoring"
|
||||
],
|
||||
"ServiceName": "apigateway",
|
||||
"SubServiceName": "rest_api",
|
||||
"ResourceIdTemplate": "arn:partition:service:region:account-id:resource-id",
|
||||
@@ -30,6 +32,5 @@
|
||||
},
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "",
|
||||
"Compliance": []
|
||||
"Notes": ""
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@ class apigateway_logging_enabled(Check):
|
||||
def execute(self):
|
||||
findings = []
|
||||
for rest_api in apigateway_client.rest_apis:
|
||||
report = Check_Report(self.metadata)
|
||||
report = Check_Report(self.metadata())
|
||||
report.region = rest_api.region
|
||||
for stage in rest_api.stages:
|
||||
if stage.logging:
|
||||
|
||||
@@ -2,7 +2,9 @@
|
||||
"Provider": "aws",
|
||||
"CheckID": "apigateway_waf_acl_attached",
|
||||
"CheckTitle": "Check if API Gateway has a WAF ACL attached.",
|
||||
"CheckType": ["Infrastructure Security"],
|
||||
"CheckType": [
|
||||
"Infrastructure Security"
|
||||
],
|
||||
"ServiceName": "apigateway",
|
||||
"SubServiceName": "rest_api",
|
||||
"ResourceIdTemplate": "arn:partition:service:region:account-id:resource-id",
|
||||
@@ -30,6 +32,5 @@
|
||||
},
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "",
|
||||
"Compliance": []
|
||||
"Notes": ""
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@ class apigateway_waf_acl_attached(Check):
|
||||
def execute(self):
|
||||
findings = []
|
||||
for rest_api in apigateway_client.rest_apis:
|
||||
report = Check_Report(self.metadata)
|
||||
report = Check_Report(self.metadata())
|
||||
report.region = rest_api.region
|
||||
for stage in rest_api.stages:
|
||||
if stage.waf:
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
{
|
||||
"Provider": "aws",
|
||||
"CheckID": "apigatewayv2_authorizers_enabled",
|
||||
"CheckID": "apigatewayv2_access_logging_enabled",
|
||||
"CheckTitle": "Check if API Gateway V2 has configured authorizers.",
|
||||
"CheckType": ["IAM"],
|
||||
"CheckType": [
|
||||
"IAM"
|
||||
],
|
||||
"ServiceName": "apigateway",
|
||||
"SubServiceName": "rest_api",
|
||||
"ResourceIdTemplate": "arn:partition:service:region:account-id:resource-id",
|
||||
@@ -30,6 +32,5 @@
|
||||
},
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "",
|
||||
"Compliance": []
|
||||
"Notes": ""
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@ class apigatewayv2_access_logging_enabled(Check):
|
||||
def execute(self):
|
||||
findings = []
|
||||
for api in apigatewayv2_client.apis:
|
||||
report = Check_Report(self.metadata)
|
||||
report = Check_Report(self.metadata())
|
||||
report.region = api.region
|
||||
for stage in api.stages:
|
||||
if stage.logging:
|
||||
|
||||
@@ -2,7 +2,9 @@
|
||||
"Provider": "aws",
|
||||
"CheckID": "apigatewayv2_authorizers_enabled",
|
||||
"CheckTitle": "Checks if API Gateway V2 has Access Logging enabled.",
|
||||
"CheckType": ["Logging and Monitoring"],
|
||||
"CheckType": [
|
||||
"Logging and Monitoring"
|
||||
],
|
||||
"ServiceName": "apigateway",
|
||||
"SubServiceName": "api",
|
||||
"ResourceIdTemplate": "arn:partition:service:region:account-id:resource-id",
|
||||
@@ -30,6 +32,5 @@
|
||||
},
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "",
|
||||
"Compliance": []
|
||||
"Notes": ""
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@ class apigatewayv2_authorizers_enabled(Check):
|
||||
def execute(self):
|
||||
findings = []
|
||||
for api in apigatewayv2_client.apis:
|
||||
report = Check_Report(self.metadata)
|
||||
report = Check_Report(self.metadata())
|
||||
report.region = api.region
|
||||
if api.authorizer:
|
||||
report.status = "PASS"
|
||||
|
||||
@@ -1,36 +1,38 @@
|
||||
{
|
||||
"Provider": "aws",
|
||||
"CheckID": "appstream_fleet_default_internet_access_disabled",
|
||||
"CheckTitle": "Ensure default Internet Access from your Amazon AppStream fleet streaming instances should remain unchecked.",
|
||||
"CheckType": ["Software and Configuration Checks", "Industry and Regulatory Standards", "CIS AWS Foundations Benchmark"],
|
||||
"ServiceName": "appstream",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "arn:partition:appstream:region:account-id:fleet/resource-id",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "AppStream",
|
||||
"Description": "Ensure default Internet Access from your Amazon AppStream fleet streaming instances should remain unchecked.",
|
||||
"Risk": "Default Internet Access from your fleet streaming instances should be controlled using a NAT gateway in the VPC.",
|
||||
"RelatedUrl": "https://docs.aws.amazon.com/appstream2/latest/developerguide/set-up-stacks-fleets.html",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "",
|
||||
"NativeIaC": "",
|
||||
"Other": "",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Uncheck the default internet access for the AppStream Fleet.",
|
||||
"Url": "https://docs.aws.amazon.com/appstream2/latest/developerguide/set-up-stacks-fleets.html"
|
||||
}
|
||||
"Provider": "aws",
|
||||
"CheckID": "appstream_fleet_default_internet_access_disabled",
|
||||
"CheckTitle": "Ensure default Internet Access from your Amazon AppStream fleet streaming instances should remain unchecked.",
|
||||
"CheckType": [
|
||||
"Software and Configuration Checks",
|
||||
"Industry and Regulatory Standards",
|
||||
"CIS AWS Foundations Benchmark"
|
||||
],
|
||||
"ServiceName": "appstream",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "arn:partition:appstream:region:account-id:fleet/resource-id",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "AppStream",
|
||||
"Description": "Ensure default Internet Access from your Amazon AppStream fleet streaming instances should remain unchecked.",
|
||||
"Risk": "Default Internet Access from your fleet streaming instances should be controlled using a NAT gateway in the VPC.",
|
||||
"RelatedUrl": "https://docs.aws.amazon.com/appstream2/latest/developerguide/set-up-stacks-fleets.html",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "",
|
||||
"NativeIaC": "",
|
||||
"Other": "",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Categories": [],
|
||||
"Tags": {
|
||||
"Tag1Key": "value",
|
||||
"Tag2Key": "value"
|
||||
},
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "Infrastructure Security",
|
||||
"Compliance": []
|
||||
}
|
||||
|
||||
"Recommendation": {
|
||||
"Text": "Uncheck the default internet access for the AppStream Fleet.",
|
||||
"Url": "https://docs.aws.amazon.com/appstream2/latest/developerguide/set-up-stacks-fleets.html"
|
||||
}
|
||||
},
|
||||
"Categories": [],
|
||||
"Tags": {
|
||||
"Tag1Key": "value",
|
||||
"Tag2Key": "value"
|
||||
},
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "Infrastructure Security"
|
||||
}
|
||||
|
||||
@@ -10,7 +10,7 @@ class appstream_fleet_default_internet_access_disabled(Check):
|
||||
"""Execute the appstream_fleet_default_internet_access_disabled check"""
|
||||
findings = []
|
||||
for fleet in appstream_client.fleets:
|
||||
report = Check_Report(self.metadata)
|
||||
report = Check_Report(self.metadata())
|
||||
report.region = fleet.region
|
||||
report.resource_id = fleet.name
|
||||
report.resource_arn = fleet.arn
|
||||
|
||||
@@ -1,36 +1,36 @@
|
||||
{
|
||||
"Provider": "aws",
|
||||
"CheckID": "appstream_fleet_maximum_session_duration",
|
||||
"CheckTitle": "Ensure user maximum session duration is no longer than 10 hours.",
|
||||
"CheckType": ["Infrastructure Security"],
|
||||
"ServiceName": "appstream",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "arn:partition:appstream:region:account-id:fleet/resource-id",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "AppStream",
|
||||
"Description": "Ensure user maximum session duration is no longer than 10 hours.",
|
||||
"Risk": "Having a session duration lasting longer than 10 hours should not be necessary and if running for any malicious reasons provides a greater time for usage than should be allowed.",
|
||||
"RelatedUrl": "https://docs.aws.amazon.com/appstream2/latest/developerguide/set-up-stacks-fleets.html",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "",
|
||||
"NativeIaC": "",
|
||||
"Other": "",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Change the Maximum session duration is set to 600 minutes or less for the AppStream Fleet.",
|
||||
"Url": "https://docs.aws.amazon.com/appstream2/latest/developerguide/set-up-stacks-fleets.html"
|
||||
}
|
||||
"Provider": "aws",
|
||||
"CheckID": "appstream_fleet_maximum_session_duration",
|
||||
"CheckTitle": "Ensure user maximum session duration is no longer than 10 hours.",
|
||||
"CheckType": [
|
||||
"Infrastructure Security"
|
||||
],
|
||||
"ServiceName": "appstream",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "arn:partition:appstream:region:account-id:fleet/resource-id",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "AppStream",
|
||||
"Description": "Ensure user maximum session duration is no longer than 10 hours.",
|
||||
"Risk": "Having a session duration lasting longer than 10 hours should not be necessary and if running for any malicious reasons provides a greater time for usage than should be allowed.",
|
||||
"RelatedUrl": "https://docs.aws.amazon.com/appstream2/latest/developerguide/set-up-stacks-fleets.html",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "",
|
||||
"NativeIaC": "",
|
||||
"Other": "",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Categories": [],
|
||||
"Tags": {
|
||||
"Tag1Key": "value",
|
||||
"Tag2Key": "value"
|
||||
},
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "Infrastructure Security",
|
||||
"Compliance": []
|
||||
}
|
||||
|
||||
"Recommendation": {
|
||||
"Text": "Change the Maximum session duration is set to 600 minutes or less for the AppStream Fleet.",
|
||||
"Url": "https://docs.aws.amazon.com/appstream2/latest/developerguide/set-up-stacks-fleets.html"
|
||||
}
|
||||
},
|
||||
"Categories": [],
|
||||
"Tags": {
|
||||
"Tag1Key": "value",
|
||||
"Tag2Key": "value"
|
||||
},
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "Infrastructure Security"
|
||||
}
|
||||
|
||||
@@ -13,7 +13,7 @@ class appstream_fleet_maximum_session_duration(Check):
|
||||
"""Execute the appstream_fleet_maximum_session_duration check"""
|
||||
findings = []
|
||||
for fleet in appstream_client.fleets:
|
||||
report = Check_Report(self.metadata)
|
||||
report = Check_Report(self.metadata())
|
||||
report.region = fleet.region
|
||||
report.resource_id = fleet.name
|
||||
report.resource_arn = fleet.arn
|
||||
|
||||
@@ -1,36 +1,38 @@
|
||||
{
|
||||
"Provider": "aws",
|
||||
"CheckID": "appstream_fleet_session_disconnect_timeout",
|
||||
"CheckTitle": "Ensure session disconnect timeout is set to 5 minutes or lesss.",
|
||||
"CheckType": ["Software and Configuration Checks", "Industry and Regulatory Standards", "CIS AWS Foundations Benchmark"],
|
||||
"ServiceName": "appstream",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "arn:partition:appstream:region:account-id:fleet/resource-id",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "AppStream",
|
||||
"Description": "Ensure session disconnect timeout is set to 5 minutes or less",
|
||||
"Risk": "Disconnect timeout in minutes, is the amount of of time that a streaming session remains active after users disconnect.",
|
||||
"RelatedUrl": "https://docs.aws.amazon.com/appstream2/latest/developerguide/set-up-stacks-fleets.html",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "",
|
||||
"NativeIaC": "",
|
||||
"Other": "",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Change the Disconnect timeout to 5 minutes or less for the AppStream Fleet.",
|
||||
"Url": "https://docs.aws.amazon.com/appstream2/latest/developerguide/set-up-stacks-fleets.html"
|
||||
}
|
||||
"Provider": "aws",
|
||||
"CheckID": "appstream_fleet_session_disconnect_timeout",
|
||||
"CheckTitle": "Ensure session disconnect timeout is set to 5 minutes or lesss.",
|
||||
"CheckType": [
|
||||
"Software and Configuration Checks",
|
||||
"Industry and Regulatory Standards",
|
||||
"CIS AWS Foundations Benchmark"
|
||||
],
|
||||
"ServiceName": "appstream",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "arn:partition:appstream:region:account-id:fleet/resource-id",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "AppStream",
|
||||
"Description": "Ensure session disconnect timeout is set to 5 minutes or less",
|
||||
"Risk": "Disconnect timeout in minutes, is the amount of of time that a streaming session remains active after users disconnect.",
|
||||
"RelatedUrl": "https://docs.aws.amazon.com/appstream2/latest/developerguide/set-up-stacks-fleets.html",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "",
|
||||
"NativeIaC": "",
|
||||
"Other": "",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Categories": [],
|
||||
"Tags": {
|
||||
"Tag1Key": "value",
|
||||
"Tag2Key": "value"
|
||||
},
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "Infrastructure Security",
|
||||
"Compliance": []
|
||||
}
|
||||
|
||||
"Recommendation": {
|
||||
"Text": "Change the Disconnect timeout to 5 minutes or less for the AppStream Fleet.",
|
||||
"Url": "https://docs.aws.amazon.com/appstream2/latest/developerguide/set-up-stacks-fleets.html"
|
||||
}
|
||||
},
|
||||
"Categories": [],
|
||||
"Tags": {
|
||||
"Tag1Key": "value",
|
||||
"Tag2Key": "value"
|
||||
},
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "Infrastructure Security"
|
||||
}
|
||||
|
||||
@@ -13,7 +13,7 @@ class appstream_fleet_session_disconnect_timeout(Check):
|
||||
"""Execute the appstream_fleet_maximum_session_duration check"""
|
||||
findings = []
|
||||
for fleet in appstream_client.fleets:
|
||||
report = Check_Report(self.metadata)
|
||||
report = Check_Report(self.metadata())
|
||||
report.region = fleet.region
|
||||
report.resource_id = fleet.name
|
||||
report.resource_arn = fleet.arn
|
||||
|
||||
@@ -1,36 +1,38 @@
|
||||
{
|
||||
"Provider": "aws",
|
||||
"CheckID": "appstream_fleet_session_disconnect_timeout",
|
||||
"CheckTitle": "Ensure session idle disconnect timeout is set to 10 minutes or less.",
|
||||
"CheckType": ["Software and Configuration Checks", "Industry and Regulatory Standards", "CIS AWS Foundations Benchmark"],
|
||||
"ServiceName": "appstream",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "arn:partition:appstream:region:account-id:fleet/resource-id",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "AppStream",
|
||||
"Description": "Ensure session idle disconnect timeout is set to 10 minutes or less.",
|
||||
"Risk": "Idle disconnect timeout in minutes is the amount of time that users can be inactive before they are disconnected from their streaming session and the Disconnect timeout in minutes time begins.",
|
||||
"RelatedUrl": "https://docs.aws.amazon.com/appstream2/latest/developerguide/set-up-stacks-fleets.html",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "",
|
||||
"NativeIaC": "",
|
||||
"Other": "",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Change the session idle timeout to 10 minutes or less for the AppStream Fleet.",
|
||||
"Url": "https://docs.aws.amazon.com/appstream2/latest/developerguide/set-up-stacks-fleets.html"
|
||||
}
|
||||
"Provider": "aws",
|
||||
"CheckID": "appstream_fleet_session_idle_disconnect_timeout",
|
||||
"CheckTitle": "Ensure session idle disconnect timeout is set to 10 minutes or less.",
|
||||
"CheckType": [
|
||||
"Software and Configuration Checks",
|
||||
"Industry and Regulatory Standards",
|
||||
"CIS AWS Foundations Benchmark"
|
||||
],
|
||||
"ServiceName": "appstream",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "arn:partition:appstream:region:account-id:fleet/resource-id",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "AppStream",
|
||||
"Description": "Ensure session idle disconnect timeout is set to 10 minutes or less.",
|
||||
"Risk": "Idle disconnect timeout in minutes is the amount of time that users can be inactive before they are disconnected from their streaming session and the Disconnect timeout in minutes time begins.",
|
||||
"RelatedUrl": "https://docs.aws.amazon.com/appstream2/latest/developerguide/set-up-stacks-fleets.html",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "",
|
||||
"NativeIaC": "",
|
||||
"Other": "",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Categories": [],
|
||||
"Tags": {
|
||||
"Tag1Key": "value",
|
||||
"Tag2Key": "value"
|
||||
},
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "Infrastructure Security",
|
||||
"Compliance": []
|
||||
}
|
||||
|
||||
"Recommendation": {
|
||||
"Text": "Change the session idle timeout to 10 minutes or less for the AppStream Fleet.",
|
||||
"Url": "https://docs.aws.amazon.com/appstream2/latest/developerguide/set-up-stacks-fleets.html"
|
||||
}
|
||||
},
|
||||
"Categories": [],
|
||||
"Tags": {
|
||||
"Tag1Key": "value",
|
||||
"Tag2Key": "value"
|
||||
},
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "Infrastructure Security"
|
||||
}
|
||||
|
||||
@@ -15,7 +15,7 @@ class appstream_fleet_session_idle_disconnect_timeout(Check):
|
||||
"""Execute the appstream_fleet_session_idle_disconnect_timeout check"""
|
||||
findings = []
|
||||
for fleet in appstream_client.fleets:
|
||||
report = Check_Report(self.metadata)
|
||||
report = Check_Report(self.metadata())
|
||||
report.region = fleet.region
|
||||
report.resource_id = fleet.name
|
||||
report.resource_arn = fleet.arn
|
||||
|
||||
@@ -2,7 +2,9 @@
|
||||
"Provider": "aws",
|
||||
"CheckID": "autoscaling_find_secrets_ec2_launch_configuration",
|
||||
"CheckTitle": "Find secrets in EC2 Auto Scaling Launch Configuration",
|
||||
"CheckType": ["IAM"],
|
||||
"CheckType": [
|
||||
"IAM"
|
||||
],
|
||||
"ServiceName": "autoscaling",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "arn:partition:access-analyzer:region:account-id:analyzer/resource-id",
|
||||
@@ -30,7 +32,5 @@
|
||||
},
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "",
|
||||
"Compliance": [
|
||||
]
|
||||
"Notes": ""
|
||||
}
|
||||
|
||||
@@ -13,7 +13,7 @@ class autoscaling_find_secrets_ec2_launch_configuration(Check):
|
||||
def execute(self):
|
||||
findings = []
|
||||
for configuration in autoscaling_client.launch_configurations:
|
||||
report = Check_Report(self.metadata)
|
||||
report = Check_Report(self.metadata())
|
||||
report.region = configuration.region
|
||||
report.resource_id = configuration.name
|
||||
report.resource_arn = configuration.arn
|
||||
|
||||
@@ -30,6 +30,5 @@
|
||||
},
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "",
|
||||
"Compliance": []
|
||||
}
|
||||
"Notes": ""
|
||||
}
|
||||
|
||||
@@ -7,7 +7,7 @@ class awslambda_function_invoke_api_operations_cloudtrail_logging_enabled(Check)
|
||||
def execute(self):
|
||||
findings = []
|
||||
for function in awslambda_client.functions.values():
|
||||
report = Check_Report(self.metadata)
|
||||
report = Check_Report(self.metadata())
|
||||
report.region = function.region
|
||||
report.resource_id = function.name
|
||||
report.resource_arn = function.arn
|
||||
|
||||
@@ -30,6 +30,5 @@
|
||||
},
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "",
|
||||
"Compliance": []
|
||||
}
|
||||
"Notes": ""
|
||||
}
|
||||
|
||||
@@ -10,7 +10,7 @@ class awslambda_function_no_secrets_in_code(Check):
|
||||
def execute(self):
|
||||
findings = []
|
||||
for function in awslambda_client.functions.values():
|
||||
report = Check_Report(self.metadata)
|
||||
report = Check_Report(self.metadata())
|
||||
report.region = function.region
|
||||
report.resource_id = function.name
|
||||
report.resource_arn = function.arn
|
||||
|
||||
@@ -30,6 +30,5 @@
|
||||
},
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "",
|
||||
"Compliance": []
|
||||
}
|
||||
"Notes": ""
|
||||
}
|
||||
|
||||
@@ -11,7 +11,7 @@ class awslambda_function_no_secrets_in_variables(Check):
|
||||
def execute(self):
|
||||
findings = []
|
||||
for function in awslambda_client.functions.values():
|
||||
report = Check_Report(self.metadata)
|
||||
report = Check_Report(self.metadata())
|
||||
report.region = function.region
|
||||
report.resource_id = function.name
|
||||
report.resource_arn = function.arn
|
||||
|
||||
@@ -30,6 +30,5 @@
|
||||
},
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "",
|
||||
"Compliance": []
|
||||
}
|
||||
"Notes": ""
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@ class awslambda_function_not_publicly_accessible(Check):
|
||||
def execute(self):
|
||||
findings = []
|
||||
for function in awslambda_client.functions.values():
|
||||
report = Check_Report(self.metadata)
|
||||
report = Check_Report(self.metadata())
|
||||
report.region = function.region
|
||||
report.resource_id = function.name
|
||||
report.resource_arn = function.arn
|
||||
|
||||
@@ -30,6 +30,5 @@
|
||||
},
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "",
|
||||
"Compliance": []
|
||||
}
|
||||
"Notes": ""
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@ class awslambda_function_url_cors_policy(Check):
|
||||
def execute(self):
|
||||
findings = []
|
||||
for function in awslambda_client.functions.values():
|
||||
report = Check_Report(self.metadata)
|
||||
report = Check_Report(self.metadata())
|
||||
report.region = function.region
|
||||
report.resource_id = function.name
|
||||
report.resource_arn = function.arn
|
||||
|
||||
@@ -30,6 +30,5 @@
|
||||
},
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "",
|
||||
"Compliance": []
|
||||
}
|
||||
"Notes": ""
|
||||
}
|
||||
|
||||
@@ -8,7 +8,7 @@ class awslambda_function_url_public(Check):
|
||||
def execute(self):
|
||||
findings = []
|
||||
for function in awslambda_client.functions.values():
|
||||
report = Check_Report(self.metadata)
|
||||
report = Check_Report(self.metadata())
|
||||
report.region = function.region
|
||||
report.resource_id = function.name
|
||||
report.resource_arn = function.arn
|
||||
|
||||
@@ -30,6 +30,5 @@
|
||||
},
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "",
|
||||
"Compliance": []
|
||||
}
|
||||
"Notes": ""
|
||||
}
|
||||
|
||||
@@ -7,7 +7,7 @@ class awslambda_function_using_supported_runtimes(Check):
|
||||
def execute(self):
|
||||
findings = []
|
||||
for function in awslambda_client.functions.values():
|
||||
report = Check_Report(self.metadata)
|
||||
report = Check_Report(self.metadata())
|
||||
report.region = function.region
|
||||
report.resource_id = function.name
|
||||
report.resource_arn = function.arn
|
||||
|
||||
@@ -30,6 +30,5 @@
|
||||
},
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "Infrastructure Protection",
|
||||
"Compliance": []
|
||||
"Notes": "Infrastructure Protection"
|
||||
}
|
||||
|
||||
@@ -17,7 +17,7 @@ class cloudformation_outputs_find_secrets(Check):
|
||||
"""Execute the cloudformation_outputs_find_secrets check"""
|
||||
findings = []
|
||||
for stack in cloudformation_client.stacks:
|
||||
report = Check_Report(self.metadata)
|
||||
report = Check_Report(self.metadata())
|
||||
report.region = stack.region
|
||||
report.resource_id = stack.name
|
||||
report.resource_arn = stack.arn
|
||||
|
||||
@@ -30,6 +30,5 @@
|
||||
},
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "Infrastructure Protection",
|
||||
"Compliance": []
|
||||
"Notes": "Infrastructure Protection"
|
||||
}
|
||||
|
||||
@@ -12,7 +12,7 @@ class cloudformation_stacks_termination_protection_enabled(Check):
|
||||
findings = []
|
||||
for stack in cloudformation_client.stacks:
|
||||
if not stack.is_nested_stack:
|
||||
report = Check_Report(self.metadata)
|
||||
report = Check_Report(self.metadata())
|
||||
report.region = stack.region
|
||||
report.resource_id = stack.name
|
||||
report.resource_arn = stack.arn
|
||||
|
||||
@@ -2,7 +2,9 @@
|
||||
"Provider": "aws",
|
||||
"CheckID": "cloudfront_distributions_field_level_encryption_enabled",
|
||||
"CheckTitle": "Check if CloudFront distributions have Field Level Encryption enabled.",
|
||||
"CheckType": [""],
|
||||
"CheckType": [
|
||||
""
|
||||
],
|
||||
"ServiceName": "cloudfront",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "arn:partition:cloudfront:region:account-id:distribution/resource-id",
|
||||
@@ -30,6 +32,5 @@
|
||||
},
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "Data Protection",
|
||||
"Compliance": []
|
||||
"Notes": "Data Protection"
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@ class cloudfront_distributions_field_level_encryption_enabled(Check):
|
||||
def execute(self):
|
||||
findings = []
|
||||
for distribution in cloudfront_client.distributions.values():
|
||||
report = Check_Report(self.metadata)
|
||||
report = Check_Report(self.metadata())
|
||||
report.region = distribution.region
|
||||
report.resource_arn = distribution.arn
|
||||
report.resource_id = distribution.id
|
||||
|
||||
@@ -2,7 +2,9 @@
|
||||
"Provider": "aws",
|
||||
"CheckID": "cloudfront_distributions_geo_restrictions_enabled",
|
||||
"CheckTitle": "Check if Geo restrictions are enabled in CloudFront distributions.",
|
||||
"CheckType": [""],
|
||||
"CheckType": [
|
||||
""
|
||||
],
|
||||
"ServiceName": "cloudfront",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "arn:partition:cloudfront:region:account-id:distribution/resource-id",
|
||||
@@ -30,6 +32,5 @@
|
||||
},
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "Infrastructure Security",
|
||||
"Compliance": []
|
||||
"Notes": "Infrastructure Security"
|
||||
}
|
||||
|
||||
@@ -7,7 +7,7 @@ class cloudfront_distributions_geo_restrictions_enabled(Check):
|
||||
def execute(self):
|
||||
findings = []
|
||||
for distribution in cloudfront_client.distributions.values():
|
||||
report = Check_Report(self.metadata)
|
||||
report = Check_Report(self.metadata())
|
||||
report.region = distribution.region
|
||||
report.resource_arn = distribution.arn
|
||||
report.resource_id = distribution.id
|
||||
|
||||
@@ -2,7 +2,9 @@
|
||||
"Provider": "aws",
|
||||
"CheckID": "cloudfront_distributions_https_enabled",
|
||||
"CheckTitle": "Check if CloudFront distributions are set to HTTPS.",
|
||||
"CheckType": [""],
|
||||
"CheckType": [
|
||||
""
|
||||
],
|
||||
"ServiceName": "cloudfront",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "arn:partition:cloudfront:region:account-id:distribution/resource-id",
|
||||
@@ -30,6 +32,5 @@
|
||||
},
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "",
|
||||
"Compliance": []
|
||||
"Notes": ""
|
||||
}
|
||||
|
||||
@@ -7,7 +7,7 @@ class cloudfront_distributions_https_enabled(Check):
|
||||
def execute(self):
|
||||
findings = []
|
||||
for distribution in cloudfront_client.distributions.values():
|
||||
report = Check_Report(self.metadata)
|
||||
report = Check_Report(self.metadata())
|
||||
report.region = distribution.region
|
||||
report.resource_arn = distribution.arn
|
||||
report.resource_id = distribution.id
|
||||
|
||||
@@ -2,7 +2,9 @@
|
||||
"Provider": "aws",
|
||||
"CheckID": "cloudfront_distributions_logging_enabled",
|
||||
"CheckTitle": "Check if CloudFront distributions have logging enabled.",
|
||||
"CheckType": [""],
|
||||
"CheckType": [
|
||||
""
|
||||
],
|
||||
"ServiceName": "cloudfront",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "arn:partition:cloudfront:region:account-id:distribution/resource-id",
|
||||
@@ -30,6 +32,5 @@
|
||||
},
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "Logging and Monitoring",
|
||||
"Compliance": []
|
||||
"Notes": "Logging and Monitoring"
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@ class cloudfront_distributions_logging_enabled(Check):
|
||||
def execute(self):
|
||||
findings = []
|
||||
for distribution in cloudfront_client.distributions.values():
|
||||
report = Check_Report(self.metadata)
|
||||
report = Check_Report(self.metadata())
|
||||
report.region = distribution.region
|
||||
report.resource_arn = distribution.arn
|
||||
report.resource_id = distribution.id
|
||||
|
||||
@@ -2,7 +2,9 @@
|
||||
"Provider": "aws",
|
||||
"CheckID": "cloudfront_distributions_using_deprecated_ssl_protocols",
|
||||
"CheckTitle": "Check if CloudFront distributions are using deprecated SSL protocols.",
|
||||
"CheckType": [""],
|
||||
"CheckType": [
|
||||
""
|
||||
],
|
||||
"ServiceName": "cloudfront",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "arn:partition:cloudfront:region:account-id:distribution/resource-id",
|
||||
@@ -30,6 +32,5 @@
|
||||
},
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "",
|
||||
"Compliance": []
|
||||
"Notes": ""
|
||||
}
|
||||
|
||||
@@ -7,7 +7,7 @@ class cloudfront_distributions_using_deprecated_ssl_protocols(Check):
|
||||
def execute(self):
|
||||
findings = []
|
||||
for distribution in cloudfront_client.distributions.values():
|
||||
report = Check_Report(self.metadata)
|
||||
report = Check_Report(self.metadata())
|
||||
report.region = distribution.region
|
||||
report.resource_arn = distribution.arn
|
||||
report.resource_id = distribution.id
|
||||
|
||||
@@ -2,7 +2,9 @@
|
||||
"Provider": "aws",
|
||||
"CheckID": "cloudfront_distributions_using_waf",
|
||||
"CheckTitle": "Check if CloudFront distributions are using WAF.",
|
||||
"CheckType": ["IAM"],
|
||||
"CheckType": [
|
||||
"IAM"
|
||||
],
|
||||
"ServiceName": "cloudfront",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "arn:partition:cloudfront:region:account-id:distribution/resource-id",
|
||||
@@ -30,6 +32,5 @@
|
||||
},
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "",
|
||||
"Compliance": []
|
||||
"Notes": ""
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@ class cloudfront_distributions_using_waf(Check):
|
||||
def execute(self):
|
||||
findings = []
|
||||
for distribution in cloudfront_client.distributions.values():
|
||||
report = Check_Report(self.metadata)
|
||||
report = Check_Report(self.metadata())
|
||||
report.region = distribution.region
|
||||
report.resource_arn = distribution.arn
|
||||
report.resource_id = distribution.id
|
||||
|
||||
@@ -1,35 +1,38 @@
|
||||
{
|
||||
"Provider": "aws",
|
||||
"CheckID": "cloudtrail_cloudwatch_logging_enabled",
|
||||
"CheckTitle": "Ensure CloudTrail trails are integrated with CloudWatch Logs",
|
||||
"CheckType": ["Software and Configuration Checks", "Industry and Regulatory Standards" ,"CIS AWS Foundations Benchmark"],
|
||||
"ServiceName": "cloudtrail",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "arn:partition:service:region:account-id:resource-id",
|
||||
"Severity": "low",
|
||||
"ResourceType": "AwsCloudTrailTrail",
|
||||
"Description": "Ensure CloudTrail trails are integrated with CloudWatch Logs",
|
||||
"Risk": "Sending CloudTrail logs to CloudWatch Logs will facilitate real-time and historic activity logging based on user; API; resource; and IP address; and provides opportunity to establish alarms and notifications for anomalous or sensitivity account activity.",
|
||||
"RelatedUrl": "",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "aws cloudtrail update-trail --name <trail_name> --cloudwatch-logs-log-group- arn <cloudtrail_log_group_arn> --cloudwatch-logs-role-arn <cloudtrail_cloudwatchLogs_role_arn>",
|
||||
"NativeIaC": "",
|
||||
"Other": "https://docs.bridgecrew.io/docs/logging_4#aws-console",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Validate that the trails in CloudTrail has an arn set in the CloudWatchLogsLogGroupArn property.",
|
||||
"Url": "https://docs.aws.amazon.com/awscloudtrail/latest/userguide/send-cloudtrail-events-to-cloudwatch-logs.html"
|
||||
}
|
||||
"Provider": "aws",
|
||||
"CheckID": "cloudtrail_cloudwatch_logging_enabled",
|
||||
"CheckTitle": "Ensure CloudTrail trails are integrated with CloudWatch Logs",
|
||||
"CheckType": [
|
||||
"Software and Configuration Checks",
|
||||
"Industry and Regulatory Standards",
|
||||
"CIS AWS Foundations Benchmark"
|
||||
],
|
||||
"ServiceName": "cloudtrail",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "arn:partition:service:region:account-id:resource-id",
|
||||
"Severity": "low",
|
||||
"ResourceType": "AwsCloudTrailTrail",
|
||||
"Description": "Ensure CloudTrail trails are integrated with CloudWatch Logs",
|
||||
"Risk": "Sending CloudTrail logs to CloudWatch Logs will facilitate real-time and historic activity logging based on user; API; resource; and IP address; and provides opportunity to establish alarms and notifications for anomalous or sensitivity account activity.",
|
||||
"RelatedUrl": "",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "aws cloudtrail update-trail --name <trail_name> --cloudwatch-logs-log-group- arn <cloudtrail_log_group_arn> --cloudwatch-logs-role-arn <cloudtrail_cloudwatchLogs_role_arn>",
|
||||
"NativeIaC": "",
|
||||
"Other": "https://docs.bridgecrew.io/docs/logging_4#aws-console",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Categories": [],
|
||||
"Tags": {
|
||||
"Tag1Key": "value",
|
||||
"Tag2Key": "value"
|
||||
},
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "",
|
||||
"Compliance": []
|
||||
"Recommendation": {
|
||||
"Text": "Validate that the trails in CloudTrail has an arn set in the CloudWatchLogsLogGroupArn property.",
|
||||
"Url": "https://docs.aws.amazon.com/awscloudtrail/latest/userguide/send-cloudtrail-events-to-cloudwatch-logs.html"
|
||||
}
|
||||
},
|
||||
"Categories": [],
|
||||
"Tags": {
|
||||
"Tag1Key": "value",
|
||||
"Tag2Key": "value"
|
||||
},
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": ""
|
||||
}
|
||||
|
||||
@@ -11,7 +11,7 @@ class cloudtrail_cloudwatch_logging_enabled(Check):
|
||||
findings = []
|
||||
for trail in cloudtrail_client.trails:
|
||||
if trail.name:
|
||||
report = Check_Report(self.metadata)
|
||||
report = Check_Report(self.metadata())
|
||||
report.region = trail.region
|
||||
report.resource_id = trail.name
|
||||
report.resource_arn = trail.arn
|
||||
|
||||
@@ -1,35 +1,38 @@
|
||||
{
|
||||
"Provider": "aws",
|
||||
"CheckID": "cloudtrail_kms_encryption_enabled",
|
||||
"CheckTitle": "Ensure CloudTrail logs are encrypted at rest using KMS CMKs",
|
||||
"CheckType": ["Software and Configuration Checks", "Industry and Regulatory Standards" ,"CIS AWS Foundations Benchmark"],
|
||||
"ServiceName": "cloudtrail",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "arn:partition:service:region:account-id:resource-id",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "AwsCloudTrailTrail",
|
||||
"Description": "Ensure CloudTrail logs are encrypted at rest using KMS CMKs",
|
||||
"Risk": "By default; the log files delivered by CloudTrail to your bucket are encrypted by Amazon server-side encryption with Amazon S3-managed encryption keys (SSE-S3). To provide a security layer that is directly manageable; you can instead use server-side encryption with AWS KMS–managed keys (SSE-KMS) for your CloudTrail log files.",
|
||||
"RelatedUrl": "",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "aws cloudtrail update-trail --name <trail_name> --kms-id <cloudtrail_kms_key> aws kms put-key-policy --key-id <cloudtrail_kms_key> --policy <cloudtrail_kms_key_policy>",
|
||||
"NativeIaC": "https://docs.bridgecrew.io/docs/logging_7#fix---buildtime",
|
||||
"Other": "",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "This approach has the following advantages: You can create and manage the CMK encryption keys yourself. You can use a single CMK to encrypt and decrypt log files for multiple accounts across all regions. You have control over who can use your key for encrypting and decrypting CloudTrail log files. You can assign permissions for the key to the users. You have enhanced security.",
|
||||
"Url": "https://docs.aws.amazon.com/awscloudtrail/latest/userguide/encrypting-cloudtrail-log-files-with-aws-kms.html"
|
||||
}
|
||||
"Provider": "aws",
|
||||
"CheckID": "cloudtrail_kms_encryption_enabled",
|
||||
"CheckTitle": "Ensure CloudTrail logs are encrypted at rest using KMS CMKs",
|
||||
"CheckType": [
|
||||
"Software and Configuration Checks",
|
||||
"Industry and Regulatory Standards",
|
||||
"CIS AWS Foundations Benchmark"
|
||||
],
|
||||
"ServiceName": "cloudtrail",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "arn:partition:service:region:account-id:resource-id",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "AwsCloudTrailTrail",
|
||||
"Description": "Ensure CloudTrail logs are encrypted at rest using KMS CMKs",
|
||||
"Risk": "By default; the log files delivered by CloudTrail to your bucket are encrypted by Amazon server-side encryption with Amazon S3-managed encryption keys (SSE-S3). To provide a security layer that is directly manageable; you can instead use server-side encryption with AWS KMS–managed keys (SSE-KMS) for your CloudTrail log files.",
|
||||
"RelatedUrl": "",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "aws cloudtrail update-trail --name <trail_name> --kms-id <cloudtrail_kms_key> aws kms put-key-policy --key-id <cloudtrail_kms_key> --policy <cloudtrail_kms_key_policy>",
|
||||
"NativeIaC": "https://docs.bridgecrew.io/docs/logging_7#fix---buildtime",
|
||||
"Other": "",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Categories": [],
|
||||
"Tags": {
|
||||
"Tag1Key": "value",
|
||||
"Tag2Key": "value"
|
||||
},
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "",
|
||||
"Compliance": []
|
||||
}
|
||||
"Recommendation": {
|
||||
"Text": "This approach has the following advantages: You can create and manage the CMK encryption keys yourself. You can use a single CMK to encrypt and decrypt log files for multiple accounts across all regions. You have control over who can use your key for encrypting and decrypting CloudTrail log files. You can assign permissions for the key to the users. You have enhanced security.",
|
||||
"Url": "https://docs.aws.amazon.com/awscloudtrail/latest/userguide/encrypting-cloudtrail-log-files-with-aws-kms.html"
|
||||
}
|
||||
},
|
||||
"Categories": [],
|
||||
"Tags": {
|
||||
"Tag1Key": "value",
|
||||
"Tag2Key": "value"
|
||||
},
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": ""
|
||||
}
|
||||
|
||||
@@ -7,7 +7,7 @@ class cloudtrail_kms_encryption_enabled(Check):
|
||||
findings = []
|
||||
for trail in cloudtrail_client.trails:
|
||||
if trail.name:
|
||||
report = Check_Report(self.metadata)
|
||||
report = Check_Report(self.metadata())
|
||||
report.region = trail.region
|
||||
report.resource_id = trail.name
|
||||
report.resource_arn = trail.arn
|
||||
|
||||
@@ -1,35 +1,38 @@
|
||||
{
|
||||
"Provider": "aws",
|
||||
"CheckID": "cloudtrail_log_file_validation_enabled",
|
||||
"CheckTitle": "Ensure CloudTrail log file validation is enabled",
|
||||
"CheckType": ["Software and Configuration Checks", "Industry and Regulatory Standards" ,"CIS AWS Foundations Benchmark"],
|
||||
"ServiceName": "cloudtrail",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "arn:partition:service:region:account-id:resource-id",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "AwsCloudTrailTrail",
|
||||
"Description": "Ensure CloudTrail log file validation is enabled",
|
||||
"Risk": "Enabling log file validation will provide additional integrity checking of CloudTrail logs. ",
|
||||
"RelatedUrl": "",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "aws cloudtrail update-trail --name <trail_name> --enable-log-file-validation",
|
||||
"NativeIaC": "https://docs.bridgecrew.io/docs/logging_2#cloudformation",
|
||||
"Other": "",
|
||||
"Terraform": "https://docs.bridgecrew.io/docs/logging_2#terraform"
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Ensure LogFileValidationEnabled is set to true for each trail.",
|
||||
"Url": "http://docs.aws.amazon.com/awscloudtrail/latest/userguide/cloudtrail-log-filevalidation-enabling.html"
|
||||
}
|
||||
"Provider": "aws",
|
||||
"CheckID": "cloudtrail_log_file_validation_enabled",
|
||||
"CheckTitle": "Ensure CloudTrail log file validation is enabled",
|
||||
"CheckType": [
|
||||
"Software and Configuration Checks",
|
||||
"Industry and Regulatory Standards",
|
||||
"CIS AWS Foundations Benchmark"
|
||||
],
|
||||
"ServiceName": "cloudtrail",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "arn:partition:service:region:account-id:resource-id",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "AwsCloudTrailTrail",
|
||||
"Description": "Ensure CloudTrail log file validation is enabled",
|
||||
"Risk": "Enabling log file validation will provide additional integrity checking of CloudTrail logs. ",
|
||||
"RelatedUrl": "",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "aws cloudtrail update-trail --name <trail_name> --enable-log-file-validation",
|
||||
"NativeIaC": "https://docs.bridgecrew.io/docs/logging_2#cloudformation",
|
||||
"Other": "",
|
||||
"Terraform": "https://docs.bridgecrew.io/docs/logging_2#terraform"
|
||||
},
|
||||
"Categories": [],
|
||||
"Tags": {
|
||||
"Tag1Key": "value",
|
||||
"Tag2Key": "value"
|
||||
},
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "",
|
||||
"Compliance": []
|
||||
}
|
||||
"Recommendation": {
|
||||
"Text": "Ensure LogFileValidationEnabled is set to true for each trail.",
|
||||
"Url": "http://docs.aws.amazon.com/awscloudtrail/latest/userguide/cloudtrail-log-filevalidation-enabling.html"
|
||||
}
|
||||
},
|
||||
"Categories": [],
|
||||
"Tags": {
|
||||
"Tag1Key": "value",
|
||||
"Tag2Key": "value"
|
||||
},
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": ""
|
||||
}
|
||||
|
||||
@@ -7,7 +7,7 @@ class cloudtrail_log_file_validation_enabled(Check):
|
||||
findings = []
|
||||
for trail in cloudtrail_client.trails:
|
||||
if trail.name:
|
||||
report = Check_Report(self.metadata)
|
||||
report = Check_Report(self.metadata())
|
||||
report.region = trail.region
|
||||
report.resource_id = trail.name
|
||||
report.resource_arn = trail.arn
|
||||
|
||||
@@ -1,35 +1,38 @@
|
||||
{
|
||||
"Provider": "aws",
|
||||
"CheckID": "cloudtrail_logs_s3_bucket_access_logging_enabled",
|
||||
"CheckTitle": "Ensure S3 bucket access logging is enabled on the CloudTrail S3 bucket",
|
||||
"CheckType": ["Software and Configuration Checks", "Industry and Regulatory Standards" ,"CIS AWS Foundations Benchmark"],
|
||||
"ServiceName": "cloudtrail",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "arn:partition:service:region:account-id:resource-id",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "AwsCloudTrailTrail",
|
||||
"Description": "Ensure S3 bucket access logging is enabled on the CloudTrail S3 bucket",
|
||||
"Risk": "Server access logs can assist you in security and access audits; help you learn about your customer base; and understand your Amazon S3 bill.",
|
||||
"RelatedUrl": "",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "",
|
||||
"NativeIaC": "",
|
||||
"Other": "https://docs.bridgecrew.io/docs/logging_6#aws-console",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Ensure that S3 buckets have Logging enabled. CloudTrail data events can be used in place of S3 bucket logging. If that is the case; this finding can be considered a false positive.",
|
||||
"Url": "https://docs.aws.amazon.com/AmazonS3/latest/dev/security-best-practices.html"
|
||||
}
|
||||
"Provider": "aws",
|
||||
"CheckID": "cloudtrail_logs_s3_bucket_access_logging_enabled",
|
||||
"CheckTitle": "Ensure S3 bucket access logging is enabled on the CloudTrail S3 bucket",
|
||||
"CheckType": [
|
||||
"Software and Configuration Checks",
|
||||
"Industry and Regulatory Standards",
|
||||
"CIS AWS Foundations Benchmark"
|
||||
],
|
||||
"ServiceName": "cloudtrail",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "arn:partition:service:region:account-id:resource-id",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "AwsCloudTrailTrail",
|
||||
"Description": "Ensure S3 bucket access logging is enabled on the CloudTrail S3 bucket",
|
||||
"Risk": "Server access logs can assist you in security and access audits; help you learn about your customer base; and understand your Amazon S3 bill.",
|
||||
"RelatedUrl": "",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "",
|
||||
"NativeIaC": "",
|
||||
"Other": "https://docs.bridgecrew.io/docs/logging_6#aws-console",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Categories": [],
|
||||
"Tags": {
|
||||
"Tag1Key": "value",
|
||||
"Tag2Key": "value"
|
||||
},
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "",
|
||||
"Compliance": []
|
||||
}
|
||||
"Recommendation": {
|
||||
"Text": "Ensure that S3 buckets have Logging enabled. CloudTrail data events can be used in place of S3 bucket logging. If that is the case; this finding can be considered a false positive.",
|
||||
"Url": "https://docs.aws.amazon.com/AmazonS3/latest/dev/security-best-practices.html"
|
||||
}
|
||||
},
|
||||
"Categories": [],
|
||||
"Tags": {
|
||||
"Tag1Key": "value",
|
||||
"Tag2Key": "value"
|
||||
},
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": ""
|
||||
}
|
||||
|
||||
@@ -9,7 +9,7 @@ class cloudtrail_logs_s3_bucket_access_logging_enabled(Check):
|
||||
for trail in cloudtrail_client.trails:
|
||||
if trail.name:
|
||||
trail_bucket = trail.s3_bucket
|
||||
report = Check_Report(self.metadata)
|
||||
report = Check_Report(self.metadata())
|
||||
report.region = trail.region
|
||||
report.resource_id = trail.name
|
||||
report.resource_arn = trail.arn
|
||||
|
||||
@@ -1,35 +1,38 @@
|
||||
{
|
||||
"Provider": "aws",
|
||||
"CheckID": "cloudtrail_logs_s3_bucket_is_not_publicly_accessible",
|
||||
"CheckTitle": "Ensure the S3 bucket CloudTrail logs is not publicly accessible",
|
||||
"CheckType": ["Software and Configuration Checks", "Industry and Regulatory Standards" ,"CIS AWS Foundations Benchmark"],
|
||||
"ServiceName": "cloudtrail",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "arn:partition:service:region:account-id:resource-id",
|
||||
"Severity": "critical",
|
||||
"ResourceType": "AwsCloudTrailTrail",
|
||||
"Description": "Ensure the S3 bucket CloudTrail logs to is not publicly accessible",
|
||||
"Risk": "Allowing public access to CloudTrail log content may aid an adversary in identifying weaknesses in the affected accounts use or configuration.",
|
||||
"RelatedUrl": "",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "",
|
||||
"NativeIaC": "",
|
||||
"Other": "https://docs.bridgecrew.io/docs/logging_3#aws-console",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Analyze Bucket policy to validate appropriate permissions. Ensure the AllUsers principal is not granted privileges. Ensure the AuthenticatedUsers principal is not granted privileges.",
|
||||
"Url": "https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_elements_principal.html"
|
||||
}
|
||||
"Provider": "aws",
|
||||
"CheckID": "cloudtrail_logs_s3_bucket_is_not_publicly_accessible",
|
||||
"CheckTitle": "Ensure the S3 bucket CloudTrail logs is not publicly accessible",
|
||||
"CheckType": [
|
||||
"Software and Configuration Checks",
|
||||
"Industry and Regulatory Standards",
|
||||
"CIS AWS Foundations Benchmark"
|
||||
],
|
||||
"ServiceName": "cloudtrail",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "arn:partition:service:region:account-id:resource-id",
|
||||
"Severity": "critical",
|
||||
"ResourceType": "AwsCloudTrailTrail",
|
||||
"Description": "Ensure the S3 bucket CloudTrail logs to is not publicly accessible",
|
||||
"Risk": "Allowing public access to CloudTrail log content may aid an adversary in identifying weaknesses in the affected accounts use or configuration.",
|
||||
"RelatedUrl": "",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "",
|
||||
"NativeIaC": "",
|
||||
"Other": "https://docs.bridgecrew.io/docs/logging_3#aws-console",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Categories": [],
|
||||
"Tags": {
|
||||
"Tag1Key": "value",
|
||||
"Tag2Key": "value"
|
||||
},
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "",
|
||||
"Compliance": []
|
||||
}
|
||||
"Recommendation": {
|
||||
"Text": "Analyze Bucket policy to validate appropriate permissions. Ensure the AllUsers principal is not granted privileges. Ensure the AuthenticatedUsers principal is not granted privileges.",
|
||||
"Url": "https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_elements_principal.html"
|
||||
}
|
||||
},
|
||||
"Categories": [],
|
||||
"Tags": {
|
||||
"Tag1Key": "value",
|
||||
"Tag2Key": "value"
|
||||
},
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": ""
|
||||
}
|
||||
|
||||
@@ -9,7 +9,7 @@ class cloudtrail_logs_s3_bucket_is_not_publicly_accessible(Check):
|
||||
for trail in cloudtrail_client.trails:
|
||||
if trail.name:
|
||||
trail_bucket = trail.s3_bucket
|
||||
report = Check_Report(self.metadata)
|
||||
report = Check_Report(self.metadata())
|
||||
report.region = trail.region
|
||||
report.resource_id = trail.name
|
||||
report.resource_arn = trail.arn
|
||||
|
||||
@@ -1,46 +1,38 @@
|
||||
{
|
||||
"Provider": "aws",
|
||||
"CheckID": "cloudtrail_multi_region_enabled",
|
||||
"CheckTitle": "Ensure CloudTrail is enabled in all regions",
|
||||
"CheckType": ["Software and Configuration Checks", "Industry and Regulatory Standards" ,"CIS AWS Foundations Benchmark"],
|
||||
"ServiceName": "cloudtrail",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "arn:partition:service:region:account-id:resource-id",
|
||||
"Severity": "high",
|
||||
"ResourceType": "AwsCloudTrailTrail",
|
||||
"Description": "Ensure CloudTrail is enabled in all regions",
|
||||
"Risk": "AWS CloudTrail is a web service that records AWS API calls for your account and delivers log files to you. The recorded information includes the identity of the API caller; the time of the API call; the source IP address of the API caller; the request parameters; and the response elements returned by the AWS service.",
|
||||
"RelatedUrl": "",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "aws cloudtrail create-trail --name <trail_name> --bucket-name <s3_bucket_for_cloudtrail> --is-multi-region-trail aws cloudtrail update-trail --name <trail_name> --is-multi-region-trail ",
|
||||
"NativeIaC": "https://docs.bridgecrew.io/docs/logging_1#cloudformation",
|
||||
"Other": "https://docs.bridgecrew.io/docs/logging_1#aws-console",
|
||||
"Terraform": "https://docs.bridgecrew.io/docs/logging_1#terraform"
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Ensure Logging is set to ON on all regions (even if they are not being used at the moment.",
|
||||
"Url": "https://docs.aws.amazon.com/awscloudtrail/latest/userguide/cloudtrailconcepts.html#cloudtrail-concepts-management-events"
|
||||
}
|
||||
"Provider": "aws",
|
||||
"CheckID": "cloudtrail_multi_region_enabled",
|
||||
"CheckTitle": "Ensure CloudTrail is enabled in all regions",
|
||||
"CheckType": [
|
||||
"Software and Configuration Checks",
|
||||
"Industry and Regulatory Standards",
|
||||
"CIS AWS Foundations Benchmark"
|
||||
],
|
||||
"ServiceName": "cloudtrail",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "arn:partition:service:region:account-id:resource-id",
|
||||
"Severity": "high",
|
||||
"ResourceType": "AwsCloudTrailTrail",
|
||||
"Description": "Ensure CloudTrail is enabled in all regions",
|
||||
"Risk": "AWS CloudTrail is a web service that records AWS API calls for your account and delivers log files to you. The recorded information includes the identity of the API caller; the time of the API call; the source IP address of the API caller; the request parameters; and the response elements returned by the AWS service.",
|
||||
"RelatedUrl": "",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "aws cloudtrail create-trail --name <trail_name> --bucket-name <s3_bucket_for_cloudtrail> --is-multi-region-trail aws cloudtrail update-trail --name <trail_name> --is-multi-region-trail ",
|
||||
"NativeIaC": "https://docs.bridgecrew.io/docs/logging_1#cloudformation",
|
||||
"Other": "https://docs.bridgecrew.io/docs/logging_1#aws-console",
|
||||
"Terraform": "https://docs.bridgecrew.io/docs/logging_1#terraform"
|
||||
},
|
||||
"Categories": [],
|
||||
"Tags": {
|
||||
"Tag1Key": "value",
|
||||
"Tag2Key": "value"
|
||||
},
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "",
|
||||
"Compliance": [
|
||||
{
|
||||
"Control": [
|
||||
"2.1"
|
||||
],
|
||||
"Framework": "CIS-AWS",
|
||||
"Group": [
|
||||
"level1"
|
||||
],
|
||||
"Version": "1.4"
|
||||
}
|
||||
]
|
||||
}
|
||||
"Recommendation": {
|
||||
"Text": "Ensure Logging is set to ON on all regions (even if they are not being used at the moment.",
|
||||
"Url": "https://docs.aws.amazon.com/awscloudtrail/latest/userguide/cloudtrailconcepts.html#cloudtrail-concepts-management-events"
|
||||
}
|
||||
},
|
||||
"Categories": [],
|
||||
"Tags": {
|
||||
"Tag1Key": "value",
|
||||
"Tag2Key": "value"
|
||||
},
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": ""
|
||||
}
|
||||
|
||||
@@ -7,7 +7,7 @@ class cloudtrail_multi_region_enabled(Check):
|
||||
findings = []
|
||||
actual_region = None
|
||||
for trail in cloudtrail_client.trails:
|
||||
report = Check_Report(self.metadata)
|
||||
report = Check_Report(self.metadata())
|
||||
report.region = trail.region
|
||||
if trail.name: # Check if there are trails in region
|
||||
# Check if region has changed and add report of previous region
|
||||
|
||||
@@ -32,6 +32,5 @@
|
||||
},
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "",
|
||||
"Compliance": []
|
||||
"Notes": ""
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@ from providers.aws.services.cloudtrail.cloudtrail_client import cloudtrail_clien
|
||||
class cloudtrail_s3_dataevents_read_enabled(Check):
|
||||
def execute(self):
|
||||
findings = []
|
||||
report = Check_Report(self.metadata)
|
||||
report = Check_Report(self.metadata())
|
||||
report.region = cloudtrail_client.region
|
||||
report.resource_id = "No trails"
|
||||
report.resource_arn = "No trails"
|
||||
|
||||
@@ -32,6 +32,5 @@
|
||||
},
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "",
|
||||
"Compliance": []
|
||||
"Notes": ""
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user