mirror of
https://github.com/ghndrx/prowler.git
synced 2026-02-10 06:45:08 +00:00
build(deps-dev): bump black from 22.12.0 to 24.1.1 (#3356)
Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Pepe Fagoaga <pepe@prowler.com>
This commit is contained in:
@@ -1,7 +1,7 @@
|
|||||||
repos:
|
repos:
|
||||||
## GENERAL
|
## GENERAL
|
||||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||||
rev: v4.4.0
|
rev: v4.5.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: check-merge-conflict
|
- id: check-merge-conflict
|
||||||
- id: check-yaml
|
- id: check-yaml
|
||||||
@@ -15,7 +15,7 @@ repos:
|
|||||||
|
|
||||||
## TOML
|
## TOML
|
||||||
- repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks
|
- repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks
|
||||||
rev: v2.10.0
|
rev: v2.12.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: pretty-format-toml
|
- id: pretty-format-toml
|
||||||
args: [--autofix]
|
args: [--autofix]
|
||||||
@@ -28,7 +28,7 @@ repos:
|
|||||||
- id: shellcheck
|
- id: shellcheck
|
||||||
## PYTHON
|
## PYTHON
|
||||||
- repo: https://github.com/myint/autoflake
|
- repo: https://github.com/myint/autoflake
|
||||||
rev: v2.2.0
|
rev: v2.2.1
|
||||||
hooks:
|
hooks:
|
||||||
- id: autoflake
|
- id: autoflake
|
||||||
args:
|
args:
|
||||||
@@ -39,25 +39,25 @@ repos:
|
|||||||
]
|
]
|
||||||
|
|
||||||
- repo: https://github.com/timothycrosley/isort
|
- repo: https://github.com/timothycrosley/isort
|
||||||
rev: 5.12.0
|
rev: 5.13.2
|
||||||
hooks:
|
hooks:
|
||||||
- id: isort
|
- id: isort
|
||||||
args: ["--profile", "black"]
|
args: ["--profile", "black"]
|
||||||
|
|
||||||
- repo: https://github.com/psf/black
|
- repo: https://github.com/psf/black
|
||||||
rev: 22.12.0
|
rev: 24.1.1
|
||||||
hooks:
|
hooks:
|
||||||
- id: black
|
- id: black
|
||||||
|
|
||||||
- repo: https://github.com/pycqa/flake8
|
- repo: https://github.com/pycqa/flake8
|
||||||
rev: 6.1.0
|
rev: 7.0.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: flake8
|
- id: flake8
|
||||||
exclude: contrib
|
exclude: contrib
|
||||||
args: ["--ignore=E266,W503,E203,E501,W605"]
|
args: ["--ignore=E266,W503,E203,E501,W605"]
|
||||||
|
|
||||||
- repo: https://github.com/python-poetry/poetry
|
- repo: https://github.com/python-poetry/poetry
|
||||||
rev: 1.6.0 # add version here
|
rev: 1.7.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: poetry-check
|
- id: poetry-check
|
||||||
- id: poetry-lock
|
- id: poetry-lock
|
||||||
|
|||||||
58
poetry.lock
generated
58
poetry.lock
generated
@@ -332,36 +332,47 @@ yaml = ["PyYAML"]
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "black"
|
name = "black"
|
||||||
version = "22.12.0"
|
version = "24.1.1"
|
||||||
description = "The uncompromising code formatter."
|
description = "The uncompromising code formatter."
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.7"
|
python-versions = ">=3.8"
|
||||||
files = [
|
files = [
|
||||||
{file = "black-22.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9eedd20838bd5d75b80c9f5487dbcb06836a43833a37846cf1d8c1cc01cef59d"},
|
{file = "black-24.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2588021038bd5ada078de606f2a804cadd0a3cc6a79cb3e9bb3a8bf581325a4c"},
|
||||||
{file = "black-22.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:159a46a4947f73387b4d83e87ea006dbb2337eab6c879620a3ba52699b1f4351"},
|
{file = "black-24.1.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1a95915c98d6e32ca43809d46d932e2abc5f1f7d582ffbe65a5b4d1588af7445"},
|
||||||
{file = "black-22.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d30b212bffeb1e252b31dd269dfae69dd17e06d92b87ad26e23890f3efea366f"},
|
{file = "black-24.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fa6a0e965779c8f2afb286f9ef798df770ba2b6cee063c650b96adec22c056a"},
|
||||||
{file = "black-22.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:7412e75863aa5c5411886804678b7d083c7c28421210180d67dfd8cf1221e1f4"},
|
{file = "black-24.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:5242ecd9e990aeb995b6d03dc3b2d112d4a78f2083e5a8e86d566340ae80fec4"},
|
||||||
{file = "black-22.12.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c116eed0efb9ff870ded8b62fe9f28dd61ef6e9ddd28d83d7d264a38417dcee2"},
|
{file = "black-24.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fc1ec9aa6f4d98d022101e015261c056ddebe3da6a8ccfc2c792cbe0349d48b7"},
|
||||||
{file = "black-22.12.0-cp37-cp37m-win_amd64.whl", hash = "sha256:1f58cbe16dfe8c12b7434e50ff889fa479072096d79f0a7f25e4ab8e94cd8350"},
|
{file = "black-24.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0269dfdea12442022e88043d2910429bed717b2d04523867a85dacce535916b8"},
|
||||||
{file = "black-22.12.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77d86c9f3db9b1bf6761244bc0b3572a546f5fe37917a044e02f3166d5aafa7d"},
|
{file = "black-24.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3d64db762eae4a5ce04b6e3dd745dcca0fb9560eb931a5be97472e38652a161"},
|
||||||
{file = "black-22.12.0-cp38-cp38-win_amd64.whl", hash = "sha256:82d9fe8fee3401e02e79767016b4907820a7dc28d70d137eb397b92ef3cc5bfc"},
|
{file = "black-24.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:5d7b06ea8816cbd4becfe5f70accae953c53c0e53aa98730ceccb0395520ee5d"},
|
||||||
{file = "black-22.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:101c69b23df9b44247bd88e1d7e90154336ac4992502d4197bdac35dd7ee3320"},
|
{file = "black-24.1.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e2c8dfa14677f90d976f68e0c923947ae68fa3961d61ee30976c388adc0b02c8"},
|
||||||
{file = "black-22.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:559c7a1ba9a006226f09e4916060982fd27334ae1998e7a38b3f33a37f7a2148"},
|
{file = "black-24.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a21725862d0e855ae05da1dd25e3825ed712eaaccef6b03017fe0853a01aa45e"},
|
||||||
{file = "black-22.12.0-py3-none-any.whl", hash = "sha256:436cc9167dd28040ad90d3b404aec22cedf24a6e4d7de221bec2730ec0c97bcf"},
|
{file = "black-24.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07204d078e25327aad9ed2c64790d681238686bce254c910de640c7cc4fc3aa6"},
|
||||||
{file = "black-22.12.0.tar.gz", hash = "sha256:229351e5a18ca30f447bf724d007f890f97e13af070bb6ad4c0a441cd7596a2f"},
|
{file = "black-24.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:a83fe522d9698d8f9a101b860b1ee154c1d25f8a82ceb807d319f085b2627c5b"},
|
||||||
|
{file = "black-24.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:08b34e85170d368c37ca7bf81cf67ac863c9d1963b2c1780c39102187ec8dd62"},
|
||||||
|
{file = "black-24.1.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7258c27115c1e3b5de9ac6c4f9957e3ee2c02c0b39222a24dc7aa03ba0e986f5"},
|
||||||
|
{file = "black-24.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40657e1b78212d582a0edecafef133cf1dd02e6677f539b669db4746150d38f6"},
|
||||||
|
{file = "black-24.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:e298d588744efda02379521a19639ebcd314fba7a49be22136204d7ed1782717"},
|
||||||
|
{file = "black-24.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:34afe9da5056aa123b8bfda1664bfe6fb4e9c6f311d8e4a6eb089da9a9173bf9"},
|
||||||
|
{file = "black-24.1.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:854c06fb86fd854140f37fb24dbf10621f5dab9e3b0c29a690ba595e3d543024"},
|
||||||
|
{file = "black-24.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3897ae5a21ca132efa219c029cce5e6bfc9c3d34ed7e892113d199c0b1b444a2"},
|
||||||
|
{file = "black-24.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:ecba2a15dfb2d97105be74bbfe5128bc5e9fa8477d8c46766505c1dda5883aac"},
|
||||||
|
{file = "black-24.1.1-py3-none-any.whl", hash = "sha256:5cdc2e2195212208fbcae579b931407c1fa9997584f0a415421748aeafff1168"},
|
||||||
|
{file = "black-24.1.1.tar.gz", hash = "sha256:48b5760dcbfe5cf97fd4fba23946681f3a81514c6ab8a45b50da67ac8fbc6c7b"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
click = ">=8.0.0"
|
click = ">=8.0.0"
|
||||||
mypy-extensions = ">=0.4.3"
|
mypy-extensions = ">=0.4.3"
|
||||||
|
packaging = ">=22.0"
|
||||||
pathspec = ">=0.9.0"
|
pathspec = ">=0.9.0"
|
||||||
platformdirs = ">=2"
|
platformdirs = ">=2"
|
||||||
tomli = {version = ">=1.1.0", markers = "python_full_version < \"3.11.0a7\""}
|
tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
|
||||||
typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""}
|
typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""}
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
colorama = ["colorama (>=0.4.3)"]
|
colorama = ["colorama (>=0.4.3)"]
|
||||||
d = ["aiohttp (>=3.7.4)"]
|
d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"]
|
||||||
jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"]
|
jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"]
|
||||||
uvloop = ["uvloop (>=0.15.2)"]
|
uvloop = ["uvloop (>=0.15.2)"]
|
||||||
|
|
||||||
@@ -1901,18 +1912,15 @@ openapi-schema-validator = ">=0.6.0,<0.7.0"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "packaging"
|
name = "packaging"
|
||||||
version = "21.3"
|
version = "23.0"
|
||||||
description = "Core utilities for Python packages"
|
description = "Core utilities for Python packages"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.6"
|
python-versions = ">=3.7"
|
||||||
files = [
|
files = [
|
||||||
{file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"},
|
{file = "packaging-23.0-py3-none-any.whl", hash = "sha256:714ac14496c3e68c99c29b00845f7a2b85f3bb6f1078fd9f72fd20f0570002b2"},
|
||||||
{file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"},
|
{file = "packaging-23.0.tar.gz", hash = "sha256:b6ad297f8907de0fa2fe1ccbd26fdaf387f5f47c7275fedf8cce89f99446cf97"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
pyparsing = ">=2.0.2,<3.0.5 || >3.0.5"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "paginate"
|
name = "paginate"
|
||||||
version = "0.5.6"
|
version = "0.5.6"
|
||||||
@@ -3370,4 +3378,4 @@ docs = ["mkdocs", "mkdocs-material"]
|
|||||||
[metadata]
|
[metadata]
|
||||||
lock-version = "2.0"
|
lock-version = "2.0"
|
||||||
python-versions = ">=3.9,<3.12"
|
python-versions = ">=3.9,<3.12"
|
||||||
content-hash = "d18fadad137b8a53915ec624ab94c527addd6372e3f3c7daae36daa874c901bc"
|
content-hash = "7f049b1d5545c8024d977255ab1f42396e15f7c041d4c25e0393ca4778876938"
|
||||||
|
|||||||
@@ -67,9 +67,9 @@ def bulk_load_compliance_frameworks(provider: str) -> dict:
|
|||||||
# cis_v1.4_aws.json --> cis_v1.4_aws
|
# cis_v1.4_aws.json --> cis_v1.4_aws
|
||||||
compliance_framework_name = filename.split(".json")[0]
|
compliance_framework_name = filename.split(".json")[0]
|
||||||
# Store the compliance info
|
# Store the compliance info
|
||||||
bulk_compliance_frameworks[
|
bulk_compliance_frameworks[compliance_framework_name] = (
|
||||||
compliance_framework_name
|
load_compliance_framework(file_path)
|
||||||
] = load_compliance_framework(file_path)
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"{e.__class__.__name__}[{e.__traceback__.tb_lineno}] -- {e}")
|
logger.error(f"{e.__class__.__name__}[{e.__traceback__.tb_lineno}] -- {e}")
|
||||||
|
|
||||||
|
|||||||
@@ -51,9 +51,9 @@ def fill_json_asff(finding_output, audit_info, finding, output_options):
|
|||||||
finding_output.GeneratorId = "prowler-" + finding.check_metadata.CheckID
|
finding_output.GeneratorId = "prowler-" + finding.check_metadata.CheckID
|
||||||
finding_output.AwsAccountId = audit_info.audited_account
|
finding_output.AwsAccountId = audit_info.audited_account
|
||||||
finding_output.Types = finding.check_metadata.CheckType
|
finding_output.Types = finding.check_metadata.CheckType
|
||||||
finding_output.FirstObservedAt = (
|
finding_output.FirstObservedAt = finding_output.UpdatedAt = (
|
||||||
finding_output.UpdatedAt
|
finding_output.CreatedAt
|
||||||
) = finding_output.CreatedAt = timestamp_utc.strftime("%Y-%m-%dT%H:%M:%SZ")
|
) = timestamp_utc.strftime("%Y-%m-%dT%H:%M:%SZ")
|
||||||
finding_output.Severity = Severity(
|
finding_output.Severity = Severity(
|
||||||
Label=finding.check_metadata.Severity.upper()
|
Label=finding.check_metadata.Severity.upper()
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -55,9 +55,9 @@ def generate_provider_output_csv(
|
|||||||
data["resource_name"] = finding.resource_name
|
data["resource_name"] = finding.resource_name
|
||||||
data["subscription"] = finding.subscription
|
data["subscription"] = finding.subscription
|
||||||
data["tenant_domain"] = audit_info.identity.domain
|
data["tenant_domain"] = audit_info.identity.domain
|
||||||
data[
|
data["finding_unique_id"] = (
|
||||||
"finding_unique_id"
|
f"prowler-{provider}-{finding.check_metadata.CheckID}-{finding.subscription}-{finding.resource_id}"
|
||||||
] = f"prowler-{provider}-{finding.check_metadata.CheckID}-{finding.subscription}-{finding.resource_id}"
|
)
|
||||||
data["compliance"] = unroll_dict(
|
data["compliance"] = unroll_dict(
|
||||||
get_check_compliance(finding, provider, output_options)
|
get_check_compliance(finding, provider, output_options)
|
||||||
)
|
)
|
||||||
@@ -68,9 +68,9 @@ def generate_provider_output_csv(
|
|||||||
data["resource_name"] = finding.resource_name
|
data["resource_name"] = finding.resource_name
|
||||||
data["project_id"] = finding.project_id
|
data["project_id"] = finding.project_id
|
||||||
data["location"] = finding.location.lower()
|
data["location"] = finding.location.lower()
|
||||||
data[
|
data["finding_unique_id"] = (
|
||||||
"finding_unique_id"
|
f"prowler-{provider}-{finding.check_metadata.CheckID}-{finding.project_id}-{finding.resource_id}"
|
||||||
] = f"prowler-{provider}-{finding.check_metadata.CheckID}-{finding.project_id}-{finding.resource_id}"
|
)
|
||||||
data["compliance"] = unroll_dict(
|
data["compliance"] = unroll_dict(
|
||||||
get_check_compliance(finding, provider, output_options)
|
get_check_compliance(finding, provider, output_options)
|
||||||
)
|
)
|
||||||
@@ -82,9 +82,9 @@ def generate_provider_output_csv(
|
|||||||
data["region"] = finding.region
|
data["region"] = finding.region
|
||||||
data["resource_id"] = finding.resource_id
|
data["resource_id"] = finding.resource_id
|
||||||
data["resource_arn"] = finding.resource_arn
|
data["resource_arn"] = finding.resource_arn
|
||||||
data[
|
data["finding_unique_id"] = (
|
||||||
"finding_unique_id"
|
f"prowler-{provider}-{finding.check_metadata.CheckID}-{audit_info.audited_account}-{finding.region}-{finding.resource_id}"
|
||||||
] = f"prowler-{provider}-{finding.check_metadata.CheckID}-{audit_info.audited_account}-{finding.region}-{finding.resource_id}"
|
)
|
||||||
data["compliance"] = unroll_dict(
|
data["compliance"] = unroll_dict(
|
||||||
get_check_compliance(finding, provider, output_options)
|
get_check_compliance(finding, provider, output_options)
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -54,10 +54,8 @@ class Athena(AWSService):
|
|||||||
)
|
)
|
||||||
|
|
||||||
wg_configuration = wg.get("WorkGroup").get("Configuration")
|
wg_configuration = wg.get("WorkGroup").get("Configuration")
|
||||||
self.workgroups[
|
self.workgroups[workgroup.arn].enforce_workgroup_configuration = (
|
||||||
workgroup.arn
|
wg_configuration.get("EnforceWorkGroupConfiguration", False)
|
||||||
].enforce_workgroup_configuration = wg_configuration.get(
|
|
||||||
"EnforceWorkGroupConfiguration", False
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# We include an empty EncryptionConfiguration to handle if the workgroup does not have encryption configured
|
# We include an empty EncryptionConfiguration to handle if the workgroup does not have encryption configured
|
||||||
@@ -77,9 +75,9 @@ class Athena(AWSService):
|
|||||||
encryption_configuration = EncryptionConfiguration(
|
encryption_configuration = EncryptionConfiguration(
|
||||||
encryption_option=encryption, encrypted=True
|
encryption_option=encryption, encrypted=True
|
||||||
)
|
)
|
||||||
self.workgroups[
|
self.workgroups[workgroup.arn].encryption_configuration = (
|
||||||
workgroup.arn
|
encryption_configuration
|
||||||
].encryption_configuration = encryption_configuration
|
)
|
||||||
except Exception as error:
|
except Exception as error:
|
||||||
logger.error(
|
logger.error(
|
||||||
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||||
|
|||||||
@@ -53,12 +53,12 @@ class CloudFront(AWSService):
|
|||||||
distributions[distribution_id].logging_enabled = distribution_config[
|
distributions[distribution_id].logging_enabled = distribution_config[
|
||||||
"DistributionConfig"
|
"DistributionConfig"
|
||||||
]["Logging"]["Enabled"]
|
]["Logging"]["Enabled"]
|
||||||
distributions[
|
distributions[distribution_id].geo_restriction_type = (
|
||||||
distribution_id
|
GeoRestrictionType(
|
||||||
].geo_restriction_type = GeoRestrictionType(
|
distribution_config["DistributionConfig"]["Restrictions"][
|
||||||
distribution_config["DistributionConfig"]["Restrictions"][
|
"GeoRestriction"
|
||||||
"GeoRestriction"
|
]["RestrictionType"]
|
||||||
]["RestrictionType"]
|
)
|
||||||
)
|
)
|
||||||
distributions[distribution_id].web_acl_id = distribution_config[
|
distributions[distribution_id].web_acl_id = distribution_config[
|
||||||
"DistributionConfig"
|
"DistributionConfig"
|
||||||
@@ -78,9 +78,9 @@ class CloudFront(AWSService):
|
|||||||
"DefaultCacheBehavior"
|
"DefaultCacheBehavior"
|
||||||
].get("FieldLevelEncryptionId"),
|
].get("FieldLevelEncryptionId"),
|
||||||
)
|
)
|
||||||
distributions[
|
distributions[distribution_id].default_cache_config = (
|
||||||
distribution_id
|
default_cache_config
|
||||||
].default_cache_config = default_cache_config
|
)
|
||||||
|
|
||||||
except Exception as error:
|
except Exception as error:
|
||||||
logger.error(
|
logger.error(
|
||||||
|
|||||||
@@ -218,9 +218,9 @@ class LogGroup(BaseModel):
|
|||||||
never_expire: bool
|
never_expire: bool
|
||||||
kms_id: Optional[str]
|
kms_id: Optional[str]
|
||||||
region: str
|
region: str
|
||||||
log_streams: dict[
|
log_streams: dict[str, list[str]] = (
|
||||||
str, list[str]
|
{}
|
||||||
] = {} # Log stream name as the key, array of events as the value
|
) # Log stream name as the key, array of events as the value
|
||||||
tags: Optional[list] = []
|
tags: Optional[list] = []
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -108,9 +108,9 @@ class EMR(AWSService):
|
|||||||
master_public_dns_name = cluster_info["Cluster"].get(
|
master_public_dns_name = cluster_info["Cluster"].get(
|
||||||
"MasterPublicDnsName"
|
"MasterPublicDnsName"
|
||||||
)
|
)
|
||||||
self.clusters[
|
self.clusters[cluster.id].master_public_dns_name = (
|
||||||
cluster.id
|
master_public_dns_name
|
||||||
].master_public_dns_name = master_public_dns_name
|
)
|
||||||
# Set cluster Public/Private
|
# Set cluster Public/Private
|
||||||
# Public EMR cluster have their DNS ending with .amazonaws.com
|
# Public EMR cluster have their DNS ending with .amazonaws.com
|
||||||
# while private ones have format of ip-xxx-xx-xx.us-east-1.compute.internal.
|
# while private ones have format of ip-xxx-xx-xx.us-east-1.compute.internal.
|
||||||
@@ -136,12 +136,12 @@ class EMR(AWSService):
|
|||||||
regional_client.get_block_public_access_configuration()
|
regional_client.get_block_public_access_configuration()
|
||||||
)
|
)
|
||||||
|
|
||||||
self.block_public_access_configuration[
|
self.block_public_access_configuration[regional_client.region] = (
|
||||||
regional_client.region
|
BlockPublicAccessConfiguration(
|
||||||
] = BlockPublicAccessConfiguration(
|
block_public_security_group_rules=block_public_access_configuration[
|
||||||
block_public_security_group_rules=block_public_access_configuration[
|
"BlockPublicAccessConfiguration"
|
||||||
"BlockPublicAccessConfiguration"
|
]["BlockPublicSecurityGroupRules"]
|
||||||
]["BlockPublicSecurityGroupRules"]
|
)
|
||||||
)
|
)
|
||||||
except Exception as error:
|
except Exception as error:
|
||||||
logger.error(
|
logger.error(
|
||||||
|
|||||||
@@ -636,9 +636,9 @@ class IAM(AWSService):
|
|||||||
entity=policy["PolicyId"],
|
entity=policy["PolicyId"],
|
||||||
version_id=policy["DefaultVersionId"],
|
version_id=policy["DefaultVersionId"],
|
||||||
type="Custom" if scope == "Local" else "AWS",
|
type="Custom" if scope == "Local" else "AWS",
|
||||||
attached=True
|
attached=(
|
||||||
if policy["AttachmentCount"] > 0
|
True if policy["AttachmentCount"] > 0 else False
|
||||||
else False,
|
),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
except Exception as error:
|
except Exception as error:
|
||||||
@@ -867,9 +867,9 @@ class IAM(AWSService):
|
|||||||
services_accessed > 0 and access_keys_number > 0
|
services_accessed > 0 and access_keys_number > 0
|
||||||
)
|
)
|
||||||
|
|
||||||
self.user_temporary_credentials_usage[
|
self.user_temporary_credentials_usage[user_data] = (
|
||||||
user_data
|
temporary_credentials_usage
|
||||||
] = temporary_credentials_usage
|
)
|
||||||
|
|
||||||
except Exception as error:
|
except Exception as error:
|
||||||
logger.error(
|
logger.error(
|
||||||
|
|||||||
@@ -87,12 +87,12 @@ class Route53(AWSService):
|
|||||||
)
|
)
|
||||||
for page in list_query_logging_configs_paginator.paginate():
|
for page in list_query_logging_configs_paginator.paginate():
|
||||||
for logging_config in page["QueryLoggingConfigs"]:
|
for logging_config in page["QueryLoggingConfigs"]:
|
||||||
self.hosted_zones[
|
self.hosted_zones[hosted_zone.id].logging_config = (
|
||||||
hosted_zone.id
|
LoggingConfig(
|
||||||
].logging_config = LoggingConfig(
|
cloudwatch_log_group_arn=logging_config[
|
||||||
cloudwatch_log_group_arn=logging_config[
|
"CloudWatchLogsLogGroupArn"
|
||||||
"CloudWatchLogsLogGroupArn"
|
]
|
||||||
]
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
except Exception as error:
|
except Exception as error:
|
||||||
|
|||||||
@@ -114,11 +114,13 @@ class Compute(GCPService):
|
|||||||
disks_encryption=[
|
disks_encryption=[
|
||||||
(
|
(
|
||||||
disk["deviceName"],
|
disk["deviceName"],
|
||||||
True
|
(
|
||||||
if disk.get("diskEncryptionKey", {}).get(
|
True
|
||||||
"sha256"
|
if disk.get("diskEncryptionKey", {}).get(
|
||||||
)
|
"sha256"
|
||||||
else False,
|
)
|
||||||
|
else False
|
||||||
|
),
|
||||||
)
|
)
|
||||||
for disk in instance["disks"]
|
for disk in instance["disks"]
|
||||||
],
|
],
|
||||||
@@ -144,9 +146,9 @@ class Compute(GCPService):
|
|||||||
subnet_mode = (
|
subnet_mode = (
|
||||||
"legacy"
|
"legacy"
|
||||||
if "autoCreateSubnetworks" not in network
|
if "autoCreateSubnetworks" not in network
|
||||||
else "auto"
|
else (
|
||||||
if network["autoCreateSubnetworks"]
|
"auto" if network["autoCreateSubnetworks"] else "custom"
|
||||||
else "custom"
|
)
|
||||||
)
|
)
|
||||||
self.networks.append(
|
self.networks.append(
|
||||||
Network(
|
Network(
|
||||||
|
|||||||
@@ -57,7 +57,7 @@ docs = ["mkdocs", "mkdocs-material"]
|
|||||||
|
|
||||||
[tool.poetry.group.dev.dependencies]
|
[tool.poetry.group.dev.dependencies]
|
||||||
bandit = "1.7.6"
|
bandit = "1.7.6"
|
||||||
black = "22.12.0"
|
black = "24.1.1"
|
||||||
coverage = "7.4.0"
|
coverage = "7.4.0"
|
||||||
docker = "7.0.0"
|
docker = "7.0.0"
|
||||||
flake8 = "7.0.0"
|
flake8 = "7.0.0"
|
||||||
|
|||||||
@@ -455,9 +455,9 @@ class Test_Outputs:
|
|||||||
expected.GeneratorId = "prowler-" + finding.check_metadata.CheckID
|
expected.GeneratorId = "prowler-" + finding.check_metadata.CheckID
|
||||||
expected.AwsAccountId = AWS_ACCOUNT_ID
|
expected.AwsAccountId = AWS_ACCOUNT_ID
|
||||||
expected.Types = finding.check_metadata.CheckType
|
expected.Types = finding.check_metadata.CheckType
|
||||||
expected.FirstObservedAt = (
|
expected.FirstObservedAt = expected.UpdatedAt = expected.CreatedAt = (
|
||||||
expected.UpdatedAt
|
timestamp_utc.strftime("%Y-%m-%dT%H:%M:%SZ")
|
||||||
) = expected.CreatedAt = timestamp_utc.strftime("%Y-%m-%dT%H:%M:%SZ")
|
)
|
||||||
expected.Severity = Severity(Label=finding.check_metadata.Severity.upper())
|
expected.Severity = Severity(Label=finding.check_metadata.Severity.upper())
|
||||||
expected.Title = finding.check_metadata.CheckTitle
|
expected.Title = finding.check_metadata.CheckTitle
|
||||||
expected.Description = finding.status_extended
|
expected.Description = finding.status_extended
|
||||||
@@ -536,9 +536,9 @@ class Test_Outputs:
|
|||||||
expected.GeneratorId = "prowler-" + finding.check_metadata.CheckID
|
expected.GeneratorId = "prowler-" + finding.check_metadata.CheckID
|
||||||
expected.AwsAccountId = AWS_ACCOUNT_ID
|
expected.AwsAccountId = AWS_ACCOUNT_ID
|
||||||
expected.Types = finding.check_metadata.CheckType
|
expected.Types = finding.check_metadata.CheckType
|
||||||
expected.FirstObservedAt = (
|
expected.FirstObservedAt = expected.UpdatedAt = expected.CreatedAt = (
|
||||||
expected.UpdatedAt
|
timestamp_utc.strftime("%Y-%m-%dT%H:%M:%SZ")
|
||||||
) = expected.CreatedAt = timestamp_utc.strftime("%Y-%m-%dT%H:%M:%SZ")
|
)
|
||||||
expected.Severity = Severity(Label=finding.check_metadata.Severity.upper())
|
expected.Severity = Severity(Label=finding.check_metadata.Severity.upper())
|
||||||
expected.Title = finding.check_metadata.CheckTitle
|
expected.Title = finding.check_metadata.CheckTitle
|
||||||
expected.Description = finding.status_extended
|
expected.Description = finding.status_extended
|
||||||
@@ -563,12 +563,12 @@ class Test_Outputs:
|
|||||||
# "Code": finding.check_metadata.Remediation.Code,
|
# "Code": finding.check_metadata.Remediation.Code,
|
||||||
}
|
}
|
||||||
|
|
||||||
expected.Remediation[
|
expected.Remediation["Recommendation"].Text = (
|
||||||
"Recommendation"
|
finding.check_metadata.Remediation.Recommendation.Text
|
||||||
].Text = finding.check_metadata.Remediation.Recommendation.Text
|
)
|
||||||
expected.Remediation[
|
expected.Remediation["Recommendation"].Url = (
|
||||||
"Recommendation"
|
"https://docs.aws.amazon.com/securityhub/latest/userguide/what-is-securityhub.html"
|
||||||
].Url = "https://docs.aws.amazon.com/securityhub/latest/userguide/what-is-securityhub.html"
|
)
|
||||||
|
|
||||||
input = Check_Output_JSON_ASFF()
|
input = Check_Output_JSON_ASFF()
|
||||||
output_options = mock.MagicMock()
|
output_options = mock.MagicMock()
|
||||||
@@ -627,9 +627,9 @@ class Test_Outputs:
|
|||||||
expected.GeneratorId = "prowler-" + finding.check_metadata.CheckID
|
expected.GeneratorId = "prowler-" + finding.check_metadata.CheckID
|
||||||
expected.AwsAccountId = AWS_ACCOUNT_ID
|
expected.AwsAccountId = AWS_ACCOUNT_ID
|
||||||
expected.Types = finding.check_metadata.CheckType
|
expected.Types = finding.check_metadata.CheckType
|
||||||
expected.FirstObservedAt = (
|
expected.FirstObservedAt = expected.UpdatedAt = expected.CreatedAt = (
|
||||||
expected.UpdatedAt
|
timestamp_utc.strftime("%Y-%m-%dT%H:%M:%SZ")
|
||||||
) = expected.CreatedAt = timestamp_utc.strftime("%Y-%m-%dT%H:%M:%SZ")
|
)
|
||||||
expected.Severity = Severity(Label=finding.check_metadata.Severity.upper())
|
expected.Severity = Severity(Label=finding.check_metadata.Severity.upper())
|
||||||
expected.Title = finding.check_metadata.CheckTitle
|
expected.Title = finding.check_metadata.CheckTitle
|
||||||
expected.Description = finding.status_extended[:1000] + "..."
|
expected.Description = finding.status_extended[:1000] + "..."
|
||||||
@@ -654,12 +654,12 @@ class Test_Outputs:
|
|||||||
# "Code": finding.check_metadata.Remediation.Code,
|
# "Code": finding.check_metadata.Remediation.Code,
|
||||||
}
|
}
|
||||||
|
|
||||||
expected.Remediation[
|
expected.Remediation["Recommendation"].Text = (
|
||||||
"Recommendation"
|
finding.check_metadata.Remediation.Recommendation.Text
|
||||||
].Text = finding.check_metadata.Remediation.Recommendation.Text
|
)
|
||||||
expected.Remediation[
|
expected.Remediation["Recommendation"].Url = (
|
||||||
"Recommendation"
|
"https://docs.aws.amazon.com/securityhub/latest/userguide/what-is-securityhub.html"
|
||||||
].Url = "https://docs.aws.amazon.com/securityhub/latest/userguide/what-is-securityhub.html"
|
)
|
||||||
|
|
||||||
input = Check_Output_JSON_ASFF()
|
input = Check_Output_JSON_ASFF()
|
||||||
output_options = mock.MagicMock()
|
output_options = mock.MagicMock()
|
||||||
@@ -866,9 +866,9 @@ class Test_Outputs:
|
|||||||
expected.GeneratorId = "prowler-" + finding.check_metadata.CheckID
|
expected.GeneratorId = "prowler-" + finding.check_metadata.CheckID
|
||||||
expected.AwsAccountId = AWS_ACCOUNT_ID
|
expected.AwsAccountId = AWS_ACCOUNT_ID
|
||||||
expected.Types = finding.check_metadata.CheckType
|
expected.Types = finding.check_metadata.CheckType
|
||||||
expected.FirstObservedAt = (
|
expected.FirstObservedAt = expected.UpdatedAt = expected.CreatedAt = (
|
||||||
expected.UpdatedAt
|
timestamp_utc.strftime("%Y-%m-%dT%H:%M:%SZ")
|
||||||
) = expected.CreatedAt = timestamp_utc.strftime("%Y-%m-%dT%H:%M:%SZ")
|
)
|
||||||
expected.Severity = Severity(Label=finding.check_metadata.Severity.upper())
|
expected.Severity = Severity(Label=finding.check_metadata.Severity.upper())
|
||||||
expected.Title = finding.check_metadata.CheckTitle
|
expected.Title = finding.check_metadata.CheckTitle
|
||||||
expected.Description = finding.status_extended
|
expected.Description = finding.status_extended
|
||||||
@@ -935,12 +935,12 @@ class Test_Outputs:
|
|||||||
# "Code": finding.check_metadata.Remediation.Code,
|
# "Code": finding.check_metadata.Remediation.Code,
|
||||||
}
|
}
|
||||||
|
|
||||||
expected.Remediation[
|
expected.Remediation["Recommendation"].Text = (
|
||||||
"Recommendation"
|
finding.check_metadata.Remediation.Recommendation.Text
|
||||||
].Text = finding.check_metadata.Remediation.Recommendation.Text
|
)
|
||||||
expected.Remediation[
|
expected.Remediation["Recommendation"].Url = (
|
||||||
"Recommendation"
|
"https://docs.aws.amazon.com/securityhub/latest/userguide/what-is-securityhub.html"
|
||||||
].Url = "https://docs.aws.amazon.com/securityhub/latest/userguide/what-is-securityhub.html"
|
)
|
||||||
|
|
||||||
input = Check_Output_JSON_ASFF()
|
input = Check_Output_JSON_ASFF()
|
||||||
output_options = mock.MagicMock()
|
output_options = mock.MagicMock()
|
||||||
@@ -1332,12 +1332,12 @@ class Test_Outputs:
|
|||||||
|
|
||||||
output_options = mock.MagicMock()
|
output_options = mock.MagicMock()
|
||||||
output_options.bulk_checks_metadata = {}
|
output_options.bulk_checks_metadata = {}
|
||||||
output_options.bulk_checks_metadata[
|
output_options.bulk_checks_metadata["iam_user_accesskey_unused"] = (
|
||||||
"iam_user_accesskey_unused"
|
mock.MagicMock()
|
||||||
] = mock.MagicMock()
|
)
|
||||||
output_options.bulk_checks_metadata[
|
output_options.bulk_checks_metadata["iam_user_accesskey_unused"].Compliance = (
|
||||||
"iam_user_accesskey_unused"
|
bulk_check_metadata
|
||||||
].Compliance = bulk_check_metadata
|
)
|
||||||
|
|
||||||
assert get_check_compliance(finding, "aws", output_options) == {
|
assert get_check_compliance(finding, "aws", output_options) == {
|
||||||
"CIS-1.4": ["2.1.3"],
|
"CIS-1.4": ["2.1.3"],
|
||||||
|
|||||||
@@ -445,7 +445,9 @@ class Test_iam_inline_policy_no_administrative_privileges:
|
|||||||
iam_client = client("iam")
|
iam_client = client("iam")
|
||||||
# Create IAM User
|
# Create IAM User
|
||||||
user_name = "test_user"
|
user_name = "test_user"
|
||||||
user_arn = iam_client.create_user(UserName=user_name,)[
|
user_arn = iam_client.create_user(
|
||||||
|
UserName=user_name,
|
||||||
|
)[
|
||||||
"User"
|
"User"
|
||||||
]["Arn"]
|
]["Arn"]
|
||||||
|
|
||||||
@@ -491,7 +493,9 @@ class Test_iam_inline_policy_no_administrative_privileges:
|
|||||||
iam_client = client("iam")
|
iam_client = client("iam")
|
||||||
# Create IAM User
|
# Create IAM User
|
||||||
user_name = "test_user"
|
user_name = "test_user"
|
||||||
user_arn = iam_client.create_user(UserName=user_name,)[
|
user_arn = iam_client.create_user(
|
||||||
|
UserName=user_name,
|
||||||
|
)[
|
||||||
"User"
|
"User"
|
||||||
]["Arn"]
|
]["Arn"]
|
||||||
|
|
||||||
@@ -537,7 +541,9 @@ class Test_iam_inline_policy_no_administrative_privileges:
|
|||||||
iam_client = client("iam")
|
iam_client = client("iam")
|
||||||
# Create IAM User
|
# Create IAM User
|
||||||
user_name = "test_user"
|
user_name = "test_user"
|
||||||
user_arn = iam_client.create_user(UserName=user_name,)[
|
user_arn = iam_client.create_user(
|
||||||
|
UserName=user_name,
|
||||||
|
)[
|
||||||
"User"
|
"User"
|
||||||
]["Arn"]
|
]["Arn"]
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user