mirror of
https://github.com/ghndrx/prowler.git
synced 2026-02-10 06:45:08 +00:00
fix: Linter issues (#1471)
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
This commit is contained in:
12
.github/workflows/pull-request.yml
vendored
12
.github/workflows/pull-request.yml
vendored
@@ -26,7 +26,17 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
python -m pip install --upgrade pip
|
python -m pip install --upgrade pip
|
||||||
pip install pipenv
|
pip install pipenv
|
||||||
pipenv install
|
pipenv install --dev
|
||||||
|
pipenv run pip list
|
||||||
|
- name: Lint with flake8
|
||||||
|
run: |
|
||||||
|
pipenv run flake8 . --ignore=E266,W503,E203,E501,W605,E128 --exclude contrib
|
||||||
|
- name: Checking format with black
|
||||||
|
run: |
|
||||||
|
pipenv run black --check .
|
||||||
|
- name: Lint with pylint
|
||||||
|
run: |
|
||||||
|
pipenv run pylint --disable=W,C,R,E -j 0 providers lib util config
|
||||||
- name: Bandit
|
- name: Bandit
|
||||||
run: |
|
run: |
|
||||||
pipenv run bandit -q -lll -x '*_test.py,./contrib/' -r .
|
pipenv run bandit -q -lll -x '*_test.py,./contrib/' -r .
|
||||||
|
|||||||
@@ -20,7 +20,7 @@ repos:
|
|||||||
- id: shellcheck
|
- id: shellcheck
|
||||||
## PYTHON
|
## PYTHON
|
||||||
- repo: https://github.com/myint/autoflake
|
- repo: https://github.com/myint/autoflake
|
||||||
rev: v1.4
|
rev: v1.7.7
|
||||||
hooks:
|
hooks:
|
||||||
- id: autoflake
|
- id: autoflake
|
||||||
args: ['--in-place', '--remove-all-unused-imports', '--remove-unused-variable']
|
args: ['--in-place', '--remove-all-unused-imports', '--remove-unused-variable']
|
||||||
@@ -32,10 +32,19 @@ repos:
|
|||||||
args: ["--profile", "black"]
|
args: ["--profile", "black"]
|
||||||
|
|
||||||
- repo: https://github.com/psf/black
|
- repo: https://github.com/psf/black
|
||||||
rev: 22.3.0
|
rev: 22.10.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: black
|
- id: black
|
||||||
|
|
||||||
|
- repo: https://github.com/pycqa/flake8
|
||||||
|
rev: 5.0.4
|
||||||
|
hooks:
|
||||||
|
- id: flake8
|
||||||
|
exclude: contrib
|
||||||
|
args: [
|
||||||
|
"--ignore=E266,W503,E203,E501,W605"
|
||||||
|
]
|
||||||
|
|
||||||
- repo: https://github.com/haizaar/check-pipfile-lock
|
- repo: https://github.com/haizaar/check-pipfile-lock
|
||||||
rev: v0.0.5
|
rev: v0.0.5
|
||||||
hooks:
|
hooks:
|
||||||
@@ -43,6 +52,11 @@ repos:
|
|||||||
|
|
||||||
- repo: local
|
- repo: local
|
||||||
hooks:
|
hooks:
|
||||||
|
- id: pylint
|
||||||
|
name: pylint
|
||||||
|
entry: bash -c 'pylint --disable=W,C,R,E -j 0 -rn -sn providers lib util config'
|
||||||
|
language: system
|
||||||
|
|
||||||
- id: pytest-check
|
- id: pytest-check
|
||||||
name: pytest-check
|
name: pytest-check
|
||||||
entry: bash -c 'pytest -n auto'
|
entry: bash -c 'pytest -n auto'
|
||||||
|
|||||||
15
Makefile
15
Makefile
@@ -1,5 +1,6 @@
|
|||||||
.DEFAULT_GOAL:=help
|
.DEFAULT_GOAL:=help
|
||||||
|
|
||||||
|
##@ Testing
|
||||||
test: ## Test with pytest
|
test: ## Test with pytest
|
||||||
pytest -n auto -vvv -s -x
|
pytest -n auto -vvv -s -x
|
||||||
|
|
||||||
@@ -7,6 +8,20 @@ coverage: ## Show Test Coverage
|
|||||||
coverage run --skip-covered -m pytest -v && \
|
coverage run --skip-covered -m pytest -v && \
|
||||||
coverage report -m && \
|
coverage report -m && \
|
||||||
rm -rf .coverage
|
rm -rf .coverage
|
||||||
|
|
||||||
|
##@ Linting
|
||||||
|
format: ## Format Code
|
||||||
|
@echo "Running black..."
|
||||||
|
black .
|
||||||
|
|
||||||
|
lint: ## Lint Code
|
||||||
|
@echo "Running flake8..."
|
||||||
|
flake8 . --ignore=E266,W503,E203,E501,W605,E128 --exclude contrib
|
||||||
|
@echo "Running black... "
|
||||||
|
black --check .
|
||||||
|
@echo "Running pylint..."
|
||||||
|
pylint --disable=W,C,R,E -j 0 providers lib util config
|
||||||
|
|
||||||
##@ Help
|
##@ Help
|
||||||
help: ## Show this help.
|
help: ## Show this help.
|
||||||
@echo "Prowler Makefile"
|
@echo "Prowler Makefile"
|
||||||
|
|||||||
23
Pipfile
23
Pipfile
@@ -9,20 +9,23 @@ boto3 = "1.26.3"
|
|||||||
arnparse = "0.0.2"
|
arnparse = "0.0.2"
|
||||||
botocore = "1.27.8"
|
botocore = "1.27.8"
|
||||||
pydantic = "1.9.1"
|
pydantic = "1.9.1"
|
||||||
moto = "4.0.9"
|
|
||||||
sure = "2.0.0"
|
|
||||||
bandit = "1.7.4"
|
|
||||||
safety = "1.10.3"
|
|
||||||
vulture = "2.4"
|
|
||||||
coverage = "6.4.1"
|
|
||||||
pytest = "7.1.2"
|
|
||||||
pytest-xdist = "2.5.0"
|
|
||||||
shodan = "1.28.0"
|
shodan = "1.28.0"
|
||||||
openapi-spec-validator = "0.5.1"
|
|
||||||
docker = "6.0.0"
|
|
||||||
detect-secrets = "1.4.0"
|
detect-secrets = "1.4.0"
|
||||||
|
|
||||||
[dev-packages]
|
[dev-packages]
|
||||||
|
black = "22.10.0"
|
||||||
|
pylint = "2.15.5"
|
||||||
|
flake8 = "5.0.4"
|
||||||
|
bandit = "1.7.4"
|
||||||
|
safety = "2.3.1"
|
||||||
|
vulture = "2.6"
|
||||||
|
moto = "4.0.9"
|
||||||
|
docker = "6.0.0"
|
||||||
|
openapi-spec-validator = "0.5.1"
|
||||||
|
pytest = "7.1.2"
|
||||||
|
pytest-xdist = "2.5.0"
|
||||||
|
coverage = "6.4.1"
|
||||||
|
sure = "2.0.0"
|
||||||
|
|
||||||
[requires]
|
[requires]
|
||||||
python_version = "3.9"
|
python_version = "3.9"
|
||||||
|
|||||||
559
Pipfile.lock
generated
559
Pipfile.lock
generated
@@ -1,7 +1,7 @@
|
|||||||
{
|
{
|
||||||
"_meta": {
|
"_meta": {
|
||||||
"hash": {
|
"hash": {
|
||||||
"sha256": "7b5d26e522da612a9192a5d5122e2a100162936290013fd0053d7a14da41e422"
|
"sha256": "4e3096e19f235b38d957c39849b152deb4a0ffaa4af22dfcb49ec6446b190918"
|
||||||
},
|
},
|
||||||
"pipfile-spec": 6,
|
"pipfile-spec": 6,
|
||||||
"requires": {
|
"requires": {
|
||||||
@@ -24,6 +24,246 @@
|
|||||||
"index": "pypi",
|
"index": "pypi",
|
||||||
"version": "==0.0.2"
|
"version": "==0.0.2"
|
||||||
},
|
},
|
||||||
|
"boto3": {
|
||||||
|
"hashes": [
|
||||||
|
"sha256:b39303fdda9b5d77a152e3ec9f264ae318ccdaa853eaf694626dc335464ded98",
|
||||||
|
"sha256:c02fc93a926944b4b426a170d2dae274b4c8c09ec5259450b94269a8ce990dd7"
|
||||||
|
],
|
||||||
|
"index": "pypi",
|
||||||
|
"version": "==1.26.8"
|
||||||
|
},
|
||||||
|
"botocore": {
|
||||||
|
"hashes": [
|
||||||
|
"sha256:48cf33d7c513320711321c3b303b0c9810b23e15fa03424f7323883e4ce6cef8",
|
||||||
|
"sha256:9c6adcf4e080be63b92f50d01e176ef2d1d2a3da7d8387a964abb9eb65fc8aad"
|
||||||
|
],
|
||||||
|
"index": "pypi",
|
||||||
|
"version": "==1.29.8"
|
||||||
|
},
|
||||||
|
"certifi": {
|
||||||
|
"hashes": [
|
||||||
|
"sha256:0d9c601124e5a6ba9712dbc60d9c53c21e34f5f641fe83002317394311bdce14",
|
||||||
|
"sha256:90c1a32f1d68f940488354e36370f6cca89f0f106db09518524c88d6ed83f382"
|
||||||
|
],
|
||||||
|
"markers": "python_version >= '3.6'",
|
||||||
|
"version": "==2022.9.24"
|
||||||
|
},
|
||||||
|
"charset-normalizer": {
|
||||||
|
"hashes": [
|
||||||
|
"sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845",
|
||||||
|
"sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f"
|
||||||
|
],
|
||||||
|
"markers": "python_full_version >= '3.6.0'",
|
||||||
|
"version": "==2.1.1"
|
||||||
|
},
|
||||||
|
"click": {
|
||||||
|
"hashes": [
|
||||||
|
"sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e",
|
||||||
|
"sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"
|
||||||
|
],
|
||||||
|
"markers": "python_version >= '3.7'",
|
||||||
|
"version": "==8.1.3"
|
||||||
|
},
|
||||||
|
"click-plugins": {
|
||||||
|
"hashes": [
|
||||||
|
"sha256:46ab999744a9d831159c3411bb0c79346d94a444df9a3a3742e9ed63645f264b",
|
||||||
|
"sha256:5d262006d3222f5057fd81e1623d4443e41dcda5dc815c06b442aa3c02889fc8"
|
||||||
|
],
|
||||||
|
"version": "==1.1.1"
|
||||||
|
},
|
||||||
|
"colorama": {
|
||||||
|
"hashes": [
|
||||||
|
"sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44",
|
||||||
|
"sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"
|
||||||
|
],
|
||||||
|
"index": "pypi",
|
||||||
|
"version": "==0.4.6"
|
||||||
|
},
|
||||||
|
"detect-secrets": {
|
||||||
|
"hashes": [
|
||||||
|
"sha256:d08ecabeee8b68c0acb0e8a354fb98d822a653f6ed05e520cead4c6fc1fc02cd",
|
||||||
|
"sha256:d56787e339758cef48c9ccd6692f7a094b9963c979c9813580b0169e41132833"
|
||||||
|
],
|
||||||
|
"index": "pypi",
|
||||||
|
"version": "==1.4.0"
|
||||||
|
},
|
||||||
|
"idna": {
|
||||||
|
"hashes": [
|
||||||
|
"sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4",
|
||||||
|
"sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"
|
||||||
|
],
|
||||||
|
"markers": "python_version >= '3.5'",
|
||||||
|
"version": "==3.4"
|
||||||
|
},
|
||||||
|
"jmespath": {
|
||||||
|
"hashes": [
|
||||||
|
"sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980",
|
||||||
|
"sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"
|
||||||
|
],
|
||||||
|
"markers": "python_version >= '3.7'",
|
||||||
|
"version": "==1.0.1"
|
||||||
|
},
|
||||||
|
"pydantic": {
|
||||||
|
"hashes": [
|
||||||
|
"sha256:05e00dbebbe810b33c7a7362f231893183bcc4251f3f2ff991c31d5c08240c42",
|
||||||
|
"sha256:06094d18dd5e6f2bbf93efa54991c3240964bb663b87729ac340eb5014310624",
|
||||||
|
"sha256:0b959f4d8211fc964772b595ebb25f7652da3f22322c007b6fed26846a40685e",
|
||||||
|
"sha256:19b3b9ccf97af2b7519c42032441a891a5e05c68368f40865a90eb88833c2559",
|
||||||
|
"sha256:1b6ee725bd6e83ec78b1aa32c5b1fa67a3a65badddde3976bca5fe4568f27709",
|
||||||
|
"sha256:1ee433e274268a4b0c8fde7ad9d58ecba12b069a033ecc4645bb6303c062d2e9",
|
||||||
|
"sha256:216f3bcbf19c726b1cc22b099dd409aa371f55c08800bcea4c44c8f74b73478d",
|
||||||
|
"sha256:2d0567e60eb01bccda3a4df01df677adf6b437958d35c12a3ac3e0f078b0ee52",
|
||||||
|
"sha256:2e05aed07fa02231dbf03d0adb1be1d79cabb09025dd45aa094aa8b4e7b9dcda",
|
||||||
|
"sha256:352aedb1d71b8b0736c6d56ad2bd34c6982720644b0624462059ab29bd6e5912",
|
||||||
|
"sha256:355639d9afc76bcb9b0c3000ddcd08472ae75318a6eb67a15866b87e2efa168c",
|
||||||
|
"sha256:37c90345ec7dd2f1bcef82ce49b6235b40f282b94d3eec47e801baf864d15525",
|
||||||
|
"sha256:4b8795290deaae348c4eba0cebb196e1c6b98bdbe7f50b2d0d9a4a99716342fe",
|
||||||
|
"sha256:5760e164b807a48a8f25f8aa1a6d857e6ce62e7ec83ea5d5c5a802eac81bad41",
|
||||||
|
"sha256:6eb843dcc411b6a2237a694f5e1d649fc66c6064d02b204a7e9d194dff81eb4b",
|
||||||
|
"sha256:7b5ba54d026c2bd2cb769d3468885f23f43710f651688e91f5fb1edcf0ee9283",
|
||||||
|
"sha256:7c2abc4393dea97a4ccbb4ec7d8658d4e22c4765b7b9b9445588f16c71ad9965",
|
||||||
|
"sha256:81a7b66c3f499108b448f3f004801fcd7d7165fb4200acb03f1c2402da73ce4c",
|
||||||
|
"sha256:91b8e218852ef6007c2b98cd861601c6a09f1aa32bbbb74fab5b1c33d4a1e410",
|
||||||
|
"sha256:9300fcbebf85f6339a02c6994b2eb3ff1b9c8c14f502058b5bf349d42447dcf5",
|
||||||
|
"sha256:9cabf4a7f05a776e7793e72793cd92cc865ea0e83a819f9ae4ecccb1b8aa6116",
|
||||||
|
"sha256:a1f5a63a6dfe19d719b1b6e6106561869d2efaca6167f84f5ab9347887d78b98",
|
||||||
|
"sha256:a4c805731c33a8db4b6ace45ce440c4ef5336e712508b4d9e1aafa617dc9907f",
|
||||||
|
"sha256:ae544c47bec47a86bc7d350f965d8b15540e27e5aa4f55170ac6a75e5f73b644",
|
||||||
|
"sha256:b97890e56a694486f772d36efd2ba31612739bc6f3caeee50e9e7e3ebd2fdd13",
|
||||||
|
"sha256:bb6ad4489af1bac6955d38ebcb95079a836af31e4c4f74aba1ca05bb9f6027bd",
|
||||||
|
"sha256:bedf309630209e78582ffacda64a21f96f3ed2e51fbf3962d4d488e503420254",
|
||||||
|
"sha256:c1ba1afb396148bbc70e9eaa8c06c1716fdddabaf86e7027c5988bae2a829ab6",
|
||||||
|
"sha256:c33602f93bfb67779f9c507e4d69451664524389546bacfe1bee13cae6dc7488",
|
||||||
|
"sha256:c4aac8e7103bf598373208f6299fa9a5cfd1fc571f2d40bf1dd1955a63d6eeb5",
|
||||||
|
"sha256:c6f981882aea41e021f72779ce2a4e87267458cc4d39ea990729e21ef18f0f8c",
|
||||||
|
"sha256:cc78cc83110d2f275ec1970e7a831f4e371ee92405332ebfe9860a715f8336e1",
|
||||||
|
"sha256:d49f3db871575e0426b12e2f32fdb25e579dea16486a26e5a0474af87cb1ab0a",
|
||||||
|
"sha256:dd3f9a40c16daf323cf913593083698caee97df2804aa36c4b3175d5ac1b92a2",
|
||||||
|
"sha256:e0bedafe4bc165ad0a56ac0bd7695df25c50f76961da29c050712596cf092d6d",
|
||||||
|
"sha256:e9069e1b01525a96e6ff49e25876d90d5a563bc31c658289a8772ae186552236"
|
||||||
|
],
|
||||||
|
"index": "pypi",
|
||||||
|
"version": "==1.10.2"
|
||||||
|
},
|
||||||
|
"python-dateutil": {
|
||||||
|
"hashes": [
|
||||||
|
"sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86",
|
||||||
|
"sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"
|
||||||
|
],
|
||||||
|
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
|
||||||
|
"version": "==2.8.2"
|
||||||
|
},
|
||||||
|
"pyyaml": {
|
||||||
|
"hashes": [
|
||||||
|
"sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf",
|
||||||
|
"sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293",
|
||||||
|
"sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b",
|
||||||
|
"sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57",
|
||||||
|
"sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b",
|
||||||
|
"sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4",
|
||||||
|
"sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07",
|
||||||
|
"sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba",
|
||||||
|
"sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9",
|
||||||
|
"sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287",
|
||||||
|
"sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513",
|
||||||
|
"sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0",
|
||||||
|
"sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782",
|
||||||
|
"sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0",
|
||||||
|
"sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92",
|
||||||
|
"sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f",
|
||||||
|
"sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2",
|
||||||
|
"sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc",
|
||||||
|
"sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1",
|
||||||
|
"sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c",
|
||||||
|
"sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86",
|
||||||
|
"sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4",
|
||||||
|
"sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c",
|
||||||
|
"sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34",
|
||||||
|
"sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b",
|
||||||
|
"sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d",
|
||||||
|
"sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c",
|
||||||
|
"sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb",
|
||||||
|
"sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7",
|
||||||
|
"sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737",
|
||||||
|
"sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3",
|
||||||
|
"sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d",
|
||||||
|
"sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358",
|
||||||
|
"sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53",
|
||||||
|
"sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78",
|
||||||
|
"sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803",
|
||||||
|
"sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a",
|
||||||
|
"sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f",
|
||||||
|
"sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174",
|
||||||
|
"sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"
|
||||||
|
],
|
||||||
|
"markers": "python_version >= '3.6'",
|
||||||
|
"version": "==6.0"
|
||||||
|
},
|
||||||
|
"requests": {
|
||||||
|
"hashes": [
|
||||||
|
"sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983",
|
||||||
|
"sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"
|
||||||
|
],
|
||||||
|
"markers": "python_version >= '3.7' and python_version < '4'",
|
||||||
|
"version": "==2.28.1"
|
||||||
|
},
|
||||||
|
"s3transfer": {
|
||||||
|
"hashes": [
|
||||||
|
"sha256:06176b74f3a15f61f1b4f25a1fc29a4429040b7647133a463da8fa5bd28d5ecd",
|
||||||
|
"sha256:2ed07d3866f523cc561bf4a00fc5535827981b117dd7876f036b0c1aca42c947"
|
||||||
|
],
|
||||||
|
"markers": "python_version >= '3.7'",
|
||||||
|
"version": "==0.6.0"
|
||||||
|
},
|
||||||
|
"shodan": {
|
||||||
|
"hashes": [
|
||||||
|
"sha256:18bd2ae81114b70836e0e3315227325e14398275223998a8c235b099432f4b0b"
|
||||||
|
],
|
||||||
|
"index": "pypi",
|
||||||
|
"version": "==1.28.0"
|
||||||
|
},
|
||||||
|
"six": {
|
||||||
|
"hashes": [
|
||||||
|
"sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926",
|
||||||
|
"sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"
|
||||||
|
],
|
||||||
|
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
|
||||||
|
"version": "==1.16.0"
|
||||||
|
},
|
||||||
|
"typing-extensions": {
|
||||||
|
"hashes": [
|
||||||
|
"sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa",
|
||||||
|
"sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e"
|
||||||
|
],
|
||||||
|
"markers": "python_version >= '3.7'",
|
||||||
|
"version": "==4.4.0"
|
||||||
|
},
|
||||||
|
"urllib3": {
|
||||||
|
"hashes": [
|
||||||
|
"sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e",
|
||||||
|
"sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997"
|
||||||
|
],
|
||||||
|
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5' and python_version < '4'",
|
||||||
|
"version": "==1.26.12"
|
||||||
|
},
|
||||||
|
"xlsxwriter": {
|
||||||
|
"hashes": [
|
||||||
|
"sha256:df0aefe5137478d206847eccf9f114715e42aaea077e6a48d0e8a2152e983010",
|
||||||
|
"sha256:e89f4a1d2fa2c9ea15cde77de95cd3fd8b0345d0efb3964623f395c8c4988b7f"
|
||||||
|
],
|
||||||
|
"markers": "python_version >= '3.4'",
|
||||||
|
"version": "==3.0.3"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"develop": {
|
||||||
|
"astroid": {
|
||||||
|
"hashes": [
|
||||||
|
"sha256:1c00a14f5a3ed0339d38d2e2e5b74ea2591df5861c0936bb292b84ccf3a78d83",
|
||||||
|
"sha256:72702205200b2a638358369d90c222d74ebc376787af8fb2f7f2a86f7b5cc85f"
|
||||||
|
],
|
||||||
|
"markers": "python_full_version >= '3.7.2'",
|
||||||
|
"version": "==2.12.12"
|
||||||
|
},
|
||||||
"attrs": {
|
"attrs": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6",
|
"sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6",
|
||||||
@@ -40,21 +280,48 @@
|
|||||||
"index": "pypi",
|
"index": "pypi",
|
||||||
"version": "==1.7.4"
|
"version": "==1.7.4"
|
||||||
},
|
},
|
||||||
"boto3": {
|
"black": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:7e871c481f88e5b2fc6ac16eb190c95de21efb43ab2d959beacf8b7b096b11d2",
|
"sha256:14ff67aec0a47c424bc99b71005202045dc09270da44a27848d534600ac64fc7",
|
||||||
"sha256:b81e4aa16891eac7532ce6cc9eb690a8d2e0ceea3bcf44b5c5a1309c2500d35f"
|
"sha256:197df8509263b0b8614e1df1756b1dd41be6738eed2ba9e9769f3880c2b9d7b6",
|
||||||
|
"sha256:1e464456d24e23d11fced2bc8c47ef66d471f845c7b7a42f3bd77bf3d1789650",
|
||||||
|
"sha256:2039230db3c6c639bd84efe3292ec7b06e9214a2992cd9beb293d639c6402edb",
|
||||||
|
"sha256:21199526696b8f09c3997e2b4db8d0b108d801a348414264d2eb8eb2532e540d",
|
||||||
|
"sha256:2644b5d63633702bc2c5f3754b1b475378fbbfb481f62319388235d0cd104c2d",
|
||||||
|
"sha256:432247333090c8c5366e69627ccb363bc58514ae3e63f7fc75c54b1ea80fa7de",
|
||||||
|
"sha256:444ebfb4e441254e87bad00c661fe32df9969b2bf224373a448d8aca2132b395",
|
||||||
|
"sha256:5b9b29da4f564ba8787c119f37d174f2b69cdfdf9015b7d8c5c16121ddc054ae",
|
||||||
|
"sha256:5cc42ca67989e9c3cf859e84c2bf014f6633db63d1cbdf8fdb666dcd9e77e3fa",
|
||||||
|
"sha256:5d8f74030e67087b219b032aa33a919fae8806d49c867846bfacde57f43972ef",
|
||||||
|
"sha256:72ef3925f30e12a184889aac03d77d031056860ccae8a1e519f6cbb742736383",
|
||||||
|
"sha256:819dc789f4498ecc91438a7de64427c73b45035e2e3680c92e18795a839ebb66",
|
||||||
|
"sha256:915ace4ff03fdfff953962fa672d44be269deb2eaf88499a0f8805221bc68c87",
|
||||||
|
"sha256:9311e99228ae10023300ecac05be5a296f60d2fd10fff31cf5c1fa4ca4b1988d",
|
||||||
|
"sha256:974308c58d057a651d182208a484ce80a26dac0caef2895836a92dd6ebd725e0",
|
||||||
|
"sha256:b8b49776299fece66bffaafe357d929ca9451450f5466e997a7285ab0fe28e3b",
|
||||||
|
"sha256:c957b2b4ea88587b46cf49d1dc17681c1e672864fd7af32fc1e9664d572b3458",
|
||||||
|
"sha256:e41a86c6c650bcecc6633ee3180d80a025db041a8e2398dcc059b3afa8382cd4",
|
||||||
|
"sha256:f513588da599943e0cde4e32cc9879e825d58720d6557062d1098c5ad80080e1",
|
||||||
|
"sha256:fba8a281e570adafb79f7755ac8721b6cf1bbf691186a287e990c7929c7692ff"
|
||||||
],
|
],
|
||||||
"index": "pypi",
|
"index": "pypi",
|
||||||
"version": "==1.26.3"
|
"version": "==22.10.0"
|
||||||
|
},
|
||||||
|
"boto3": {
|
||||||
|
"hashes": [
|
||||||
|
"sha256:b39303fdda9b5d77a152e3ec9f264ae318ccdaa853eaf694626dc335464ded98",
|
||||||
|
"sha256:c02fc93a926944b4b426a170d2dae274b4c8c09ec5259450b94269a8ce990dd7"
|
||||||
|
],
|
||||||
|
"index": "pypi",
|
||||||
|
"version": "==1.26.8"
|
||||||
},
|
},
|
||||||
"botocore": {
|
"botocore": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:100534532b2745f6fa019b79199a8941f04b8168f9d557d0847191455f1f1eed",
|
"sha256:48cf33d7c513320711321c3b303b0c9810b23e15fa03424f7323883e4ce6cef8",
|
||||||
"sha256:ac7986fefe1b9c6323d381c4fdee3845c67fa53eb6c9cf586a8e8a07270dbcfe"
|
"sha256:9c6adcf4e080be63b92f50d01e176ef2d1d2a3da7d8387a964abb9eb65fc8aad"
|
||||||
],
|
],
|
||||||
"index": "pypi",
|
"index": "pypi",
|
||||||
"version": "==1.29.3"
|
"version": "==1.29.8"
|
||||||
},
|
},
|
||||||
"certifi": {
|
"certifi": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
@@ -138,7 +405,7 @@
|
|||||||
"sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845",
|
"sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845",
|
||||||
"sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f"
|
"sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f"
|
||||||
],
|
],
|
||||||
"markers": "python_version >= '3.6'",
|
"markers": "python_full_version >= '3.6.0'",
|
||||||
"version": "==2.1.1"
|
"version": "==2.1.1"
|
||||||
},
|
},
|
||||||
"click": {
|
"click": {
|
||||||
@@ -146,24 +413,9 @@
|
|||||||
"sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e",
|
"sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e",
|
||||||
"sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"
|
"sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"
|
||||||
],
|
],
|
||||||
"markers": "python_full_version >= '3.7.0'",
|
"markers": "python_version >= '3.7'",
|
||||||
"version": "==8.1.3"
|
"version": "==8.1.3"
|
||||||
},
|
},
|
||||||
"click-plugins": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:46ab999744a9d831159c3411bb0c79346d94a444df9a3a3742e9ed63645f264b",
|
|
||||||
"sha256:5d262006d3222f5057fd81e1623d4443e41dcda5dc815c06b442aa3c02889fc8"
|
|
||||||
],
|
|
||||||
"version": "==1.1.1"
|
|
||||||
},
|
|
||||||
"colorama": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44",
|
|
||||||
"sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"
|
|
||||||
],
|
|
||||||
"index": "pypi",
|
|
||||||
"version": "==0.4.6"
|
|
||||||
},
|
|
||||||
"coverage": {
|
"coverage": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:027018943386e7b942fa832372ebc120155fd970837489896099f5cfa2890f79",
|
"sha256:027018943386e7b942fa832372ebc120155fd970837489896099f5cfa2890f79",
|
||||||
@@ -252,13 +504,13 @@
|
|||||||
"markers": "python_version >= '3.6'",
|
"markers": "python_version >= '3.6'",
|
||||||
"version": "==38.0.3"
|
"version": "==38.0.3"
|
||||||
},
|
},
|
||||||
"detect-secrets": {
|
"dill": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:d08ecabeee8b68c0acb0e8a354fb98d822a653f6ed05e520cead4c6fc1fc02cd",
|
"sha256:a07ffd2351b8c678dfc4a856a3005f8067aea51d6ba6c700796a4d9e280f39f0",
|
||||||
"sha256:d56787e339758cef48c9ccd6692f7a094b9963c979c9813580b0169e41132833"
|
"sha256:e5db55f3687856d8fbdab002ed78544e1c4559a130302693d839dfe8f93f2373"
|
||||||
],
|
],
|
||||||
"index": "pypi",
|
"markers": "python_version >= '3.7'",
|
||||||
"version": "==1.4.0"
|
"version": "==0.3.6"
|
||||||
},
|
},
|
||||||
"docker": {
|
"docker": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
@@ -278,11 +530,11 @@
|
|||||||
},
|
},
|
||||||
"exceptiongroup": {
|
"exceptiongroup": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:4d6c0aa6dd825810941c792f53d7b8d71da26f5e5f84f20f9508e8f2d33b140a",
|
"sha256:a31cd183c3dea02e617aab5153588d5f7258a77b51f0ef41b3815ae8a0d0f695",
|
||||||
"sha256:73866f7f842ede6cb1daa42c4af078e2035e5f7607f0e2c762cc51bb31bbe7b2"
|
"sha256:c22f11ec6a10d2b453871c5c5fe887436c4d1961324ce9090f2ca6ddc4180c27"
|
||||||
],
|
],
|
||||||
"markers": "python_version < '3.11'",
|
"markers": "python_version < '3.11'",
|
||||||
"version": "==1.0.1"
|
"version": "==1.0.2"
|
||||||
},
|
},
|
||||||
"execnet": {
|
"execnet": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
@@ -292,6 +544,14 @@
|
|||||||
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'",
|
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'",
|
||||||
"version": "==1.9.0"
|
"version": "==1.9.0"
|
||||||
},
|
},
|
||||||
|
"flake8": {
|
||||||
|
"hashes": [
|
||||||
|
"sha256:6fbe320aad8d6b95cec8b8e47bc933004678dc63095be98528b7bdd2a9f510db",
|
||||||
|
"sha256:7a1cf6b73744f5806ab95e526f6f0d8c01c66d7bbe349562d22dfca20610b248"
|
||||||
|
],
|
||||||
|
"index": "pypi",
|
||||||
|
"version": "==5.0.4"
|
||||||
|
},
|
||||||
"gitdb": {
|
"gitdb": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:8033ad4e853066ba6ca92050b9df2f89301b8fc8bf7e9324d412a63f8bf1a8fd",
|
"sha256:8033ad4e853066ba6ca92050b9df2f89301b8fc8bf7e9324d412a63f8bf1a8fd",
|
||||||
@@ -305,7 +565,7 @@
|
|||||||
"sha256:41eea0deec2deea139b459ac03656f0dd28fc4a3387240ec1d3c259a2c47850f",
|
"sha256:41eea0deec2deea139b459ac03656f0dd28fc4a3387240ec1d3c259a2c47850f",
|
||||||
"sha256:cc36bfc4a3f913e66805a28e84703e419d9c264c1077e537b54f0e1af85dbefd"
|
"sha256:cc36bfc4a3f913e66805a28e84703e419d9c264c1077e537b54f0e1af85dbefd"
|
||||||
],
|
],
|
||||||
"markers": "python_full_version >= '3.7.0'",
|
"markers": "python_version >= '3.7'",
|
||||||
"version": "==3.1.29"
|
"version": "==3.1.29"
|
||||||
},
|
},
|
||||||
"idna": {
|
"idna": {
|
||||||
@@ -321,7 +581,7 @@
|
|||||||
"sha256:c01b1b94210d9849f286b86bb51bcea7cd56dde0600d8db721d7b81330711668",
|
"sha256:c01b1b94210d9849f286b86bb51bcea7cd56dde0600d8db721d7b81330711668",
|
||||||
"sha256:ee17ec648f85480d523596ce49eae8ead87d5631ae1551f913c0100b5edd3437"
|
"sha256:ee17ec648f85480d523596ce49eae8ead87d5631ae1551f913c0100b5edd3437"
|
||||||
],
|
],
|
||||||
"markers": "python_full_version >= '3.7.0'",
|
"markers": "python_version >= '3.7'",
|
||||||
"version": "==5.10.0"
|
"version": "==5.10.0"
|
||||||
},
|
},
|
||||||
"iniconfig": {
|
"iniconfig": {
|
||||||
@@ -331,12 +591,20 @@
|
|||||||
],
|
],
|
||||||
"version": "==1.1.1"
|
"version": "==1.1.1"
|
||||||
},
|
},
|
||||||
|
"isort": {
|
||||||
|
"hashes": [
|
||||||
|
"sha256:6f62d78e2f89b4500b080fe3a81690850cd254227f27f75c3a0c491a1f351ba7",
|
||||||
|
"sha256:e8443a5e7a020e9d7f97f1d7d9cd17c88bcb3bc7e218bf9cf5095fe550be2951"
|
||||||
|
],
|
||||||
|
"markers": "python_version < '4.0' and python_full_version >= '3.6.1'",
|
||||||
|
"version": "==5.10.1"
|
||||||
|
},
|
||||||
"jinja2": {
|
"jinja2": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852",
|
"sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852",
|
||||||
"sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"
|
"sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"
|
||||||
],
|
],
|
||||||
"markers": "python_full_version >= '3.7.0'",
|
"markers": "python_version >= '3.7'",
|
||||||
"version": "==3.1.2"
|
"version": "==3.1.2"
|
||||||
},
|
},
|
||||||
"jmespath": {
|
"jmespath": {
|
||||||
@@ -344,7 +612,7 @@
|
|||||||
"sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980",
|
"sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980",
|
||||||
"sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"
|
"sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"
|
||||||
],
|
],
|
||||||
"markers": "python_full_version >= '3.7.0'",
|
"markers": "python_version >= '3.7'",
|
||||||
"version": "==1.0.1"
|
"version": "==1.0.1"
|
||||||
},
|
},
|
||||||
"jsonschema": {
|
"jsonschema": {
|
||||||
@@ -352,7 +620,7 @@
|
|||||||
"sha256:5bfcf2bca16a087ade17e02b282d34af7ccd749ef76241e7f9bd7c0cb8a9424d",
|
"sha256:5bfcf2bca16a087ade17e02b282d34af7ccd749ef76241e7f9bd7c0cb8a9424d",
|
||||||
"sha256:f660066c3966db7d6daeaea8a75e0b68237a48e51cf49882087757bb59916248"
|
"sha256:f660066c3966db7d6daeaea8a75e0b68237a48e51cf49882087757bb59916248"
|
||||||
],
|
],
|
||||||
"markers": "python_full_version >= '3.7.0'",
|
"markers": "python_version >= '3.7'",
|
||||||
"version": "==4.17.0"
|
"version": "==4.17.0"
|
||||||
},
|
},
|
||||||
"jsonschema-spec": {
|
"jsonschema-spec": {
|
||||||
@@ -385,7 +653,7 @@
|
|||||||
"sha256:eac3a9a5ef13b332c059772fd40b4b1c3d45a3a2b05e33a361dee48e54a4dad0",
|
"sha256:eac3a9a5ef13b332c059772fd40b4b1c3d45a3a2b05e33a361dee48e54a4dad0",
|
||||||
"sha256:eb329f8d8145379bf5dbe722182410fe8863d186e51bf034d2075eb8d85ee25b"
|
"sha256:eb329f8d8145379bf5dbe722182410fe8863d186e51bf034d2075eb8d85ee25b"
|
||||||
],
|
],
|
||||||
"markers": "python_full_version >= '3.7.0'",
|
"markers": "python_version >= '3.7'",
|
||||||
"version": "==1.8.0"
|
"version": "==1.8.0"
|
||||||
},
|
},
|
||||||
"markupsafe": {
|
"markupsafe": {
|
||||||
@@ -431,9 +699,17 @@
|
|||||||
"sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a",
|
"sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a",
|
||||||
"sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7"
|
"sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7"
|
||||||
],
|
],
|
||||||
"markers": "python_full_version >= '3.7.0'",
|
"markers": "python_version >= '3.7'",
|
||||||
"version": "==2.1.1"
|
"version": "==2.1.1"
|
||||||
},
|
},
|
||||||
|
"mccabe": {
|
||||||
|
"hashes": [
|
||||||
|
"sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325",
|
||||||
|
"sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"
|
||||||
|
],
|
||||||
|
"markers": "python_version >= '3.6'",
|
||||||
|
"version": "==0.7.0"
|
||||||
|
},
|
||||||
"mock": {
|
"mock": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:122fcb64ee37cfad5b3f48d7a7d51875d7031aaf3d8be7c42e2bee25044eee62",
|
"sha256:122fcb64ee37cfad5b3f48d7a7d51875d7031aaf3d8be7c42e2bee25044eee62",
|
||||||
@@ -450,6 +726,13 @@
|
|||||||
"index": "pypi",
|
"index": "pypi",
|
||||||
"version": "==4.0.9"
|
"version": "==4.0.9"
|
||||||
},
|
},
|
||||||
|
"mypy-extensions": {
|
||||||
|
"hashes": [
|
||||||
|
"sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d",
|
||||||
|
"sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"
|
||||||
|
],
|
||||||
|
"version": "==0.4.3"
|
||||||
|
},
|
||||||
"openapi-schema-validator": {
|
"openapi-schema-validator": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:34fbd14b7501abe25e64d7b4624a9db02cde1a578d285b3da6f34b290cdf0b3a",
|
"sha256:34fbd14b7501abe25e64d7b4624a9db02cde1a578d285b3da6f34b290cdf0b3a",
|
||||||
@@ -482,6 +765,14 @@
|
|||||||
"markers": "python_full_version >= '3.7.0' and python_full_version < '4.0.0'",
|
"markers": "python_full_version >= '3.7.0' and python_full_version < '4.0.0'",
|
||||||
"version": "==0.4.3"
|
"version": "==0.4.3"
|
||||||
},
|
},
|
||||||
|
"pathspec": {
|
||||||
|
"hashes": [
|
||||||
|
"sha256:88c2606f2c1e818b978540f73ecc908e13999c6c3a383daf3705652ae79807a5",
|
||||||
|
"sha256:8f6bf73e5758fd365ef5d58ce09ac7c27d2833a8d7da51712eac6e27e35141b0"
|
||||||
|
],
|
||||||
|
"markers": "python_version >= '3.7'",
|
||||||
|
"version": "==0.10.2"
|
||||||
|
},
|
||||||
"pbr": {
|
"pbr": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:b97bc6695b2aff02144133c2e7399d5885223d42b7912ffaec2ca3898e673bfe",
|
"sha256:b97bc6695b2aff02144133c2e7399d5885223d42b7912ffaec2ca3898e673bfe",
|
||||||
@@ -490,6 +781,14 @@
|
|||||||
"markers": "python_version >= '2.6'",
|
"markers": "python_version >= '2.6'",
|
||||||
"version": "==5.11.0"
|
"version": "==5.11.0"
|
||||||
},
|
},
|
||||||
|
"platformdirs": {
|
||||||
|
"hashes": [
|
||||||
|
"sha256:1006647646d80f16130f052404c6b901e80ee4ed6bef6792e1f238a8969106f7",
|
||||||
|
"sha256:af0276409f9a02373d540bf8480021a048711d572745aef4b7842dad245eba10"
|
||||||
|
],
|
||||||
|
"markers": "python_version >= '3.7'",
|
||||||
|
"version": "==2.5.4"
|
||||||
|
},
|
||||||
"pluggy": {
|
"pluggy": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159",
|
"sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159",
|
||||||
@@ -498,6 +797,14 @@
|
|||||||
"markers": "python_version >= '3.6'",
|
"markers": "python_version >= '3.6'",
|
||||||
"version": "==1.0.0"
|
"version": "==1.0.0"
|
||||||
},
|
},
|
||||||
|
"pycodestyle": {
|
||||||
|
"hashes": [
|
||||||
|
"sha256:2c9607871d58c76354b697b42f5d57e1ada7d261c261efac224b664affdc5785",
|
||||||
|
"sha256:d1735fc58b418fd7c5f658d28d943854f8a849b01a5d0a1e6f3f3fdd0166804b"
|
||||||
|
],
|
||||||
|
"markers": "python_version >= '3.6'",
|
||||||
|
"version": "==2.9.1"
|
||||||
|
},
|
||||||
"pycparser": {
|
"pycparser": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9",
|
"sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9",
|
||||||
@@ -505,47 +812,21 @@
|
|||||||
],
|
],
|
||||||
"version": "==2.21"
|
"version": "==2.21"
|
||||||
},
|
},
|
||||||
"pydantic": {
|
"pyflakes": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:05e00dbebbe810b33c7a7362f231893183bcc4251f3f2ff991c31d5c08240c42",
|
"sha256:4579f67d887f804e67edb544428f264b7b24f435b263c4614f384135cea553d2",
|
||||||
"sha256:06094d18dd5e6f2bbf93efa54991c3240964bb663b87729ac340eb5014310624",
|
"sha256:491feb020dca48ccc562a8c0cbe8df07ee13078df59813b83959cbdada312ea3"
|
||||||
"sha256:0b959f4d8211fc964772b595ebb25f7652da3f22322c007b6fed26846a40685e",
|
],
|
||||||
"sha256:19b3b9ccf97af2b7519c42032441a891a5e05c68368f40865a90eb88833c2559",
|
"markers": "python_version >= '3.6'",
|
||||||
"sha256:1b6ee725bd6e83ec78b1aa32c5b1fa67a3a65badddde3976bca5fe4568f27709",
|
"version": "==2.5.0"
|
||||||
"sha256:1ee433e274268a4b0c8fde7ad9d58ecba12b069a033ecc4645bb6303c062d2e9",
|
},
|
||||||
"sha256:216f3bcbf19c726b1cc22b099dd409aa371f55c08800bcea4c44c8f74b73478d",
|
"pylint": {
|
||||||
"sha256:2d0567e60eb01bccda3a4df01df677adf6b437958d35c12a3ac3e0f078b0ee52",
|
"hashes": [
|
||||||
"sha256:2e05aed07fa02231dbf03d0adb1be1d79cabb09025dd45aa094aa8b4e7b9dcda",
|
"sha256:3b120505e5af1d06a5ad76b55d8660d44bf0f2fc3c59c2bdd94e39188ee3a4df",
|
||||||
"sha256:352aedb1d71b8b0736c6d56ad2bd34c6982720644b0624462059ab29bd6e5912",
|
"sha256:c2108037eb074334d9e874dc3c783752cc03d0796c88c9a9af282d0f161a1004"
|
||||||
"sha256:355639d9afc76bcb9b0c3000ddcd08472ae75318a6eb67a15866b87e2efa168c",
|
|
||||||
"sha256:37c90345ec7dd2f1bcef82ce49b6235b40f282b94d3eec47e801baf864d15525",
|
|
||||||
"sha256:4b8795290deaae348c4eba0cebb196e1c6b98bdbe7f50b2d0d9a4a99716342fe",
|
|
||||||
"sha256:5760e164b807a48a8f25f8aa1a6d857e6ce62e7ec83ea5d5c5a802eac81bad41",
|
|
||||||
"sha256:6eb843dcc411b6a2237a694f5e1d649fc66c6064d02b204a7e9d194dff81eb4b",
|
|
||||||
"sha256:7b5ba54d026c2bd2cb769d3468885f23f43710f651688e91f5fb1edcf0ee9283",
|
|
||||||
"sha256:7c2abc4393dea97a4ccbb4ec7d8658d4e22c4765b7b9b9445588f16c71ad9965",
|
|
||||||
"sha256:81a7b66c3f499108b448f3f004801fcd7d7165fb4200acb03f1c2402da73ce4c",
|
|
||||||
"sha256:91b8e218852ef6007c2b98cd861601c6a09f1aa32bbbb74fab5b1c33d4a1e410",
|
|
||||||
"sha256:9300fcbebf85f6339a02c6994b2eb3ff1b9c8c14f502058b5bf349d42447dcf5",
|
|
||||||
"sha256:9cabf4a7f05a776e7793e72793cd92cc865ea0e83a819f9ae4ecccb1b8aa6116",
|
|
||||||
"sha256:a1f5a63a6dfe19d719b1b6e6106561869d2efaca6167f84f5ab9347887d78b98",
|
|
||||||
"sha256:a4c805731c33a8db4b6ace45ce440c4ef5336e712508b4d9e1aafa617dc9907f",
|
|
||||||
"sha256:ae544c47bec47a86bc7d350f965d8b15540e27e5aa4f55170ac6a75e5f73b644",
|
|
||||||
"sha256:b97890e56a694486f772d36efd2ba31612739bc6f3caeee50e9e7e3ebd2fdd13",
|
|
||||||
"sha256:bb6ad4489af1bac6955d38ebcb95079a836af31e4c4f74aba1ca05bb9f6027bd",
|
|
||||||
"sha256:bedf309630209e78582ffacda64a21f96f3ed2e51fbf3962d4d488e503420254",
|
|
||||||
"sha256:c1ba1afb396148bbc70e9eaa8c06c1716fdddabaf86e7027c5988bae2a829ab6",
|
|
||||||
"sha256:c33602f93bfb67779f9c507e4d69451664524389546bacfe1bee13cae6dc7488",
|
|
||||||
"sha256:c4aac8e7103bf598373208f6299fa9a5cfd1fc571f2d40bf1dd1955a63d6eeb5",
|
|
||||||
"sha256:c6f981882aea41e021f72779ce2a4e87267458cc4d39ea990729e21ef18f0f8c",
|
|
||||||
"sha256:cc78cc83110d2f275ec1970e7a831f4e371ee92405332ebfe9860a715f8336e1",
|
|
||||||
"sha256:d49f3db871575e0426b12e2f32fdb25e579dea16486a26e5a0474af87cb1ab0a",
|
|
||||||
"sha256:dd3f9a40c16daf323cf913593083698caee97df2804aa36c4b3175d5ac1b92a2",
|
|
||||||
"sha256:e0bedafe4bc165ad0a56ac0bd7695df25c50f76961da29c050712596cf092d6d",
|
|
||||||
"sha256:e9069e1b01525a96e6ff49e25876d90d5a563bc31c658289a8772ae186552236"
|
|
||||||
],
|
],
|
||||||
"index": "pypi",
|
"index": "pypi",
|
||||||
"version": "==1.10.2"
|
"version": "==2.15.5"
|
||||||
},
|
},
|
||||||
"pyparsing": {
|
"pyparsing": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
@@ -580,7 +861,7 @@
|
|||||||
"sha256:ea6b79a02a28550c98b6ca9c35b9f492beaa54d7c5c9e9949555893c8a9234d0",
|
"sha256:ea6b79a02a28550c98b6ca9c35b9f492beaa54d7c5c9e9949555893c8a9234d0",
|
||||||
"sha256:f1258f4e6c42ad0b20f9cfcc3ada5bd6b83374516cd01c0960e3cb75fdca6770"
|
"sha256:f1258f4e6c42ad0b20f9cfcc3ada5bd6b83374516cd01c0960e3cb75fdca6770"
|
||||||
],
|
],
|
||||||
"markers": "python_full_version >= '3.7.0'",
|
"markers": "python_version >= '3.7'",
|
||||||
"version": "==0.19.2"
|
"version": "==0.19.2"
|
||||||
},
|
},
|
||||||
"pytest": {
|
"pytest": {
|
||||||
@@ -665,7 +946,7 @@
|
|||||||
"sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983",
|
"sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983",
|
||||||
"sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"
|
"sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"
|
||||||
],
|
],
|
||||||
"markers": "python_full_version >= '3.7.0' and python_full_version < '4.0.0'",
|
"markers": "python_version >= '3.7' and python_version < '4'",
|
||||||
"version": "==2.28.1"
|
"version": "==2.28.1"
|
||||||
},
|
},
|
||||||
"responses": {
|
"responses": {
|
||||||
@@ -673,7 +954,7 @@
|
|||||||
"sha256:396acb2a13d25297789a5866b4881cf4e46ffd49cc26c43ab1117f40b973102e",
|
"sha256:396acb2a13d25297789a5866b4881cf4e46ffd49cc26c43ab1117f40b973102e",
|
||||||
"sha256:dcf294d204d14c436fddcc74caefdbc5764795a40ff4e6a7740ed8ddbf3294be"
|
"sha256:dcf294d204d14c436fddcc74caefdbc5764795a40ff4e6a7740ed8ddbf3294be"
|
||||||
],
|
],
|
||||||
"markers": "python_full_version >= '3.7.0'",
|
"markers": "python_version >= '3.7'",
|
||||||
"version": "==0.22.0"
|
"version": "==0.22.0"
|
||||||
},
|
},
|
||||||
"ruamel.yaml": {
|
"ruamel.yaml": {
|
||||||
@@ -728,7 +1009,7 @@
|
|||||||
"sha256:06176b74f3a15f61f1b4f25a1fc29a4429040b7647133a463da8fa5bd28d5ecd",
|
"sha256:06176b74f3a15f61f1b4f25a1fc29a4429040b7647133a463da8fa5bd28d5ecd",
|
||||||
"sha256:2ed07d3866f523cc561bf4a00fc5535827981b117dd7876f036b0c1aca42c947"
|
"sha256:2ed07d3866f523cc561bf4a00fc5535827981b117dd7876f036b0c1aca42c947"
|
||||||
],
|
],
|
||||||
"markers": "python_full_version >= '3.7.0'",
|
"markers": "python_version >= '3.7'",
|
||||||
"version": "==0.6.0"
|
"version": "==0.6.0"
|
||||||
},
|
},
|
||||||
"safety": {
|
"safety": {
|
||||||
@@ -744,16 +1025,9 @@
|
|||||||
"sha256:d0b9a8433464d5800cbe05094acf5c6d52a91bfac9b52bcfc4d41382be5d5d31",
|
"sha256:d0b9a8433464d5800cbe05094acf5c6d52a91bfac9b52bcfc4d41382be5d5d31",
|
||||||
"sha256:e197a19aa8ec9722928f2206f8de752def0e4c9fc6953527360d1c36d94ddb2f"
|
"sha256:e197a19aa8ec9722928f2206f8de752def0e4c9fc6953527360d1c36d94ddb2f"
|
||||||
],
|
],
|
||||||
"markers": "python_full_version >= '3.7.0'",
|
"markers": "python_version >= '3.7'",
|
||||||
"version": "==65.5.1"
|
"version": "==65.5.1"
|
||||||
},
|
},
|
||||||
"shodan": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:18bd2ae81114b70836e0e3315227325e14398275223998a8c235b099432f4b0b"
|
|
||||||
],
|
|
||||||
"index": "pypi",
|
|
||||||
"version": "==1.28.0"
|
|
||||||
},
|
|
||||||
"six": {
|
"six": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926",
|
"sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926",
|
||||||
@@ -772,11 +1046,11 @@
|
|||||||
},
|
},
|
||||||
"stevedore": {
|
"stevedore": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:02518a8f0d6d29be8a445b7f2ac63753ff29e8f2a2faa01777568d5500d777a6",
|
"sha256:7f8aeb6e3f90f96832c301bff21a7eb5eefbe894c88c506483d355565d88cc1a",
|
||||||
"sha256:3b1cbd592a87315f000d05164941ee5e164899f8fc0ce9a00bb0f321f40ef93e"
|
"sha256:aa6436565c069b2946fe4ebff07f5041e0c8bf18c7376dd29edf80cf7d524e4e"
|
||||||
],
|
],
|
||||||
"markers": "python_version >= '3.8'",
|
"markers": "python_version >= '3.8'",
|
||||||
"version": "==4.1.0"
|
"version": "==4.1.1"
|
||||||
},
|
},
|
||||||
"sure": {
|
"sure": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
@@ -798,22 +1072,30 @@
|
|||||||
"sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc",
|
"sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc",
|
||||||
"sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"
|
"sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"
|
||||||
],
|
],
|
||||||
"markers": "python_version < '3.11'",
|
"markers": "python_full_version < '3.11.0a7'",
|
||||||
"version": "==2.0.1"
|
"version": "==2.0.1"
|
||||||
},
|
},
|
||||||
|
"tomlkit": {
|
||||||
|
"hashes": [
|
||||||
|
"sha256:07de26b0d8cfc18f871aec595fda24d95b08fef89d147caa861939f37230bf4b",
|
||||||
|
"sha256:71b952e5721688937fb02cf9d354dbcf0785066149d2855e44531ebdd2b65d73"
|
||||||
|
],
|
||||||
|
"markers": "python_version >= '3.6'",
|
||||||
|
"version": "==0.11.6"
|
||||||
|
},
|
||||||
"types-toml": {
|
"types-toml": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:8300fd093e5829eb9c1fba69cee38130347d4b74ddf32d0a7df650ae55c2b599",
|
"sha256:171bdb3163d79a520560f24ba916a9fc9bff81659c5448a9fea89240923722be",
|
||||||
"sha256:b7e7ea572308b1030dc86c3ba825c5210814c2825612ec679eb7814f8dd9295a"
|
"sha256:b7b5c4977f96ab7b5ac06d8a6590d17c0bf252a96efc03b109c2711fb3e0eafd"
|
||||||
],
|
],
|
||||||
"version": "==0.10.8"
|
"version": "==0.10.8.1"
|
||||||
},
|
},
|
||||||
"typing-extensions": {
|
"typing-extensions": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa",
|
"sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa",
|
||||||
"sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e"
|
"sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e"
|
||||||
],
|
],
|
||||||
"markers": "python_full_version >= '3.7.0'",
|
"markers": "python_version >= '3.7'",
|
||||||
"version": "==4.4.0"
|
"version": "==4.4.0"
|
||||||
},
|
},
|
||||||
"urllib3": {
|
"urllib3": {
|
||||||
@@ -821,7 +1103,7 @@
|
|||||||
"sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e",
|
"sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e",
|
||||||
"sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997"
|
"sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997"
|
||||||
],
|
],
|
||||||
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5' and python_full_version < '4.0.0'",
|
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5' and python_version < '4'",
|
||||||
"version": "==1.26.12"
|
"version": "==1.26.12"
|
||||||
},
|
},
|
||||||
"vulture": {
|
"vulture": {
|
||||||
@@ -837,7 +1119,7 @@
|
|||||||
"sha256:d6b06432f184438d99ac1f456eaf22fe1ade524c3dd16e661142dc54e9cba574",
|
"sha256:d6b06432f184438d99ac1f456eaf22fe1ade524c3dd16e661142dc54e9cba574",
|
||||||
"sha256:d6e8f90ca8e2dd4e8027c4561adeb9456b54044312dba655e7cae652ceb9ae59"
|
"sha256:d6e8f90ca8e2dd4e8027c4561adeb9456b54044312dba655e7cae652ceb9ae59"
|
||||||
],
|
],
|
||||||
"markers": "python_full_version >= '3.7.0'",
|
"markers": "python_version >= '3.7'",
|
||||||
"version": "==1.4.2"
|
"version": "==1.4.2"
|
||||||
},
|
},
|
||||||
"werkzeug": {
|
"werkzeug": {
|
||||||
@@ -845,16 +1127,78 @@
|
|||||||
"sha256:7ea2d48322cc7c0f8b3a215ed73eabd7b5d75d0b50e31ab006286ccff9e00b8f",
|
"sha256:7ea2d48322cc7c0f8b3a215ed73eabd7b5d75d0b50e31ab006286ccff9e00b8f",
|
||||||
"sha256:f979ab81f58d7318e064e99c4506445d60135ac5cd2e177a2de0089bfd4c9bd5"
|
"sha256:f979ab81f58d7318e064e99c4506445d60135ac5cd2e177a2de0089bfd4c9bd5"
|
||||||
],
|
],
|
||||||
"markers": "python_full_version >= '3.7.0'",
|
"markers": "python_version >= '3.7'",
|
||||||
"version": "==2.2.2"
|
"version": "==2.2.2"
|
||||||
},
|
},
|
||||||
"xlsxwriter": {
|
"wrapt": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:df0aefe5137478d206847eccf9f114715e42aaea077e6a48d0e8a2152e983010",
|
"sha256:00b6d4ea20a906c0ca56d84f93065b398ab74b927a7a3dbd470f6fc503f95dc3",
|
||||||
"sha256:e89f4a1d2fa2c9ea15cde77de95cd3fd8b0345d0efb3964623f395c8c4988b7f"
|
"sha256:01c205616a89d09827986bc4e859bcabd64f5a0662a7fe95e0d359424e0e071b",
|
||||||
|
"sha256:02b41b633c6261feff8ddd8d11c711df6842aba629fdd3da10249a53211a72c4",
|
||||||
|
"sha256:07f7a7d0f388028b2df1d916e94bbb40624c59b48ecc6cbc232546706fac74c2",
|
||||||
|
"sha256:11871514607b15cfeb87c547a49bca19fde402f32e2b1c24a632506c0a756656",
|
||||||
|
"sha256:1b376b3f4896e7930f1f772ac4b064ac12598d1c38d04907e696cc4d794b43d3",
|
||||||
|
"sha256:21ac0156c4b089b330b7666db40feee30a5d52634cc4560e1905d6529a3897ff",
|
||||||
|
"sha256:257fd78c513e0fb5cdbe058c27a0624c9884e735bbd131935fd49e9fe719d310",
|
||||||
|
"sha256:2b39d38039a1fdad98c87279b48bc5dce2c0ca0d73483b12cb72aa9609278e8a",
|
||||||
|
"sha256:2cf71233a0ed05ccdabe209c606fe0bac7379fdcf687f39b944420d2a09fdb57",
|
||||||
|
"sha256:2fe803deacd09a233e4762a1adcea5db5d31e6be577a43352936179d14d90069",
|
||||||
|
"sha256:3232822c7d98d23895ccc443bbdf57c7412c5a65996c30442ebe6ed3df335383",
|
||||||
|
"sha256:34aa51c45f28ba7f12accd624225e2b1e5a3a45206aa191f6f9aac931d9d56fe",
|
||||||
|
"sha256:36f582d0c6bc99d5f39cd3ac2a9062e57f3cf606ade29a0a0d6b323462f4dd87",
|
||||||
|
"sha256:380a85cf89e0e69b7cfbe2ea9f765f004ff419f34194018a6827ac0e3edfed4d",
|
||||||
|
"sha256:40e7bc81c9e2b2734ea4bc1aceb8a8f0ceaac7c5299bc5d69e37c44d9081d43b",
|
||||||
|
"sha256:43ca3bbbe97af00f49efb06e352eae40434ca9d915906f77def219b88e85d907",
|
||||||
|
"sha256:4fcc4649dc762cddacd193e6b55bc02edca674067f5f98166d7713b193932b7f",
|
||||||
|
"sha256:5a0f54ce2c092aaf439813735584b9537cad479575a09892b8352fea5e988dc0",
|
||||||
|
"sha256:5a9a0d155deafd9448baff28c08e150d9b24ff010e899311ddd63c45c2445e28",
|
||||||
|
"sha256:5b02d65b9ccf0ef6c34cba6cf5bf2aab1bb2f49c6090bafeecc9cd81ad4ea1c1",
|
||||||
|
"sha256:60db23fa423575eeb65ea430cee741acb7c26a1365d103f7b0f6ec412b893853",
|
||||||
|
"sha256:642c2e7a804fcf18c222e1060df25fc210b9c58db7c91416fb055897fc27e8cc",
|
||||||
|
"sha256:6a9a25751acb379b466ff6be78a315e2b439d4c94c1e99cb7266d40a537995d3",
|
||||||
|
"sha256:6b1a564e6cb69922c7fe3a678b9f9a3c54e72b469875aa8018f18b4d1dd1adf3",
|
||||||
|
"sha256:6d323e1554b3d22cfc03cd3243b5bb815a51f5249fdcbb86fda4bf62bab9e164",
|
||||||
|
"sha256:6e743de5e9c3d1b7185870f480587b75b1cb604832e380d64f9504a0535912d1",
|
||||||
|
"sha256:709fe01086a55cf79d20f741f39325018f4df051ef39fe921b1ebe780a66184c",
|
||||||
|
"sha256:7b7c050ae976e286906dd3f26009e117eb000fb2cf3533398c5ad9ccc86867b1",
|
||||||
|
"sha256:7d2872609603cb35ca513d7404a94d6d608fc13211563571117046c9d2bcc3d7",
|
||||||
|
"sha256:7ef58fb89674095bfc57c4069e95d7a31cfdc0939e2a579882ac7d55aadfd2a1",
|
||||||
|
"sha256:80bb5c256f1415f747011dc3604b59bc1f91c6e7150bd7db03b19170ee06b320",
|
||||||
|
"sha256:81b19725065dcb43df02b37e03278c011a09e49757287dca60c5aecdd5a0b8ed",
|
||||||
|
"sha256:833b58d5d0b7e5b9832869f039203389ac7cbf01765639c7309fd50ef619e0b1",
|
||||||
|
"sha256:88bd7b6bd70a5b6803c1abf6bca012f7ed963e58c68d76ee20b9d751c74a3248",
|
||||||
|
"sha256:8ad85f7f4e20964db4daadcab70b47ab05c7c1cf2a7c1e51087bfaa83831854c",
|
||||||
|
"sha256:8c0ce1e99116d5ab21355d8ebe53d9460366704ea38ae4d9f6933188f327b456",
|
||||||
|
"sha256:8d649d616e5c6a678b26d15ece345354f7c2286acd6db868e65fcc5ff7c24a77",
|
||||||
|
"sha256:903500616422a40a98a5a3c4ff4ed9d0066f3b4c951fa286018ecdf0750194ef",
|
||||||
|
"sha256:9736af4641846491aedb3c3f56b9bc5568d92b0692303b5a305301a95dfd38b1",
|
||||||
|
"sha256:988635d122aaf2bdcef9e795435662bcd65b02f4f4c1ae37fbee7401c440b3a7",
|
||||||
|
"sha256:9cca3c2cdadb362116235fdbd411735de4328c61425b0aa9f872fd76d02c4e86",
|
||||||
|
"sha256:9e0fd32e0148dd5dea6af5fee42beb949098564cc23211a88d799e434255a1f4",
|
||||||
|
"sha256:9f3e6f9e05148ff90002b884fbc2a86bd303ae847e472f44ecc06c2cd2fcdb2d",
|
||||||
|
"sha256:a85d2b46be66a71bedde836d9e41859879cc54a2a04fad1191eb50c2066f6e9d",
|
||||||
|
"sha256:a9a52172be0b5aae932bef82a79ec0a0ce87288c7d132946d645eba03f0ad8a8",
|
||||||
|
"sha256:aa31fdcc33fef9eb2552cbcbfee7773d5a6792c137b359e82879c101e98584c5",
|
||||||
|
"sha256:b014c23646a467558be7da3d6b9fa409b2c567d2110599b7cf9a0c5992b3b471",
|
||||||
|
"sha256:b21bb4c09ffabfa0e85e3a6b623e19b80e7acd709b9f91452b8297ace2a8ab00",
|
||||||
|
"sha256:b5901a312f4d14c59918c221323068fad0540e34324925c8475263841dbdfe68",
|
||||||
|
"sha256:b9b7a708dd92306328117d8c4b62e2194d00c365f18eff11a9b53c6f923b01e3",
|
||||||
|
"sha256:d1967f46ea8f2db647c786e78d8cc7e4313dbd1b0aca360592d8027b8508e24d",
|
||||||
|
"sha256:d52a25136894c63de15a35bc0bdc5adb4b0e173b9c0d07a2be9d3ca64a332735",
|
||||||
|
"sha256:d77c85fedff92cf788face9bfa3ebaa364448ebb1d765302e9af11bf449ca36d",
|
||||||
|
"sha256:d79d7d5dc8a32b7093e81e97dad755127ff77bcc899e845f41bf71747af0c569",
|
||||||
|
"sha256:dbcda74c67263139358f4d188ae5faae95c30929281bc6866d00573783c422b7",
|
||||||
|
"sha256:ddaea91abf8b0d13443f6dac52e89051a5063c7d014710dcb4d4abb2ff811a59",
|
||||||
|
"sha256:dee0ce50c6a2dd9056c20db781e9c1cfd33e77d2d569f5d1d9321c641bb903d5",
|
||||||
|
"sha256:dee60e1de1898bde3b238f18340eec6148986da0455d8ba7848d50470a7a32fb",
|
||||||
|
"sha256:e2f83e18fe2f4c9e7db597e988f72712c0c3676d337d8b101f6758107c42425b",
|
||||||
|
"sha256:e3fb1677c720409d5f671e39bac6c9e0e422584e5f518bfd50aa4cbbea02433f",
|
||||||
|
"sha256:ee2b1b1769f6707a8a445162ea16dddf74285c3964f605877a20e38545c3c462",
|
||||||
|
"sha256:ee6acae74a2b91865910eef5e7de37dc6895ad96fa23603d1d27ea69df545015",
|
||||||
|
"sha256:ef3f72c9666bba2bab70d2a8b79f2c6d2c1a42a7f7e2b0ec83bb2f9e383950af"
|
||||||
],
|
],
|
||||||
"markers": "python_version >= '3.4'",
|
"markers": "python_version < '3.11'",
|
||||||
"version": "==3.0.3"
|
"version": "==1.14.1"
|
||||||
},
|
},
|
||||||
"xmltodict": {
|
"xmltodict": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
@@ -872,6 +1216,5 @@
|
|||||||
"markers": "python_version < '3.10'",
|
"markers": "python_version < '3.10'",
|
||||||
"version": "==3.10.0"
|
"version": "==3.10.0"
|
||||||
}
|
}
|
||||||
},
|
}
|
||||||
"develop": {}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -32,17 +32,17 @@ import re
|
|||||||
################################################################################
|
################################################################################
|
||||||
# Constants
|
# Constants
|
||||||
################################################################################
|
################################################################################
|
||||||
WAZUH_PATH = open('/etc/ossec-init.conf').readline().split('"')[1]
|
WAZUH_PATH = open("/etc/ossec-init.conf").readline().split('"')[1]
|
||||||
DEBUG_LEVEL = 0 # Enable/disable debug mode
|
DEBUG_LEVEL = 0 # Enable/disable debug mode
|
||||||
PATH_TO_PROWLER = '{0}/integrations/prowler'.format(WAZUH_PATH) # No trailing slash
|
PATH_TO_PROWLER = "{0}/integrations/prowler".format(WAZUH_PATH) # No trailing slash
|
||||||
TEMPLATE_CHECK = '''
|
TEMPLATE_CHECK = """
|
||||||
{{
|
{{
|
||||||
"integration": "prowler",
|
"integration": "prowler",
|
||||||
"prowler": {0}
|
"prowler": {0}
|
||||||
}}
|
}}
|
||||||
'''
|
"""
|
||||||
TEMPLATE_MSG = '1:Wazuh-Prowler:{0}'
|
TEMPLATE_MSG = "1:Wazuh-Prowler:{0}"
|
||||||
TEMPLATE_ERROR = '''{{
|
TEMPLATE_ERROR = """{{
|
||||||
"aws_account_id": {aws_account_id},
|
"aws_account_id": {aws_account_id},
|
||||||
"aws_profile": "{aws_profile}",
|
"aws_profile": "{aws_profile}",
|
||||||
"prowler_error": "{prowler_error}",
|
"prowler_error": "{prowler_error}",
|
||||||
@@ -50,193 +50,225 @@ TEMPLATE_ERROR = '''{{
|
|||||||
"timestamp": "{timestamp}",
|
"timestamp": "{timestamp}",
|
||||||
"status": "Error"
|
"status": "Error"
|
||||||
}}
|
}}
|
||||||
'''
|
"""
|
||||||
WAZUH_QUEUE = '{0}/queue/ossec/queue'.format(WAZUH_PATH)
|
WAZUH_QUEUE = "{0}/queue/ossec/queue".format(WAZUH_PATH)
|
||||||
FIELD_REMAP = {
|
FIELD_REMAP = {
|
||||||
"Profile": "aws_profile",
|
"Profile": "aws_profile",
|
||||||
"Control": "control",
|
"Control": "control",
|
||||||
"Account Number": "aws_account_id",
|
"Account Number": "aws_account_id",
|
||||||
"Level": "level",
|
"Level": "level",
|
||||||
"Account Alias": "aws_account_alias",
|
"Account Alias": "aws_account_alias",
|
||||||
"Timestamp": "timestamp",
|
"Timestamp": "timestamp",
|
||||||
"Region": "region",
|
"Region": "region",
|
||||||
"Control ID": "control_id",
|
"Control ID": "control_id",
|
||||||
"Status": "status",
|
"Status": "status",
|
||||||
"Scored": "scored",
|
"Scored": "scored",
|
||||||
"Message": "message"
|
"Message": "message",
|
||||||
}
|
}
|
||||||
CHECKS_FILES_TO_IGNORE = [
|
CHECKS_FILES_TO_IGNORE = ["check_sample"]
|
||||||
'check_sample'
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
################################################################################
|
################################################################################
|
||||||
# Functions
|
# Functions
|
||||||
################################################################################
|
################################################################################
|
||||||
def _send_msg(msg):
|
def _send_msg(msg):
|
||||||
try:
|
try:
|
||||||
_json_msg = json.dumps(_reformat_msg(msg))
|
_json_msg = json.dumps(_reformat_msg(msg))
|
||||||
_debug("Sending Msg: {0}".format(_json_msg), 3)
|
_debug("Sending Msg: {0}".format(_json_msg), 3)
|
||||||
_socket = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM)
|
_socket = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM)
|
||||||
_socket.connect(WAZUH_QUEUE)
|
_socket.connect(WAZUH_QUEUE)
|
||||||
_socket.send(TEMPLATE_MSG.format(_json_msg).encode())
|
_socket.send(TEMPLATE_MSG.format(_json_msg).encode())
|
||||||
_socket.close()
|
_socket.close()
|
||||||
except socket.error as e:
|
except socket.error as e:
|
||||||
if e.errno == 111:
|
if e.errno == 111:
|
||||||
print('ERROR: Wazuh must be running.')
|
print("ERROR: Wazuh must be running.")
|
||||||
sys.exit(5)
|
sys.exit(5)
|
||||||
else:
|
else:
|
||||||
print("ERROR: Error sending message to wazuh: {}".format(e))
|
print("ERROR: Error sending message to wazuh: {}".format(e))
|
||||||
sys.exit(6)
|
sys.exit(6)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print("ERROR: Error sending message to wazuh: {}".format(e))
|
print("ERROR: Error sending message to wazuh: {}".format(e))
|
||||||
sys.exit(6)
|
sys.exit(6)
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|
||||||
def _handler(signal, frame):
|
def _handler(signal, frame):
|
||||||
print("ERROR: SIGINT received.")
|
print("ERROR: SIGINT received.")
|
||||||
sys.exit(12)
|
sys.exit(12)
|
||||||
|
|
||||||
|
|
||||||
def _debug(msg, msg_level):
|
def _debug(msg, msg_level):
|
||||||
if DEBUG_LEVEL >= msg_level:
|
if DEBUG_LEVEL >= msg_level:
|
||||||
print('DEBUG-{level}: {debug_msg}'.format(level=msg_level, debug_msg=msg))
|
print("DEBUG-{level}: {debug_msg}".format(level=msg_level, debug_msg=msg))
|
||||||
|
|
||||||
|
|
||||||
def _get_script_arguments():
|
def _get_script_arguments():
|
||||||
_parser = argparse.ArgumentParser(usage="usage: %(prog)s [options]",
|
_parser = argparse.ArgumentParser(
|
||||||
description="Wazuh wodle for evaluating AWS security configuration",
|
usage="usage: %(prog)s [options]",
|
||||||
formatter_class=argparse.RawTextHelpFormatter)
|
description="Wazuh wodle for evaluating AWS security configuration",
|
||||||
_parser.add_argument('-c', '--aws_account_id', dest='aws_account_id',
|
formatter_class=argparse.RawTextHelpFormatter,
|
||||||
help='AWS Account ID for logs',
|
)
|
||||||
required=False)
|
_parser.add_argument(
|
||||||
_parser.add_argument('-d', '--debug', action='store', dest='debug', default=0, help='Enable debug')
|
"-c",
|
||||||
_parser.add_argument('-p', '--aws_profile', dest='aws_profile', help='The name of credential profile to use',
|
"--aws_account_id",
|
||||||
default=None)
|
dest="aws_account_id",
|
||||||
_parser.add_argument('-n', '--aws_account_alias', dest='aws_account_alias',
|
help="AWS Account ID for logs",
|
||||||
help='AWS Account ID Alias', default='')
|
required=False,
|
||||||
_parser.add_argument('-e', '--skip_on_error', action='store_false', dest='skip_on_error',
|
)
|
||||||
help='If check output is invalid json, error out instead of skipping the check', default=True)
|
_parser.add_argument(
|
||||||
return _parser.parse_args()
|
"-d", "--debug", action="store", dest="debug", default=0, help="Enable debug"
|
||||||
|
)
|
||||||
|
_parser.add_argument(
|
||||||
|
"-p",
|
||||||
|
"--aws_profile",
|
||||||
|
dest="aws_profile",
|
||||||
|
help="The name of credential profile to use",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
_parser.add_argument(
|
||||||
|
"-n",
|
||||||
|
"--aws_account_alias",
|
||||||
|
dest="aws_account_alias",
|
||||||
|
help="AWS Account ID Alias",
|
||||||
|
default="",
|
||||||
|
)
|
||||||
|
_parser.add_argument(
|
||||||
|
"-e",
|
||||||
|
"--skip_on_error",
|
||||||
|
action="store_false",
|
||||||
|
dest="skip_on_error",
|
||||||
|
help="If check output is invalid json, error out instead of skipping the check",
|
||||||
|
default=True,
|
||||||
|
)
|
||||||
|
return _parser.parse_args()
|
||||||
|
|
||||||
|
|
||||||
def _run_prowler(prowler_args):
|
def _run_prowler(prowler_args):
|
||||||
_debug('Running prowler with args: {0}'.format(prowler_args), 1)
|
_debug("Running prowler with args: {0}".format(prowler_args), 1)
|
||||||
_prowler_command = '{prowler}/prowler {args}'.format(prowler=PATH_TO_PROWLER, args=prowler_args)
|
_prowler_command = "{prowler}/prowler {args}".format(
|
||||||
_debug('Running command: {0}'.format(_prowler_command), 2)
|
prowler=PATH_TO_PROWLER, args=prowler_args
|
||||||
_process = subprocess.Popen(_prowler_command, stdout=subprocess.PIPE, shell=True)
|
)
|
||||||
_output, _error = _process.communicate()
|
_debug("Running command: {0}".format(_prowler_command), 2)
|
||||||
_debug('Raw prowler output: {0}'.format(_output), 3)
|
_process = subprocess.Popen(_prowler_command, stdout=subprocess.PIPE, shell=True)
|
||||||
_debug('Raw prowler error: {0}'.format(_error), 3)
|
_output, _error = _process.communicate()
|
||||||
if _error is not None:
|
_debug("Raw prowler output: {0}".format(_output), 3)
|
||||||
_debug('PROWLER ERROR: {0}'.format(_error), 1)
|
_debug("Raw prowler error: {0}".format(_error), 3)
|
||||||
exit(3)
|
if _error is not None:
|
||||||
return _output
|
_debug("PROWLER ERROR: {0}".format(_error), 1)
|
||||||
|
exit(3)
|
||||||
|
return _output
|
||||||
|
|
||||||
|
|
||||||
def _get_prowler_version(options):
|
def _get_prowler_version(options):
|
||||||
_debug('+++ Get Prowler Version', 1)
|
_debug("+++ Get Prowler Version", 1)
|
||||||
# Execute prowler, but only display the version and immediately exit
|
# Execute prowler, but only display the version and immediately exit
|
||||||
return _run_prowler('-p {0} -V'.format(options.aws_profile)).rstrip()
|
return _run_prowler("-p {0} -V".format(options.aws_profile)).rstrip()
|
||||||
|
|
||||||
|
|
||||||
def _get_prowler_results(options, prowler_check):
|
def _get_prowler_results(options, prowler_check):
|
||||||
_debug('+++ Get Prowler Results - {check}'.format(check=prowler_check), 1)
|
_debug("+++ Get Prowler Results - {check}".format(check=prowler_check), 1)
|
||||||
# Execute prowler with all checks
|
# Execute prowler with all checks
|
||||||
# -b = disable banner
|
# -b = disable banner
|
||||||
# -p = credential profile
|
# -p = credential profile
|
||||||
# -M = output json
|
# -M = output json
|
||||||
|
|
||||||
|
return _run_prowler(
|
||||||
|
"-b -c {check} -p {aws_profile} -M json".format(
|
||||||
|
check=prowler_check, aws_profile=options.aws_profile
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
return _run_prowler('-b -c {check} -p {aws_profile} -M json'.format(check=prowler_check,
|
|
||||||
aws_profile=options.aws_profile))
|
|
||||||
|
|
||||||
def _get_prowler_checks():
|
def _get_prowler_checks():
|
||||||
_prowler_checks = []
|
_prowler_checks = []
|
||||||
for _directory_path, _directories, _files in os.walk('{path}/checks'.format(path=PATH_TO_PROWLER)):
|
for _directory_path, _directories, _files in os.walk(
|
||||||
_debug('Checking in : {}'.format(_directory_path), 3)
|
"{path}/checks".format(path=PATH_TO_PROWLER)
|
||||||
for _file in _files:
|
):
|
||||||
if _file in CHECKS_FILES_TO_IGNORE:
|
_debug("Checking in : {}".format(_directory_path), 3)
|
||||||
_debug('Ignoring check - {}'.format(_directory_path, _file), 3)
|
for _file in _files:
|
||||||
elif re.match("check\d+", _file):
|
if _file in CHECKS_FILES_TO_IGNORE:
|
||||||
_prowler_checks.append(_file)
|
_debug("Ignoring check - {}".format(_directory_path, _file), 3)
|
||||||
elif re.match("check_extra(\d+)", _file):
|
elif re.match("check\d+", _file):
|
||||||
_prowler_checks.append(_file[6:])
|
_prowler_checks.append(_file)
|
||||||
else:
|
elif re.match("check_extra(\d+)", _file):
|
||||||
_debug('Unknown check file type- {}'.format(_directory_path, _file), 3)
|
_prowler_checks.append(_file[6:])
|
||||||
return _prowler_checks
|
else:
|
||||||
|
_debug("Unknown check file type- {}".format(_directory_path, _file), 3)
|
||||||
|
return _prowler_checks
|
||||||
|
|
||||||
|
|
||||||
def _send_prowler_results(prowler_results, _prowler_version, options):
|
def _send_prowler_results(prowler_results, _prowler_version, options):
|
||||||
_debug('+++ Send Prowler Results', 1)
|
_debug("+++ Send Prowler Results", 1)
|
||||||
for _check_result in prowler_results.splitlines():
|
for _check_result in prowler_results.splitlines():
|
||||||
# Empty row
|
# Empty row
|
||||||
if len(_check_result) < 1:
|
if len(_check_result) < 1:
|
||||||
continue
|
continue
|
||||||
# Something failed during prowler check
|
# Something failed during prowler check
|
||||||
elif _check_result[:17] == 'An error occurred':
|
elif _check_result[:17] == "An error occurred":
|
||||||
_debug('ERROR MSG --- {0}'.format(_check_result), 2)
|
_debug("ERROR MSG --- {0}".format(_check_result), 2)
|
||||||
_temp_msg = TEMPLATE_ERROR.format(
|
_temp_msg = TEMPLATE_ERROR.format(
|
||||||
aws_account_id=options.aws_account_id,
|
aws_account_id=options.aws_account_id,
|
||||||
aws_profile=options.aws_profile,
|
aws_profile=options.aws_profile,
|
||||||
prowler_error=_check_result.replace('"', '\"'),
|
prowler_error=_check_result.replace('"', '"'),
|
||||||
prowler_version=_prowler_version,
|
prowler_version=_prowler_version,
|
||||||
timestamp=datetime.now().isoformat()
|
timestamp=datetime.now().isoformat(),
|
||||||
)
|
)
|
||||||
_error_msg = json.loads(TEMPLATE_CHECK.format(_temp_msg))
|
_error_msg = json.loads(TEMPLATE_CHECK.format(_temp_msg))
|
||||||
_send_msg(_error_msg)
|
_send_msg(_error_msg)
|
||||||
continue
|
continue
|
||||||
try:
|
try:
|
||||||
_debug('RESULT MSG --- {0}'.format(_check_result), 2)
|
_debug("RESULT MSG --- {0}".format(_check_result), 2)
|
||||||
_check_result = json.loads(TEMPLATE_CHECK.format(_check_result))
|
_check_result = json.loads(TEMPLATE_CHECK.format(_check_result))
|
||||||
except:
|
except:
|
||||||
_debug('INVALID JSON --- {0}'.format(TEMPLATE_CHECK.format(_check_result)), 1)
|
_debug(
|
||||||
if not options.skip_on_error:
|
"INVALID JSON --- {0}".format(TEMPLATE_CHECK.format(_check_result)), 1
|
||||||
exit(4)
|
)
|
||||||
_check_result['prowler']['prowler_version'] = _prowler_version
|
if not options.skip_on_error:
|
||||||
_check_result['prowler']['aws_account_alias'] = options.aws_account_alias
|
exit(4)
|
||||||
_send_msg(_check_result)
|
_check_result["prowler"]["prowler_version"] = _prowler_version
|
||||||
|
_check_result["prowler"]["aws_account_alias"] = options.aws_account_alias
|
||||||
|
_send_msg(_check_result)
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
def _reformat_msg(msg):
|
def _reformat_msg(msg):
|
||||||
for field in FIELD_REMAP:
|
for field in FIELD_REMAP:
|
||||||
if field in msg['prowler']:
|
if field in msg["prowler"]:
|
||||||
msg['prowler'][FIELD_REMAP[field]] = msg['prowler'][field]
|
msg["prowler"][FIELD_REMAP[field]] = msg["prowler"][field]
|
||||||
del msg['prowler'][field]
|
del msg["prowler"][field]
|
||||||
return msg
|
return msg
|
||||||
|
|
||||||
|
|
||||||
# Main
|
# Main
|
||||||
###############################################################################
|
###############################################################################
|
||||||
def main(argv):
|
def main(argv):
|
||||||
_debug('+++ Begin script', 1)
|
_debug("+++ Begin script", 1)
|
||||||
# Parse arguments
|
# Parse arguments
|
||||||
_options = _get_script_arguments()
|
_options = _get_script_arguments()
|
||||||
|
|
||||||
if int(_options.debug) > 0:
|
if int(_options.debug) > 0:
|
||||||
global DEBUG_LEVEL
|
global DEBUG_LEVEL
|
||||||
DEBUG_LEVEL = int(_options.debug)
|
DEBUG_LEVEL = int(_options.debug)
|
||||||
_debug('+++ Debug mode on - Level: {debug}'.format(debug=_options.debug), 1)
|
_debug("+++ Debug mode on - Level: {debug}".format(debug=_options.debug), 1)
|
||||||
|
|
||||||
_prowler_version = _get_prowler_version(_options)
|
_prowler_version = _get_prowler_version(_options)
|
||||||
_prowler_checks = _get_prowler_checks()
|
_prowler_checks = _get_prowler_checks()
|
||||||
for _check in _prowler_checks:
|
for _check in _prowler_checks:
|
||||||
_prowler_results = _get_prowler_results(_options, _check)
|
_prowler_results = _get_prowler_results(_options, _check)
|
||||||
_send_prowler_results(_prowler_results, _prowler_version, _options)
|
_send_prowler_results(_prowler_results, _prowler_version, _options)
|
||||||
_debug('+++ Finished script', 1)
|
_debug("+++ Finished script", 1)
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == "__main__":
|
||||||
try:
|
try:
|
||||||
_debug('Args: {args}'.format(args=str(sys.argv)), 2)
|
_debug("Args: {args}".format(args=str(sys.argv)), 2)
|
||||||
signal.signal(signal.SIGINT, _handler)
|
signal.signal(signal.SIGINT, _handler)
|
||||||
main(sys.argv[1:])
|
main(sys.argv[1:])
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print("Unknown error: {}".format(e))
|
print("Unknown error: {}".format(e))
|
||||||
if DEBUG_LEVEL > 0:
|
if DEBUG_LEVEL > 0:
|
||||||
raise
|
raise
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|||||||
@@ -117,7 +117,7 @@ def print_checks(provider: str, check_list: set, bulk_checks_metadata: dict):
|
|||||||
# List available groups
|
# List available groups
|
||||||
def list_groups(provider: str):
|
def list_groups(provider: str):
|
||||||
groups = parse_groups_from_file(groups_file)
|
groups = parse_groups_from_file(groups_file)
|
||||||
print(f"Available Groups:")
|
print("Available Groups:")
|
||||||
|
|
||||||
for group, value in groups[provider].items():
|
for group, value in groups[provider].items():
|
||||||
group_description = value["description"]
|
group_description = value["description"]
|
||||||
@@ -219,8 +219,3 @@ def run_check(check, audit_info, output_options):
|
|||||||
report(findings, output_options, audit_info)
|
report(findings, output_options, audit_info)
|
||||||
finally:
|
finally:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
def import_check(check_path: str) -> ModuleType:
|
|
||||||
lib = importlib.import_module(f"{check_path}")
|
|
||||||
return lib
|
|
||||||
|
|||||||
@@ -273,7 +273,6 @@ def send_to_s3_bucket(
|
|||||||
):
|
):
|
||||||
try:
|
try:
|
||||||
# Get only last part of the path
|
# Get only last part of the path
|
||||||
output_directory = output_directory.split("/")[-1]
|
|
||||||
if output_mode == "csv":
|
if output_mode == "csv":
|
||||||
filename = f"{output_filename}{csv_file_suffix}"
|
filename = f"{output_filename}{csv_file_suffix}"
|
||||||
elif output_mode == "json":
|
elif output_mode == "json":
|
||||||
|
|||||||
@@ -233,7 +233,7 @@ class Test_Outputs:
|
|||||||
|
|
||||||
expected = Check_Output_JSON_ASFF()
|
expected = Check_Output_JSON_ASFF()
|
||||||
expected.Id = f"prowler-{finding.check_metadata.CheckID}-123456789012-eu-west-1-{hash_sha512('test-resource')}"
|
expected.Id = f"prowler-{finding.check_metadata.CheckID}-123456789012-eu-west-1-{hash_sha512('test-resource')}"
|
||||||
expected.ProductArn = f"arn:aws:securityhub:eu-west-1::product/prowler/prowler"
|
expected.ProductArn = "arn:aws:securityhub:eu-west-1::product/prowler/prowler"
|
||||||
expected.ProductFields = ProductFields(
|
expected.ProductFields = ProductFields(
|
||||||
ProviderVersion=prowler_version, ProwlerResourceName="test-resource"
|
ProviderVersion=prowler_version, ProwlerResourceName="test-resource"
|
||||||
)
|
)
|
||||||
@@ -291,15 +291,9 @@ class Test_Outputs:
|
|||||||
client = boto3.client("s3")
|
client = boto3.client("s3")
|
||||||
client.create_bucket(Bucket=bucket_name)
|
client.create_bucket(Bucket=bucket_name)
|
||||||
# Create mock csv output file
|
# Create mock csv output file
|
||||||
output_directory = "."
|
output_directory = f"{os.path.dirname(os.path.realpath(__file__))}/fixtures"
|
||||||
output_mode = "csv"
|
output_mode = "csv"
|
||||||
filename = (
|
filename = f"prowler-output-{input_audit_info.audited_account}"
|
||||||
f"prowler-output-{input_audit_info.audited_account}-{output_file_timestamp}"
|
|
||||||
)
|
|
||||||
file_descriptor = open_file(
|
|
||||||
f"{output_directory}/{filename}{csv_file_suffix}",
|
|
||||||
"a",
|
|
||||||
)
|
|
||||||
# Send mock csv file to mock S3 Bucket
|
# Send mock csv file to mock S3 Bucket
|
||||||
send_to_s3_bucket(
|
send_to_s3_bucket(
|
||||||
filename,
|
filename,
|
||||||
@@ -321,4 +315,3 @@ class Test_Outputs:
|
|||||||
)["ContentType"]
|
)["ContentType"]
|
||||||
== "binary/octet-stream"
|
== "binary/octet-stream"
|
||||||
)
|
)
|
||||||
remove(f"{output_directory}/{filename}{csv_file_suffix}")
|
|
||||||
|
|||||||
@@ -136,7 +136,7 @@ def provider_set_session(
|
|||||||
current_audit_info.organizations_metadata = get_organizations_metadata(
|
current_audit_info.organizations_metadata = get_organizations_metadata(
|
||||||
current_audit_info.audited_account, assumed_credentials
|
current_audit_info.audited_account, assumed_credentials
|
||||||
)
|
)
|
||||||
logger.info(f"Organizations metadata retrieved")
|
logger.info("Organizations metadata retrieved")
|
||||||
|
|
||||||
logger.info("Checking if role assumption is needed ...")
|
logger.info("Checking if role assumption is needed ...")
|
||||||
if input_role:
|
if input_role:
|
||||||
@@ -194,11 +194,11 @@ def print_audit_credentials(audit_info: AWS_Audit_Info):
|
|||||||
# Beautify audited regions, set "all" if there is no filter region
|
# Beautify audited regions, set "all" if there is no filter region
|
||||||
regions = (
|
regions = (
|
||||||
", ".join(audit_info.audited_regions)
|
", ".join(audit_info.audited_regions)
|
||||||
if audit_info.audited_regions != None
|
if audit_info.audited_regions is not None
|
||||||
else "all"
|
else "all"
|
||||||
)
|
)
|
||||||
# Beautify audited profile, set "default" if there is no profile set
|
# Beautify audited profile, set "default" if there is no profile set
|
||||||
profile = audit_info.profile if audit_info.profile != None else "default"
|
profile = audit_info.profile if audit_info.profile is not None else "default"
|
||||||
|
|
||||||
report = f"""
|
report = f"""
|
||||||
This report is being generated using credentials below:
|
This report is being generated using credentials below:
|
||||||
@@ -208,7 +208,7 @@ AWS Account: {Fore.YELLOW}[{audit_info.audited_account}]{Style.RESET_ALL} UserId
|
|||||||
Caller Identity ARN: {Fore.YELLOW}[{audit_info.audited_identity_arn}]{Style.RESET_ALL}
|
Caller Identity ARN: {Fore.YELLOW}[{audit_info.audited_identity_arn}]{Style.RESET_ALL}
|
||||||
"""
|
"""
|
||||||
# If -A is set, print Assumed Role ARN
|
# If -A is set, print Assumed Role ARN
|
||||||
if audit_info.assumed_role_info.role_arn != None:
|
if audit_info.assumed_role_info.role_arn is not None:
|
||||||
report += f"Assumed Role ARN: {Fore.YELLOW}[{audit_info.assumed_role_info.role_arn}]{Style.RESET_ALL}"
|
report += f"Assumed Role ARN: {Fore.YELLOW}[{audit_info.assumed_role_info.role_arn}]{Style.RESET_ALL}"
|
||||||
print(report)
|
print(report)
|
||||||
|
|
||||||
|
|||||||
@@ -91,19 +91,19 @@ class Test_AWS_Provider:
|
|||||||
# Recover credentials for the assume role operation
|
# Recover credentials for the assume role operation
|
||||||
credentials = assume_role_response["Credentials"]
|
credentials = assume_role_response["Credentials"]
|
||||||
# Test the response
|
# Test the response
|
||||||
## SessionToken
|
# SessionToken
|
||||||
credentials["SessionToken"].should.have.length_of(356)
|
credentials["SessionToken"].should.have.length_of(356)
|
||||||
credentials["SessionToken"].startswith("FQoGZXIvYXdzE")
|
credentials["SessionToken"].startswith("FQoGZXIvYXdzE")
|
||||||
## AccessKeyId
|
# AccessKeyId
|
||||||
credentials["AccessKeyId"].should.have.length_of(20)
|
credentials["AccessKeyId"].should.have.length_of(20)
|
||||||
credentials["AccessKeyId"].startswith("ASIA")
|
credentials["AccessKeyId"].startswith("ASIA")
|
||||||
## SecretAccessKey
|
# SecretAccessKey
|
||||||
credentials["SecretAccessKey"].should.have.length_of(40)
|
credentials["SecretAccessKey"].should.have.length_of(40)
|
||||||
## Assumed Role
|
# Assumed Role
|
||||||
assume_role_response["AssumedRoleUser"]["Arn"].should.equal(
|
assume_role_response["AssumedRoleUser"]["Arn"].should.equal(
|
||||||
f"arn:aws:sts::{ACCOUNT_ID}:assumed-role/{role_name}/{sessionName}"
|
f"arn:aws:sts::{ACCOUNT_ID}:assumed-role/{role_name}/{sessionName}"
|
||||||
)
|
)
|
||||||
## AssumedRoleUser
|
# AssumedRoleUser
|
||||||
assert assume_role_response["AssumedRoleUser"]["AssumedRoleId"].startswith(
|
assert assume_role_response["AssumedRoleUser"]["AssumedRoleId"].startswith(
|
||||||
"AROA"
|
"AROA"
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -113,30 +113,20 @@ class Test_Allowlist:
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
assert (
|
assert is_allowlisted(
|
||||||
is_allowlisted(
|
allowlist, AWS_ACCOUNT_NUMBER, "check_test", AWS_REGION, "prowler"
|
||||||
allowlist, AWS_ACCOUNT_NUMBER, "check_test", AWS_REGION, "prowler"
|
|
||||||
)
|
|
||||||
== True
|
|
||||||
)
|
)
|
||||||
|
|
||||||
assert (
|
assert is_allowlisted(
|
||||||
is_allowlisted(
|
allowlist, AWS_ACCOUNT_NUMBER, "check_test", AWS_REGION, "prowler-test"
|
||||||
allowlist, AWS_ACCOUNT_NUMBER, "check_test", AWS_REGION, "prowler-test"
|
|
||||||
)
|
|
||||||
== True
|
|
||||||
)
|
)
|
||||||
|
|
||||||
assert (
|
assert is_allowlisted(
|
||||||
is_allowlisted(
|
allowlist, AWS_ACCOUNT_NUMBER, "check_test", AWS_REGION, "test-prowler"
|
||||||
allowlist, AWS_ACCOUNT_NUMBER, "check_test", AWS_REGION, "test-prowler"
|
|
||||||
)
|
|
||||||
== True
|
|
||||||
)
|
)
|
||||||
|
|
||||||
assert (
|
assert not (
|
||||||
is_allowlisted(
|
is_allowlisted(
|
||||||
allowlist, AWS_ACCOUNT_NUMBER, "check_test", "us-east-2", "test"
|
allowlist, AWS_ACCOUNT_NUMBER, "check_test", "us-east-2", "test"
|
||||||
)
|
)
|
||||||
== False
|
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -18,7 +18,7 @@ def arn_parsing(arn):
|
|||||||
else:
|
else:
|
||||||
arn_parsed = arnparse(arn)
|
arn_parsed = arnparse(arn)
|
||||||
# First check if region is empty (in IAM arns region is always empty)
|
# First check if region is empty (in IAM arns region is always empty)
|
||||||
if arn_parsed.region != None:
|
if arn_parsed.region is not None:
|
||||||
raise RoleArnParsingIAMRegionNotEmpty
|
raise RoleArnParsingIAMRegionNotEmpty
|
||||||
else:
|
else:
|
||||||
# check if needed fields are filled:
|
# check if needed fields are filled:
|
||||||
@@ -27,12 +27,12 @@ def arn_parsing(arn):
|
|||||||
# - account_id
|
# - account_id
|
||||||
# - resource_type
|
# - resource_type
|
||||||
# - resource
|
# - resource
|
||||||
if arn_parsed.partition == None:
|
if arn_parsed.partition is None:
|
||||||
raise RoleArnParsingPartitionEmpty
|
raise RoleArnParsingPartitionEmpty
|
||||||
elif arn_parsed.service != "iam":
|
elif arn_parsed.service != "iam":
|
||||||
raise RoleArnParsingServiceNotIAM
|
raise RoleArnParsingServiceNotIAM
|
||||||
elif (
|
elif (
|
||||||
arn_parsed.account_id == None
|
arn_parsed.account_id is None
|
||||||
or len(arn_parsed.account_id) != 12
|
or len(arn_parsed.account_id) != 12
|
||||||
or not arn_parsed.account_id.isnumeric()
|
or not arn_parsed.account_id.isnumeric()
|
||||||
):
|
):
|
||||||
|
|||||||
@@ -94,7 +94,7 @@ class Test_accessanalyzer_enabled_without_findings:
|
|||||||
assert result[1].status == "FAIL"
|
assert result[1].status == "FAIL"
|
||||||
assert (
|
assert (
|
||||||
result[1].status_extended
|
result[1].status_extended
|
||||||
== f"IAM Access Analyzer Test Analyzer has 10 active findings"
|
== "IAM Access Analyzer Test Analyzer has 10 active findings"
|
||||||
)
|
)
|
||||||
assert result[1].resource_id == "Test Analyzer"
|
assert result[1].resource_id == "Test Analyzer"
|
||||||
|
|
||||||
@@ -128,7 +128,7 @@ class Test_accessanalyzer_enabled_without_findings:
|
|||||||
assert result[0].status == "PASS"
|
assert result[0].status == "PASS"
|
||||||
assert (
|
assert (
|
||||||
result[0].status_extended
|
result[0].status_extended
|
||||||
== f"IAM Access Analyzer Test Analyzer has no active findings"
|
== "IAM Access Analyzer Test Analyzer has no active findings"
|
||||||
)
|
)
|
||||||
assert result[0].resource_id == "Test Analyzer"
|
assert result[0].resource_id == "Test Analyzer"
|
||||||
|
|
||||||
@@ -162,6 +162,6 @@ class Test_accessanalyzer_enabled_without_findings:
|
|||||||
assert result[0].status == "FAIL"
|
assert result[0].status == "FAIL"
|
||||||
assert (
|
assert (
|
||||||
result[0].status_extended
|
result[0].status_extended
|
||||||
== f"IAM Access Analyzer Test Analyzer is not active"
|
== "IAM Access Analyzer Test Analyzer is not active"
|
||||||
)
|
)
|
||||||
assert result[0].resource_id == "Test Analyzer"
|
assert result[0].resource_id == "Test Analyzer"
|
||||||
|
|||||||
@@ -11,12 +11,15 @@ AWS_REGION = "eu-west-1"
|
|||||||
# Mocking Access Analyzer Calls
|
# Mocking Access Analyzer Calls
|
||||||
make_api_call = botocore.client.BaseClient._make_api_call
|
make_api_call = botocore.client.BaseClient._make_api_call
|
||||||
|
|
||||||
# As you can see the operation_name has the list_analyzers snake_case form but
|
|
||||||
# we are using the ListAnalyzers form.
|
|
||||||
# Rationale -> https://github.com/boto/botocore/blob/develop/botocore/client.py#L810:L816
|
|
||||||
#
|
|
||||||
# We have to mock every AWS API call using Boto3
|
|
||||||
def mock_make_api_call(self, operation_name, kwarg):
|
def mock_make_api_call(self, operation_name, kwarg):
|
||||||
|
"""
|
||||||
|
Mock every AWS API call using Boto3
|
||||||
|
|
||||||
|
As you can see the operation_name has the list_analyzers snake_case form but
|
||||||
|
we are using the ListAnalyzers form.
|
||||||
|
Rationale -> https://github.com/boto/botocore/blob/develop/botocore/client.py#L810:L816
|
||||||
|
"""
|
||||||
if operation_name == "ListAnalyzers":
|
if operation_name == "ListAnalyzers":
|
||||||
return {
|
return {
|
||||||
"analyzers": [
|
"analyzers": [
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ from colorama import Fore, Style
|
|||||||
|
|
||||||
from lib.check.models import Check
|
from lib.check.models import Check
|
||||||
|
|
||||||
### This check has no findings since it is manual
|
# This check has no findings since it is manual
|
||||||
|
|
||||||
|
|
||||||
class account_maintain_current_contact_details(Check):
|
class account_maintain_current_contact_details(Check):
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ from colorama import Fore, Style
|
|||||||
|
|
||||||
from lib.check.models import Check
|
from lib.check.models import Check
|
||||||
|
|
||||||
### This check has no findings since it is manual
|
# This check has no findings since it is manual
|
||||||
|
|
||||||
|
|
||||||
class account_security_contact_information_is_registered(Check):
|
class account_security_contact_information_is_registered(Check):
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ from colorama import Fore, Style
|
|||||||
|
|
||||||
from lib.check.models import Check
|
from lib.check.models import Check
|
||||||
|
|
||||||
### This check has no findings since it is manual
|
# This check has no findings since it is manual
|
||||||
|
|
||||||
|
|
||||||
class account_security_questions_are_registered_in_the_aws_account(Check):
|
class account_security_questions_are_registered_in_the_aws_account(Check):
|
||||||
|
|||||||
@@ -44,8 +44,8 @@ class Test_ACM_Service:
|
|||||||
# ACM client for this test class
|
# ACM client for this test class
|
||||||
audit_info = self.set_mocked_audit_info()
|
audit_info = self.set_mocked_audit_info()
|
||||||
acm = ACM(audit_info)
|
acm = ACM(audit_info)
|
||||||
for client in acm.regional_clients.values():
|
for regional_client in acm.regional_clients.values():
|
||||||
assert client.__class__.__name__ == "ACM"
|
assert regional_client.__class__.__name__ == "ACM"
|
||||||
|
|
||||||
# Test ACM Session
|
# Test ACM Session
|
||||||
@mock_acm
|
@mock_acm
|
||||||
|
|||||||
@@ -44,8 +44,8 @@ class Test_APIGateway_Service:
|
|||||||
# APIGateway client for this test class
|
# APIGateway client for this test class
|
||||||
audit_info = self.set_mocked_audit_info()
|
audit_info = self.set_mocked_audit_info()
|
||||||
apigateway = APIGateway(audit_info)
|
apigateway = APIGateway(audit_info)
|
||||||
for client in apigateway.regional_clients.values():
|
for regional_client in apigateway.regional_clients.values():
|
||||||
assert client.__class__.__name__ == "APIGateway"
|
assert regional_client.__class__.__name__ == "APIGateway"
|
||||||
|
|
||||||
# Test APIGateway Session
|
# Test APIGateway Session
|
||||||
@mock_apigateway
|
@mock_apigateway
|
||||||
@@ -97,7 +97,7 @@ class Test_APIGateway_Service:
|
|||||||
# APIGateway client for this test class
|
# APIGateway client for this test class
|
||||||
audit_info = self.set_mocked_audit_info()
|
audit_info = self.set_mocked_audit_info()
|
||||||
apigateway = APIGateway(audit_info)
|
apigateway = APIGateway(audit_info)
|
||||||
assert apigateway.rest_apis[0].authorizer == True
|
assert apigateway.rest_apis[0].authorizer is True
|
||||||
|
|
||||||
# Test APIGateway Get Rest API
|
# Test APIGateway Get Rest API
|
||||||
@mock_apigateway
|
@mock_apigateway
|
||||||
@@ -111,7 +111,7 @@ class Test_APIGateway_Service:
|
|||||||
# APIGateway client for this test class
|
# APIGateway client for this test class
|
||||||
audit_info = self.set_mocked_audit_info()
|
audit_info = self.set_mocked_audit_info()
|
||||||
apigateway = APIGateway(audit_info)
|
apigateway = APIGateway(audit_info)
|
||||||
assert apigateway.rest_apis[0].public_endpoint == False
|
assert apigateway.rest_apis[0].public_endpoint is False
|
||||||
|
|
||||||
# Test APIGateway Get Stages
|
# Test APIGateway Get Stages
|
||||||
@mock_apigateway
|
@mock_apigateway
|
||||||
@@ -162,4 +162,4 @@ class Test_APIGateway_Service:
|
|||||||
)
|
)
|
||||||
audit_info = self.set_mocked_audit_info()
|
audit_info = self.set_mocked_audit_info()
|
||||||
apigateway = APIGateway(audit_info)
|
apigateway = APIGateway(audit_info)
|
||||||
assert apigateway.rest_apis[0].stages[0].logging == True
|
assert apigateway.rest_apis[0].stages[0].logging is True
|
||||||
|
|||||||
@@ -9,10 +9,14 @@ AWS_REGION = "us-east-1"
|
|||||||
|
|
||||||
# Mocking ApiGatewayV2 Calls
|
# Mocking ApiGatewayV2 Calls
|
||||||
make_api_call = botocore.client.BaseClient._make_api_call
|
make_api_call = botocore.client.BaseClient._make_api_call
|
||||||
# Rationale -> https://github.com/boto/botocore/blob/develop/botocore/client.py#L810:L816
|
|
||||||
#
|
|
||||||
# We have to mock every AWS API call using Boto3
|
|
||||||
def mock_make_api_call(self, operation_name, kwarg):
|
def mock_make_api_call(self, operation_name, kwarg):
|
||||||
|
"""
|
||||||
|
We have to mock every AWS API call using Boto3
|
||||||
|
|
||||||
|
Rationale -> https://github.com/boto/botocore/blob/develop/botocore/client.py#L810:L816
|
||||||
|
"""
|
||||||
if operation_name == "GetAuthorizers":
|
if operation_name == "GetAuthorizers":
|
||||||
return {"Items": [{"AuthorizerId": "authorizer-id", "Name": "test-authorizer"}]}
|
return {"Items": [{"AuthorizerId": "authorizer-id", "Name": "test-authorizer"}]}
|
||||||
elif operation_name == "GetStages":
|
elif operation_name == "GetStages":
|
||||||
|
|||||||
@@ -9,10 +9,14 @@ AWS_REGION = "us-east-1"
|
|||||||
|
|
||||||
# Mocking ApiGatewayV2 Calls
|
# Mocking ApiGatewayV2 Calls
|
||||||
make_api_call = botocore.client.BaseClient._make_api_call
|
make_api_call = botocore.client.BaseClient._make_api_call
|
||||||
# Rationale -> https://github.com/boto/botocore/blob/develop/botocore/client.py#L810:L816
|
|
||||||
#
|
|
||||||
# We have to mock every AWS API call using Boto3
|
|
||||||
def mock_make_api_call(self, operation_name, kwarg):
|
def mock_make_api_call(self, operation_name, kwarg):
|
||||||
|
"""
|
||||||
|
We have to mock every AWS API call using Boto3
|
||||||
|
|
||||||
|
Rationale -> https://github.com/boto/botocore/blob/develop/botocore/client.py#L810:L816
|
||||||
|
"""
|
||||||
if operation_name == "GetAuthorizers":
|
if operation_name == "GetAuthorizers":
|
||||||
return {"Items": [{"AuthorizerId": "authorizer-id", "Name": "test-authorizer"}]}
|
return {"Items": [{"AuthorizerId": "authorizer-id", "Name": "test-authorizer"}]}
|
||||||
elif operation_name == "GetStages":
|
elif operation_name == "GetStages":
|
||||||
|
|||||||
@@ -11,10 +11,14 @@ AWS_REGION = "us-east-1"
|
|||||||
|
|
||||||
# Mocking ApiGatewayV2 Calls
|
# Mocking ApiGatewayV2 Calls
|
||||||
make_api_call = botocore.client.BaseClient._make_api_call
|
make_api_call = botocore.client.BaseClient._make_api_call
|
||||||
# Rationale -> https://github.com/boto/botocore/blob/develop/botocore/client.py#L810:L816
|
|
||||||
#
|
|
||||||
# We have to mock every AWS API call using Boto3
|
|
||||||
def mock_make_api_call(self, operation_name, kwarg):
|
def mock_make_api_call(self, operation_name, kwarg):
|
||||||
|
"""
|
||||||
|
We have to mock every AWS API call using Boto3
|
||||||
|
|
||||||
|
Rationale -> https://github.com/boto/botocore/blob/develop/botocore/client.py#L810:L816
|
||||||
|
"""
|
||||||
if operation_name == "GetAuthorizers":
|
if operation_name == "GetAuthorizers":
|
||||||
return {"Items": [{"AuthorizerId": "authorizer-id", "Name": "test-authorizer"}]}
|
return {"Items": [{"AuthorizerId": "authorizer-id", "Name": "test-authorizer"}]}
|
||||||
elif operation_name == "GetStages":
|
elif operation_name == "GetStages":
|
||||||
@@ -69,8 +73,8 @@ class Test_ApiGatewayV2_Service:
|
|||||||
# ApiGatewayV2 client for this test class
|
# ApiGatewayV2 client for this test class
|
||||||
audit_info = self.set_mocked_audit_info()
|
audit_info = self.set_mocked_audit_info()
|
||||||
apigatewayv2 = ApiGatewayV2(audit_info)
|
apigatewayv2 = ApiGatewayV2(audit_info)
|
||||||
for client in apigatewayv2.regional_clients.values():
|
for regional_client in apigatewayv2.regional_clients.values():
|
||||||
assert client.__class__.__name__ == "ApiGatewayV2"
|
assert regional_client.__class__.__name__ == "ApiGatewayV2"
|
||||||
|
|
||||||
# Test ApiGatewayV2 Session
|
# Test ApiGatewayV2 Session
|
||||||
@mock_apigatewayv2
|
@mock_apigatewayv2
|
||||||
@@ -118,7 +122,7 @@ class Test_ApiGatewayV2_Service:
|
|||||||
# ApiGatewayV2 client for this test class
|
# ApiGatewayV2 client for this test class
|
||||||
audit_info = self.set_mocked_audit_info()
|
audit_info = self.set_mocked_audit_info()
|
||||||
apigatewayv2 = ApiGatewayV2(audit_info)
|
apigatewayv2 = ApiGatewayV2(audit_info)
|
||||||
assert apigatewayv2.apis[0].authorizer == True
|
assert apigatewayv2.apis[0].authorizer is True
|
||||||
|
|
||||||
# Test ApiGatewayV2 Get Stages
|
# Test ApiGatewayV2 Get Stages
|
||||||
@mock_apigatewayv2
|
@mock_apigatewayv2
|
||||||
@@ -130,4 +134,4 @@ class Test_ApiGatewayV2_Service:
|
|||||||
|
|
||||||
audit_info = self.set_mocked_audit_info()
|
audit_info = self.set_mocked_audit_info()
|
||||||
apigatewayv2 = ApiGatewayV2(audit_info)
|
apigatewayv2 = ApiGatewayV2(audit_info)
|
||||||
assert apigatewayv2.apis[0].stages[0].logging == True
|
assert apigatewayv2.apis[0].stages[0].logging is True
|
||||||
|
|||||||
@@ -4,7 +4,8 @@ from providers.aws.services.appstream.appstream_client import appstream_client
|
|||||||
|
|
||||||
max_session_duration_seconds = get_config_var("max_session_duration_seconds")
|
max_session_duration_seconds = get_config_var("max_session_duration_seconds")
|
||||||
"""max_session_duration_seconds, default: 36000 seconds (10 hours)"""
|
"""max_session_duration_seconds, default: 36000 seconds (10 hours)"""
|
||||||
# Check if there are AppStream Fleets with the user maximum session duration no longer than 10 hours
|
|
||||||
|
|
||||||
class appstream_fleet_maximum_session_duration(Check):
|
class appstream_fleet_maximum_session_duration(Check):
|
||||||
"""Check if there are AppStream Fleets with the user maximum session duration no longer than 10 hours"""
|
"""Check if there are AppStream Fleets with the user maximum session duration no longer than 10 hours"""
|
||||||
|
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ from providers.aws.services.appstream.appstream_client import appstream_client
|
|||||||
max_disconnect_timeout_in_seconds = get_config_var("max_disconnect_timeout_in_seconds")
|
max_disconnect_timeout_in_seconds = get_config_var("max_disconnect_timeout_in_seconds")
|
||||||
"""max_disconnect_timeout_in_seconds, default: 300 seconds (5 minutes)"""
|
"""max_disconnect_timeout_in_seconds, default: 300 seconds (5 minutes)"""
|
||||||
|
|
||||||
# Check if there are AppStream Fleets with the session disconnect timeout set to 5 minutes or less
|
|
||||||
class appstream_fleet_session_disconnect_timeout(Check):
|
class appstream_fleet_session_disconnect_timeout(Check):
|
||||||
"""Check if there are AppStream Fleets with the session disconnect timeout set to 5 minutes or less"""
|
"""Check if there are AppStream Fleets with the session disconnect timeout set to 5 minutes or less"""
|
||||||
|
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ max_idle_disconnect_timeout_in_seconds = get_config_var(
|
|||||||
)
|
)
|
||||||
"""max_idle_disconnect_timeout_in_seconds, default: 600 seconds (10 minutes)"""
|
"""max_idle_disconnect_timeout_in_seconds, default: 600 seconds (10 minutes)"""
|
||||||
|
|
||||||
# Check if there are AppStream Fleets with the idle disconnect timeout set to 10 minutes or less
|
|
||||||
class appstream_fleet_session_idle_disconnect_timeout(Check):
|
class appstream_fleet_session_idle_disconnect_timeout(Check):
|
||||||
"""Check if there are AppStream Fleets with the idle disconnect timeout set to 10 minutes or less"""
|
"""Check if there are AppStream Fleets with the idle disconnect timeout set to 10 minutes or less"""
|
||||||
|
|
||||||
|
|||||||
@@ -13,12 +13,15 @@ AWS_REGION = "eu-west-1"
|
|||||||
# Mocking Access Analyzer Calls
|
# Mocking Access Analyzer Calls
|
||||||
make_api_call = botocore.client.BaseClient._make_api_call
|
make_api_call = botocore.client.BaseClient._make_api_call
|
||||||
|
|
||||||
# As you can see the operation_name has the list_analyzers snake_case form but
|
|
||||||
# we are using the ListAnalyzers form.
|
|
||||||
# Rationale -> https://github.com/boto/botocore/blob/develop/botocore/client.py#L810:L816
|
|
||||||
#
|
|
||||||
# We have to mock every AWS API call using Boto3
|
|
||||||
def mock_make_api_call(self, operation_name, kwarg):
|
def mock_make_api_call(self, operation_name, kwarg):
|
||||||
|
"""
|
||||||
|
We have to mock every AWS API call using Boto3
|
||||||
|
|
||||||
|
As you can see the operation_name has the list_analyzers snake_case form but
|
||||||
|
we are using the ListAnalyzers form.
|
||||||
|
Rationale -> https://github.com/boto/botocore/blob/develop/botocore/client.py#L810:L816
|
||||||
|
"""
|
||||||
if operation_name == "DescribeFleets":
|
if operation_name == "DescribeFleets":
|
||||||
return {
|
return {
|
||||||
"Fleets": [
|
"Fleets": [
|
||||||
@@ -86,7 +89,7 @@ class Test_AppStream_Service:
|
|||||||
assert appstream.fleets[0].max_user_duration_in_seconds == 100
|
assert appstream.fleets[0].max_user_duration_in_seconds == 100
|
||||||
assert appstream.fleets[0].disconnect_timeout_in_seconds == 900
|
assert appstream.fleets[0].disconnect_timeout_in_seconds == 900
|
||||||
assert appstream.fleets[0].idle_disconnect_timeout_in_seconds == 900
|
assert appstream.fleets[0].idle_disconnect_timeout_in_seconds == 900
|
||||||
assert appstream.fleets[0].enable_default_internet_access == False
|
assert appstream.fleets[0].enable_default_internet_access is False
|
||||||
assert appstream.fleets[0].region == AWS_REGION
|
assert appstream.fleets[0].region == AWS_REGION
|
||||||
|
|
||||||
assert (
|
assert (
|
||||||
@@ -97,5 +100,5 @@ class Test_AppStream_Service:
|
|||||||
assert appstream.fleets[1].max_user_duration_in_seconds == 57600
|
assert appstream.fleets[1].max_user_duration_in_seconds == 57600
|
||||||
assert appstream.fleets[1].disconnect_timeout_in_seconds == 900
|
assert appstream.fleets[1].disconnect_timeout_in_seconds == 900
|
||||||
assert appstream.fleets[1].idle_disconnect_timeout_in_seconds == 900
|
assert appstream.fleets[1].idle_disconnect_timeout_in_seconds == 900
|
||||||
assert appstream.fleets[1].enable_default_internet_access == True
|
assert appstream.fleets[1].enable_default_internet_access is True
|
||||||
assert appstream.fleets[1].region == AWS_REGION
|
assert appstream.fleets[1].region == AWS_REGION
|
||||||
|
|||||||
@@ -46,8 +46,8 @@ class Test_AutoScaling_Service:
|
|||||||
# AutoScaling client for this test class
|
# AutoScaling client for this test class
|
||||||
audit_info = self.set_mocked_audit_info()
|
audit_info = self.set_mocked_audit_info()
|
||||||
autoscaling = AutoScaling(audit_info)
|
autoscaling = AutoScaling(audit_info)
|
||||||
for client in autoscaling.regional_clients.values():
|
for regional_client in autoscaling.regional_clients.values():
|
||||||
assert client.__class__.__name__ == "AutoScaling"
|
assert regional_client.__class__.__name__ == "AutoScaling"
|
||||||
|
|
||||||
# Test AutoScaling Session
|
# Test AutoScaling Session
|
||||||
@mock_autoscaling
|
@mock_autoscaling
|
||||||
|
|||||||
@@ -39,12 +39,15 @@ dummy_template = {
|
|||||||
# Mocking Access Analyzer Calls
|
# Mocking Access Analyzer Calls
|
||||||
make_api_call = botocore.client.BaseClient._make_api_call
|
make_api_call = botocore.client.BaseClient._make_api_call
|
||||||
|
|
||||||
# As you can see the operation_name has the list_analyzers snake_case form but
|
|
||||||
# we are using the ListAnalyzers form.
|
|
||||||
# Rationale -> https://github.com/boto/botocore/blob/develop/botocore/client.py#L810:L816
|
|
||||||
#
|
|
||||||
# We have to mock every AWS API call using Boto3
|
|
||||||
def mock_make_api_call(self, operation_name, kwarg):
|
def mock_make_api_call(self, operation_name, kwarg):
|
||||||
|
"""
|
||||||
|
As you can see the operation_name has the list_analyzers snake_case form but
|
||||||
|
we are using the ListAnalyzers form.
|
||||||
|
Rationale -> https://github.com/boto/botocore/blob/develop/botocore/client.py#L810:L816
|
||||||
|
|
||||||
|
We have to mock every AWS API call using Boto3
|
||||||
|
"""
|
||||||
if operation_name == "CreateStack":
|
if operation_name == "CreateStack":
|
||||||
return {
|
return {
|
||||||
"StackId": "arn:aws:cloudformation:eu-west-1:123456789012:stack/Test-Stack/796c8d26-b390-41d7-a23c-0702c4e78b60"
|
"StackId": "arn:aws:cloudformation:eu-west-1:123456789012:stack/Test-Stack/796c8d26-b390-41d7-a23c-0702c4e78b60"
|
||||||
@@ -122,13 +125,6 @@ def mock_generate_regional_clients(service, audit_info):
|
|||||||
return {AWS_REGION: regional_client}
|
return {AWS_REGION: regional_client}
|
||||||
|
|
||||||
|
|
||||||
# Mock generate_regional_clients()
|
|
||||||
def mock_generate_regional_clients(service, audit_info):
|
|
||||||
regional_client = audit_info.audit_session.client(service, region_name=AWS_REGION)
|
|
||||||
regional_client.region = AWS_REGION
|
|
||||||
return {AWS_REGION: regional_client}
|
|
||||||
|
|
||||||
|
|
||||||
# Patch every AWS call using Boto3 and generate_regional_clients to have 1 client
|
# Patch every AWS call using Boto3 and generate_regional_clients to have 1 client
|
||||||
@patch("botocore.client.BaseClient._make_api_call", new=mock_make_api_call)
|
@patch("botocore.client.BaseClient._make_api_call", new=mock_make_api_call)
|
||||||
@patch(
|
@patch(
|
||||||
@@ -207,7 +203,7 @@ class Test_CloudFormation_Service:
|
|||||||
assert cloudformation.stacks[0].arn == stack_arn["StackId"]
|
assert cloudformation.stacks[0].arn == stack_arn["StackId"]
|
||||||
assert cloudformation.stacks[0].name == "Test-Stack"
|
assert cloudformation.stacks[0].name == "Test-Stack"
|
||||||
assert cloudformation.stacks[0].outputs == ["TestOutput1:TestValue1"]
|
assert cloudformation.stacks[0].outputs == ["TestOutput1:TestValue1"]
|
||||||
assert cloudformation.stacks[0].enable_termination_protection == True
|
assert cloudformation.stacks[0].enable_termination_protection is True
|
||||||
assert cloudformation.stacks[0].is_nested_stack == False
|
assert cloudformation.stacks[0].is_nested_stack is False
|
||||||
assert cloudformation.stacks[0].root_nested_stack == ""
|
assert cloudformation.stacks[0].root_nested_stack == ""
|
||||||
assert cloudformation.stacks[0].region == AWS_REGION
|
assert cloudformation.stacks[0].region == AWS_REGION
|
||||||
|
|||||||
@@ -21,8 +21,8 @@ class cloudtrail_logs_s3_bucket_is_not_publicly_accessible(Check):
|
|||||||
for bucket in s3_client.buckets:
|
for bucket in s3_client.buckets:
|
||||||
# Here we need to ensure that acl_grantee is filled since if we don't have permissions to query the api for a concrete region
|
# Here we need to ensure that acl_grantee is filled since if we don't have permissions to query the api for a concrete region
|
||||||
# (for example due to a SCP) we are going to try access an attribute from a None type
|
# (for example due to a SCP) we are going to try access an attribute from a None type
|
||||||
if trail_bucket == bucket.name and bucket.acl_grantee:
|
if trail_bucket == bucket.name and bucket.acl_grantees:
|
||||||
for grant in bucket.acl_grantee:
|
for grant in bucket.acl_grantees:
|
||||||
if (
|
if (
|
||||||
grant.URI
|
grant.URI
|
||||||
== "http://acs.amazonaws.com/groups/global/AllUsers"
|
== "http://acs.amazonaws.com/groups/global/AllUsers"
|
||||||
|
|||||||
@@ -50,16 +50,10 @@ class Test_cloudtrail_logs_s3_bucket_is_not_publicly_accessible:
|
|||||||
|
|
||||||
@mock_cloudtrail
|
@mock_cloudtrail
|
||||||
@mock_s3
|
@mock_s3
|
||||||
def test_trail_bucket_not_valid_acl(self):
|
def test_trail_bucket_public_acl(self):
|
||||||
cloudtrail_client = client("cloudtrail", region_name="us-east-1")
|
|
||||||
s3_client = client("s3", region_name="us-east-1")
|
s3_client = client("s3", region_name="us-east-1")
|
||||||
trail_name_us = "trail_test_us"
|
|
||||||
bucket_name_us = "bucket_test_us"
|
bucket_name_us = "bucket_test_us"
|
||||||
s3_client.create_bucket(Bucket=bucket_name_us)
|
s3_client.create_bucket(Bucket=bucket_name_us)
|
||||||
trail_us = cloudtrail_client.create_trail(
|
|
||||||
Name=trail_name_us, S3BucketName=bucket_name_us, IsMultiRegionTrail=False
|
|
||||||
)
|
|
||||||
|
|
||||||
s3_client.put_bucket_acl(
|
s3_client.put_bucket_acl(
|
||||||
AccessControlPolicy={
|
AccessControlPolicy={
|
||||||
"Grants": [
|
"Grants": [
|
||||||
@@ -78,6 +72,13 @@ class Test_cloudtrail_logs_s3_bucket_is_not_publicly_accessible:
|
|||||||
},
|
},
|
||||||
Bucket=bucket_name_us,
|
Bucket=bucket_name_us,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
trail_name_us = "trail_test_us"
|
||||||
|
cloudtrail_client = client("cloudtrail", region_name="us-east-1")
|
||||||
|
trail_us = cloudtrail_client.create_trail(
|
||||||
|
Name=trail_name_us, S3BucketName=bucket_name_us, IsMultiRegionTrail=False
|
||||||
|
)
|
||||||
|
|
||||||
from providers.aws.lib.audit_info.audit_info import current_audit_info
|
from providers.aws.lib.audit_info.audit_info import current_audit_info
|
||||||
from providers.aws.services.cloudtrail.cloudtrail_service import Cloudtrail
|
from providers.aws.services.cloudtrail.cloudtrail_service import Cloudtrail
|
||||||
from providers.aws.services.s3.s3_service import S3
|
from providers.aws.services.s3.s3_service import S3
|
||||||
@@ -89,7 +90,7 @@ class Test_cloudtrail_logs_s3_bucket_is_not_publicly_accessible:
|
|||||||
new=Cloudtrail(current_audit_info),
|
new=Cloudtrail(current_audit_info),
|
||||||
):
|
):
|
||||||
with mock.patch(
|
with mock.patch(
|
||||||
"providers.aws.services.cloudtrail.cloudtrail_logs_s3_bucket_access_logging_enabled.cloudtrail_logs_s3_bucket_access_logging_enabled.s3_client",
|
"providers.aws.services.cloudtrail.cloudtrail_logs_s3_bucket_is_not_publicly_accessible.cloudtrail_logs_s3_bucket_is_not_publicly_accessible.s3_client",
|
||||||
new=S3(current_audit_info),
|
new=S3(current_audit_info),
|
||||||
):
|
):
|
||||||
# Test Check
|
# Test Check
|
||||||
@@ -111,7 +112,7 @@ class Test_cloudtrail_logs_s3_bucket_is_not_publicly_accessible:
|
|||||||
|
|
||||||
@mock_cloudtrail
|
@mock_cloudtrail
|
||||||
@mock_s3
|
@mock_s3
|
||||||
def test_trail_bucket_not_valid_acl(self):
|
def test_trail_bucket_not_public_acl(self):
|
||||||
cloudtrail_client = client("cloudtrail", region_name="us-east-1")
|
cloudtrail_client = client("cloudtrail", region_name="us-east-1")
|
||||||
s3_client = client("s3", region_name="us-east-1")
|
s3_client = client("s3", region_name="us-east-1")
|
||||||
trail_name_us = "trail_test_us"
|
trail_name_us = "trail_test_us"
|
||||||
|
|||||||
@@ -30,7 +30,7 @@ class cloudtrail_multi_region_enabled(Check):
|
|||||||
else:
|
else:
|
||||||
report.status = "FAIL"
|
report.status = "FAIL"
|
||||||
report.status_extended = (
|
report.status_extended = (
|
||||||
f"No CloudTrail trails enabled and logging were found"
|
"No CloudTrail trails enabled and logging were found"
|
||||||
)
|
)
|
||||||
report.region = cloudtrail_client.region
|
report.region = cloudtrail_client.region
|
||||||
report.resource_arn = "No trails"
|
report.resource_arn = "No trails"
|
||||||
|
|||||||
@@ -54,10 +54,10 @@ class Test_cloudtrail_multi_region_enabled:
|
|||||||
Bucket=bucket_name_eu,
|
Bucket=bucket_name_eu,
|
||||||
CreateBucketConfiguration={"LocationConstraint": "eu-west-1"},
|
CreateBucketConfiguration={"LocationConstraint": "eu-west-1"},
|
||||||
)
|
)
|
||||||
trail_us = cloudtrail_client_us_east_1.create_trail(
|
_ = cloudtrail_client_us_east_1.create_trail(
|
||||||
Name=trail_name_us, S3BucketName=bucket_name_us, IsMultiRegionTrail=False
|
Name=trail_name_us, S3BucketName=bucket_name_us, IsMultiRegionTrail=False
|
||||||
)
|
)
|
||||||
trail_eu = cloudtrail_client_eu_west_1.create_trail(
|
_ = cloudtrail_client_eu_west_1.create_trail(
|
||||||
Name=trail_name_eu, S3BucketName=bucket_name_eu, IsMultiRegionTrail=False
|
Name=trail_name_eu, S3BucketName=bucket_name_eu, IsMultiRegionTrail=False
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -113,8 +113,8 @@ class Test_cloudtrail_multi_region_enabled:
|
|||||||
cloudtrail_client_eu_west_1.create_trail(
|
cloudtrail_client_eu_west_1.create_trail(
|
||||||
Name=trail_name_eu, S3BucketName=bucket_name_eu, IsMultiRegionTrail=False
|
Name=trail_name_eu, S3BucketName=bucket_name_eu, IsMultiRegionTrail=False
|
||||||
)
|
)
|
||||||
response = cloudtrail_client_us_east_1.start_logging(Name=trail_name_us)
|
_ = cloudtrail_client_us_east_1.start_logging(Name=trail_name_us)
|
||||||
status = cloudtrail_client_us_east_1.get_trail_status(Name=trail_name_us)
|
_ = cloudtrail_client_us_east_1.get_trail_status(Name=trail_name_us)
|
||||||
|
|
||||||
from providers.aws.lib.audit_info.audit_info import current_audit_info
|
from providers.aws.lib.audit_info.audit_info import current_audit_info
|
||||||
from providers.aws.services.cloudtrail.cloudtrail_service import Cloudtrail
|
from providers.aws.services.cloudtrail.cloudtrail_service import Cloudtrail
|
||||||
|
|||||||
@@ -10,7 +10,7 @@ class cloudtrail_s3_dataevents_enabled(Check):
|
|||||||
report.resource_id = "No trails"
|
report.resource_id = "No trails"
|
||||||
report.resource_arn = "No trails"
|
report.resource_arn = "No trails"
|
||||||
report.status = "FAIL"
|
report.status = "FAIL"
|
||||||
report.status_extended = f"No CloudTrail trails have a data event to record all S3 object-level API operations."
|
report.status_extended = "No CloudTrail trails have a data event to record all S3 object-level API operations."
|
||||||
for trail in cloudtrail_client.trails:
|
for trail in cloudtrail_client.trails:
|
||||||
for data_event in trail.data_events:
|
for data_event in trail.data_events:
|
||||||
# Check if trail has a data event for all S3 Buckets for write/read
|
# Check if trail has a data event for all S3 Buckets for write/read
|
||||||
|
|||||||
@@ -55,7 +55,7 @@ class Test_cloudtrail_s3_dataevents_enabled:
|
|||||||
cloudtrail_client_us_east_1.create_trail(
|
cloudtrail_client_us_east_1.create_trail(
|
||||||
Name=trail_name_us, S3BucketName=bucket_name_us, IsMultiRegionTrail=False
|
Name=trail_name_us, S3BucketName=bucket_name_us, IsMultiRegionTrail=False
|
||||||
)
|
)
|
||||||
data_events_response = cloudtrail_client_us_east_1.put_event_selectors(
|
_ = cloudtrail_client_us_east_1.put_event_selectors(
|
||||||
TrailName=trail_name_us,
|
TrailName=trail_name_us,
|
||||||
EventSelectors=[
|
EventSelectors=[
|
||||||
{
|
{
|
||||||
@@ -104,7 +104,7 @@ class Test_cloudtrail_s3_dataevents_enabled:
|
|||||||
trail_us = cloudtrail_client_us_east_1.create_trail(
|
trail_us = cloudtrail_client_us_east_1.create_trail(
|
||||||
Name=trail_name_us, S3BucketName=bucket_name_us, IsMultiRegionTrail=False
|
Name=trail_name_us, S3BucketName=bucket_name_us, IsMultiRegionTrail=False
|
||||||
)
|
)
|
||||||
data_events_response = cloudtrail_client_us_east_1.put_event_selectors(
|
_ = cloudtrail_client_us_east_1.put_event_selectors(
|
||||||
TrailName=trail_name_us,
|
TrailName=trail_name_us,
|
||||||
EventSelectors=[
|
EventSelectors=[
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -41,8 +41,8 @@ class Test_Cloudtrail_Service:
|
|||||||
def test_client(self):
|
def test_client(self):
|
||||||
audit_info = self.set_mocked_audit_info()
|
audit_info = self.set_mocked_audit_info()
|
||||||
cloudtrail = Cloudtrail(audit_info)
|
cloudtrail = Cloudtrail(audit_info)
|
||||||
for client in cloudtrail.regional_clients.values():
|
for regional_client in cloudtrail.regional_clients.values():
|
||||||
assert client.__class__.__name__ == "CloudTrail"
|
assert regional_client.__class__.__name__ == "CloudTrail"
|
||||||
|
|
||||||
# Test Cloudtrail session
|
# Test Cloudtrail session
|
||||||
@mock_cloudtrail
|
@mock_cloudtrail
|
||||||
|
|||||||
@@ -13,7 +13,7 @@ class cloudwatch_changes_to_network_acls_alarm_configured(Check):
|
|||||||
report = Check_Report(self.metadata)
|
report = Check_Report(self.metadata)
|
||||||
report.status = "FAIL"
|
report.status = "FAIL"
|
||||||
report.status_extended = (
|
report.status_extended = (
|
||||||
f"No CloudWatch log groups found with metric filters or alarms associated."
|
"No CloudWatch log groups found with metric filters or alarms associated."
|
||||||
)
|
)
|
||||||
report.region = "us-east-1"
|
report.region = "us-east-1"
|
||||||
report.resource_id = ""
|
report.resource_id = ""
|
||||||
|
|||||||
@@ -202,7 +202,7 @@ class Test_cloudwatch_log_metric_filter_unauthorized_api_calls:
|
|||||||
assert result[0].status == "FAIL"
|
assert result[0].status == "FAIL"
|
||||||
assert (
|
assert (
|
||||||
result[0].status_extended
|
result[0].status_extended
|
||||||
== f"CloudWatch log group /log-group/test found with metric filter test-filter but no alarms associated."
|
== "CloudWatch log group /log-group/test found with metric filter test-filter but no alarms associated."
|
||||||
)
|
)
|
||||||
assert result[0].resource_id == "/log-group/test"
|
assert result[0].resource_id == "/log-group/test"
|
||||||
|
|
||||||
@@ -277,6 +277,6 @@ class Test_cloudwatch_log_metric_filter_unauthorized_api_calls:
|
|||||||
assert result[0].status == "PASS"
|
assert result[0].status == "PASS"
|
||||||
assert (
|
assert (
|
||||||
result[0].status_extended
|
result[0].status_extended
|
||||||
== f"CloudWatch log group /log-group/test found with metric filter test-filter and alarms set."
|
== "CloudWatch log group /log-group/test found with metric filter test-filter and alarms set."
|
||||||
)
|
)
|
||||||
assert result[0].resource_id == "/log-group/test"
|
assert result[0].resource_id == "/log-group/test"
|
||||||
|
|||||||
@@ -13,7 +13,7 @@ class cloudwatch_changes_to_network_gateways_alarm_configured(Check):
|
|||||||
report = Check_Report(self.metadata)
|
report = Check_Report(self.metadata)
|
||||||
report.status = "FAIL"
|
report.status = "FAIL"
|
||||||
report.status_extended = (
|
report.status_extended = (
|
||||||
f"No CloudWatch log groups found with metric filters or alarms associated."
|
"No CloudWatch log groups found with metric filters or alarms associated."
|
||||||
)
|
)
|
||||||
report.region = "us-east-1"
|
report.region = "us-east-1"
|
||||||
report.resource_id = ""
|
report.resource_id = ""
|
||||||
|
|||||||
@@ -202,7 +202,7 @@ class Test_cloudwatch_log_metric_filter_unauthorized_api_calls:
|
|||||||
assert result[0].status == "FAIL"
|
assert result[0].status == "FAIL"
|
||||||
assert (
|
assert (
|
||||||
result[0].status_extended
|
result[0].status_extended
|
||||||
== f"CloudWatch log group /log-group/test found with metric filter test-filter but no alarms associated."
|
== "CloudWatch log group /log-group/test found with metric filter test-filter but no alarms associated."
|
||||||
)
|
)
|
||||||
assert result[0].resource_id == "/log-group/test"
|
assert result[0].resource_id == "/log-group/test"
|
||||||
|
|
||||||
@@ -277,6 +277,6 @@ class Test_cloudwatch_log_metric_filter_unauthorized_api_calls:
|
|||||||
assert result[0].status == "PASS"
|
assert result[0].status == "PASS"
|
||||||
assert (
|
assert (
|
||||||
result[0].status_extended
|
result[0].status_extended
|
||||||
== f"CloudWatch log group /log-group/test found with metric filter test-filter and alarms set."
|
== "CloudWatch log group /log-group/test found with metric filter test-filter and alarms set."
|
||||||
)
|
)
|
||||||
assert result[0].resource_id == "/log-group/test"
|
assert result[0].resource_id == "/log-group/test"
|
||||||
|
|||||||
@@ -13,7 +13,7 @@ class cloudwatch_changes_to_network_route_tables_alarm_configured(Check):
|
|||||||
report = Check_Report(self.metadata)
|
report = Check_Report(self.metadata)
|
||||||
report.status = "FAIL"
|
report.status = "FAIL"
|
||||||
report.status_extended = (
|
report.status_extended = (
|
||||||
f"No CloudWatch log groups found with metric filters or alarms associated."
|
"No CloudWatch log groups found with metric filters or alarms associated."
|
||||||
)
|
)
|
||||||
report.region = "us-east-1"
|
report.region = "us-east-1"
|
||||||
report.resource_id = ""
|
report.resource_id = ""
|
||||||
|
|||||||
@@ -202,7 +202,7 @@ class Test_cloudwatch_log_metric_filter_unauthorized_api_calls:
|
|||||||
assert result[0].status == "FAIL"
|
assert result[0].status == "FAIL"
|
||||||
assert (
|
assert (
|
||||||
result[0].status_extended
|
result[0].status_extended
|
||||||
== f"CloudWatch log group /log-group/test found with metric filter test-filter but no alarms associated."
|
== "CloudWatch log group /log-group/test found with metric filter test-filter but no alarms associated."
|
||||||
)
|
)
|
||||||
assert result[0].resource_id == "/log-group/test"
|
assert result[0].resource_id == "/log-group/test"
|
||||||
|
|
||||||
@@ -277,6 +277,6 @@ class Test_cloudwatch_log_metric_filter_unauthorized_api_calls:
|
|||||||
assert result[0].status == "PASS"
|
assert result[0].status == "PASS"
|
||||||
assert (
|
assert (
|
||||||
result[0].status_extended
|
result[0].status_extended
|
||||||
== f"CloudWatch log group /log-group/test found with metric filter test-filter and alarms set."
|
== "CloudWatch log group /log-group/test found with metric filter test-filter and alarms set."
|
||||||
)
|
)
|
||||||
assert result[0].resource_id == "/log-group/test"
|
assert result[0].resource_id == "/log-group/test"
|
||||||
|
|||||||
@@ -13,7 +13,7 @@ class cloudwatch_changes_to_vpcs_alarm_configured(Check):
|
|||||||
report = Check_Report(self.metadata)
|
report = Check_Report(self.metadata)
|
||||||
report.status = "FAIL"
|
report.status = "FAIL"
|
||||||
report.status_extended = (
|
report.status_extended = (
|
||||||
f"No CloudWatch log groups found with metric filters or alarms associated."
|
"No CloudWatch log groups found with metric filters or alarms associated."
|
||||||
)
|
)
|
||||||
report.region = "us-east-1"
|
report.region = "us-east-1"
|
||||||
report.resource_id = ""
|
report.resource_id = ""
|
||||||
|
|||||||
@@ -202,7 +202,7 @@ class Test_cloudwatch_log_metric_filter_unauthorized_api_calls:
|
|||||||
assert result[0].status == "FAIL"
|
assert result[0].status == "FAIL"
|
||||||
assert (
|
assert (
|
||||||
result[0].status_extended
|
result[0].status_extended
|
||||||
== f"CloudWatch log group /log-group/test found with metric filter test-filter but no alarms associated."
|
== "CloudWatch log group /log-group/test found with metric filter test-filter but no alarms associated."
|
||||||
)
|
)
|
||||||
assert result[0].resource_id == "/log-group/test"
|
assert result[0].resource_id == "/log-group/test"
|
||||||
|
|
||||||
@@ -277,6 +277,6 @@ class Test_cloudwatch_log_metric_filter_unauthorized_api_calls:
|
|||||||
assert result[0].status == "PASS"
|
assert result[0].status == "PASS"
|
||||||
assert (
|
assert (
|
||||||
result[0].status_extended
|
result[0].status_extended
|
||||||
== f"CloudWatch log group /log-group/test found with metric filter test-filter and alarms set."
|
== "CloudWatch log group /log-group/test found with metric filter test-filter and alarms set."
|
||||||
)
|
)
|
||||||
assert result[0].resource_id == "/log-group/test"
|
assert result[0].resource_id == "/log-group/test"
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ class cloudwatch_log_metric_filter_and_alarm_for_aws_config_configuration_change
|
|||||||
report = Check_Report(self.metadata)
|
report = Check_Report(self.metadata)
|
||||||
report.status = "FAIL"
|
report.status = "FAIL"
|
||||||
report.status_extended = (
|
report.status_extended = (
|
||||||
f"No CloudWatch log groups found with metric filters or alarms associated."
|
"No CloudWatch log groups found with metric filters or alarms associated."
|
||||||
)
|
)
|
||||||
report.region = "us-east-1"
|
report.region = "us-east-1"
|
||||||
report.resource_id = ""
|
report.resource_id = ""
|
||||||
|
|||||||
@@ -210,7 +210,7 @@ class Test_cloudwatch_log_metric_filter_unauthorized_api_calls:
|
|||||||
assert result[0].status == "FAIL"
|
assert result[0].status == "FAIL"
|
||||||
assert (
|
assert (
|
||||||
result[0].status_extended
|
result[0].status_extended
|
||||||
== f"CloudWatch log group /log-group/test found with metric filter test-filter but no alarms associated."
|
== "CloudWatch log group /log-group/test found with metric filter test-filter but no alarms associated."
|
||||||
)
|
)
|
||||||
assert result[0].resource_id == "/log-group/test"
|
assert result[0].resource_id == "/log-group/test"
|
||||||
|
|
||||||
@@ -287,6 +287,6 @@ class Test_cloudwatch_log_metric_filter_unauthorized_api_calls:
|
|||||||
assert result[0].status == "PASS"
|
assert result[0].status == "PASS"
|
||||||
assert (
|
assert (
|
||||||
result[0].status_extended
|
result[0].status_extended
|
||||||
== f"CloudWatch log group /log-group/test found with metric filter test-filter and alarms set."
|
== "CloudWatch log group /log-group/test found with metric filter test-filter and alarms set."
|
||||||
)
|
)
|
||||||
assert result[0].resource_id == "/log-group/test"
|
assert result[0].resource_id == "/log-group/test"
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ class cloudwatch_log_metric_filter_and_alarm_for_cloudtrail_configuration_change
|
|||||||
report = Check_Report(self.metadata)
|
report = Check_Report(self.metadata)
|
||||||
report.status = "FAIL"
|
report.status = "FAIL"
|
||||||
report.status_extended = (
|
report.status_extended = (
|
||||||
f"No CloudWatch log groups found with metric filters or alarms associated."
|
"No CloudWatch log groups found with metric filters or alarms associated."
|
||||||
)
|
)
|
||||||
report.region = "us-east-1"
|
report.region = "us-east-1"
|
||||||
report.resource_id = ""
|
report.resource_id = ""
|
||||||
|
|||||||
@@ -210,7 +210,7 @@ class Test_cloudwatch_log_metric_filter_unauthorized_api_calls:
|
|||||||
assert result[0].status == "FAIL"
|
assert result[0].status == "FAIL"
|
||||||
assert (
|
assert (
|
||||||
result[0].status_extended
|
result[0].status_extended
|
||||||
== f"CloudWatch log group /log-group/test found with metric filter test-filter but no alarms associated."
|
== "CloudWatch log group /log-group/test found with metric filter test-filter but no alarms associated."
|
||||||
)
|
)
|
||||||
assert result[0].resource_id == "/log-group/test"
|
assert result[0].resource_id == "/log-group/test"
|
||||||
|
|
||||||
@@ -287,6 +287,6 @@ class Test_cloudwatch_log_metric_filter_unauthorized_api_calls:
|
|||||||
assert result[0].status == "PASS"
|
assert result[0].status == "PASS"
|
||||||
assert (
|
assert (
|
||||||
result[0].status_extended
|
result[0].status_extended
|
||||||
== f"CloudWatch log group /log-group/test found with metric filter test-filter and alarms set."
|
== "CloudWatch log group /log-group/test found with metric filter test-filter and alarms set."
|
||||||
)
|
)
|
||||||
assert result[0].resource_id == "/log-group/test"
|
assert result[0].resource_id == "/log-group/test"
|
||||||
|
|||||||
@@ -13,7 +13,7 @@ class cloudwatch_log_metric_filter_authentication_failures(Check):
|
|||||||
report = Check_Report(self.metadata)
|
report = Check_Report(self.metadata)
|
||||||
report.status = "FAIL"
|
report.status = "FAIL"
|
||||||
report.status_extended = (
|
report.status_extended = (
|
||||||
f"No CloudWatch log groups found with metric filters or alarms associated."
|
"No CloudWatch log groups found with metric filters or alarms associated."
|
||||||
)
|
)
|
||||||
report.region = "us-east-1"
|
report.region = "us-east-1"
|
||||||
report.resource_id = ""
|
report.resource_id = ""
|
||||||
|
|||||||
@@ -202,7 +202,7 @@ class Test_cloudwatch_log_metric_filter_unauthorized_api_calls:
|
|||||||
assert result[0].status == "FAIL"
|
assert result[0].status == "FAIL"
|
||||||
assert (
|
assert (
|
||||||
result[0].status_extended
|
result[0].status_extended
|
||||||
== f"CloudWatch log group /log-group/test found with metric filter test-filter but no alarms associated."
|
== "CloudWatch log group /log-group/test found with metric filter test-filter but no alarms associated."
|
||||||
)
|
)
|
||||||
assert result[0].resource_id == "/log-group/test"
|
assert result[0].resource_id == "/log-group/test"
|
||||||
|
|
||||||
@@ -277,6 +277,6 @@ class Test_cloudwatch_log_metric_filter_unauthorized_api_calls:
|
|||||||
assert result[0].status == "PASS"
|
assert result[0].status == "PASS"
|
||||||
assert (
|
assert (
|
||||||
result[0].status_extended
|
result[0].status_extended
|
||||||
== f"CloudWatch log group /log-group/test found with metric filter test-filter and alarms set."
|
== "CloudWatch log group /log-group/test found with metric filter test-filter and alarms set."
|
||||||
)
|
)
|
||||||
assert result[0].resource_id == "/log-group/test"
|
assert result[0].resource_id == "/log-group/test"
|
||||||
|
|||||||
@@ -13,7 +13,7 @@ class cloudwatch_log_metric_filter_disable_or_scheduled_deletion_of_kms_cmk(Chec
|
|||||||
report = Check_Report(self.metadata)
|
report = Check_Report(self.metadata)
|
||||||
report.status = "FAIL"
|
report.status = "FAIL"
|
||||||
report.status_extended = (
|
report.status_extended = (
|
||||||
f"No CloudWatch log groups found with metric filters or alarms associated."
|
"No CloudWatch log groups found with metric filters or alarms associated."
|
||||||
)
|
)
|
||||||
report.region = "us-east-1"
|
report.region = "us-east-1"
|
||||||
report.resource_id = ""
|
report.resource_id = ""
|
||||||
|
|||||||
@@ -210,7 +210,7 @@ class Test_cloudwatch_log_metric_filter_unauthorized_api_calls:
|
|||||||
assert result[0].status == "FAIL"
|
assert result[0].status == "FAIL"
|
||||||
assert (
|
assert (
|
||||||
result[0].status_extended
|
result[0].status_extended
|
||||||
== f"CloudWatch log group /log-group/test found with metric filter test-filter but no alarms associated."
|
== "CloudWatch log group /log-group/test found with metric filter test-filter but no alarms associated."
|
||||||
)
|
)
|
||||||
assert result[0].resource_id == "/log-group/test"
|
assert result[0].resource_id == "/log-group/test"
|
||||||
|
|
||||||
@@ -287,6 +287,6 @@ class Test_cloudwatch_log_metric_filter_unauthorized_api_calls:
|
|||||||
assert result[0].status == "PASS"
|
assert result[0].status == "PASS"
|
||||||
assert (
|
assert (
|
||||||
result[0].status_extended
|
result[0].status_extended
|
||||||
== f"CloudWatch log group /log-group/test found with metric filter test-filter and alarms set."
|
== "CloudWatch log group /log-group/test found with metric filter test-filter and alarms set."
|
||||||
)
|
)
|
||||||
assert result[0].resource_id == "/log-group/test"
|
assert result[0].resource_id == "/log-group/test"
|
||||||
|
|||||||
@@ -13,7 +13,7 @@ class cloudwatch_log_metric_filter_for_s3_bucket_policy_changes(Check):
|
|||||||
report = Check_Report(self.metadata)
|
report = Check_Report(self.metadata)
|
||||||
report.status = "FAIL"
|
report.status = "FAIL"
|
||||||
report.status_extended = (
|
report.status_extended = (
|
||||||
f"No CloudWatch log groups found with metric filters or alarms associated."
|
"No CloudWatch log groups found with metric filters or alarms associated."
|
||||||
)
|
)
|
||||||
report.region = "us-east-1"
|
report.region = "us-east-1"
|
||||||
report.resource_id = ""
|
report.resource_id = ""
|
||||||
|
|||||||
@@ -202,7 +202,7 @@ class Test_cloudwatch_log_metric_filter_unauthorized_api_calls:
|
|||||||
assert result[0].status == "FAIL"
|
assert result[0].status == "FAIL"
|
||||||
assert (
|
assert (
|
||||||
result[0].status_extended
|
result[0].status_extended
|
||||||
== f"CloudWatch log group /log-group/test found with metric filter test-filter but no alarms associated."
|
== "CloudWatch log group /log-group/test found with metric filter test-filter but no alarms associated."
|
||||||
)
|
)
|
||||||
assert result[0].resource_id == "/log-group/test"
|
assert result[0].resource_id == "/log-group/test"
|
||||||
|
|
||||||
@@ -277,6 +277,6 @@ class Test_cloudwatch_log_metric_filter_unauthorized_api_calls:
|
|||||||
assert result[0].status == "PASS"
|
assert result[0].status == "PASS"
|
||||||
assert (
|
assert (
|
||||||
result[0].status_extended
|
result[0].status_extended
|
||||||
== f"CloudWatch log group /log-group/test found with metric filter test-filter and alarms set."
|
== "CloudWatch log group /log-group/test found with metric filter test-filter and alarms set."
|
||||||
)
|
)
|
||||||
assert result[0].resource_id == "/log-group/test"
|
assert result[0].resource_id == "/log-group/test"
|
||||||
|
|||||||
@@ -13,7 +13,7 @@ class cloudwatch_log_metric_filter_policy_changes(Check):
|
|||||||
report = Check_Report(self.metadata)
|
report = Check_Report(self.metadata)
|
||||||
report.status = "FAIL"
|
report.status = "FAIL"
|
||||||
report.status_extended = (
|
report.status_extended = (
|
||||||
f"No CloudWatch log groups found with metric filters or alarms associated."
|
"No CloudWatch log groups found with metric filters or alarms associated."
|
||||||
)
|
)
|
||||||
report.region = "us-east-1"
|
report.region = "us-east-1"
|
||||||
report.resource_id = ""
|
report.resource_id = ""
|
||||||
|
|||||||
@@ -202,7 +202,7 @@ class Test_cloudwatch_log_metric_filter_unauthorized_api_calls:
|
|||||||
assert result[0].status == "FAIL"
|
assert result[0].status == "FAIL"
|
||||||
assert (
|
assert (
|
||||||
result[0].status_extended
|
result[0].status_extended
|
||||||
== f"CloudWatch log group /log-group/test found with metric filter test-filter but no alarms associated."
|
== "CloudWatch log group /log-group/test found with metric filter test-filter but no alarms associated."
|
||||||
)
|
)
|
||||||
assert result[0].resource_id == "/log-group/test"
|
assert result[0].resource_id == "/log-group/test"
|
||||||
|
|
||||||
@@ -277,6 +277,6 @@ class Test_cloudwatch_log_metric_filter_unauthorized_api_calls:
|
|||||||
assert result[0].status == "PASS"
|
assert result[0].status == "PASS"
|
||||||
assert (
|
assert (
|
||||||
result[0].status_extended
|
result[0].status_extended
|
||||||
== f"CloudWatch log group /log-group/test found with metric filter test-filter and alarms set."
|
== "CloudWatch log group /log-group/test found with metric filter test-filter and alarms set."
|
||||||
)
|
)
|
||||||
assert result[0].resource_id == "/log-group/test"
|
assert result[0].resource_id == "/log-group/test"
|
||||||
|
|||||||
@@ -13,7 +13,7 @@ class cloudwatch_log_metric_filter_root_usage(Check):
|
|||||||
report = Check_Report(self.metadata)
|
report = Check_Report(self.metadata)
|
||||||
report.status = "FAIL"
|
report.status = "FAIL"
|
||||||
report.status_extended = (
|
report.status_extended = (
|
||||||
f"No CloudWatch log groups found with metric filters or alarms associated."
|
"No CloudWatch log groups found with metric filters or alarms associated."
|
||||||
)
|
)
|
||||||
report.region = "us-east-1"
|
report.region = "us-east-1"
|
||||||
report.resource_id = ""
|
report.resource_id = ""
|
||||||
|
|||||||
@@ -202,7 +202,7 @@ class Test_cloudwatch_log_metric_filter_unauthorized_api_calls:
|
|||||||
assert result[0].status == "FAIL"
|
assert result[0].status == "FAIL"
|
||||||
assert (
|
assert (
|
||||||
result[0].status_extended
|
result[0].status_extended
|
||||||
== f"CloudWatch log group /log-group/test found with metric filter test-filter but no alarms associated."
|
== "CloudWatch log group /log-group/test found with metric filter test-filter but no alarms associated."
|
||||||
)
|
)
|
||||||
assert result[0].resource_id == "/log-group/test"
|
assert result[0].resource_id == "/log-group/test"
|
||||||
|
|
||||||
@@ -277,6 +277,6 @@ class Test_cloudwatch_log_metric_filter_unauthorized_api_calls:
|
|||||||
assert result[0].status == "PASS"
|
assert result[0].status == "PASS"
|
||||||
assert (
|
assert (
|
||||||
result[0].status_extended
|
result[0].status_extended
|
||||||
== f"CloudWatch log group /log-group/test found with metric filter test-filter and alarms set."
|
== "CloudWatch log group /log-group/test found with metric filter test-filter and alarms set."
|
||||||
)
|
)
|
||||||
assert result[0].resource_id == "/log-group/test"
|
assert result[0].resource_id == "/log-group/test"
|
||||||
|
|||||||
@@ -13,7 +13,7 @@ class cloudwatch_log_metric_filter_security_group_changes(Check):
|
|||||||
report = Check_Report(self.metadata)
|
report = Check_Report(self.metadata)
|
||||||
report.status = "FAIL"
|
report.status = "FAIL"
|
||||||
report.status_extended = (
|
report.status_extended = (
|
||||||
f"No CloudWatch log groups found with metric filters or alarms associated."
|
"No CloudWatch log groups found with metric filters or alarms associated."
|
||||||
)
|
)
|
||||||
report.region = "us-east-1"
|
report.region = "us-east-1"
|
||||||
report.resource_id = ""
|
report.resource_id = ""
|
||||||
|
|||||||
@@ -202,7 +202,7 @@ class Test_cloudwatch_log_metric_filter_unauthorized_api_calls:
|
|||||||
assert result[0].status == "FAIL"
|
assert result[0].status == "FAIL"
|
||||||
assert (
|
assert (
|
||||||
result[0].status_extended
|
result[0].status_extended
|
||||||
== f"CloudWatch log group /log-group/test found with metric filter test-filter but no alarms associated."
|
== "CloudWatch log group /log-group/test found with metric filter test-filter but no alarms associated."
|
||||||
)
|
)
|
||||||
assert result[0].resource_id == "/log-group/test"
|
assert result[0].resource_id == "/log-group/test"
|
||||||
|
|
||||||
@@ -277,6 +277,6 @@ class Test_cloudwatch_log_metric_filter_unauthorized_api_calls:
|
|||||||
assert result[0].status == "PASS"
|
assert result[0].status == "PASS"
|
||||||
assert (
|
assert (
|
||||||
result[0].status_extended
|
result[0].status_extended
|
||||||
== f"CloudWatch log group /log-group/test found with metric filter test-filter and alarms set."
|
== "CloudWatch log group /log-group/test found with metric filter test-filter and alarms set."
|
||||||
)
|
)
|
||||||
assert result[0].resource_id == "/log-group/test"
|
assert result[0].resource_id == "/log-group/test"
|
||||||
|
|||||||
@@ -13,7 +13,7 @@ class cloudwatch_log_metric_filter_sign_in_without_mfa(Check):
|
|||||||
report = Check_Report(self.metadata)
|
report = Check_Report(self.metadata)
|
||||||
report.status = "FAIL"
|
report.status = "FAIL"
|
||||||
report.status_extended = (
|
report.status_extended = (
|
||||||
f"No CloudWatch log groups found with metric filters or alarms associated."
|
"No CloudWatch log groups found with metric filters or alarms associated."
|
||||||
)
|
)
|
||||||
report.region = "us-east-1"
|
report.region = "us-east-1"
|
||||||
report.resource_id = ""
|
report.resource_id = ""
|
||||||
|
|||||||
@@ -202,7 +202,7 @@ class Test_cloudwatch_log_metric_filter_unauthorized_api_calls:
|
|||||||
assert result[0].status == "FAIL"
|
assert result[0].status == "FAIL"
|
||||||
assert (
|
assert (
|
||||||
result[0].status_extended
|
result[0].status_extended
|
||||||
== f"CloudWatch log group /log-group/test found with metric filter test-filter but no alarms associated."
|
== "CloudWatch log group /log-group/test found with metric filter test-filter but no alarms associated."
|
||||||
)
|
)
|
||||||
assert result[0].resource_id == "/log-group/test"
|
assert result[0].resource_id == "/log-group/test"
|
||||||
|
|
||||||
@@ -277,6 +277,6 @@ class Test_cloudwatch_log_metric_filter_unauthorized_api_calls:
|
|||||||
assert result[0].status == "PASS"
|
assert result[0].status == "PASS"
|
||||||
assert (
|
assert (
|
||||||
result[0].status_extended
|
result[0].status_extended
|
||||||
== f"CloudWatch log group /log-group/test found with metric filter test-filter and alarms set."
|
== "CloudWatch log group /log-group/test found with metric filter test-filter and alarms set."
|
||||||
)
|
)
|
||||||
assert result[0].resource_id == "/log-group/test"
|
assert result[0].resource_id == "/log-group/test"
|
||||||
|
|||||||
@@ -13,7 +13,7 @@ class cloudwatch_log_metric_filter_unauthorized_api_calls(Check):
|
|||||||
report = Check_Report(self.metadata)
|
report = Check_Report(self.metadata)
|
||||||
report.status = "FAIL"
|
report.status = "FAIL"
|
||||||
report.status_extended = (
|
report.status_extended = (
|
||||||
f"No CloudWatch log groups found with metric filters or alarms associated."
|
"No CloudWatch log groups found with metric filters or alarms associated."
|
||||||
)
|
)
|
||||||
report.region = "us-east-1"
|
report.region = "us-east-1"
|
||||||
report.resource_id = ""
|
report.resource_id = ""
|
||||||
|
|||||||
@@ -202,7 +202,7 @@ class Test_cloudwatch_log_metric_filter_unauthorized_api_calls:
|
|||||||
assert result[0].status == "FAIL"
|
assert result[0].status == "FAIL"
|
||||||
assert (
|
assert (
|
||||||
result[0].status_extended
|
result[0].status_extended
|
||||||
== f"CloudWatch log group /log-group/test found with metric filter test-filter but no alarms associated."
|
== "CloudWatch log group /log-group/test found with metric filter test-filter but no alarms associated."
|
||||||
)
|
)
|
||||||
assert result[0].resource_id == "/log-group/test"
|
assert result[0].resource_id == "/log-group/test"
|
||||||
|
|
||||||
@@ -277,6 +277,6 @@ class Test_cloudwatch_log_metric_filter_unauthorized_api_calls:
|
|||||||
assert result[0].status == "PASS"
|
assert result[0].status == "PASS"
|
||||||
assert (
|
assert (
|
||||||
result[0].status_extended
|
result[0].status_extended
|
||||||
== f"CloudWatch log group /log-group/test found with metric filter test-filter and alarms set."
|
== "CloudWatch log group /log-group/test found with metric filter test-filter and alarms set."
|
||||||
)
|
)
|
||||||
assert result[0].resource_id == "/log-group/test"
|
assert result[0].resource_id == "/log-group/test"
|
||||||
|
|||||||
@@ -12,7 +12,7 @@ class config_recorder_all_regions_enabled(Check):
|
|||||||
# Check if Config is enabled in region
|
# Check if Config is enabled in region
|
||||||
if not recorder.name:
|
if not recorder.name:
|
||||||
report.status = "FAIL"
|
report.status = "FAIL"
|
||||||
report.status_extended = f"No AWS Config recorders in region."
|
report.status_extended = "No AWS Config recorders in region."
|
||||||
else:
|
else:
|
||||||
if recorder.recording:
|
if recorder.recording:
|
||||||
if recorder.last_status == "Failure":
|
if recorder.last_status == "Failure":
|
||||||
|
|||||||
@@ -62,7 +62,7 @@ class Test_config_recorder_all_regions_enabled:
|
|||||||
assert recorder.status == "FAIL"
|
assert recorder.status == "FAIL"
|
||||||
assert (
|
assert (
|
||||||
recorder.status_extended
|
recorder.status_extended
|
||||||
== f"AWS Config recorder default is disabled."
|
== "AWS Config recorder default is disabled."
|
||||||
)
|
)
|
||||||
assert recorder.resource_id == "default"
|
assert recorder.resource_id == "default"
|
||||||
|
|
||||||
@@ -102,6 +102,6 @@ class Test_config_recorder_all_regions_enabled:
|
|||||||
assert recorder.status == "PASS"
|
assert recorder.status == "PASS"
|
||||||
assert (
|
assert (
|
||||||
recorder.status_extended
|
recorder.status_extended
|
||||||
== f"AWS Config recorder default is enabled."
|
== "AWS Config recorder default is enabled."
|
||||||
)
|
)
|
||||||
assert recorder.resource_id == "default"
|
assert recorder.resource_id == "default"
|
||||||
|
|||||||
@@ -44,8 +44,8 @@ class Test_Config_Service:
|
|||||||
# Config client for this test class
|
# Config client for this test class
|
||||||
audit_info = self.set_mocked_audit_info()
|
audit_info = self.set_mocked_audit_info()
|
||||||
config = Config(audit_info)
|
config = Config(audit_info)
|
||||||
for client in config.regional_clients.values():
|
for regional_client in config.regional_clients.values():
|
||||||
assert client.__class__.__name__ == "ConfigService"
|
assert regional_client.__class__.__name__ == "ConfigService"
|
||||||
|
|
||||||
# Test Config Session
|
# Test Config Session
|
||||||
@mock_config
|
@mock_config
|
||||||
@@ -86,4 +86,4 @@ class Test_Config_Service:
|
|||||||
# Search for the recorder just created
|
# Search for the recorder just created
|
||||||
for recorder in config.recorders:
|
for recorder in config.recorders:
|
||||||
if recorder.name == "default":
|
if recorder.name == "default":
|
||||||
assert recorder.recording == True
|
assert recorder.recording is True
|
||||||
|
|||||||
@@ -41,6 +41,6 @@ class ec2_elastic_ip_shodan(Check):
|
|||||||
findings.append(report)
|
findings.append(report)
|
||||||
else:
|
else:
|
||||||
logger.error(
|
logger.error(
|
||||||
f"ERROR: No Shodan API Key -- Please input a Shodan API Key with -N/--shodan or in config.yaml"
|
"ERROR: No Shodan API Key -- Please input a Shodan API Key with -N/--shodan or in config.yaml"
|
||||||
)
|
)
|
||||||
return findings
|
return findings
|
||||||
|
|||||||
@@ -44,8 +44,8 @@ class Test_EC2_Service:
|
|||||||
# EC2 client for this test class
|
# EC2 client for this test class
|
||||||
audit_info = self.set_mocked_audit_info()
|
audit_info = self.set_mocked_audit_info()
|
||||||
ec2 = EC2(audit_info)
|
ec2 = EC2(audit_info)
|
||||||
for client in ec2.regional_clients.values():
|
for regional_client in ec2.regional_clients.values():
|
||||||
assert client.__class__.__name__ == "EC2"
|
assert regional_client.__class__.__name__ == "EC2"
|
||||||
|
|
||||||
# Test EC2 Session
|
# Test EC2 Session
|
||||||
@mock_ec2
|
@mock_ec2
|
||||||
@@ -164,4 +164,4 @@ class Test_EC2_Service:
|
|||||||
ec2 = EC2(audit_info)
|
ec2 = EC2(audit_info)
|
||||||
for snapshot in ec2.snapshots:
|
for snapshot in ec2.snapshots:
|
||||||
if snapshot.id == snapshot_id:
|
if snapshot.id == snapshot_id:
|
||||||
assert snapshot.public == True
|
assert snapshot.public is True
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
from re import T
|
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
|
|
||||||
@@ -8,7 +7,8 @@ def check_network_acl(rules: Any, protocol: str, port: str) -> bool:
|
|||||||
|
|
||||||
# Spliting IPv6 from IPv4 rules
|
# Spliting IPv6 from IPv4 rules
|
||||||
rules_IPv6 = list(
|
rules_IPv6 = list(
|
||||||
filter(lambda rule: rule.get("CidrBlock") is None and not rule["Egress"], rules))
|
filter(lambda rule: rule.get("CidrBlock") is None and not rule["Egress"], rules)
|
||||||
|
)
|
||||||
|
|
||||||
# For IPv6
|
# For IPv6
|
||||||
# Rules must order by RuleNumber
|
# Rules must order by RuleNumber
|
||||||
@@ -18,11 +18,9 @@ def check_network_acl(rules: Any, protocol: str, port: str) -> bool:
|
|||||||
and rule["RuleAction"] == "deny"
|
and rule["RuleAction"] == "deny"
|
||||||
and (
|
and (
|
||||||
rule["Protocol"] == "-1"
|
rule["Protocol"] == "-1"
|
||||||
or
|
or (
|
||||||
(
|
|
||||||
rule["Protocol"] == protocol
|
rule["Protocol"] == protocol
|
||||||
and
|
and rule["PortRange"]["From"] <= port <= rule["PortRange"]["To"]
|
||||||
rule["PortRange"]["From"] <= port <= rule["PortRange"]["To"]
|
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
):
|
):
|
||||||
@@ -34,11 +32,9 @@ def check_network_acl(rules: Any, protocol: str, port: str) -> bool:
|
|||||||
and rule["RuleAction"] == "allow"
|
and rule["RuleAction"] == "allow"
|
||||||
and (
|
and (
|
||||||
rule["Protocol"] == "-1"
|
rule["Protocol"] == "-1"
|
||||||
or
|
or (
|
||||||
(
|
|
||||||
rule["Protocol"] == protocol
|
rule["Protocol"] == protocol
|
||||||
and
|
and rule["PortRange"]["From"] <= port <= rule["PortRange"]["To"]
|
||||||
rule["PortRange"]["From"] <= port <= rule["PortRange"]["To"]
|
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
):
|
):
|
||||||
@@ -48,7 +44,11 @@ def check_network_acl(rules: Any, protocol: str, port: str) -> bool:
|
|||||||
# There are not IPv6 Public access here
|
# There are not IPv6 Public access here
|
||||||
|
|
||||||
# Spliting IPv4 from IPv6 rules
|
# Spliting IPv4 from IPv6 rules
|
||||||
rules_IPv4 = list(filter(lambda rule: rule.get("Ipv6CidrBlock") is None and not rule["Egress"], rules))
|
rules_IPv4 = list(
|
||||||
|
filter(
|
||||||
|
lambda rule: rule.get("Ipv6CidrBlock") is None and not rule["Egress"], rules
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
# For IPv4
|
# For IPv4
|
||||||
# Rules must order by RuleNumber
|
# Rules must order by RuleNumber
|
||||||
@@ -58,11 +58,9 @@ def check_network_acl(rules: Any, protocol: str, port: str) -> bool:
|
|||||||
and rule["RuleAction"] == "deny"
|
and rule["RuleAction"] == "deny"
|
||||||
and (
|
and (
|
||||||
rule["Protocol"] == "-1"
|
rule["Protocol"] == "-1"
|
||||||
or
|
or (
|
||||||
(
|
|
||||||
rule["Protocol"] == protocol
|
rule["Protocol"] == protocol
|
||||||
and
|
and rule["PortRange"]["From"] <= port <= rule["PortRange"]["To"]
|
||||||
rule["PortRange"]["From"] <= port <= rule["PortRange"]["To"]
|
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
):
|
):
|
||||||
@@ -75,11 +73,9 @@ def check_network_acl(rules: Any, protocol: str, port: str) -> bool:
|
|||||||
and rule["RuleAction"] == "allow"
|
and rule["RuleAction"] == "allow"
|
||||||
and (
|
and (
|
||||||
rule["Protocol"] == "-1"
|
rule["Protocol"] == "-1"
|
||||||
or
|
or (
|
||||||
(
|
|
||||||
rule["Protocol"] == protocol
|
rule["Protocol"] == protocol
|
||||||
and
|
and rule["PortRange"]["From"] <= port <= rule["PortRange"]["To"]
|
||||||
rule["PortRange"]["From"] <= port <= rule["PortRange"]["To"]
|
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
):
|
):
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -35,7 +35,7 @@ class Test_iam_no_root_access_key_test:
|
|||||||
# raise Exception
|
# raise Exception
|
||||||
assert result[0].status == "PASS"
|
assert result[0].status == "PASS"
|
||||||
assert search(
|
assert search(
|
||||||
f"User <root_account> has not access keys.",
|
"User <root_account> has not access keys.",
|
||||||
result[0].status_extended,
|
result[0].status_extended,
|
||||||
)
|
)
|
||||||
assert result[0].resource_id == "<root_account>"
|
assert result[0].resource_id == "<root_account>"
|
||||||
@@ -73,7 +73,7 @@ class Test_iam_no_root_access_key_test:
|
|||||||
# raise Exception
|
# raise Exception
|
||||||
assert result[0].status == "FAIL"
|
assert result[0].status == "FAIL"
|
||||||
assert search(
|
assert search(
|
||||||
f"User <root_account> has one active access key.",
|
"User <root_account> has one active access key.",
|
||||||
result[0].status_extended,
|
result[0].status_extended,
|
||||||
)
|
)
|
||||||
assert result[0].resource_id == "<root_account>"
|
assert result[0].resource_id == "<root_account>"
|
||||||
@@ -111,7 +111,7 @@ class Test_iam_no_root_access_key_test:
|
|||||||
# raise Exception
|
# raise Exception
|
||||||
assert result[0].status == "FAIL"
|
assert result[0].status == "FAIL"
|
||||||
assert search(
|
assert search(
|
||||||
f"User <root_account> has one active access key.",
|
"User <root_account> has one active access key.",
|
||||||
result[0].status_extended,
|
result[0].status_extended,
|
||||||
)
|
)
|
||||||
assert result[0].resource_id == "<root_account>"
|
assert result[0].resource_id == "<root_account>"
|
||||||
@@ -149,7 +149,7 @@ class Test_iam_no_root_access_key_test:
|
|||||||
# raise Exception
|
# raise Exception
|
||||||
assert result[0].status == "FAIL"
|
assert result[0].status == "FAIL"
|
||||||
assert search(
|
assert search(
|
||||||
f"User <root_account> has two active access key.",
|
"User <root_account> has two active access key.",
|
||||||
result[0].status_extended,
|
result[0].status_extended,
|
||||||
)
|
)
|
||||||
assert result[0].resource_id == "<root_account>"
|
assert result[0].resource_id == "<root_account>"
|
||||||
|
|||||||
@@ -21,6 +21,6 @@ class iam_password_policy_lowercase(Check):
|
|||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
report.status = "FAIL"
|
report.status = "FAIL"
|
||||||
report.status_extended = f"Password policy cannot be found"
|
report.status_extended = "Password policy cannot be found"
|
||||||
findings.append(report)
|
findings.append(report)
|
||||||
return findings
|
return findings
|
||||||
|
|||||||
@@ -25,9 +25,7 @@ class iam_policy_attached_only_to_group_or_roles(Check):
|
|||||||
report = Check_Report(self.metadata)
|
report = Check_Report(self.metadata)
|
||||||
report.region = iam_client.region
|
report.region = iam_client.region
|
||||||
report.status = "FAIL"
|
report.status = "FAIL"
|
||||||
report.status_extended = (
|
report.status_extended = f"User {user.name} has the following inline policy {policy}"
|
||||||
f"User {user.name} has the following inline policy {policy}"
|
|
||||||
)
|
|
||||||
report.resource_id = user.name
|
report.resource_id = user.name
|
||||||
findings.append(report)
|
findings.append(report)
|
||||||
|
|
||||||
|
|||||||
@@ -81,7 +81,10 @@ class Test_iam_policy_attached_only_to_group_or_roles:
|
|||||||
f"User {user} has attached the following policy",
|
f"User {user} has attached the following policy",
|
||||||
result[0].status_extended,
|
result[0].status_extended,
|
||||||
)
|
)
|
||||||
assert search(f"User {user} has the following inline policy", result[1].status_extended)
|
assert search(
|
||||||
|
f"User {user} has the following inline policy",
|
||||||
|
result[1].status_extended,
|
||||||
|
)
|
||||||
|
|
||||||
@mock_iam
|
@mock_iam
|
||||||
def test_iam_user_inline_policy(self):
|
def test_iam_user_inline_policy(self):
|
||||||
|
|||||||
@@ -382,10 +382,10 @@ class MFADevice:
|
|||||||
class User:
|
class User:
|
||||||
name: str
|
name: str
|
||||||
arn: str
|
arn: str
|
||||||
mfa_devices: "list[MFADevice]"
|
mfa_devices: list[MFADevice]
|
||||||
password_last_used: str
|
password_last_used: str
|
||||||
attached_policies: "list[dict]"
|
attached_policies: list[dict]
|
||||||
inline_policies: "list[str]"
|
inline_policies: list[str]
|
||||||
|
|
||||||
def __init__(self, name, arn, password_last_used):
|
def __init__(self, name, arn, password_last_used):
|
||||||
self.name = name
|
self.name = name
|
||||||
@@ -400,8 +400,8 @@ class User:
|
|||||||
class Group:
|
class Group:
|
||||||
name: str
|
name: str
|
||||||
arn: str
|
arn: str
|
||||||
attached_policies: "list[dict]"
|
attached_policies: list[dict]
|
||||||
users: " list[User]"
|
users: list[User]
|
||||||
|
|
||||||
def __init__(self, name, arn):
|
def __init__(self, name, arn):
|
||||||
self.name = name
|
self.name = name
|
||||||
|
|||||||
@@ -230,7 +230,7 @@ class Test_IAM_Service:
|
|||||||
assert iam.password_policy.uppercase == require_upper
|
assert iam.password_policy.uppercase == require_upper
|
||||||
assert iam.password_policy.lowercase == require_lower
|
assert iam.password_policy.lowercase == require_lower
|
||||||
assert iam.password_policy.allow_change == allow_users_to_change
|
assert iam.password_policy.allow_change == allow_users_to_change
|
||||||
assert iam.password_policy.expiration == True
|
assert iam.password_policy.expiration is True
|
||||||
assert iam.password_policy.max_age == max_password_age
|
assert iam.password_policy.max_age == max_password_age
|
||||||
assert iam.password_policy.reuse_prevention == password_reuse_prevention
|
assert iam.password_policy.reuse_prevention == password_reuse_prevention
|
||||||
assert iam.password_policy.hard_expiry == hard_expiry
|
assert iam.password_policy.hard_expiry == hard_expiry
|
||||||
@@ -379,7 +379,7 @@ class Test_IAM_Service:
|
|||||||
@mock_iam
|
@mock_iam
|
||||||
def test__get_entities_attached_to_support_roles__no_roles(self):
|
def test__get_entities_attached_to_support_roles__no_roles(self):
|
||||||
iam_client = client("iam")
|
iam_client = client("iam")
|
||||||
support_roles = iam_client.list_entities_for_policy(
|
_ = iam_client.list_entities_for_policy(
|
||||||
PolicyArn="arn:aws:iam::aws:policy/aws-service-role/AWSSupportServiceRolePolicy",
|
PolicyArn="arn:aws:iam::aws:policy/aws-service-role/AWSSupportServiceRolePolicy",
|
||||||
EntityFilter="Role",
|
EntityFilter="Role",
|
||||||
)["PolicyRoles"]
|
)["PolicyRoles"]
|
||||||
@@ -458,7 +458,7 @@ class Test_IAM_Service:
|
|||||||
assert iam.list_policies_version[0]["Statement"][0]["Effect"] == "Allow"
|
assert iam.list_policies_version[0]["Statement"][0]["Effect"] == "Allow"
|
||||||
assert iam.list_policies_version[0]["Statement"][0]["Action"] == "*"
|
assert iam.list_policies_version[0]["Statement"][0]["Action"] == "*"
|
||||||
assert iam.list_policies_version[0]["Statement"][0]["Resource"] == "*"
|
assert iam.list_policies_version[0]["Statement"][0]["Resource"] == "*"
|
||||||
|
|
||||||
# Test IAM List SAML Providers
|
# Test IAM List SAML Providers
|
||||||
@mock_iam
|
@mock_iam
|
||||||
def test__list_saml_providers__(self):
|
def test__list_saml_providers__(self):
|
||||||
|
|||||||
@@ -16,6 +16,6 @@ class iam_support_role_created(Check):
|
|||||||
report.status_extended = f"Support policy attached to role {iam_client.entities_attached_to_support_roles[0]['RoleName']}"
|
report.status_extended = f"Support policy attached to role {iam_client.entities_attached_to_support_roles[0]['RoleName']}"
|
||||||
else:
|
else:
|
||||||
report.status = "FAIL"
|
report.status = "FAIL"
|
||||||
report.status_extended = f"Support policy is not attached to any role"
|
report.status_extended = "Support policy is not attached to any role"
|
||||||
findings.append(report)
|
findings.append(report)
|
||||||
return findings
|
return findings
|
||||||
|
|||||||
@@ -17,7 +17,7 @@ class kms_key_not_publicly_accessible(Check):
|
|||||||
report.region = key.region
|
report.region = key.region
|
||||||
# If the "Principal" element value is set to { "AWS": "*" } and the policy statement is not using any Condition clauses to filter the access, the selected AWS KMS master key is publicly accessible.
|
# If the "Principal" element value is set to { "AWS": "*" } and the policy statement is not using any Condition clauses to filter the access, the selected AWS KMS master key is publicly accessible.
|
||||||
for statement in key.policy["Statement"]:
|
for statement in key.policy["Statement"]:
|
||||||
if "*" == statement["Principal"] and not "Condition" in statement:
|
if "*" == statement["Principal"] and "Condition" not in statement:
|
||||||
report.status = "FAIL"
|
report.status = "FAIL"
|
||||||
report.status_extended = (
|
report.status_extended = (
|
||||||
f"KMS key {key.id} may be publicly accessible!"
|
f"KMS key {key.id} may be publicly accessible!"
|
||||||
@@ -28,7 +28,7 @@ class kms_key_not_publicly_accessible(Check):
|
|||||||
else:
|
else:
|
||||||
principals = statement["Principal"]["AWS"]
|
principals = statement["Principal"]["AWS"]
|
||||||
for principal_arn in principals:
|
for principal_arn in principals:
|
||||||
if principal_arn == "*" and not "Condition" in statement:
|
if principal_arn == "*" and "Condition" not in statement:
|
||||||
report.status = "FAIL"
|
report.status = "FAIL"
|
||||||
report.status_extended = (
|
report.status_extended = (
|
||||||
f"KMS key {key.id} may be publicly accessible!"
|
f"KMS key {key.id} may be publicly accessible!"
|
||||||
|
|||||||
@@ -46,8 +46,8 @@ class Test_ACM_Service:
|
|||||||
# KMS client for this test class
|
# KMS client for this test class
|
||||||
audit_info = self.set_mocked_audit_info()
|
audit_info = self.set_mocked_audit_info()
|
||||||
kms = KMS(audit_info)
|
kms = KMS(audit_info)
|
||||||
for client in kms.regional_clients.values():
|
for regional_client in kms.regional_clients.values():
|
||||||
assert client.__class__.__name__ == "KMS"
|
assert regional_client.__class__.__name__ == "KMS"
|
||||||
|
|
||||||
# Test KMS Session
|
# Test KMS Session
|
||||||
@mock_kms
|
@mock_kms
|
||||||
@@ -110,9 +110,9 @@ class Test_ACM_Service:
|
|||||||
kms = KMS(audit_info)
|
kms = KMS(audit_info)
|
||||||
assert len(kms.keys) == 2
|
assert len(kms.keys) == 2
|
||||||
assert kms.keys[0].arn == key1["Arn"]
|
assert kms.keys[0].arn == key1["Arn"]
|
||||||
assert kms.keys[0].rotation_enabled == False
|
assert kms.keys[0].rotation_enabled is False
|
||||||
assert kms.keys[1].arn == key2["Arn"]
|
assert kms.keys[1].arn == key2["Arn"]
|
||||||
assert kms.keys[1].rotation_enabled == True
|
assert kms.keys[1].rotation_enabled is True
|
||||||
|
|
||||||
# Test KMS Key policy
|
# Test KMS Key policy
|
||||||
@mock_kms
|
@mock_kms
|
||||||
|
|||||||
@@ -13,7 +13,7 @@ class s3_bucket_public_access(Check):
|
|||||||
):
|
):
|
||||||
report = Check_Report(self.metadata)
|
report = Check_Report(self.metadata)
|
||||||
report.status = "PASS"
|
report.status = "PASS"
|
||||||
report.status_extended = f"All S3 public access blocked at account level."
|
report.status_extended = "All S3 public access blocked at account level."
|
||||||
report.region = s3control_client.region
|
report.region = s3control_client.region
|
||||||
report.resource_id = s3_client.audited_account
|
report.resource_id = s3_client.audited_account
|
||||||
findings.append(report)
|
findings.append(report)
|
||||||
|
|||||||
@@ -251,7 +251,6 @@ class Bucket:
|
|||||||
policy: dict
|
policy: dict
|
||||||
encryption: str
|
encryption: str
|
||||||
region: str
|
region: str
|
||||||
acl_grantee: list[ACL_Grantee]
|
|
||||||
logging_target_bucket: str
|
logging_target_bucket: str
|
||||||
ownership: str
|
ownership: str
|
||||||
|
|
||||||
@@ -272,6 +271,5 @@ class Bucket:
|
|||||||
self.policy = {}
|
self.policy = {}
|
||||||
self.encryption = None
|
self.encryption = None
|
||||||
self.region = region
|
self.region = region
|
||||||
self.acl_grantee = None
|
|
||||||
self.logging_target_bucket = None
|
self.logging_target_bucket = None
|
||||||
self.ownership = None
|
self.ownership = None
|
||||||
|
|||||||
@@ -99,7 +99,7 @@ class Test_S3_Service:
|
|||||||
s3 = S3(audit_info)
|
s3 = S3(audit_info)
|
||||||
assert len(s3.buckets) == 1
|
assert len(s3.buckets) == 1
|
||||||
assert s3.buckets[0].name == bucket_name
|
assert s3.buckets[0].name == bucket_name
|
||||||
assert s3.buckets[0].versioning == True
|
assert s3.buckets[0].versioning is True
|
||||||
|
|
||||||
# Test S3 Get Bucket ACL
|
# Test S3 Get Bucket ACL
|
||||||
@mock_s3
|
@mock_s3
|
||||||
@@ -204,7 +204,7 @@ class Test_S3_Service:
|
|||||||
s3 = S3(audit_info)
|
s3 = S3(audit_info)
|
||||||
assert len(s3.buckets) == 1
|
assert len(s3.buckets) == 1
|
||||||
assert s3.buckets[0].name == bucket_name
|
assert s3.buckets[0].name == bucket_name
|
||||||
assert s3.buckets[0].logging == True
|
assert s3.buckets[0].logging is True
|
||||||
|
|
||||||
# Test S3 Get Bucket Policy
|
# Test S3 Get Bucket Policy
|
||||||
@mock_s3
|
@mock_s3
|
||||||
@@ -270,24 +270,6 @@ class Test_S3_Service:
|
|||||||
assert s3.buckets[0].name == bucket_name
|
assert s3.buckets[0].name == bucket_name
|
||||||
assert s3.buckets[0].ownership == "BucketOwnerEnforced"
|
assert s3.buckets[0].ownership == "BucketOwnerEnforced"
|
||||||
|
|
||||||
# Test S3 Get Bucket Ownership Controls
|
|
||||||
@mock_s3
|
|
||||||
def test__get_bucket_ownership_controls__(self):
|
|
||||||
# Generate S3 Client
|
|
||||||
s3_client = client("s3")
|
|
||||||
# Create S3 Bucket
|
|
||||||
bucket_name = "test-bucket"
|
|
||||||
s3_client.create_bucket(
|
|
||||||
Bucket=bucket_name, ObjectOwnership="BucketOwnerEnforced"
|
|
||||||
)
|
|
||||||
|
|
||||||
# S3 client for this test class
|
|
||||||
audit_info = self.set_mocked_audit_info()
|
|
||||||
s3 = S3(audit_info)
|
|
||||||
assert len(s3.buckets) == 1
|
|
||||||
assert s3.buckets[0].name == bucket_name
|
|
||||||
assert s3.buckets[0].ownership == "BucketOwnerEnforced"
|
|
||||||
|
|
||||||
# Test S3 Get Public Access Block
|
# Test S3 Get Public Access Block
|
||||||
@mock_s3
|
@mock_s3
|
||||||
def test__get_public_access_block__(self):
|
def test__get_public_access_block__(self):
|
||||||
@@ -319,7 +301,7 @@ class Test_S3_Service:
|
|||||||
|
|
||||||
# Test S3 Control Account Get Public Access Block
|
# Test S3 Control Account Get Public Access Block
|
||||||
@mock_s3control
|
@mock_s3control
|
||||||
def test__get_public_access_block__(self):
|
def test__get_public_access_block__s3_control(self):
|
||||||
# Generate S3Control Client
|
# Generate S3Control Client
|
||||||
s3control_client = client("s3control", region_name=AWS_REGION)
|
s3control_client = client("s3control", region_name=AWS_REGION)
|
||||||
s3control_client.put_public_access_block(
|
s3control_client.put_public_access_block(
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ class securityhub_enabled(Check):
|
|||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
report.status = "FAIL"
|
report.status = "FAIL"
|
||||||
report.status_extended = f"Security Hub is not enabled"
|
report.status_extended = "Security Hub is not enabled"
|
||||||
report.resource_id = securityhub.id
|
report.resource_id = securityhub.id
|
||||||
report.resource_arn = securityhub.arn
|
report.resource_arn = securityhub.arn
|
||||||
findings.append(report)
|
findings.append(report)
|
||||||
|
|||||||
@@ -11,12 +11,14 @@ AWS_REGION = "eu-west-1"
|
|||||||
# Mocking Access Analyzer Calls
|
# Mocking Access Analyzer Calls
|
||||||
make_api_call = botocore.client.BaseClient._make_api_call
|
make_api_call = botocore.client.BaseClient._make_api_call
|
||||||
|
|
||||||
# As you can see the operation_name has the list_analyzers snake_case form but
|
|
||||||
# we are using the ListAnalyzers form.
|
|
||||||
# Rationale -> https://github.com/boto/botocore/blob/develop/botocore/client.py#L810:L816
|
|
||||||
#
|
|
||||||
# We have to mock every AWS API call using Boto3
|
|
||||||
def mock_make_api_call(self, operation_name, kwarg):
|
def mock_make_api_call(self, operation_name, kwarg):
|
||||||
|
"""
|
||||||
|
We have to mock every AWS API call using Boto3
|
||||||
|
|
||||||
|
As you can see the operation_name has the snake_case
|
||||||
|
Rationale -> https://github.com/boto/botocore/blob/develop/botocore/client.py#L810:L816
|
||||||
|
"""
|
||||||
if operation_name == "GetEnabledStandards":
|
if operation_name == "GetEnabledStandards":
|
||||||
return {
|
return {
|
||||||
"StandardsSubscriptions": [
|
"StandardsSubscriptions": [
|
||||||
|
|||||||
@@ -10,10 +10,14 @@ ACCOUNT_ID = "123456789012"
|
|||||||
|
|
||||||
# Mocking VPC Calls
|
# Mocking VPC Calls
|
||||||
make_api_call = botocore.client.BaseClient._make_api_call
|
make_api_call = botocore.client.BaseClient._make_api_call
|
||||||
# Rationale -> https://github.com/boto/botocore/blob/develop/botocore/client.py#L810:L816
|
|
||||||
#
|
|
||||||
# We have to mock every AWS API call using Boto3
|
|
||||||
def mock_make_api_call(self, operation_name, kwarg):
|
def mock_make_api_call(self, operation_name, kwarg):
|
||||||
|
"""
|
||||||
|
We have to mock every AWS API call using Boto3
|
||||||
|
|
||||||
|
Rationale -> https://github.com/boto/botocore/blob/develop/botocore/client.py#L810:L816
|
||||||
|
"""
|
||||||
if operation_name == "DescribeVpcEndpointServices":
|
if operation_name == "DescribeVpcEndpointServices":
|
||||||
return {
|
return {
|
||||||
"ServiceDetails": [
|
"ServiceDetails": [
|
||||||
@@ -67,7 +71,7 @@ class Test_vpc_endpoint_services_allowed_principals_trust_boundaries:
|
|||||||
AvailabilityZone=f"{AWS_REGION}a",
|
AvailabilityZone=f"{AWS_REGION}a",
|
||||||
)
|
)
|
||||||
lb_name = "lb_vpce-test"
|
lb_name = "lb_vpce-test"
|
||||||
lb_arn = elbv2_client.create_load_balancer(
|
_ = elbv2_client.create_load_balancer(
|
||||||
Name=lb_name,
|
Name=lb_name,
|
||||||
Subnets=[subnet["Subnet"]["SubnetId"]],
|
Subnets=[subnet["Subnet"]["SubnetId"]],
|
||||||
Scheme="internal",
|
Scheme="internal",
|
||||||
|
|||||||
@@ -46,8 +46,8 @@ class Test_VPC_Service:
|
|||||||
# VPC client for this test class
|
# VPC client for this test class
|
||||||
audit_info = self.set_mocked_audit_info()
|
audit_info = self.set_mocked_audit_info()
|
||||||
vpc = VPC(audit_info)
|
vpc = VPC(audit_info)
|
||||||
for client in vpc.regional_clients.values():
|
for regional_client in vpc.regional_clients.values():
|
||||||
assert client.__class__.__name__ == "EC2"
|
assert regional_client.__class__.__name__ == "EC2"
|
||||||
|
|
||||||
# Test VPC Session
|
# Test VPC Session
|
||||||
@mock_ec2
|
@mock_ec2
|
||||||
@@ -102,29 +102,7 @@ class Test_VPC_Service:
|
|||||||
# Search created VPC among default ones
|
# Search created VPC among default ones
|
||||||
for vpc in vpc.vpcs:
|
for vpc in vpc.vpcs:
|
||||||
if vpc.id == new_vpc["VpcId"]:
|
if vpc.id == new_vpc["VpcId"]:
|
||||||
assert vpc.flow_log == True
|
assert vpc.flow_log is True
|
||||||
|
|
||||||
# Test VPC Describe VPC Peering connections
|
|
||||||
@mock_ec2
|
|
||||||
def test__describe_vpc_peering_connections__(self):
|
|
||||||
# Generate VPC Client
|
|
||||||
ec2_client = client("ec2", region_name=AWS_REGION)
|
|
||||||
# Create VPCs peers
|
|
||||||
vpc = ec2_client.create_vpc(CidrBlock="10.0.0.0/16")
|
|
||||||
peer_vpc = ec2_client.create_vpc(CidrBlock="11.0.0.0/16")
|
|
||||||
vpc_pcx = ec2_client.create_vpc_peering_connection(
|
|
||||||
VpcId=vpc["Vpc"]["VpcId"], PeerVpcId=peer_vpc["Vpc"]["VpcId"]
|
|
||||||
)
|
|
||||||
vpc_pcx_id = vpc_pcx["VpcPeeringConnection"]["VpcPeeringConnectionId"]
|
|
||||||
|
|
||||||
vpc_pcx = ec2_client.accept_vpc_peering_connection(
|
|
||||||
VpcPeeringConnectionId=vpc_pcx_id
|
|
||||||
)
|
|
||||||
# VPC client for this test class
|
|
||||||
audit_info = self.set_mocked_audit_info()
|
|
||||||
vpc = VPC(audit_info)
|
|
||||||
assert len(vpc.vpc_peering_connections) == 1
|
|
||||||
assert vpc.vpc_peering_connections[0].id == vpc_pcx_id
|
|
||||||
|
|
||||||
# Test VPC Describe VPC Peering connections
|
# Test VPC Describe VPC Peering connections
|
||||||
@mock_ec2
|
@mock_ec2
|
||||||
@@ -153,7 +131,7 @@ class Test_VPC_Service:
|
|||||||
def test__describe_route_tables__(self):
|
def test__describe_route_tables__(self):
|
||||||
# Generate VPC Client
|
# Generate VPC Client
|
||||||
ec2_client = client("ec2", region_name=AWS_REGION)
|
ec2_client = client("ec2", region_name=AWS_REGION)
|
||||||
ec2_resource = resource("ec2", region_name=AWS_REGION)
|
_ = resource("ec2", region_name=AWS_REGION)
|
||||||
|
|
||||||
# Create VPCs peers as well as a route
|
# Create VPCs peers as well as a route
|
||||||
vpc = ec2_client.create_vpc(CidrBlock="10.0.0.0/16")
|
vpc = ec2_client.create_vpc(CidrBlock="10.0.0.0/16")
|
||||||
@@ -247,7 +225,7 @@ class Test_VPC_Service:
|
|||||||
Type="network",
|
Type="network",
|
||||||
)["LoadBalancers"][0]["LoadBalancerArn"]
|
)["LoadBalancers"][0]["LoadBalancerArn"]
|
||||||
|
|
||||||
service = ec2_client.create_vpc_endpoint_service_configuration(
|
_ = ec2_client.create_vpc_endpoint_service_configuration(
|
||||||
NetworkLoadBalancerArns=[lb_arn]
|
NetworkLoadBalancerArns=[lb_arn]
|
||||||
)
|
)
|
||||||
# VPC client for this test class
|
# VPC client for this test class
|
||||||
|
|||||||
@@ -63,7 +63,7 @@ for item in original_matrix_regions_aws["prices"]:
|
|||||||
regions_dict["regions"] = regions
|
regions_dict["regions"] = regions
|
||||||
|
|
||||||
# Store final JSON
|
# Store final JSON
|
||||||
logging.info(f"Storing final JSON")
|
logging.info("Storing final JSON")
|
||||||
regions_by_service["services"] = services
|
regions_by_service["services"] = services
|
||||||
|
|
||||||
# Write to file
|
# Write to file
|
||||||
|
|||||||
Reference in New Issue
Block a user