diff --git a/.gitignore b/.gitignore index f6e46e88..02937ce9 100644 --- a/.gitignore +++ b/.gitignore @@ -32,3 +32,9 @@ junit-reports/ # VSCode files .vscode/ + +terraform-kickstarter/.terraform.lock.hcl + +terraform-kickstarter/.terraform/providers/registry.terraform.io/hashicorp/aws/3.56.0/darwin_amd64/terraform-provider-aws_v3.56.0_x5 + +terraform-kickstarter/terraform.tfstate diff --git a/LICENSE b/LICENSE index b9b7f40f..0903645a 100644 --- a/LICENSE +++ b/LICENSE @@ -1,2 +1,201 @@ -Apache License 2.0 as specified in each file. You may obtain a copy of the License at LICENSE-APACHE-2.0 and -http://www.apache.org/licenses/LICENSE-2.0 + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + +Copyright 2018 Netflix, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/LICENSE-APACHE-2.0 b/LICENSE-APACHE-2.0 deleted file mode 100644 index 46de5eb1..00000000 --- a/LICENSE-APACHE-2.0 +++ /dev/null @@ -1,201 +0,0 @@ -Apache License -Version 2.0, January 2004 -http://www.apache.org/licenses/ - -TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - -1. Definitions. - -"License" shall mean the terms and conditions for use, reproduction, -and distribution as defined by Sections 1 through 9 of this document. - -"Licensor" shall mean the copyright owner or entity authorized by -the copyright owner that is granting the License. - -"Legal Entity" shall mean the union of the acting entity and all -other entities that control, are controlled by, or are under common -control with that entity. For the purposes of this definition, -"control" means (i) the power, direct or indirect, to cause the -direction or management of such entity, whether by contract or -otherwise, or (ii) ownership of fifty percent (50%) or more of the -outstanding shares, or (iii) beneficial ownership of such entity. - -"You" (or "Your") shall mean an individual or Legal Entity -exercising permissions granted by this License. - -"Source" form shall mean the preferred form for making modifications, -including but not limited to software source code, documentation -source, and configuration files. - -"Object" form shall mean any form resulting from mechanical -transformation or translation of a Source form, including but -not limited to compiled object code, generated documentation, -and conversions to other media types. - -"Work" shall mean the work of authorship, whether in Source or -Object form, made available under the License, as indicated by a -copyright notice that is included in or attached to the work -(an example is provided in the Appendix below). - -"Derivative Works" shall mean any work, whether in Source or Object -form, that is based on (or derived from) the Work and for which the -editorial revisions, annotations, elaborations, or other modifications -represent, as a whole, an original work of authorship. For the purposes -of this License, Derivative Works shall not include works that remain -separable from, or merely link (or bind by name) to the interfaces of, -the Work and Derivative Works thereof. - -"Contribution" shall mean any work of authorship, including -the original version of the Work and any modifications or additions -to that Work or Derivative Works thereof, that is intentionally -submitted to Licensor for inclusion in the Work by the copyright owner -or by an individual or Legal Entity authorized to submit on behalf of -the copyright owner. For the purposes of this definition, "submitted" -means any form of electronic, verbal, or written communication sent -to the Licensor or its representatives, including but not limited to -communication on electronic mailing lists, source code control systems, -and issue tracking systems that are managed by, or on behalf of, the -Licensor for the purpose of discussing and improving the Work, but -excluding communication that is conspicuously marked or otherwise -designated in writing by the copyright owner as "Not a Contribution." - -"Contributor" shall mean Licensor and any individual or Legal Entity -on behalf of whom a Contribution has been received by Licensor and -subsequently incorporated within the Work. - -2. Grant of Copyright License. Subject to the terms and conditions of -this License, each Contributor hereby grants to You a perpetual, -worldwide, non-exclusive, no-charge, royalty-free, irrevocable -copyright license to reproduce, prepare Derivative Works of, -publicly display, publicly perform, sublicense, and distribute the -Work and such Derivative Works in Source or Object form. - -3. Grant of Patent License. Subject to the terms and conditions of -this License, each Contributor hereby grants to You a perpetual, -worldwide, non-exclusive, no-charge, royalty-free, irrevocable -(except as stated in this section) patent license to make, have made, -use, offer to sell, sell, import, and otherwise transfer the Work, -where such license applies only to those patent claims licensable -by such Contributor that are necessarily infringed by their -Contribution(s) alone or by combination of their Contribution(s) -with the Work to which such Contribution(s) was submitted. If You -institute patent litigation against any entity (including a -cross-claim or counterclaim in a lawsuit) alleging that the Work -or a Contribution incorporated within the Work constitutes direct -or contributory patent infringement, then any patent licenses -granted to You under this License for that Work shall terminate -as of the date such litigation is filed. - -4. Redistribution. You may reproduce and distribute copies of the -Work or Derivative Works thereof in any medium, with or without -modifications, and in Source or Object form, provided that You -meet the following conditions: - -(a) You must give any other recipients of the Work or -Derivative Works a copy of this License; and - -(b) You must cause any modified files to carry prominent notices -stating that You changed the files; and - -(c) You must retain, in the Source form of any Derivative Works -that You distribute, all copyright, patent, trademark, and -attribution notices from the Source form of the Work, -excluding those notices that do not pertain to any part of -the Derivative Works; and - -(d) If the Work includes a "NOTICE" text file as part of its -distribution, then any Derivative Works that You distribute must -include a readable copy of the attribution notices contained -within such NOTICE file, excluding those notices that do not -pertain to any part of the Derivative Works, in at least one -of the following places: within a NOTICE text file distributed -as part of the Derivative Works; within the Source form or -documentation, if provided along with the Derivative Works; or, -within a display generated by the Derivative Works, if and -wherever such third-party notices normally appear. The contents -of the NOTICE file are for informational purposes only and -do not modify the License. You may add Your own attribution -notices within Derivative Works that You distribute, alongside -or as an addendum to the NOTICE text from the Work, provided -that such additional attribution notices cannot be construed -as modifying the License. - -You may add Your own copyright statement to Your modifications and -may provide additional or different license terms and conditions -for use, reproduction, or distribution of Your modifications, or -for any such Derivative Works as a whole, provided Your use, -reproduction, and distribution of the Work otherwise complies with -the conditions stated in this License. - -5. Submission of Contributions. Unless You explicitly state otherwise, -any Contribution intentionally submitted for inclusion in the Work -by You to the Licensor shall be under the terms and conditions of -this License, without any additional terms or conditions. -Notwithstanding the above, nothing herein shall supersede or modify -the terms of any separate license agreement you may have executed -with Licensor regarding such Contributions. - -6. Trademarks. This License does not grant permission to use the trade -names, trademarks, service marks, or product names of the Licensor, -except as required for reasonable and customary use in describing the -origin of the Work and reproducing the content of the NOTICE file. - -7. Disclaimer of Warranty. Unless required by applicable law or -agreed to in writing, Licensor provides the Work (and each -Contributor provides its Contributions) on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -implied, including, without limitation, any warranties or conditions -of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A -PARTICULAR PURPOSE. You are solely responsible for determining the -appropriateness of using or redistributing the Work and assume any -risks associated with Your exercise of permissions under this License. - -8. Limitation of Liability. In no event and under no legal theory, -whether in tort (including negligence), contract, or otherwise, -unless required by applicable law (such as deliberate and grossly -negligent acts) or agreed to in writing, shall any Contributor be -liable to You for damages, including any direct, indirect, special, -incidental, or consequential damages of any character arising as a -result of this License or out of the use or inability to use the -Work (including but not limited to damages for loss of goodwill, -work stoppage, computer failure or malfunction, or any and all -other commercial damages or losses), even if such Contributor -has been advised of the possibility of such damages. - -9. Accepting Warranty or Additional Liability. While redistributing -the Work or Derivative Works thereof, You may choose to offer, -and charge a fee for, acceptance of support, warranty, indemnity, -or other liability obligations and/or rights consistent with this -License. However, in accepting such obligations, You may act only -on Your own behalf and on Your sole responsibility, not on behalf -of any other Contributor, and only if You agree to indemnify, -defend, and hold each Contributor harmless for any liability -incurred by, or claims asserted against, such Contributor by reason -of your accepting any such warranty or additional liability. - -END OF TERMS AND CONDITIONS - -APPENDIX: How to apply the Apache License to your work. - -To apply the Apache License to your work, attach the following -boilerplate notice, with the fields enclosed by brackets "[]" -replaced with your own identifying information. (Don't include -the brackets!) The text should be enclosed in the appropriate -comment syntax for the file format. We also recommend that a -file or class name and description of purpose be included on the -same "printed page" as the copyright notice for easier -identification within third-party archives. - -Copyright 2021 Toni de la Fuente - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - -http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. diff --git a/Pipfile b/Pipfile index fbd6dce0..ae5a1ba9 100644 --- a/Pipfile +++ b/Pipfile @@ -7,7 +7,7 @@ verify_ssl = true [packages] boto3 = ">=1.9.188" -detect-secrets = ">=0.12.4" +detect-secrets = "==1.0.3" [requires] python_version = "3.7" diff --git a/README.md b/README.md index 727b7242..5cb8afde 100644 --- a/README.md +++ b/README.md @@ -4,6 +4,11 @@ # Prowler - AWS Security Tool +[![Discord Shield](https://discordapp.com/api/guilds/807208614288818196/widget.png?style=shield)](https://discord.gg/UjSMCVnxSB) +[![Docker Pulls](https://img.shields.io/docker/pulls/toniblyx/prowler)](https://hub.docker.com/r/toniblyx/prowler) +[![aws-ecr](https://user-images.githubusercontent.com/3985464/141164269-8cfeef0f-6b62-4c99-8fe9-4537986a1613.png)](https://gallery.ecr.aws/o4g1s5r6/prowler) + + ## Table of Contents - [Description](#description) @@ -39,7 +44,7 @@ Read more about [CIS Amazon Web Services Foundations Benchmark v1.2.0 - 05-23-20 ## Features -+180 checks covering security best practices across all AWS regions and most of AWS services and related to the next groups: ++200 checks covering security best practices across all AWS regions and most of AWS services and related to the next groups: - Identity and Access Management [group1] - Logging [group2] @@ -56,6 +61,7 @@ Read more about [CIS Amazon Web Services Foundations Benchmark v1.2.0 - 05-23-20 - Internet exposed resources - EKS-CIS - Also includes PCI-DSS, ISO-27001, FFIEC, SOC2, ENS (Esquema Nacional de Seguridad of Spain). +- AWS FTR [FTR] Read more [here](#aws-ftr-checks) With Prowler you can: @@ -78,17 +84,26 @@ Prowler has been written in bash using AWS-CLI and it works in Linux and OSX. - Make sure the latest version of AWS-CLI is installed on your workstation (it works with either v1 or v2), and other components needed, with Python pip already installed: ```sh - pip install awscli detect-secrets + pip install awscli ``` - AWS-CLI can be also installed it using "brew", "apt", "yum" or manually from , but `detect-secrets` has to be installed using `pip`. You will need to install `jq` to get the most from Prowler. + > NOTE: detect-secrets Yelp version is no longer supported the one from IBM is mantained now. Use the one mentioned below or the specific Yelp version 1.0.3 to make sure it works as expected (`pip install detect-secrets==1.0.3`): + ```sh + pip install "git+https://github.com/ibm/detect-secrets.git@master#egg=detect-secrets" + ``` -- Make sure jq is installed (example below with "apt" but use a valid package manager for your OS): + AWS-CLI can be also installed it using "brew", "apt", "yum" or manually from , but `detect-secrets` has to be installed using `pip` or `pip3`. You will need to install `jq` to get the most from Prowler. + +- Make sure jq is installed: examples below with "apt" for Debian alike and "yum" for RedHat alike distros (like Amazon Linux): ```sh sudo apt install jq ``` + ```sh + sudo yum install jq + ``` + - Previous steps, from your workstation: ```sh @@ -187,23 +202,27 @@ Prowler has been written in bash using AWS-CLI and it works in Linux and OSX. ### Regions -By default Prowler scans all opt-in regions available, that might take a long execution time depending on the number of resources and regions used. Same applies for GovCloud or China regions. See below Advance usage for examples. +By default, Prowler scans all opt-in regions available, that might take a long execution time depending on the number of resources and regions used. Same applies for GovCloud or China regions. See below Advance usage for examples. -Prowler has to parameters related to regions: `-r` that is used query AWS services API endpoints (it uses `us-east-1` by default and required for GovCloud or China) and the option `-f` that is to filter those regions you only want to scan. For example if you want to scan Dublin only use `-f eu-west-1` and if you want to scan Dublin and Ohio `-f 'eu-west-1 us-east-s'`, note the single quotes and space between regions. +Prowler has two parameters related to regions: `-r` that is used query AWS services API endpoints (it uses `us-east-1` by default and required for GovCloud or China) and the option `-f` that is to filter those regions you only want to scan. For example if you want to scan Dublin only use `-f eu-west-1` and if you want to scan Dublin and Ohio `-f 'eu-west-1 us-east-s'`, note the single quotes and space between regions. ## Screenshots -- Sample screenshot of report first lines: +- Sample screenshot of default console report first lines of command `./prowler`: - + - Sample screenshot of the html output `-M html`: - Prowler html + Prowler html + +- Sample screenshot of the Quicksight dashboard, see [https://quicksight-security-dashboard.workshop.aws](quicksight-security-dashboard.workshop.aws/): + + Prowler with Quicksight - Sample screenshot of the junit-xml output in CodeBuild `-M junit-xml`: - + ### Save your reports @@ -323,7 +342,7 @@ Usig the same for loop it can be scanned a list of accounts with a variable like ### GovCloud Prowler runs in GovCloud regions as well. To make sure it points to the right API endpoint use `-r` to either `us-gov-west-1` or `us-gov-east-1`. If not filter region is used it will look for resources in both GovCloud regions by default: -``` +```sh ./prowler -r us-gov-west-1 ``` > For Security Hub integration see below in Security Hub section. @@ -334,9 +353,12 @@ Flag `-x /my/own/checks` will include any check in that particular directory. To ### Show or log only FAILs -In order to remove noise and get only FAIL findings there is a `-q` flag that makes Prowler to show and log only FAILs. It can be combined with any other option. +In order to remove noise and get only FAIL findings there is a `-q` flag that makes Prowler to show and log only FAILs. +It can be combined with any other option. +Will show WARNINGS when a resource is excluded, just to take into consideration. ```sh +# -q option combined with -M csv -b ./prowler -q -M csv -b ``` @@ -503,7 +525,7 @@ The `aws iam create-access-key` command will output the secret access key and th ## Extras -We are adding additional checks to improve the information gather from each account, these checks are out of the scope of the CIS benchmark for AWS but we consider them very helpful to get to know each AWS account set up and find issues on it. +We are adding additional checks to improve the information gather from each account, these checks are out of the scope of the CIS benchmark for AWS, but we consider them very helpful to get to know each AWS account set up and find issues on it. Some of these checks look for publicly facing resources may not actually be fully public due to other layered controls like S3 Bucket Policies, Security Groups or Network ACLs. @@ -558,6 +580,18 @@ The `gdpr` group of checks uses existing and extra checks. To get a GDPR report, ./prowler -g gdpr ``` +## AWS FTR Checks + +With this group of checks, Prowler shows result of checks related to the AWS Foundational Technical Review, more information [here](https://apn-checklists.s3.amazonaws.com/foundational/partner-hosted/partner-hosted/CVLHEC5X7.html). The list of checks can be seen in the group file at: + +[groups/group25_ftr](groups/group25_ftr) + +The `ftr` group of checks uses existing and extra checks. To get a AWS FTR report, run this command: + +```sh +./prowler -g ftr +``` + ## HIPAA Checks With this group of checks, Prowler shows results of controls related to the "Security Rule" of the Health Insurance Portability and Accountability Act aka [HIPAA](https://www.hhs.gov/hipaa/for-professionals/security/index.html) as defined in [45 CFR Subpart C - Security Standards for the Protection of Electronic Protected Health Information](https://www.law.cornell.edu/cfr/text/45/part-164/subpart-C) within [PART 160 - GENERAL ADMINISTRATIVE REQUIREMENTS](https://www.law.cornell.edu/cfr/text/45/part-160) and [Subpart A](https://www.law.cornell.edu/cfr/text/45/part-164/subpart-A) and [Subpart C](https://www.law.cornell.edu/cfr/text/45/part-164/subpart-C) of PART 164 - SECURITY AND PRIVACY @@ -601,7 +635,7 @@ To give it a quick shot just call: ### Scenarios -Currently this check group supports two different scenarios: +Currently, this check group supports two different scenarios: 1. Single account environment: no action required, the configuration is happening automatically for you. 2. Multi account environment: in case you environment has multiple trusted and known AWS accounts you maybe want to append them manually to [groups/group16_trustboundaries](groups/group16_trustboundaries) as a space separated list into `GROUP_TRUSTBOUNDARIES_TRUSTED_ACCOUNT_IDS` variable, then just run prowler. @@ -623,7 +657,7 @@ Every circle represents one AWS account. The dashed line represents the trust boundary, that separates trust and untrusted AWS accounts. The arrow simply describes the direction of the trust, however the data can potentially flow in both directions. -Single Account environment assumes that only the AWS account subject to this analysis is trusted. However there is a chance that two VPCs are existing within that one AWS account which are still trusted as a self reference. +Single Account environment assumes that only the AWS account subject to this analysis is trusted. However, there is a chance that two VPCs are existing within that one AWS account which are still trusted as a self reference. ![single-account-environment](/docs/images/prowler-single-account-environment.png) Multi Account environments assumes a minimum of two trusted or known accounts. For this particular example all trusted and known accounts will be tested. Therefore `GROUP_TRUSTBOUNDARIES_TRUSTED_ACCOUNT_IDS` variable in [groups/group16_trustboundaries](groups/group16_trustboundaries) should include all trusted accounts Account #A, Account #B, Account #C, and Account #D in order to finally raise Account #E and Account #F for being untrusted or unknown. diff --git a/checklist.txt b/checklist.txt new file mode 100644 index 00000000..83e556a2 --- /dev/null +++ b/checklist.txt @@ -0,0 +1,6 @@ +# You can add a comma seperated list of checks like this: +check11,check12 +extra72 # You can also use newlines for each check +check13 # This way allows you to add inline comments +# Both of these can be combined if you have a standard list and want to add +# inline comments for other checks. \ No newline at end of file diff --git a/checks/check11 b/checks/check11 index 6162db56..f5d6a742 100644 --- a/checks/check11 +++ b/checks/check11 @@ -14,7 +14,7 @@ CHECK_ID_check11="1.1" CHECK_TITLE_check11="[check11] Avoid the use of the root account" CHECK_SCORED_check11="SCORED" -CHECK_TYPE_check11="LEVEL1" +CHECK_CIS_LEVEL_check11="LEVEL1" CHECK_SEVERITY_check11="High" CHECK_ASFF_TYPE_check11="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ALTERNATE_check101="check11" diff --git a/checks/check110 b/checks/check110 index 2e60a65e..7054ea8d 100644 --- a/checks/check110 +++ b/checks/check110 @@ -14,7 +14,7 @@ CHECK_ID_check110="1.10" CHECK_TITLE_check110="[check110] Ensure IAM password policy prevents password reuse: 24 or greater" CHECK_SCORED_check110="SCORED" -CHECK_TYPE_check110="LEVEL1" +CHECK_CIS_LEVEL_check110="LEVEL1" CHECK_SEVERITY_check110="Medium" CHECK_ASFF_TYPE_check110="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ALTERNATE_check110="check110" diff --git a/checks/check111 b/checks/check111 index 1a696f0b..9fbb90a4 100644 --- a/checks/check111 +++ b/checks/check111 @@ -14,7 +14,7 @@ CHECK_ID_check111="1.11" CHECK_TITLE_check111="[check111] Ensure IAM password policy expires passwords within 90 days or less" CHECK_SCORED_check111="SCORED" -CHECK_TYPE_check111="LEVEL1" +CHECK_CIS_LEVEL_check111="LEVEL1" CHECK_SEVERITY_check111="Medium" CHECK_ASFF_TYPE_check111="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ALTERNATE_check111="check111" diff --git a/checks/check112 b/checks/check112 index f2f6c422..16635494 100644 --- a/checks/check112 +++ b/checks/check112 @@ -14,7 +14,7 @@ CHECK_ID_check112="1.12" CHECK_TITLE_check112="[check112] Ensure no root account access key exists" CHECK_SCORED_check112="SCORED" -CHECK_TYPE_check112="LEVEL1" +CHECK_CIS_LEVEL_check112="LEVEL1" CHECK_SEVERITY_check112="Critical" CHECK_ASFF_TYPE_check112="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ALTERNATE_check112="check112" diff --git a/checks/check113 b/checks/check113 index 657c9b0a..b98a4f51 100644 --- a/checks/check113 +++ b/checks/check113 @@ -14,7 +14,7 @@ CHECK_ID_check113="1.13" CHECK_TITLE_check113="[check113] Ensure MFA is enabled for the root account" CHECK_SCORED_check113="SCORED" -CHECK_TYPE_check113="LEVEL1" +CHECK_CIS_LEVEL_check113="LEVEL1" CHECK_SEVERITY_check113="Critical" CHECK_ASFF_TYPE_check113="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ALTERNATE_check113="check113" diff --git a/checks/check114 b/checks/check114 index 7872583f..aaa9dc0b 100644 --- a/checks/check114 +++ b/checks/check114 @@ -14,7 +14,7 @@ CHECK_ID_check114="1.14" CHECK_TITLE_check114="[check114] Ensure hardware MFA is enabled for the root account" CHECK_SCORED_check114="SCORED" -CHECK_TYPE_check114="LEVEL2" +CHECK_CIS_LEVEL_check114="LEVEL2" CHECK_SEVERITY_check114="Critical" CHECK_ASFF_TYPE_check114="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ALTERNATE_check114="check114" diff --git a/checks/check115 b/checks/check115 index 356ba6d7..d7c7603a 100644 --- a/checks/check115 +++ b/checks/check115 @@ -14,7 +14,7 @@ CHECK_ID_check115="1.15" CHECK_TITLE_check115="[check115] Ensure security questions are registered in the AWS account" CHECK_SCORED_check115="NOT_SCORED" -CHECK_TYPE_check115="LEVEL1" +CHECK_CIS_LEVEL_check115="LEVEL1" CHECK_SEVERITY_check115="Medium" CHECK_ASFF_TYPE_check115="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ALTERNATE_check115="check115" diff --git a/checks/check116 b/checks/check116 index 18a0cbc3..0cc78432 100644 --- a/checks/check116 +++ b/checks/check116 @@ -14,7 +14,7 @@ CHECK_ID_check116="1.16" CHECK_TITLE_check116="[check116] Ensure IAM policies are attached only to groups or roles" CHECK_SCORED_check116="SCORED" -CHECK_TYPE_check116="LEVEL1" +CHECK_CIS_LEVEL_check116="LEVEL1" CHECK_SEVERITY_check116="Low" CHECK_ASFF_TYPE_check116="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ASFF_RESOURCE_TYPE_check116="AwsIamUser" diff --git a/checks/check117 b/checks/check117 index e9854cd0..96658c5b 100644 --- a/checks/check117 +++ b/checks/check117 @@ -14,7 +14,7 @@ CHECK_ID_check117="1.17" CHECK_TITLE_check117="[check117] Maintain current contact details" CHECK_SCORED_check117="NOT_SCORED" -CHECK_TYPE_check117="LEVEL1" +CHECK_CIS_LEVEL_check117="LEVEL1" CHECK_SEVERITY_check117="Medium" CHECK_ASFF_TYPE_check117="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ALTERNATE_check117="check117" diff --git a/checks/check118 b/checks/check118 index 736bb594..f2e31c9d 100644 --- a/checks/check118 +++ b/checks/check118 @@ -14,7 +14,7 @@ CHECK_ID_check118="1.18" CHECK_TITLE_check118="[check118] Ensure security contact information is registered" CHECK_SCORED_check118="NOT_SCORED" -CHECK_TYPE_check118="LEVEL1" +CHECK_CIS_LEVEL_check118="LEVEL1" CHECK_SEVERITY_check118="Medium" CHECK_ASFF_TYPE_check118="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ALTERNATE_check118="check118" diff --git a/checks/check119 b/checks/check119 index e9d148dc..c1ebe209 100644 --- a/checks/check119 +++ b/checks/check119 @@ -14,7 +14,7 @@ CHECK_ID_check119="1.19" CHECK_TITLE_check119="[check119] Ensure IAM instance roles are used for AWS resource access from instances" CHECK_SCORED_check119="NOT_SCORED" -CHECK_TYPE_check119="LEVEL2" +CHECK_CIS_LEVEL_check119="LEVEL2" CHECK_SEVERITY_check119="Medium" CHECK_ASFF_TYPE_check119="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ASFF_RESOURCE_TYPE_check119="AwsEc2Instance" diff --git a/checks/check12 b/checks/check12 index deca5af2..f2b5d920 100644 --- a/checks/check12 +++ b/checks/check12 @@ -14,7 +14,7 @@ CHECK_ID_check12="1.2" CHECK_TITLE_check12="[check12] Ensure multi-factor authentication (MFA) is enabled for all IAM users that have a console password" CHECK_SCORED_check12="SCORED" -CHECK_TYPE_check12="LEVEL1" +CHECK_CIS_LEVEL_check12="LEVEL1" CHECK_SEVERITY_check12="High" CHECK_ASFF_TYPE_check12="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ASFF_RESOURCE_TYPE_check12="AwsIamUser" diff --git a/checks/check120 b/checks/check120 index e2935a5b..3223345a 100644 --- a/checks/check120 +++ b/checks/check120 @@ -14,7 +14,7 @@ CHECK_ID_check120="1.20" CHECK_TITLE_check120="[check120] Ensure a support role has been created to manage incidents with AWS Support" CHECK_SCORED_check120="SCORED" -CHECK_TYPE_check120="LEVEL1" +CHECK_CIS_LEVEL_check120="LEVEL1" CHECK_SEVERITY_check120="Medium" CHECK_ASFF_TYPE_check120="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ASFF_RESOURCE_TYPE_check120="AwsIamRole" @@ -31,7 +31,7 @@ check120(){ SUPPORTPOLICYARN=$($AWSCLI iam list-policies --query "Policies[?PolicyName == 'AWSSupportAccess'].Arn" $PROFILE_OPT --region $REGION --output text) if [[ $SUPPORTPOLICYARN ]];then for policyarn in $SUPPORTPOLICYARN;do - POLICYROLES=$($AWSCLI iam list-entities-for-policy --policy-arn $SUPPORTPOLICYARN $PROFILE_OPT --region $REGION --output text | awk -F$'\t' '{ print $3 }') + POLICYROLES=$($AWSCLI iam list-entities-for-policy --policy-arn $policyarn $PROFILE_OPT --region $REGION --output text | awk -F$'\t' '{ print $3 }') if [[ $POLICYROLES ]];then for name in $POLICYROLES; do textPass "$REGION: Support Policy attached to $name" "$REGION" "$name" diff --git a/checks/check121 b/checks/check121 index 64dd729c..140fb1a3 100644 --- a/checks/check121 +++ b/checks/check121 @@ -14,7 +14,7 @@ CHECK_ID_check121="1.21" CHECK_TITLE_check121="[check121] Do not setup access keys during initial user setup for all IAM users that have a console password" CHECK_SCORED_check121="NOT_SCORED" -CHECK_TYPE_check121="LEVEL1" +CHECK_CIS_LEVEL_check121="LEVEL1" CHECK_SEVERITY_check121="Medium" CHECK_ASFF_TYPE_check121="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ASFF_RESOURCE_TYPE_check121="AwsIamUser" diff --git a/checks/check122 b/checks/check122 index 70423199..81ea3e61 100644 --- a/checks/check122 +++ b/checks/check122 @@ -14,7 +14,7 @@ CHECK_ID_check122="1.22" CHECK_TITLE_check122="[check122] Ensure IAM policies that allow full \"*:*\" administrative privileges are not created" CHECK_SCORED_check122="SCORED" -CHECK_TYPE_check122="LEVEL1" +CHECK_CIS_LEVEL_check122="LEVEL1" CHECK_SEVERITY_check122="Medium" CHECK_ASFF_TYPE_check122="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ASFF_RESOURCE_TYPE_check122="AwsIamPolicy" diff --git a/checks/check13 b/checks/check13 index 81b2c52b..050ff84b 100644 --- a/checks/check13 +++ b/checks/check13 @@ -14,7 +14,7 @@ CHECK_ID_check13="1.3" CHECK_TITLE_check13="[check13] Ensure credentials unused for 90 days or greater are disabled" CHECK_SCORED_check13="SCORED" -CHECK_TYPE_check13="LEVEL1" +CHECK_CIS_LEVEL_check13="LEVEL1" CHECK_SEVERITY_check13="Medium" CHECK_ASFF_TYPE_check13="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ASFF_RESOURCE_TYPE_check13="AwsIamUser" diff --git a/checks/check14 b/checks/check14 index 0d9d1cc7..fd669860 100644 --- a/checks/check14 +++ b/checks/check14 @@ -14,7 +14,7 @@ CHECK_ID_check14="1.4" CHECK_TITLE_check14="[check14] Ensure access keys are rotated every 90 days or less" CHECK_SCORED_check14="SCORED" -CHECK_TYPE_check14="LEVEL1" +CHECK_CIS_LEVEL_check14="LEVEL1" CHECK_SEVERITY_check14="Medium" CHECK_ASFF_TYPE_check14="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ASFF_RESOURCE_TYPE_check14="AwsIamUser" diff --git a/checks/check15 b/checks/check15 index 079245d0..a5d0d749 100644 --- a/checks/check15 +++ b/checks/check15 @@ -14,7 +14,7 @@ CHECK_ID_check15="1.5" CHECK_TITLE_check15="[check15] Ensure IAM password policy requires at least one uppercase letter" CHECK_SCORED_check15="SCORED" -CHECK_TYPE_check15="LEVEL1" +CHECK_CIS_LEVEL_check15="LEVEL1" CHECK_SEVERITY_check15="Medium" CHECK_ASFF_TYPE_check15="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ALTERNATE_check105="check15" diff --git a/checks/check16 b/checks/check16 index 719811d9..4ba1c855 100644 --- a/checks/check16 +++ b/checks/check16 @@ -14,7 +14,7 @@ CHECK_ID_check16="1.6" CHECK_TITLE_check16="[check16] Ensure IAM password policy require at least one lowercase letter" CHECK_SCORED_check16="SCORED" -CHECK_TYPE_check16="LEVEL1" +CHECK_CIS_LEVEL_check16="LEVEL1" CHECK_SEVERITY_check16="Medium" CHECK_ASFF_TYPE_check16="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ALTERNATE_check106="check16" diff --git a/checks/check17 b/checks/check17 index 72fdd247..92a97039 100644 --- a/checks/check17 +++ b/checks/check17 @@ -14,7 +14,7 @@ CHECK_ID_check17="1.7" CHECK_TITLE_check17="[check17] Ensure IAM password policy require at least one symbol" CHECK_SCORED_check17="SCORED" -CHECK_TYPE_check17="LEVEL1" +CHECK_CIS_LEVEL_check17="LEVEL1" CHECK_SEVERITY_check17="Medium" CHECK_ASFF_TYPE_check17="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ALTERNATE_check107="check17" diff --git a/checks/check18 b/checks/check18 index c13e101b..a4e27d24 100644 --- a/checks/check18 +++ b/checks/check18 @@ -14,7 +14,7 @@ CHECK_ID_check18="1.8" CHECK_TITLE_check18="[check18] Ensure IAM password policy require at least one number" CHECK_SCORED_check18="SCORED" -CHECK_TYPE_check18="LEVEL1" +CHECK_CIS_LEVEL_check18="LEVEL1" CHECK_SEVERITY_check18="Medium" CHECK_ASFF_TYPE_check18="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ALTERNATE_check108="check18" diff --git a/checks/check19 b/checks/check19 index e8b92818..49656b59 100644 --- a/checks/check19 +++ b/checks/check19 @@ -14,7 +14,7 @@ CHECK_ID_check19="1.9" CHECK_TITLE_check19="[check19] Ensure IAM password policy requires minimum length of 14 or greater" CHECK_SCORED_check19="SCORED" -CHECK_TYPE_check19="LEVEL1" +CHECK_CIS_LEVEL_check19="LEVEL1" CHECK_SEVERITY_check19="Medium" CHECK_ASFF_TYPE_check19="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ALTERNATE_check109="check19" diff --git a/checks/check21 b/checks/check21 index 0446243e..968f6c19 100644 --- a/checks/check21 +++ b/checks/check21 @@ -14,7 +14,7 @@ CHECK_ID_check21="2.1" CHECK_TITLE_check21="[check21] Ensure CloudTrail is enabled in all regions" CHECK_SCORED_check21="SCORED" -CHECK_TYPE_check21="LEVEL1" +CHECK_CIS_LEVEL_check21="LEVEL1" CHECK_SEVERITY_check21="High" CHECK_ASFF_TYPE_check21="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ASFF_RESOURCE_TYPE_check21="AwsCloudTrailTrail" @@ -55,11 +55,14 @@ check21(){ textPass "$regx: Trail $trail is enabled for all regions" "$regx" "$trail" fi fi - done fi done if [[ $trail_count == 0 ]]; then - textFail "$regx: No CloudTrail trails were found in the account" "$regx" "$trail" + if [[ $FILTERREGION ]]; then + textFail "$regx: No CloudTrail trails were found in the filtered region" "$regx" "$trail" + else + textFail "$regx: No CloudTrail trails were found in the account" "$regx" "$trail" + fi fi -} +} \ No newline at end of file diff --git a/checks/check22 b/checks/check22 index 3ae3e775..cca1a8da 100644 --- a/checks/check22 +++ b/checks/check22 @@ -14,7 +14,7 @@ CHECK_ID_check22="2.2" CHECK_TITLE_check22="[check22] Ensure CloudTrail log file validation is enabled" CHECK_SCORED_check22="SCORED" -CHECK_TYPE_check22="LEVEL2" +CHECK_CIS_LEVEL_check22="LEVEL2" CHECK_SEVERITY_check22="Medium" CHECK_ASFF_TYPE_check22="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ASFF_RESOURCE_TYPE_check22="AwsCloudTrailTrail" diff --git a/checks/check23 b/checks/check23 index 56984176..48b9bf44 100644 --- a/checks/check23 +++ b/checks/check23 @@ -14,7 +14,7 @@ CHECK_ID_check23="2.3" CHECK_TITLE_check23="[check23] Ensure the S3 bucket CloudTrail logs to is not publicly accessible" CHECK_SCORED_check23="SCORED" -CHECK_TYPE_check23="LEVEL1" +CHECK_CIS_LEVEL_check23="LEVEL1" CHECK_SEVERITY_check23="Critical" CHECK_ASFF_TYPE_check23="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ASFF_RESOURCE_TYPE_check23="AwsS3Bucket" @@ -23,7 +23,7 @@ CHECK_ASFF_COMPLIANCE_TYPE_check23="ens-op.exp.10.aws.trail.3 ens-op.exp.10.aws. CHECK_SERVICENAME_check23="cloudtrail" CHECK_RISK_check23='Allowing public access to CloudTrail log content may aid an adversary in identifying weaknesses in the affected accounts use or configuration.' CHECK_REMEDIATION_check23='Analyze Bucket policy to validate appropriate permissions. Ensure the AllUsers principal is not granted privileges. Ensure the AuthenticatedUsers principal is not granted privileges.' -CHECK_DOC_check23='https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_elements_ principal.html ' +CHECK_DOC_check23='https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_elements_principal.html' CHECK_CAF_EPIC_check23='Logging and Monitoring' check23(){ diff --git a/checks/check24 b/checks/check24 index 57691f3b..68cabd67 100644 --- a/checks/check24 +++ b/checks/check24 @@ -14,7 +14,7 @@ CHECK_ID_check24="2.4" CHECK_TITLE_check24="[check24] Ensure CloudTrail trails are integrated with CloudWatch Logs" CHECK_SCORED_check24="SCORED" -CHECK_TYPE_check24="LEVEL1" +CHECK_CIS_LEVEL_check24="LEVEL1" CHECK_SEVERITY_check24="Low" CHECK_ASFF_TYPE_check24="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ASFF_RESOURCE_TYPE_check24="AwsCloudTrailTrail" diff --git a/checks/check25 b/checks/check25 index c853cde5..752235fc 100644 --- a/checks/check25 +++ b/checks/check25 @@ -14,7 +14,7 @@ CHECK_ID_check25="2.5" CHECK_TITLE_check25="[check25] Ensure AWS Config is enabled in all regions" CHECK_SCORED_check25="SCORED" -CHECK_TYPE_check25="LEVEL1" +CHECK_CIS_LEVEL_check25="LEVEL1" CHECK_SEVERITY_check25="Medium" CHECK_ASFF_TYPE_check25="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ALTERNATE_check205="check25" diff --git a/checks/check26 b/checks/check26 index a6663a22..166fbea5 100644 --- a/checks/check26 +++ b/checks/check26 @@ -14,7 +14,7 @@ CHECK_ID_check26="2.6" CHECK_TITLE_check26="[check26] Ensure S3 bucket access logging is enabled on the CloudTrail S3 bucket" CHECK_SCORED_check26="SCORED" -CHECK_TYPE_check26="LEVEL1" +CHECK_CIS_LEVEL_check26="LEVEL1" CHECK_SEVERITY_check26="Medium" CHECK_ASFF_TYPE_check26="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ASFF_RESOURCE_TYPE_check26="AwsS3Bucket" diff --git a/checks/check27 b/checks/check27 index fa6a432d..c5304c74 100644 --- a/checks/check27 +++ b/checks/check27 @@ -14,7 +14,7 @@ CHECK_ID_check27="2.7" CHECK_TITLE_check27="[check27] Ensure CloudTrail logs are encrypted at rest using KMS CMKs" CHECK_SCORED_check27="SCORED" -CHECK_TYPE_check27="LEVEL2" +CHECK_CIS_LEVEL_check27="LEVEL2" CHECK_SEVERITY_check27="Medium" CHECK_ASFF_TYPE_check27="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ASFF_RESOURCE_TYPE_check27="AwsCloudTrailTrail" diff --git a/checks/check28 b/checks/check28 index aef746b1..1008a36d 100644 --- a/checks/check28 +++ b/checks/check28 @@ -14,7 +14,7 @@ CHECK_ID_check28="2.8" CHECK_TITLE_check28="[check28] Ensure rotation for customer created KMS CMKs is enabled" CHECK_SCORED_check28="SCORED" -CHECK_TYPE_check28="LEVEL2" +CHECK_CIS_LEVEL_check28="LEVEL2" CHECK_SEVERITY_check28="Medium" CHECK_ASFF_TYPE_check28="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ASFF_RESOURCE_TYPE_check28="AwsKmsKey" diff --git a/checks/check29 b/checks/check29 index b5023894..dc9f37b4 100644 --- a/checks/check29 +++ b/checks/check29 @@ -14,14 +14,14 @@ CHECK_ID_check29="2.9" CHECK_TITLE_check29="[check29] Ensure VPC Flow Logging is Enabled in all VPCs" CHECK_SCORED_check29="SCORED" -CHECK_TYPE_check29="LEVEL2" +CHECK_CIS_LEVEL_check29="LEVEL2" CHECK_SEVERITY_check29="Medium" CHECK_ASFF_TYPE_check29="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ASFF_RESOURCE_TYPE_check29="AwsEc2Vpc" CHECK_ALTERNATE_check209="check29" CHECK_ASFF_COMPLIANCE_TYPE_check29="ens-op.mon.1.aws.flow.1" CHECK_SERVICENAME_check29="vpc" -CHECK_RISK_check29='PC Flow Logs provide visibility into network traffic that traverses the VPC and can be used to detect anomalous traffic or insight during security workflows.' +CHECK_RISK_check29='VPC Flow Logs provide visibility into network traffic that traverses the VPC and can be used to detect anomalous traffic or insight during security workflows.' CHECK_REMEDIATION_check29='It is recommended that VPC Flow Logs be enabled for packet "Rejects" for VPCs. ' CHECK_DOC_check29='http://docs.aws.amazon.com/AmazonVPC/latest/UserGuide/flow-logs.html ' CHECK_CAF_EPIC_check29='Logging and Monitoring' diff --git a/checks/check31 b/checks/check31 index 4411c6fa..b568027d 100644 --- a/checks/check31 +++ b/checks/check31 @@ -39,7 +39,7 @@ CHECK_ID_check31="3.1" CHECK_TITLE_check31="[check31] Ensure a log metric filter and alarm exist for unauthorized API calls" CHECK_SCORED_check31="SCORED" -CHECK_TYPE_check31="LEVEL1" +CHECK_CIS_LEVEL_check31="LEVEL1" CHECK_SEVERITY_check31="Medium" CHECK_ASFF_TYPE_check31="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ASFF_RESOURCE_TYPE_check31="AwsCloudTrailTrail" diff --git a/checks/check310 b/checks/check310 index 0a2d53d9..0b303701 100644 --- a/checks/check310 +++ b/checks/check310 @@ -39,7 +39,7 @@ CHECK_ID_check310="3.10" CHECK_TITLE_check310="[check310] Ensure a log metric filter and alarm exist for security group changes" CHECK_SCORED_check310="SCORED" -CHECK_TYPE_check310="LEVEL2" +CHECK_CIS_LEVEL_check310="LEVEL2" CHECK_SEVERITY_check310="Medium" CHECK_ASFF_TYPE_check310="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ASFF_RESOURCE_TYPE_check310="AwsCloudTrailTrail" diff --git a/checks/check311 b/checks/check311 index fb66edb6..21f0c612 100644 --- a/checks/check311 +++ b/checks/check311 @@ -39,7 +39,7 @@ CHECK_ID_check311="3.11" CHECK_TITLE_check311="[check311] Ensure a log metric filter and alarm exist for changes to Network Access Control Lists (NACL)" CHECK_SCORED_check311="SCORED" -CHECK_TYPE_check311="LEVEL2" +CHECK_CIS_LEVEL_check311="LEVEL2" CHECK_SEVERITY_check311="Medium" CHECK_ASFF_TYPE_check311="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ASFF_RESOURCE_TYPE_check311="AwsCloudTrailTrail" diff --git a/checks/check312 b/checks/check312 index 1de26238..4391b8eb 100644 --- a/checks/check312 +++ b/checks/check312 @@ -39,7 +39,7 @@ CHECK_ID_check312="3.12" CHECK_TITLE_check312="[check312] Ensure a log metric filter and alarm exist for changes to network gateways" CHECK_SCORED_check312="SCORED" -CHECK_TYPE_check312="LEVEL1" +CHECK_CIS_LEVEL_check312="LEVEL1" CHECK_SEVERITY_check312="Medium" CHECK_ASFF_TYPE_check312="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ASFF_RESOURCE_TYPE_check312="AwsCloudTrailTrail" diff --git a/checks/check313 b/checks/check313 index 2ce23a51..abf64ad8 100644 --- a/checks/check313 +++ b/checks/check313 @@ -39,7 +39,7 @@ CHECK_ID_check313="3.13" CHECK_TITLE_check313="[check313] Ensure a log metric filter and alarm exist for route table changes" CHECK_SCORED_check313="SCORED" -CHECK_TYPE_check313="LEVEL1" +CHECK_CIS_LEVEL_check313="LEVEL1" CHECK_SEVERITY_check313="Medium" CHECK_ASFF_TYPE_check313="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ASFF_RESOURCE_TYPE_check313="AwsCloudTrailTrail" diff --git a/checks/check314 b/checks/check314 index a0d728bb..b7bdf533 100644 --- a/checks/check314 +++ b/checks/check314 @@ -39,7 +39,7 @@ CHECK_ID_check314="3.14" CHECK_TITLE_check314="[check314] Ensure a log metric filter and alarm exist for VPC changes" CHECK_SCORED_check314="SCORED" -CHECK_TYPE_check314="LEVEL1" +CHECK_CIS_LEVEL_check314="LEVEL1" CHECK_SEVERITY_check314="Medium" CHECK_ASFF_TYPE_check314="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ASFF_RESOURCE_TYPE_check314="AwsCloudTrailTrail" diff --git a/checks/check32 b/checks/check32 index b932b13a..9cc24aaa 100644 --- a/checks/check32 +++ b/checks/check32 @@ -39,7 +39,7 @@ CHECK_ID_check32="3.2" CHECK_TITLE_check32="[check32] Ensure a log metric filter and alarm exist for Management Console sign-in without MFA" CHECK_SCORED_check32="SCORED" -CHECK_TYPE_check32="LEVEL1" +CHECK_CIS_LEVEL_check32="LEVEL1" CHECK_SEVERITY_check32="Medium" CHECK_ASFF_TYPE_check32="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ASFF_RESOURCE_TYPE_check32="AwsCloudTrailTrail" diff --git a/checks/check33 b/checks/check33 index 1cd54328..26b94710 100644 --- a/checks/check33 +++ b/checks/check33 @@ -39,7 +39,7 @@ CHECK_ID_check33="3.3" CHECK_TITLE_check33="[check33] Ensure a log metric filter and alarm exist for usage of root account" CHECK_SCORED_check33="SCORED" -CHECK_TYPE_check33="LEVEL1" +CHECK_CIS_LEVEL_check33="LEVEL1" CHECK_SEVERITY_check33="Medium" CHECK_ASFF_TYPE_check33="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ASFF_RESOURCE_TYPE_check33="AwsCloudTrailTrail" diff --git a/checks/check34 b/checks/check34 index 250044e0..beb53bf1 100644 --- a/checks/check34 +++ b/checks/check34 @@ -39,7 +39,7 @@ CHECK_ID_check34="3.4" CHECK_TITLE_check34="[check34] Ensure a log metric filter and alarm exist for IAM policy changes" CHECK_SCORED_check34="SCORED" -CHECK_TYPE_check34="LEVEL1" +CHECK_CIS_LEVEL_check34="LEVEL1" CHECK_SEVERITY_check34="Medium" CHECK_ASFF_TYPE_check34="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ASFF_RESOURCE_TYPE_check34="AwsCloudTrailTrail" diff --git a/checks/check35 b/checks/check35 index bae1f254..089bd5ab 100644 --- a/checks/check35 +++ b/checks/check35 @@ -39,7 +39,7 @@ CHECK_ID_check35="3.5" CHECK_TITLE_check35="[check35] Ensure a log metric filter and alarm exist for CloudTrail configuration changes" CHECK_SCORED_check35="SCORED" -CHECK_TYPE_check35="LEVEL1" +CHECK_CIS_LEVEL_check35="LEVEL1" CHECK_SEVERITY_check35="Medium" CHECK_ASFF_TYPE_check35="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ASFF_RESOURCE_TYPE_check35="AwsCloudTrailTrail" diff --git a/checks/check36 b/checks/check36 index fc9e4c39..631ed607 100644 --- a/checks/check36 +++ b/checks/check36 @@ -39,7 +39,7 @@ CHECK_ID_check36="3.6" CHECK_TITLE_check36="[check36] Ensure a log metric filter and alarm exist for AWS Management Console authentication failures" CHECK_SCORED_check36="SCORED" -CHECK_TYPE_check36="LEVEL2" +CHECK_CIS_LEVEL_check36="LEVEL2" CHECK_SEVERITY_check36="Medium" CHECK_ASFF_TYPE_check36="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ASFF_RESOURCE_TYPE_check36="AwsCloudTrailTrail" diff --git a/checks/check37 b/checks/check37 index 03f593ea..d7e8668b 100644 --- a/checks/check37 +++ b/checks/check37 @@ -39,7 +39,7 @@ CHECK_ID_check37="3.7" CHECK_TITLE_check37="[check37] Ensure a log metric filter and alarm exist for disabling or scheduled deletion of customer created KMS CMKs" CHECK_SCORED_check37="SCORED" -CHECK_TYPE_check37="LEVEL2" +CHECK_CIS_LEVEL_check37="LEVEL2" CHECK_SEVERITY_check37="Medium" CHECK_ASFF_TYPE_check37="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ASFF_RESOURCE_TYPE_check37="AwsCloudTrailTrail" diff --git a/checks/check38 b/checks/check38 index 9d81443c..ce34e64d 100644 --- a/checks/check38 +++ b/checks/check38 @@ -39,7 +39,7 @@ CHECK_ID_check38="3.8" CHECK_TITLE_check38="[check38] Ensure a log metric filter and alarm exist for S3 bucket policy changes" CHECK_SCORED_check38="SCORED" -CHECK_TYPE_check38="LEVEL1" +CHECK_CIS_LEVEL_check38="LEVEL1" CHECK_SEVERITY_check38="Medium" CHECK_ASFF_TYPE_check38="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ASFF_RESOURCE_TYPE_check38="AwsCloudTrailTrail" diff --git a/checks/check39 b/checks/check39 index aabbd359..15be0316 100644 --- a/checks/check39 +++ b/checks/check39 @@ -39,7 +39,7 @@ CHECK_ID_check39="3.9" CHECK_TITLE_check39="[check39] Ensure a log metric filter and alarm exist for AWS Config configuration changes" CHECK_SCORED_check39="SCORED" -CHECK_TYPE_check39="LEVEL2" +CHECK_CIS_LEVEL_check39="LEVEL2" CHECK_SEVERITY_check39="Medium" CHECK_ASFF_TYPE_check39="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ASFF_RESOURCE_TYPE_check39="AwsCloudTrailTrail" diff --git a/checks/check41 b/checks/check41 index 02f0fbf5..f8af5e9b 100644 --- a/checks/check41 +++ b/checks/check41 @@ -14,7 +14,7 @@ CHECK_ID_check41="4.1" CHECK_TITLE_check41="[check41] Ensure no security groups allow ingress from 0.0.0.0/0 or ::/0 to port 22" CHECK_SCORED_check41="SCORED" -CHECK_TYPE_check41="LEVEL2" +CHECK_CIS_LEVEL_check41="LEVEL2" CHECK_SEVERITY_check41="High" CHECK_ASFF_TYPE_check41="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ASFF_RESOURCE_TYPE_check41="AwsEc2SecurityGroup" diff --git a/checks/check42 b/checks/check42 index a2bf70fd..cf4b3cf2 100644 --- a/checks/check42 +++ b/checks/check42 @@ -14,7 +14,7 @@ CHECK_ID_check42="4.2" CHECK_TITLE_check42="[check42] Ensure no security groups allow ingress from 0.0.0.0/0 or ::/0 to port 3389" CHECK_SCORED_check42="SCORED" -CHECK_TYPE_check42="LEVEL2" +CHECK_CIS_LEVEL_check42="LEVEL2" CHECK_SEVERITY_check42="High" CHECK_ASFF_TYPE_check42="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ASFF_RESOURCE_TYPE_check42="AwsEc2SecurityGroup" diff --git a/checks/check43 b/checks/check43 index 205f4eb3..c3a57e12 100644 --- a/checks/check43 +++ b/checks/check43 @@ -14,7 +14,7 @@ CHECK_ID_check43="4.3" CHECK_TITLE_check43="[check43] Ensure the default security group of every VPC restricts all traffic" CHECK_SCORED_check43="SCORED" -CHECK_TYPE_check43="LEVEL2" +CHECK_CIS_LEVEL_check43="LEVEL2" CHECK_SEVERITY_check43="High" CHECK_ASFF_TYPE_check43="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ASFF_RESOURCE_TYPE_check43="AwsEc2SecurityGroup" diff --git a/checks/check44 b/checks/check44 index e5328c29..e6b8aee9 100644 --- a/checks/check44 +++ b/checks/check44 @@ -14,7 +14,7 @@ CHECK_ID_check44="4.4" CHECK_TITLE_check44="[check44] Ensure routing tables for VPC peering are \"least access\"" CHECK_SCORED_check44="NOT_SCORED" -CHECK_TYPE_check44="LEVEL2" +CHECK_CIS_LEVEL_check44="LEVEL2" CHECK_SEVERITY_check44="Medium" CHECK_ASFF_TYPE_check44="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ASFF_RESOURCE_TYPE_check44="AwsEc2Vpc" diff --git a/checks/check45 b/checks/check45 index d68fc140..c9a461e9 100644 --- a/checks/check45 +++ b/checks/check45 @@ -14,7 +14,7 @@ CHECK_ID_check45="4.5" CHECK_TITLE_check45="[check45] Ensure no Network ACLs allow ingress from 0.0.0.0/0 to SSH port 22" CHECK_SCORED_check45="SCORED" -CHECK_TYPE_check45="LEVEL2" +CHECK_CIS_LEVEL_check45="LEVEL2" CHECK_SEVERITY_check45="High" CHECK_ASFF_TYPE_check45="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ASFF_RESOURCE_TYPE_check45="AwsEc2NetworkAcl" diff --git a/checks/check46 b/checks/check46 index 02c2101b..72395991 100644 --- a/checks/check46 +++ b/checks/check46 @@ -14,7 +14,7 @@ CHECK_ID_check46="4.6" CHECK_TITLE_check46="[check46] Ensure no Network ACLs allow ingress from 0.0.0.0/0 to Microsoft RDP port 3389" CHECK_SCORED_check46="SCORED" -CHECK_TYPE_check46="LEVEL2" +CHECK_CIS_LEVEL_check46="LEVEL2" CHECK_SEVERITY_check46="High" CHECK_ASFF_TYPE_check46="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ASFF_RESOURCE_TYPE_check46="AwsEc2NetworkAcl" diff --git a/checks/check_extra71 b/checks/check_extra71 index 4bf1706c..ca1ecb50 100644 --- a/checks/check_extra71 +++ b/checks/check_extra71 @@ -13,7 +13,7 @@ CHECK_ID_extra71="7.1" CHECK_TITLE_extra71="[extra71] Ensure users of groups with AdministratorAccess policy have MFA tokens enabled" CHECK_SCORED_extra71="NOT_SCORED" -CHECK_TYPE_extra71="EXTRA" +CHECK_CIS_LEVEL_extra71="EXTRA" CHECK_SEVERITY_extra71="High" CHECK_ASFF_RESOURCE_TYPE_extra71="AwsIamUser" CHECK_ALTERNATE_extra701="extra71" diff --git a/checks/check_extra710 b/checks/check_extra710 index a1c10252..ffd3b035 100644 --- a/checks/check_extra710 +++ b/checks/check_extra710 @@ -13,7 +13,7 @@ CHECK_ID_extra710="7.10" CHECK_TITLE_extra710="[extra710] Check for internet facing EC2 Instances" CHECK_SCORED_extra710="NOT_SCORED" -CHECK_TYPE_extra710="EXTRA" +CHECK_CIS_LEVEL_extra710="EXTRA" CHECK_SEVERITY_extra710="Medium" CHECK_ASFF_RESOURCE_TYPE_extra710="AwsEc2Instance" CHECK_ALTERNATE_check710="extra710" diff --git a/checks/check_extra7100 b/checks/check_extra7100 index 8e2a3807..cd5d8e01 100644 --- a/checks/check_extra7100 +++ b/checks/check_extra7100 @@ -17,7 +17,7 @@ CHECK_ID_extra7100="7.100" CHECK_TITLE_extra7100="[extra7100] Ensure that no custom IAM policies exist which allow permissive role assumption (e.g. sts:AssumeRole on *)" CHECK_SCORED_extra7100="NOT_SCORED" -CHECK_TYPE_extra7100="EXTRA" +CHECK_CIS_LEVEL_extra7100="EXTRA" CHECK_SEVERITY_extra7100="Critical" CHECK_ASFF_RESOURCE_TYPE_extra7100="AwsIamPolicy" CHECK_ALTERNATE_check7100="extra7100" diff --git a/checks/check_extra7101 b/checks/check_extra7101 index e1ba8dbb..a4fd714c 100644 --- a/checks/check_extra7101 +++ b/checks/check_extra7101 @@ -14,7 +14,7 @@ CHECK_ID_extra7101="7.101" CHECK_TITLE_extra7101="[extra7101] Check if Amazon Elasticsearch Service (ES) domains have audit logging enabled" CHECK_SCORED_extra7101="NOT_SCORED" -CHECK_TYPE_extra7101="EXTRA" +CHECK_CIS_LEVEL_extra7101="EXTRA" CHECK_SEVERITY_extra7101="Low" CHECK_ASFF_RESOURCE_TYPE_extra7101="AwsElasticsearchDomain" CHECK_ALTERNATE_check7101="extra7101" diff --git a/checks/check_extra7102 b/checks/check_extra7102 index b9efcc29..f16c3908 100644 --- a/checks/check_extra7102 +++ b/checks/check_extra7102 @@ -13,7 +13,7 @@ CHECK_ID_extra7102="7.102" CHECK_TITLE_extra7102="[extra7102] Check if any of the Elastic or Public IP are in Shodan (requires Shodan API KEY)" CHECK_SCORED_extra7102="NOT_SCORED" -CHECK_TYPE_extra7102="EXTRA" +CHECK_CIS_LEVEL_extra7102="EXTRA" CHECK_SEVERITY_extra7102="High" CHECK_ASFF_RESOURCE_TYPE_extra7102="AwsEc2Eip" CHECK_ALTERNATE_check7102="extra7102" diff --git a/checks/check_extra7103 b/checks/check_extra7103 index 558a1d94..12ace203 100644 --- a/checks/check_extra7103 +++ b/checks/check_extra7103 @@ -14,7 +14,7 @@ CHECK_ID_extra7103="7.103" CHECK_TITLE_extra7103="[extra7103] Check if Amazon SageMaker Notebook instances have root access disabled" CHECK_SCORED_extra7103="NOT_SCORED" -CHECK_TYPE_extra7103="EXTRA" +CHECK_CIS_LEVEL_extra7103="EXTRA" CHECK_ASFF_RESOURCE_TYPE_extra7103="AwsSageMakerNotebookInstance" CHECK_ALTERNATE_check7103="extra7103" CHECK_SEVERITY_extra7103="Medium" diff --git a/checks/check_extra7104 b/checks/check_extra7104 index 00b9b065..7697ad50 100644 --- a/checks/check_extra7104 +++ b/checks/check_extra7104 @@ -14,7 +14,7 @@ CHECK_ID_extra7104="7.104" CHECK_TITLE_extra7104="[extra7104] Check if Amazon SageMaker Notebook instances have VPC settings configured" CHECK_SCORED_extra7104="NOT_SCORED" -CHECK_TYPE_extra7104="EXTRA" +CHECK_CIS_LEVEL_extra7104="EXTRA" CHECK_ASFF_RESOURCE_TYPE_extra7104="AwsSageMakerNotebookInstance" CHECK_ALTERNATE_check7104="extra7104" CHECK_SEVERITY_extra7104="Medium" diff --git a/checks/check_extra7105 b/checks/check_extra7105 index 1316a431..1b2d2c89 100644 --- a/checks/check_extra7105 +++ b/checks/check_extra7105 @@ -14,7 +14,7 @@ CHECK_ID_extra7105="7.105" CHECK_TITLE_extra7105="[extra7105] Check if Amazon SageMaker Models have network isolation enabled" CHECK_SCORED_extra7105="NOT_SCORED" -CHECK_TYPE_extra7105="EXTRA" +CHECK_CIS_LEVEL_extra7105="EXTRA" CHECK_ASFF_RESOURCE_TYPE_extra7105="AwsSageMakerModel" CHECK_ALTERNATE_check7105="extra7105" CHECK_SEVERITY_extra7105="Medium" diff --git a/checks/check_extra7106 b/checks/check_extra7106 index e49b8a50..af09f269 100644 --- a/checks/check_extra7106 +++ b/checks/check_extra7106 @@ -14,7 +14,7 @@ CHECK_ID_extra7106="7.106" CHECK_TITLE_extra7106="[extra7106] Check if Amazon SageMaker Models have VPC settings configured" CHECK_SCORED_extra7106="NOT_SCORED" -CHECK_TYPE_extra7106="EXTRA" +CHECK_CIS_LEVEL_extra7106="EXTRA" CHECK_ASFF_RESOURCE_TYPE_extra7106="AwsSageMakerModel" CHECK_ALTERNATE_check7106="extra7106" CHECK_SEVERITY_extra7106="Medium" diff --git a/checks/check_extra7107 b/checks/check_extra7107 index 2f8c70a6..e5536e87 100644 --- a/checks/check_extra7107 +++ b/checks/check_extra7107 @@ -14,7 +14,7 @@ CHECK_ID_extra7107="7.107" CHECK_TITLE_extra7107="[extra7107] Check if Amazon SageMaker Training jobs have intercontainer encryption enabled" CHECK_SCORED_extra7107="NOT_SCORED" -CHECK_TYPE_extra7107="EXTRA" +CHECK_CIS_LEVEL_extra7107="EXTRA" CHECK_ASFF_RESOURCE_TYPE_extra7107="AwsSageMakerNotebookInstance" CHECK_ALTERNATE_check7107="extra7107" CHECK_SEVERITY_extra7107="Medium" diff --git a/checks/check_extra7108 b/checks/check_extra7108 index f84f6997..2956afe8 100644 --- a/checks/check_extra7108 +++ b/checks/check_extra7108 @@ -14,7 +14,7 @@ CHECK_ID_extra7108="7.108" CHECK_TITLE_extra7108="[extra7108] Check if Amazon SageMaker Training jobs have volume and output with KMS encryption enabled" CHECK_SCORED_extra7108="NOT_SCORED" -CHECK_TYPE_extra7108="EXTRA" +CHECK_CIS_LEVEL_extra7108="EXTRA" CHECK_ASFF_RESOURCE_TYPE_extra7108="AwsSageMakerNotebookInstance" CHECK_ALTERNATE_check7108="extra7108" CHECK_SEVERITY_extra7108="Medium" diff --git a/checks/check_extra7109 b/checks/check_extra7109 index 80778fd2..90d036e0 100644 --- a/checks/check_extra7109 +++ b/checks/check_extra7109 @@ -14,7 +14,7 @@ CHECK_ID_extra7109="7.109" CHECK_TITLE_extra7109="[extra7109] Check if Amazon SageMaker Training jobs have network isolation enabled" CHECK_SCORED_extra7109="NOT_SCORED" -CHECK_TYPE_extra7109="EXTRA" +CHECK_CIS_LEVEL_extra7109="EXTRA" CHECK_ASFF_RESOURCE_TYPE_extra7109="AwsSageMakerNotebookInstance" CHECK_ALTERNATE_check7109="extra7109" CHECK_SEVERITY_extra7109="Medium" diff --git a/checks/check_extra711 b/checks/check_extra711 index 4a0b5d66..34e2947a 100644 --- a/checks/check_extra711 +++ b/checks/check_extra711 @@ -13,7 +13,7 @@ CHECK_ID_extra711="7.11" CHECK_TITLE_extra711="[extra711] Check for Publicly Accessible Redshift Clusters" CHECK_SCORED_extra711="NOT_SCORED" -CHECK_TYPE_extra711="EXTRA" +CHECK_CIS_LEVEL_extra711="EXTRA" CHECK_SEVERITY_extra711="High" CHECK_ASFF_RESOURCE_TYPE_extra711="AwsRedshiftCluster" CHECK_ALTERNATE_check711="extra711" diff --git a/checks/check_extra7110 b/checks/check_extra7110 index 5a6ebefc..448e2308 100644 --- a/checks/check_extra7110 +++ b/checks/check_extra7110 @@ -14,7 +14,7 @@ CHECK_ID_extra7110="7.110" CHECK_TITLE_extra7110="[extra7110] Check if Amazon SageMaker Training job have VPC settings configured." CHECK_SCORED_extra7110="NOT_SCORED" -CHECK_TYPE_extra7110="EXTRA" +CHECK_CIS_LEVEL_extra7110="EXTRA" CHECK_ASFF_RESOURCE_TYPE_extra7110="AwsSageMakerNotebookInstance" CHECK_ALTERNATE_check7110="extra7110" CHECK_SEVERITY_extra7110="Medium" diff --git a/checks/check_extra7111 b/checks/check_extra7111 index 965c6048..a6669a09 100644 --- a/checks/check_extra7111 +++ b/checks/check_extra7111 @@ -14,7 +14,7 @@ CHECK_ID_extra7111="7.111" CHECK_TITLE_extra7111="[extra7111] Check if Amazon SageMaker Notebook instances have direct internet access" CHECK_SCORED_extra7111="NOT_SCORED" -CHECK_TYPE_extra7111="EXTRA" +CHECK_CIS_LEVEL_extra7111="EXTRA" CHECK_ASFF_RESOURCE_TYPE_extra7111="AwsSageMakerNotebookInstance" CHECK_ALTERNATE_check7111="extra7111" CHECK_SEVERITY_extra7111="Medium" diff --git a/checks/check_extra7112 b/checks/check_extra7112 index ed954a9d..e031065d 100644 --- a/checks/check_extra7112 +++ b/checks/check_extra7112 @@ -14,7 +14,7 @@ CHECK_ID_extra7112="7.112" CHECK_TITLE_extra7112="[extra7112] Check if Amazon SageMaker Notebook instances have data encryption enabled" CHECK_SCORED_extra7112="NOT_SCORED" -CHECK_TYPE_extra7112="EXTRA" +CHECK_CIS_LEVEL_extra7112="EXTRA" CHECK_ASFF_RESOURCE_TYPE_extra7112="AwsSageMakerNotebookInstance" CHECK_ALTERNATE_check7112="extra7112" CHECK_SEVERITY_extra7112="Medium" diff --git a/checks/check_extra7113 b/checks/check_extra7113 index 3412a56b..4afcfdc3 100644 --- a/checks/check_extra7113 +++ b/checks/check_extra7113 @@ -25,7 +25,7 @@ CHECK_ID_extra7113="7.113" CHECK_TITLE_extra7113="[extra7113] Check if RDS instances have deletion protection enabled " CHECK_SCORED_extra7113="NOT_SCORED" -CHECK_TYPE_extra7113="EXTRA" +CHECK_CIS_LEVEL_extra7113="EXTRA" CHECK_SEVERITY_extra7113="Medium" CHECK_ASFF_RESOURCE_TYPE_extra7113="AwsRdsDbInstance" CHECK_ALTERNATE_check7113="extra7113" diff --git a/checks/check_extra7114 b/checks/check_extra7114 index 93c7906a..91a640bb 100644 --- a/checks/check_extra7114 +++ b/checks/check_extra7114 @@ -14,7 +14,7 @@ CHECK_ID_extra7114="7.114" CHECK_TITLE_extra7114="[extra7114] Check if Glue development endpoints have S3 encryption enabled." CHECK_SCORED_extra7114="NOT_SCORED" -CHECK_TYPE_extra7114="EXTRA" +CHECK_CIS_LEVEL_extra7114="EXTRA" CHECK_SEVERITY_extra7114="Medium" CHECK_ASFF_RESOURCE_TYPE_extra7114="AwsGlue" CHECK_ALTERNATE_check7114="extra7114" diff --git a/checks/check_extra7115 b/checks/check_extra7115 index 14e4e9b8..e09e6590 100644 --- a/checks/check_extra7115 +++ b/checks/check_extra7115 @@ -13,7 +13,7 @@ CHECK_ID_extra7115="7.115" CHECK_TITLE_extra7115="[extra7115] Check if Glue database connection has SSL connection enabled." CHECK_SCORED_extra7115="NOT_SCORED" -CHECK_TYPE_extra7115="EXTRA" +CHECK_CIS_LEVEL_extra7115="EXTRA" CHECK_SEVERITY_extra7115="Medium" CHECK_ASFF_RESOURCE_TYPE_extra7115="AwsGlue" CHECK_ALTERNATE_check7115="extra7115" diff --git a/checks/check_extra7116 b/checks/check_extra7116 index 43136f76..77f4eee8 100644 --- a/checks/check_extra7116 +++ b/checks/check_extra7116 @@ -13,7 +13,7 @@ CHECK_ID_extra7116="7.116" CHECK_TITLE_extra7116="[extra7116] Check if Glue data catalog settings have metadata encryption enabled." CHECK_SCORED_extra7116="NOT_SCORED" -CHECK_TYPE_extra7116="EXTRA" +CHECK_CIS_LEVEL_extra7116="EXTRA" CHECK_SEVERITY_extra7116="Medium" CHECK_ASFF_RESOURCE_TYPE_extra7116="AwsGlue" CHECK_ALTERNATE_check7116="extra7116" diff --git a/checks/check_extra7117 b/checks/check_extra7117 index f0eeb03e..ce2ebcab 100644 --- a/checks/check_extra7117 +++ b/checks/check_extra7117 @@ -13,7 +13,7 @@ CHECK_ID_extra7117="7.117" CHECK_TITLE_extra7117="[extra7117] Check if Glue data catalog settings have encrypt connection password enabled." CHECK_SCORED_extra7117="NOT_SCORED" -CHECK_TYPE_extra7117="EXTRA" +CHECK_CIS_LEVEL_extra7117="EXTRA" CHECK_SEVERITY_extra7117="Medium" CHECK_ASFF_RESOURCE_TYPE_extra7117="AwsGlue" CHECK_ALTERNATE_check7117="extra7117" diff --git a/checks/check_extra7118 b/checks/check_extra7118 index 12cc7b08..a55996ec 100644 --- a/checks/check_extra7118 +++ b/checks/check_extra7118 @@ -13,7 +13,7 @@ CHECK_ID_extra7118="7.118" CHECK_TITLE_extra7118="[extra7118] Check if Glue ETL Jobs have S3 encryption enabled." CHECK_SCORED_extra7118="NOT_SCORED" -CHECK_TYPE_extra7118="EXTRA" +CHECK_CIS_LEVEL_extra7118="EXTRA" CHECK_SEVERITY_extra7118="Medium" CHECK_ASFF_RESOURCE_TYPE_extra7118="AwsGlue" CHECK_ALTERNATE_check7118="extra7118" diff --git a/checks/check_extra7119 b/checks/check_extra7119 index 4f6e904b..a6a0a4f2 100644 --- a/checks/check_extra7119 +++ b/checks/check_extra7119 @@ -14,7 +14,7 @@ CHECK_ID_extra7119="7.119" CHECK_TITLE_extra7119="[extra7119] Check if Glue development endpoints have CloudWatch logs encryption enabled." CHECK_SCORED_extra7119="NOT_SCORED" -CHECK_TYPE_extra7119="EXTRA" +CHECK_CIS_LEVEL_extra7119="EXTRA" CHECK_SEVERITY_extra7119="Medium" CHECK_ASFF_RESOURCE_TYPE_extra7119="AwsGlue" CHECK_ALTERNATE_check7119="extra7119" diff --git a/checks/check_extra712 b/checks/check_extra712 index 754c3559..d14d60ba 100644 --- a/checks/check_extra712 +++ b/checks/check_extra712 @@ -13,7 +13,7 @@ CHECK_ID_extra712="7.12" CHECK_TITLE_extra712="[extra712] Check if Amazon Macie is enabled" CHECK_SCORED_extra712="NOT_SCORED" -CHECK_TYPE_extra712="EXTRA" +CHECK_CIS_LEVEL_extra712="EXTRA" CHECK_SEVERITY_extra712="Low" CHECK_ALTERNATE_check712="extra712" CHECK_ASFF_RESOURCE_TYPE_extra712="AwsMacieSession" diff --git a/checks/check_extra7120 b/checks/check_extra7120 index d4217ed2..37cd3094 100644 --- a/checks/check_extra7120 +++ b/checks/check_extra7120 @@ -13,7 +13,7 @@ CHECK_ID_extra7120="7.120" CHECK_TITLE_extra7120="[extra7120] Check if Glue ETL Jobs have CloudWatch Logs encryption enabled." CHECK_SCORED_extra7120="NOT_SCORED" -CHECK_TYPE_extra7120="EXTRA" +CHECK_CIS_LEVEL_extra7120="EXTRA" CHECK_SEVERITY_extra7120="Medium" CHECK_ASFF_RESOURCE_TYPE_extra7120="AwsGlue" CHECK_ALTERNATE_check7120="extra7120" diff --git a/checks/check_extra7121 b/checks/check_extra7121 index 032bcfa5..26087e05 100644 --- a/checks/check_extra7121 +++ b/checks/check_extra7121 @@ -14,7 +14,7 @@ CHECK_ID_extra7121="7.121" CHECK_TITLE_extra7121="[extra7121] Check if Glue development endpoints have Job bookmark encryption enabled." CHECK_SCORED_extra7121="NOT_SCORED" -CHECK_TYPE_extra7121="EXTRA" +CHECK_CIS_LEVEL_extra7121="EXTRA" CHECK_SEVERITY_extra7121="Medium" CHECK_ASFF_RESOURCE_TYPE_extra7121="AwsGlue" CHECK_ALTERNATE_check7121="extra7121" diff --git a/checks/check_extra7122 b/checks/check_extra7122 index 1b4f8d27..ac163833 100644 --- a/checks/check_extra7122 +++ b/checks/check_extra7122 @@ -13,7 +13,7 @@ CHECK_ID_extra7122="7.122" CHECK_TITLE_extra7122="[extra7122] Check if Glue ETL Jobs have Job bookmark encryption enabled." CHECK_SCORED_extra7122="NOT_SCORED" -CHECK_TYPE_extra7122="EXTRA" +CHECK_CIS_LEVEL_extra7122="EXTRA" CHECK_SEVERITY_extra7122="Medium" CHECK_ASFF_RESOURCE_TYPE_extra7122="AwsGlue" CHECK_ALTERNATE_check7122="extra7122" diff --git a/checks/check_extra7123 b/checks/check_extra7123 index c462f749..fecf5185 100644 --- a/checks/check_extra7123 +++ b/checks/check_extra7123 @@ -13,7 +13,7 @@ CHECK_ID_extra7123="7.123" CHECK_TITLE_extra7123="[extra7123] Check if IAM users have two active access keys" CHECK_SCORED_extra7123="NOT_SCORED" -CHECK_TYPE_extra7123="EXTRA" +CHECK_CIS_LEVEL_extra7123="EXTRA" CHECK_SEVERITY_extra7123="Medium" CHECK_ASFF_TYPE_extra7123="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ASFF_RESOURCE_TYPE_extra7123="AwsIamUser" diff --git a/checks/check_extra7124 b/checks/check_extra7124 index df02efaf..7a7e8b1f 100644 --- a/checks/check_extra7124 +++ b/checks/check_extra7124 @@ -13,7 +13,7 @@ CHECK_ID_extra7124="7.124" CHECK_TITLE_extra7124="[extra7124] Check if EC2 instances are managed by Systems Manager." CHECK_SCORED_extra7124="NOT_SCORED" -CHECK_TYPE_extra7124="EXTRA" +CHECK_CIS_LEVEL_extra7124="EXTRA" CHECK_SEVERITY_extra7124="Medium" CHECK_ASFF_RESOURCE_TYPE_extra7124="AwsEc2Instance" CHECK_ALTERNATE_check7124="extra7124" diff --git a/checks/check_extra7125 b/checks/check_extra7125 index 545d6d8b..d41b8475 100644 --- a/checks/check_extra7125 +++ b/checks/check_extra7125 @@ -13,7 +13,7 @@ CHECK_ID_extra7125="7.125" CHECK_TITLE_extra7125="[extra7125] Check if IAM users have Hardware MFA enabled." CHECK_SCORED_extra7125="NOT_SCORED" -CHECK_TYPE_extra7125="EXTRA" +CHECK_CIS_LEVEL_extra7125="EXTRA" CHECK_SEVERITY_extra7125="Medium" CHECK_ASFF_RESOURCE_TYPE_extra7125="AwsIamUser" CHECK_ALTERNATE_check7125="extra7125" diff --git a/checks/check_extra7126 b/checks/check_extra7126 index 6017afa4..31d9fdeb 100644 --- a/checks/check_extra7126 +++ b/checks/check_extra7126 @@ -13,7 +13,7 @@ CHECK_ID_extra7126="7.126" CHECK_TITLE_extra7126="[extra7126] Check if there are CMK KMS keys not used" CHECK_SCORED_extra7126="NOT_SCORED" -CHECK_TYPE_extra7126="EXTRA" +CHECK_CIS_LEVEL_extra7126="EXTRA" CHECK_SEVERITY_extra7126="Medium" CHECK_ASFF_RESOURCE_TYPE_extra7126="AwsKmsKey" CHECK_ALTERNATE_check7126="extra7126" diff --git a/checks/check_extra7127 b/checks/check_extra7127 index cc23e4b6..9c12009a 100644 --- a/checks/check_extra7127 +++ b/checks/check_extra7127 @@ -13,7 +13,7 @@ CHECK_ID_extra7127="7.127" CHECK_TITLE_extra7127="[extra7127] Check if EC2 instances managed by Systems Manager are compliant with patching requirements" CHECK_SCORED_extra7127="NOT_SCORED" -CHECK_TYPE_extra7127="EXTRA" +CHECK_CIS_LEVEL_extra7127="EXTRA" CHECK_SEVERITY_extra7127="High" CHECK_ASFF_RESOURCE_TYPE_extra7127="AwsEc2Instance" CHECK_ASFF_TYPE_extra7127="Software and Configuration Checks/ENS op.exp.4.aws.sys.1" diff --git a/checks/check_extra7128 b/checks/check_extra7128 index 20182e8c..f9ad2cda 100644 --- a/checks/check_extra7128 +++ b/checks/check_extra7128 @@ -13,7 +13,7 @@ CHECK_ID_extra7128="7.128" CHECK_TITLE_extra7128="[extra7128] Check if DynamoDB table has encryption at rest enabled using CMK KMS" CHECK_SCORED_extra7128="NOT_SCORED" -CHECK_TYPE_extra7128="EXTRA" +CHECK_CIS_LEVEL_extra7128="EXTRA" CHECK_SEVERITY_extra7128="Medium" CHECK_ASFF_RESOURCE_TYPE_extra7128="AwsDynamoDBTable" CHECK_ALTERNATE_check7128="extra7128" diff --git a/checks/check_extra7129 b/checks/check_extra7129 index ddeb1c77..caa6fefa 100644 --- a/checks/check_extra7129 +++ b/checks/check_extra7129 @@ -13,7 +13,7 @@ CHECK_ID_extra7129="7.129" CHECK_TITLE_extra7129="[extra7129] Check if Application Load Balancer has a WAF ACL attached" CHECK_SCORED_extra7129="NOT_SCORED" -CHECK_TYPE_extra7129="EXTRA" +CHECK_CIS_LEVEL_extra7129="EXTRA" CHECK_SEVERITY_extra7129="Medium" CHECK_ASFF_RESOURCE_TYPE_extra7129="AwsElasticLoadBalancingV2LoadBalancer" CHECK_ALTERNATE_check7129="extra7129" @@ -24,49 +24,54 @@ CHECK_REMEDIATION_extra7129='Using the AWS Management Console open the AWS WAF c CHECK_DOC_extra7129='https://docs.aws.amazon.com/waf/latest/developerguide/web-acl-associating-aws-resource.html' CHECK_CAF_EPIC_extra7129='Infrastructure Security' +PARALLEL_REGIONS="50" + extra7129(){ for regx in $REGIONS; do - LIST_OF_ELBSV2=$($AWSCLI elbv2 describe-load-balancers $PROFILE_OPT --region $regx --query 'LoadBalancers[?Scheme == `internet-facing` && Type == `application`].[LoadBalancerName]' --output text) - LIST_OF_WAFV2_WEBACL_ARN=$($AWSCLI wafv2 list-web-acls $PROFILE_OPT --region=$regx --scope=REGIONAL --query WebACLs[*].ARN --output text) - LIST_OF_WAFV1_WEBACL_WEBACLID=$($AWSCLI waf-regional list-web-acls $PROFILE_OPT --region $regx --query WebACLs[*].[WebACLId] --output text) + # ( + LIST_OF_ELBSV2=$($AWSCLI elbv2 describe-load-balancers $PROFILE_OPT --region $regx --query 'LoadBalancers[?Scheme == `internet-facing` && Type == `application`].[LoadBalancerName]' --output text) + LIST_OF_WAFV2_WEBACL_ARN=$($AWSCLI wafv2 list-web-acls $PROFILE_OPT --region=$regx --scope=REGIONAL --query WebACLs[*].ARN --output text) + LIST_OF_WAFV1_WEBACL_WEBACLID=$($AWSCLI waf-regional list-web-acls $PROFILE_OPT --region $regx --query WebACLs[*].[WebACLId] --output text) - if [[ $LIST_OF_ELBSV2 ]]; then - for alb in $LIST_OF_ELBSV2; do - if [[ ${#LIST_OF_WAFV2_WEBACL_ARN[@]} -gt 0 || ${#LIST_OF_WAFV1_WEBACL_WEBACLID[@]} -gt 0 ]]; then - WAF_PROTECTED_ALBS=() - for wafaclarn in $LIST_OF_WAFV2_WEBACL_ARN; do - ALB_RESOURCES_IN_WEBACL=$($AWSCLI wafv2 list-resources-for-web-acl $PROFILE_OPT --web-acl-arn $wafaclarn --region=$regx --resource-type APPLICATION_LOAD_BALANCER --query ResourceArns --output text | xargs -n1 | awk -F'/' '{ print $3 }'| grep $alb) - if [[ $ALB_RESOURCES_IN_WEBACL ]]; then - WAF_PROTECTED_ALBS+=($wafaclarn) + if [[ $LIST_OF_ELBSV2 ]]; then + for alb in $LIST_OF_ELBSV2; do + if [[ ${#LIST_OF_WAFV2_WEBACL_ARN[@]} -gt 0 || ${#LIST_OF_WAFV1_WEBACL_WEBACLID[@]} -gt 0 ]]; then + WAF_PROTECTED_ALBS=() + for wafaclarn in $LIST_OF_WAFV2_WEBACL_ARN; do + ALB_RESOURCES_IN_WEBACL=$($AWSCLI wafv2 list-resources-for-web-acl $PROFILE_OPT --web-acl-arn $wafaclarn --region=$regx --resource-type APPLICATION_LOAD_BALANCER --query ResourceArns --output text | xargs -n1 | awk -F'/' '{ print $3 }'| grep $alb) + if [[ $ALB_RESOURCES_IN_WEBACL ]]; then + WAF_PROTECTED_ALBS+=($wafaclarn) + fi + done + for wafv1aclid in $LIST_OF_WAFV1_WEBACL_WEBACLID; do + ALB_RESOURCES_IN_WEBACL=$($AWSCLI waf-regional list-resources-for-web-acl $PROFILE_OPT --web-acl-id $wafv1aclid --region=$regx --resource-type APPLICATION_LOAD_BALANCER --output text --query "[ResourceArns]"| grep $alb) + if [[ $ALB_RESOURCES_IN_WEBACL ]]; then + WAFv1_PROTECTED_ALBS+=($wafv1aclid) + fi + done + if [[ ${#WAF_PROTECTED_ALBS[@]} -gt 0 || ${#WAFv1_PROTECTED_ALBS[@]} -gt 0 ]]; then + if [[ ${#WAF_PROTECTED_ALBS[@]} -gt 0 ]]; then + for wafaclarn in "${WAF_PROTECTED_ALBS[@]}"; do + WAFV2_WEBACL_ARN_SHORT=$(echo $wafaclarn | awk -F'/' '{ print $3 }') + textPass "$regx: Application Load Balancer $alb is protected by WAFv2 ACL $WAFV2_WEBACL_ARN_SHORT" "$regx" "$alb" + done + fi + if [[ ${#WAFv1_PROTECTED_ALBS[@]} -gt 0 ]]; then + for wafv1aclid in "${WAFv1_PROTECTED_ALBS[@]}"; do + textPass "$regx: Application Load Balancer $alb is protected by WAFv1 ACL $wafv1aclid" "$regx" "$alb" + done + fi + else + textFail "$regx: Application Load Balancer $alb is not protected by WAF ACL" "$regx" "$alb" + fi + else + textFail "$regx: Application Load Balancer $alb is not protected no WAF ACL found" "$regx" "$alb" fi done - for wafv1aclid in $LIST_OF_WAFV1_WEBACL_WEBACLID; do - ALB_RESOURCES_IN_WEBACL=$($AWSCLI waf-regional list-resources-for-web-acl $PROFILE_OPT --web-acl-id $wafv1aclid --region=$regx --resource-type APPLICATION_LOAD_BALANCER --output text --query "[ResourceArns]"| grep $alb) - if [[ $ALB_RESOURCES_IN_WEBACL ]]; then - WAFv1_PROTECTED_ALBS+=($wafv1aclid) - fi - done - if [[ ${#WAF_PROTECTED_ALBS[@]} -gt 0 || ${#WAFv1_PROTECTED_ALBS[@]} -gt 0 ]]; then - if [[ ${#WAF_PROTECTED_ALBS[@]} -gt 0 ]]; then - for wafaclarn in "${WAF_PROTECTED_ALBS[@]}"; do - WAFV2_WEBACL_ARN_SHORT=$(echo $wafaclarn | awk -F'/' '{ print $3 }') - textPass "$regx: Application Load Balancer $alb is protected by WAFv2 ACL $WAFV2_WEBACL_ARN_SHORT" "$regx" "$alb" - done - fi - if [[ ${#WAFv1_PROTECTED_ALBS[@]} -gt 0 ]]; then - for wafv1aclid in "${WAFv1_PROTECTED_ALBS[@]}"; do - textPass "$regx: Application Load Balancer $alb is protected by WAFv1 ACL $wafv1aclid" "$regx" "$alb" - done - fi - else - textFail "$regx: Application Load Balancer $alb is not protected by WAF ACL" "$regx" "$alb" - fi else - textFail "$regx: Application Load Balancer $alb is not protected no WAF ACL found" "$regx" "$alb" + textInfo "$regx: No Application Load Balancers found" "$regx" fi - done - else - textInfo "$regx: No Application Load Balancers found" "$regx" - fi + # ) & done + # wait } \ No newline at end of file diff --git a/checks/check_extra713 b/checks/check_extra713 index 7a83b9bb..01f96d58 100644 --- a/checks/check_extra713 +++ b/checks/check_extra713 @@ -13,7 +13,7 @@ CHECK_ID_extra713="7.13" CHECK_TITLE_extra713="[extra713] Check if GuardDuty is enabled" CHECK_SCORED_extra713="NOT_SCORED" -CHECK_TYPE_extra713="EXTRA" +CHECK_CIS_LEVEL_extra713="EXTRA" CHECK_SEVERITY_extra713="High" CHECK_ALTERNATE_check713="extra713" CHECK_ASFF_COMPLIANCE_TYPE_extra713="ens-op.mon.1.aws.duty.1" diff --git a/checks/check_extra7130 b/checks/check_extra7130 index a302f0d4..251f6c5d 100644 --- a/checks/check_extra7130 +++ b/checks/check_extra7130 @@ -14,7 +14,7 @@ CHECK_ID_extra7130="7.130" CHECK_TITLE_extra7130="[extra7130] Ensure there are no SNS Topics unencrypted" CHECK_SCORED_extra7130="NOT_SCORED" -CHECK_TYPE_extra7130="EXTRA" +CHECK_CIS_LEVEL_extra7130="EXTRA" CHECK_SEVERITY_extra7130="Medium" CHECK_ASFF_RESOURCE_TYPE_extra7130="AwsSnsTopic" CHECK_ALTERNATE_check7130="extra7130" diff --git a/checks/check_extra7131 b/checks/check_extra7131 index 946f1682..dbd22bd9 100644 --- a/checks/check_extra7131 +++ b/checks/check_extra7131 @@ -13,7 +13,7 @@ CHECK_ID_extra7131="7.131" CHECK_TITLE_extra7131="[extra7131] Ensure RDS instances have minor version upgrade enabled" CHECK_SCORED_extra7131="NOT_SCORED" -CHECK_TYPE_extra7131="EXTRA" +CHECK_CIS_LEVEL_extra7131="EXTRA" CHECK_SEVERITY_extra7131="Low" CHECK_ASFF_RESOURCE_TYPE_extra7131="AwsRdsDbInstance" CHECK_ALTERNATE_check7131="extra7131" diff --git a/checks/check_extra7132 b/checks/check_extra7132 index 5eefb58f..4d3af561 100644 --- a/checks/check_extra7132 +++ b/checks/check_extra7132 @@ -13,7 +13,7 @@ CHECK_ID_extra7132="7.132" CHECK_TITLE_extra7132="[extra7132] Check if RDS instances has enhanced monitoring enabled" CHECK_SCORED_extra7132="NOT_SCORED" -CHECK_TYPE_extra7132="EXTRA" +CHECK_CIS_LEVEL_extra7132="EXTRA" CHECK_SEVERITY_extra7132="Low" CHECK_ASFF_RESOURCE_TYPE_extra7132="AwsRdsDbInstance" CHECK_ALTERNATE_check7132="extra7132" diff --git a/checks/check_extra7133 b/checks/check_extra7133 index c2eefd5e..ee20f261 100644 --- a/checks/check_extra7133 +++ b/checks/check_extra7133 @@ -13,7 +13,7 @@ CHECK_ID_extra7133="7.133" CHECK_TITLE_extra7133="[extra7133] Check if RDS instances have multi-AZ enabled" CHECK_SCORED_extra7133="NOT_SCORED" -CHECK_TYPE_extra7133="EXTRA" +CHECK_CIS_LEVEL_extra7133="EXTRA" CHECK_SEVERITY_extra7133="Medium" CHECK_ASFF_RESOURCE_TYPE_extra7133="AwsRdsDbInstance" CHECK_ALTERNATE_check7133="extra7133" diff --git a/checks/check_extra7134 b/checks/check_extra7134 index 4d649f83..14f2b957 100644 --- a/checks/check_extra7134 +++ b/checks/check_extra7134 @@ -13,7 +13,7 @@ CHECK_ID_extra7134="7.134" CHECK_TITLE_extra7134="[extra7134] Ensure no security groups allow ingress from 0.0.0.0/0 or ::/0 to FTP ports 20 or 21 " CHECK_SCORED_extra7134="NOT_SCORED" -CHECK_TYPE_extra7134="EXTRA" +CHECK_CIS_LEVEL_extra7134="EXTRA" CHECK_SEVERITY_extra7134="High" CHECK_ASFF_RESOURCE_TYPE_extra7134="AwsEc2SecurityGroup" CHECK_ALTERNATE_check7134="extra7134" diff --git a/checks/check_extra7135 b/checks/check_extra7135 index 42a27bfb..2788b54e 100644 --- a/checks/check_extra7135 +++ b/checks/check_extra7135 @@ -13,7 +13,7 @@ CHECK_ID_extra7135="7.135" CHECK_TITLE_extra7135="[extra7135] Ensure no security groups allow ingress from 0.0.0.0/0 or ::/0 to Kafka port 9092 " CHECK_SCORED_extra7135="NOT_SCORED" -CHECK_TYPE_extra7135="EXTRA" +CHECK_CIS_LEVEL_extra7135="EXTRA" CHECK_SEVERITY_extra7135="High" CHECK_ASFF_RESOURCE_TYPE_extra7135="AwsEc2SecurityGroup" CHECK_ALTERNATE_check7135="extra7135" diff --git a/checks/check_extra7136 b/checks/check_extra7136 index 7b440031..b7779b6f 100644 --- a/checks/check_extra7136 +++ b/checks/check_extra7136 @@ -13,7 +13,7 @@ CHECK_ID_extra7136="7.136" CHECK_TITLE_extra7136="[extra7136] Ensure no security groups allow ingress from 0.0.0.0/0 or ::/0 to Telnet port 23 " CHECK_SCORED_extra7136="NOT_SCORED" -CHECK_TYPE_extra7136="EXTRA" +CHECK_CIS_LEVEL_extra7136="EXTRA" CHECK_SEVERITY_extra7136="High" CHECK_ASFF_RESOURCE_TYPE_extra7136="AwsEc2SecurityGroup" CHECK_ALTERNATE_check7136="extra7136" diff --git a/checks/check_extra7137 b/checks/check_extra7137 index 754acc5f..5759927e 100644 --- a/checks/check_extra7137 +++ b/checks/check_extra7137 @@ -13,7 +13,7 @@ CHECK_ID_extra7137="7.137" CHECK_TITLE_extra7137="[extra7137] Ensure no security groups allow ingress from 0.0.0.0/0 or ::/0 to Windows SQL Server ports 1433 or 1434 " CHECK_SCORED_extra7137="NOT_SCORED" -CHECK_TYPE_extra7137="EXTRA" +CHECK_CIS_LEVEL_extra7137="EXTRA" CHECK_SEVERITY_extra7137="High" CHECK_ASFF_RESOURCE_TYPE_extra7137="AwsEc2SecurityGroup" CHECK_ALTERNATE_check7137="extra7137" diff --git a/checks/check_extra7138 b/checks/check_extra7138 index c1704c67..f164aa62 100644 --- a/checks/check_extra7138 +++ b/checks/check_extra7138 @@ -13,7 +13,7 @@ CHECK_ID_extra7138="7.138" CHECK_TITLE_extra7138="[extra7138] Ensure no Network ACLs allow ingress from 0.0.0.0/0 to any port" CHECK_SCORED_extra7138="NOT SCORED" -CHECK_TYPE_extra7138="LEVEL2" +CHECK_CIS_LEVEL_extra7138="LEVEL2" CHECK_SEVERITY_extra7138="High" CHECK_ASFF_TYPE_extra7138="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ASFF_RESOURCE_TYPE_extra7138="AwsEc2NetworkAcl" diff --git a/checks/check_extra7139 b/checks/check_extra7139 index 0b635a10..094037ee 100644 --- a/checks/check_extra7139 +++ b/checks/check_extra7139 @@ -12,7 +12,7 @@ CHECK_ID_extra7139="7.139" CHECK_TITLE_extra7139="[extra7139] There are High severity GuardDuty findings " CHECK_SCORED_extra7139="NOT_SCORED" -CHECK_TYPE_extra7139="EXTRA" +CHECK_CIS_LEVEL_extra7139="EXTRA" CHECK_SEVERITY_extra7139="High" CHECK_ASFF_RESOURCE_TYPE_extra7139="AwsGuardDutyDetector" CHECK_ALTERNATE_check7139="extra7139" @@ -29,7 +29,7 @@ extra7139(){ if [[ $DETECTORS_LIST ]];then for DETECTOR in $DETECTORS_LIST;do FINDINGS_COUNT="" - FINDINGS_COUNT=$($AWSCLI $PROFILE_OPT --region $regx --output text guardduty list-findings --detector-id $DETECTOR --finding-criteria '{"Criterion":{"severity": {"Eq":["8"]}}}' 2> /dev/null | wc -l | xargs) # Severity LOW=2, MED=4, HIGH=8 + FINDINGS_COUNT=$($AWSCLI $PROFILE_OPT --region $regx --output text guardduty list-findings --detector-id $DETECTOR --finding-criteria '{"Criterion":{"severity": {"Eq":["8"]}, "service.archived": {"Eq": ["false"]}}}' 2> /dev/null | wc -l | xargs) # Severity LOW=2, MED=4, HIGH=8 if [[ $FINDINGS_COUNT -gt 0 ]];then textFail "$regx: GuardDuty has $FINDINGS_COUNT high severity findings." "$regx" else diff --git a/checks/check_extra714 b/checks/check_extra714 index 27681e1f..40d2c679 100644 --- a/checks/check_extra714 +++ b/checks/check_extra714 @@ -13,7 +13,7 @@ CHECK_ID_extra714="7.14" CHECK_TITLE_extra714="[extra714] Check if CloudFront distributions have logging enabled" CHECK_SCORED_extra714="NOT_SCORED" -CHECK_TYPE_extra714="EXTRA" +CHECK_CIS_LEVEL_extra714="EXTRA" CHECK_SEVERITY_extra714="Medium" CHECK_ASFF_RESOURCE_TYPE_extra714="AwsCloudFrontDistribution" CHECK_ALTERNATE_check714="extra714" diff --git a/checks/check_extra7140 b/checks/check_extra7140 index 4b34c7a5..42f93e72 100644 --- a/checks/check_extra7140 +++ b/checks/check_extra7140 @@ -12,7 +12,7 @@ CHECK_ID_extra7140="7.140" CHECK_TITLE_extra7140="[extra7140] Check if there are SSM Documents set as public" CHECK_SCORED_extra7140="NOT_SCORED" -CHECK_TYPE_extra7140="EXTRA" +CHECK_CIS_LEVEL_extra7140="EXTRA" CHECK_SEVERITY_extra7140="High" CHECK_ASFF_RESOURCE_TYPE_extra7140="AwsSsmDocument" CHECK_ALTERNATE_check7140="extra7140" diff --git a/checks/check_extra7141 b/checks/check_extra7141 index ff4ce69c..3b828cdd 100644 --- a/checks/check_extra7141 +++ b/checks/check_extra7141 @@ -13,7 +13,7 @@ CHECK_ID_extra7141="7.141" CHECK_TITLE_extra7141="[extra7141] Find secrets in SSM Documents" CHECK_SCORED_extra7141="NOT_SCORED" -CHECK_TYPE_extra7141="EXTRA" +CHECK_CIS_LEVEL_extra7141="EXTRA" CHECK_SEVERITY_extra7141="Critical" CHECK_ASFF_RESOURCE_TYPE_extra7141="AwsSsmDocument" CHECK_ALTERNATE_check7141="extra7141" diff --git a/checks/check_extra7142 b/checks/check_extra7142 index 5fbd6922..9900d46d 100644 --- a/checks/check_extra7142 +++ b/checks/check_extra7142 @@ -11,15 +11,15 @@ # CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. CHECK_ID_extra7142="7.142" -CHECK_TITLE_extra7142="[extra7142] Check if Application Load Balancer is dropping invalid packets to prevent header based http request smuggling" +CHECK_TITLE_extra7142="[extra7142] Check if Application Load Balancer is dropping invalid packets to prevent header based HTTP request smuggling" CHECK_SCORED_extra7142="NOT_SCORED" -CHECK_TYPE_extra7142="EXTRA" +CHECK_CIS_LEVEL_extra7142="EXTRA" CHECK_SEVERITY_extra7142="Medium" CHECK_ASFF_RESOURCE_TYPE_extra7142="AwsElasticLoadBalancingV2LoadBalancer" CHECK_ALTERNATE_check7142="extra7142" CHECK_ASFF_COMPLIANCE_TYPE_extra7142="" CHECK_SERVICENAME_extra7142="elb" -CHECK_RISK_extra7142='ALB can be target of actors sendingn bad http headers' +CHECK_RISK_extra7142='ALB can be target of actors sending bad HTTP headers' CHECK_REMEDIATION_extra7142='Ensure Application Load Balancer is configured for HTTP headers with header fields that are not valid are removed by the load balancer (true)' CHECK_DOC_extra7142='https://docs.aws.amazon.com/elasticloadbalancing/latest/application/application-load-balancers.html#desync-mitigation-mode' CHECK_CAF_EPIC_extra7142='Data Protection' diff --git a/checks/check_extra7143 b/checks/check_extra7143 index 09f24131..a4bdac62 100644 --- a/checks/check_extra7143 +++ b/checks/check_extra7143 @@ -13,7 +13,7 @@ CHECK_ID_extra7143="7.143" CHECK_TITLE_extra7143="[extra7143] Check if EFS have policies which allow access to everyone" CHECK_SCORED_extra7143="NOT_SCORED" -CHECK_TYPE_extra7143="EXTRA" +CHECK_CIS_LEVEL_extra7143="EXTRA" CHECK_SEVERITY_extra7143="Critical" CHECK_ASFF_RESOURCE_TYPE_extra7143="AwsEFS" CHECK_ALTERNATE_check7143="extra7143" diff --git a/checks/check_extra7144 b/checks/check_extra7144 index 5fc9a270..2bdc0dc7 100644 --- a/checks/check_extra7144 +++ b/checks/check_extra7144 @@ -13,7 +13,7 @@ CHECK_ID_extra7144="7.144" CHECK_TITLE_extra7144="[extra7144] Check if CloudWatch has allowed cross-account sharing" CHECK_SCORED_extra7144="NOT_SCORED" -CHECK_TYPE_extra7144="EXTRA" +CHECK_CIS_LEVEL_extra7144="EXTRA" CHECK_SEVERITY_extra7144="Medium" CHECK_ASFF_RESOURCE_TYPE_extra7144="AwsCloudWatch" CHECK_ALTERNATE_check7144="extra7144" diff --git a/checks/check_extra7145 b/checks/check_extra7145 index fa74b27b..8ef77601 100644 --- a/checks/check_extra7145 +++ b/checks/check_extra7145 @@ -13,7 +13,7 @@ CHECK_ID_extra7145="7.145" CHECK_TITLE_extra7145="[extra7145] Check if Lambda functions have policies which allow access to any AWS account" CHECK_SCORED_extra7145="NOT_SCORED" -CHECK_TYPE_extra7145="EXTRA" +CHECK_CIS_LEVEL_extra7145="EXTRA" CHECK_SEVERITY_extra7145="Critical" CHECK_ASFF_RESOURCE_TYPE_extra7145="AwsLambda" CHECK_ALTERNATE_check7145="extra7145" diff --git a/checks/check_extra7146 b/checks/check_extra7146 index 92ffb813..78e56683 100644 --- a/checks/check_extra7146 +++ b/checks/check_extra7146 @@ -13,7 +13,7 @@ CHECK_ID_extra7146="7.146" CHECK_TITLE_extra7146="[extra7146] Check if there is any unassigned Elastic IP" CHECK_SCORED_extra7146="NOT_SCORED" -CHECK_TYPE_extra7146="EXTRA" +CHECK_CIS_LEVEL_extra7146="EXTRA" CHECK_SEVERITY_extra7146="Low" CHECK_ASFF_RESOURCE_TYPE_extra7146="AwsElasticIPs" CHECK_ALTERNATE_check7146="extra7146" diff --git a/checks/check_extra7147 b/checks/check_extra7147 index cff62c59..f14e1949 100644 --- a/checks/check_extra7147 +++ b/checks/check_extra7147 @@ -13,7 +13,7 @@ CHECK_ID_extra7147="7.147" CHECK_TITLE_extra7147="[extra7147] Check if S3 Glacier vaults have policies which allow access to everyone" CHECK_SCORED_extra7147="NOT_SCORED" -CHECK_TYPE_extra7147="EXTRA" +CHECK_CIS_LEVEL_extra7147="EXTRA" CHECK_SEVERITY_extra7147="Critical" CHECK_ASFF_RESOURCE_TYPE_extra7147="AwsGlacierVault" CHECK_ALTERNATE_check7147="extra7142" diff --git a/checks/check_extra7148 b/checks/check_extra7148 new file mode 100644 index 00000000..c69805b5 --- /dev/null +++ b/checks/check_extra7148 @@ -0,0 +1,43 @@ +#!/usr/bin/env bash + +# Prowler - the handy cloud security tool (copyright 2018) by Toni de la Fuente +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy +# of the License at http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software distributed +# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +# CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. +CHECK_ID_extra7148="7.148" +CHECK_TITLE_extra7148="[extra7148] Check if EFS File systems have backup enabled" +CHECK_SCORED_extra7148="NOT_SCORED" +CHECK_CIS_LEVEL_extra7148="EXTRA" +CHECK_SEVERITY_extra7148="Medium" +CHECK_ASFF_RESOURCE_TYPE_extra7148="AwsEfsFileSystem" +CHECK_ALTERNATE_check7148="extra7148" +CHECK_SERVICENAME_extra7148="efs" +CHECK_RISK_extra7148='If backup is not enabled; data is vulnerable. Human error or bad actors could erase or modify data.' +CHECK_REMEDIATION_extra7148='Enable automated backup for production data. Define a retention period and periodically test backup restoration. A Disaster Recovery process should be in place to govern Data Protection approach.' +CHECK_DOC_extra7148='https://docs.aws.amazon.com/efs/latest/ug/whatisefs.html' +CHECK_CAF_EPIC_extra7148='Data Protection' + +extra7148() { + for regx in $REGIONS; do + LIST_OF_EFS_SYSTEMS=$($AWSCLI efs describe-file-systems $PROFILE_OPT --region $regx --query 'FileSystems[*].FileSystemId' --output text) + if [[ $LIST_OF_EFS_SYSTEMS ]]; then + for filesystem in $LIST_OF_EFS_SYSTEMS; do + # if retention is 0 then is disabled + BACKUP_POLICY=$($AWSCLI efs describe-backup-policy $PROFILE_OPT --region $regx --file-system-id $filesystem --query BackupPolicy --output text) + if [[ $BACKUP_POLICY == "DISABLED" ]]; then + textFail "$regx: File system $filesystem does not have backup enabled!" "$regx" "$filesystem" + else + textPass "$regx: EFS File system $filesystem has backup enabled" "$regx" "$filesystem" + fi + done + else + textInfo "$regx: No EFS File systems found" "$regx" "$filesystem" + fi + done +} diff --git a/checks/check_extra7149 b/checks/check_extra7149 new file mode 100644 index 00000000..259947d8 --- /dev/null +++ b/checks/check_extra7149 @@ -0,0 +1,43 @@ +#!/usr/bin/env bash + +# Prowler - the handy cloud security tool (copyright 2018) by Toni de la Fuente +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy +# of the License at http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software distributed +# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +# CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. +CHECK_ID_extra7149="7.149" +CHECK_TITLE_extra7149="[extra7149] Check if Redshift Clusters have automated snapshots enabled" +CHECK_SCORED_extra7149="NOT_SCORED" +CHECK_CIS_LEVEL_extra7149="EXTRA" +CHECK_SEVERITY_extra7149="Medium" +CHECK_ASFF_RESOURCE_TYPE_extra7149="AwsRedshiftCluster" +CHECK_ALTERNATE_check7149="extra7149" +CHECK_SERVICENAME_extra7149="redshift" +CHECK_RISK_extra7149='If backup is not enabled; data is vulnerable. Human error or bad actors could erase or modify data.' +CHECK_REMEDIATION_extra7149='Enable automated backup for production data. Define a retention period and periodically test backup restoration. A Disaster Recovery process should be in place to govern Data Protection approach.' +CHECK_DOC_extra7149='https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/AWS_Redshift.html' +CHECK_CAF_EPIC_extra7149='Data Protection' + +extra7149() { + # "Check if Redshift cluster has audit logging enabled " + for regx in $REGIONS; do + LIST_OF_REDSHIFT_CLUSTERS=$($AWSCLI redshift describe-clusters $PROFILE_OPT --region $regx --query 'Clusters[*].ClusterIdentifier' --output text) + if [[ $LIST_OF_REDSHIFT_CLUSTERS ]]; then + for redshiftcluster in $LIST_OF_REDSHIFT_CLUSTERS; do + REDSHIFT_SNAPSHOT_ENABLED=$($AWSCLI redshift describe-cluster-snapshots $PROFILE_OPT --region $regx --cluster-identifier $redshiftcluster --snapshot-type automated) + if [[ $REDSHIFT_SNAPSHOT_ENABLED ]]; then + textPass "$regx: Redshift cluster $redshiftcluster has automated snapshots $REDSHIFT_SNAPSHOT_ENABLED" "$regx" "$redshiftcluster" + else + textFail "$regx: Redshift cluster $redshiftcluster has automated snapshots disabled!" "$regx" "$redshiftcluster" + fi + done + else + textInfo "$regx: No Redshift cluster configured" "$regx" + fi + done +} diff --git a/checks/check_extra715 b/checks/check_extra715 index 2ce8d287..b5ccdb0e 100644 --- a/checks/check_extra715 +++ b/checks/check_extra715 @@ -13,7 +13,7 @@ CHECK_ID_extra715="7.15" CHECK_TITLE_extra715="[extra715] Check if Amazon Elasticsearch Service (ES) domains have logging enabled" CHECK_SCORED_extra715="NOT_SCORED" -CHECK_TYPE_extra715="EXTRA" +CHECK_CIS_LEVEL_extra715="EXTRA" CHECK_SEVERITY_extra715="Medium" CHECK_ASFF_RESOURCE_TYPE_extra715="AwsElasticsearchDomain" CHECK_ALTERNATE_check715="extra715" diff --git a/checks/check_extra7150 b/checks/check_extra7150 new file mode 100644 index 00000000..673a3da8 --- /dev/null +++ b/checks/check_extra7150 @@ -0,0 +1,44 @@ +#!/usr/bin/env bash + +# Prowler - the handy cloud security tool (copyright 2018) by Toni de la Fuente +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy +# of the License at http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software distributed +# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +# CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. +CHECK_ID_extra7150="7.150" +CHECK_TITLE_extra7150="[extra7150] Check if Elastic Load Balancers have deletion protection enabled" +CHECK_SCORED_extra7150="NOT_SCORED" +CHECK_CIS_LEVEL_extra7150="EXTRA" +CHECK_SEVERITY_extra7150="Medium" +CHECK_ASFF_RESOURCE_TYPE_extra7150="AwsElbLoadBalancer" +CHECK_ALTERNATE_check7150="extra7150" +CHECK_SERVICENAME_extra7150="elb" +CHECK_RISK_extra7150='If deletion protection is not enabled; the resource is not protected against deletion.' +CHECK_REMEDIATION_extra7150='Enable deletion protection attribute; this is not enabled by default.' +CHECK_DOC_extra7150='https://docs.aws.amazon.com/elasticloadbalancing/latest/application/application-load-balancers.html#deletion-protection' +CHECK_CAF_EPIC_extra7150='Data Protection' + +extra7150(){ + # "Check if Elastic Load Balancers have delete protection enabled." + for regx in $REGIONS; do + LIST_OF_ELBSV2=$($AWSCLI elbv2 describe-load-balancers $PROFILE_OPT --region $regx --query 'LoadBalancers[*].LoadBalancerArn' --output text|xargs -n1) + if [[ $LIST_OF_ELBSV2 ]]; then + for elb in $LIST_OF_ELBSV2; do + CHECK_DELETION_PROTECTION_ENABLED=$($AWSCLI elbv2 describe-load-balancer-attributes $PROFILE_OPT --region $regx --load-balancer-arn $elb --query Attributes[*] --output text|grep "deletion_protection.enabled"|grep true ) + ELBV2_NAME=$(echo $elb|cut -d\/ -f3) + if [[ $CHECK_DELETION_PROTECTION_ENABLED ]]; then + textPass "$regx: $ELBV2_NAME has the attribute deletion protection enabled" "$regx" "$elb" + else + textFail "$regx: $ELBV2_NAME does not have deletion protection enabled." "$regx" "$elb" + fi + done + else + textInfo "$regx: No ELBs found" "$regx" + fi + done +} diff --git a/checks/check_extra7151 b/checks/check_extra7151 new file mode 100644 index 00000000..a17a3673 --- /dev/null +++ b/checks/check_extra7151 @@ -0,0 +1,44 @@ +#!/usr/bin/env bash + +# Prowler - the handy cloud security tool (copyright 2019) by Toni de la Fuente +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy +# of the License at http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software distributed +# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +# CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. + +CHECK_ID_extra7151="7.151" +CHECK_TITLE_extra7151="[extra7151] Check if DynamoDB tables point-in-time recovery (PITR) is enabled" +CHECK_SCORED_extra7151="NOT_SCORED" +CHECK_CIS_LEVEL_extra7151="EXTRA" +CHECK_SEVERITY_extra7151="Medium" +CHECK_ASFF_RESOURCE_TYPE_extra7151="AwsDynamoDbTable" +CHECK_ALTERNATE_check7151="extra7151" +CHECK_SERVICENAME_extra7151="dynamodb" +CHECK_RISK_extra7151='If the DynamoDB Table does not have point-in-time recovery enabled; it is vulnerable to accidental write or delete operations.' +CHECK_REMEDIATION_extra7151='Enable point-in-time recovery; this is not enabled by default.' +CHECK_DOC_extra7151='https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/PointInTimeRecovery_Howitworks.html' +CHECK_CAF_EPIC_extra7151='Data Protection' + +extra7151(){ + # "Check if DynamoDB tables point-in-time recovery (PITR) is enabled" + for regx in $REGIONS; do + LIST_OF_DYNAMODB_TABLES=$($AWSCLI dynamodb list-tables $PROFILE_OPT --region $regx --query 'TableNames[*]' --output text) + if [[ $LIST_OF_DYNAMODB_TABLES ]]; then + for dynamodb_table in $LIST_OF_DYNAMODB_TABLES; do + POINT_IN_TIME_RECOVERY_ENABLED=$($AWSCLI dynamodb describe-continuous-backups $PROFILE_OPT --region $regx --table-name $dynamodb_table | jq '.[].PointInTimeRecoveryDescription | select(.PointInTimeRecoveryStatus=="ENABLED") | .PointInTimeRecoveryStatus') + if [[ $POINT_IN_TIME_RECOVERY_ENABLED ]]; then + textPass "$regx: $dynamodb_table has point-in-time recovery enabled." "$regx" "$dynamodb_table" + else + textFail "$regx: $dynamodb_table does not have point-in-time recovery enabled." "$regx" "$dynamodb_table" + fi + done + else + textInfo "$regx: No DynamoDB tables found" "$regx" + fi + done +} diff --git a/checks/check_extra7152 b/checks/check_extra7152 new file mode 100644 index 00000000..e205de31 --- /dev/null +++ b/checks/check_extra7152 @@ -0,0 +1,53 @@ +#!/usr/bin/env bash + +# Prowler - the handy cloud security tool (copyright 2019) by Toni de la Fuente +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy +# of the License at http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software distributed +# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +# CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. + +# Remediation: +# +# https://docs.aws.amazon.com/cli/latest/reference/route53domains/update-domain-contact-privacy.html +# +# update-domain-contact-privacy \ +# --region us-east-1 \ +# --domain-name example.com \ +# --admin-privacy \ +# --registrant-privacy \ +# --tech-privacy + +CHECK_ID_extra7152="7.152" +CHECK_TITLE_extra7152="[extra7152] Enable Privacy Protection for for a Route53 Domain" +CHECK_SCORED_extra7152="NOT_SCORED" +CHECK_CIS_LEVEL_extra7152="EXTRA" +CHECK_SEVERITY_extra7152="Medium" +CHECK_ASFF_RESOURCE_TYPE_extra7152="AwsRoute53Domain" +CHECK_ALTERNATE_check7152="extra7152" +CHECK_SERVICENAME_extra7152="route53" +CHECK_RISK_extra7152='Without privacy protection enabled, ones personal information is published to the public WHOIS database' +CHECK_REMEDIATION_extra7152='Ensure default Privacy is enabled' +CHECK_DOC_extra7152='https://docs.aws.amazon.com/Route53/latest/DeveloperGuide/domain-privacy-protection.html' +CHECK_CAF_EPIC_extra7152='Data Protection' + +extra7152(){ + echo "Route53 is a global service, looking for domains in US-EAST-1" + DOMAIN_NAMES=$($AWSCLI route53domains list-domains $PROFILE_OPT --region us-east-1 --query 'Domains[*].DomainName' --output text ) + if [[ $DOMAIN_NAMES ]];then + for domain_name in $DOMAIN_NAMES;do + DOMAIN_DETAIL=$($AWSCLI route53domains get-domain-detail $PROFILE_OPT --region us-east-1 --query 'AdminPrivacy' --domain-name $domain_name) + if [[ $DOMAIN_DETAIL == false ]]; then + textFail "$regx: Contact information public for: $domain_name" "$regx" "$domain_name" + else + textPass "$regx: All contact information is private for: $domain_name" "$regx" "$domain_name" + fi + done + else + textPass "$regx: No Domain Names found" "$regx" + fi +} diff --git a/checks/check_extra7153 b/checks/check_extra7153 new file mode 100644 index 00000000..eee485c5 --- /dev/null +++ b/checks/check_extra7153 @@ -0,0 +1,51 @@ +#!/usr/bin/env bash + +# Prowler - the handy cloud security tool (copyright 2019) by Toni de la Fuente +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy +# of the License at http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software distributed +# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +# CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. + +# Remediation: +# +# https://docs.aws.amazon.com/cli/latest/reference/route53domains/enable-domain-transfer-lock.html +# +# enable-domain-transfer-lock \ +# --domain-name example.com + + +CHECK_ID_extra7153="7.153" +CHECK_TITLE_extra7153="[extra7153] Enable Transfer Lock for a Route53 Domain" +CHECK_SCORED_extra7153="NOT_SCORED" +CHECK_CIS_LEVEL_extra7153="EXTRA" +CHECK_SEVERITY_extra7153="Medium" +CHECK_ASFF_RESOURCE_TYPE_extra7153="AwsRoute53Domain" +CHECK_ALTERNATE_check7153="extra7153" +CHECK_SERVICENAME_extra7153="route53" +CHECK_RISK_extra7153='Without transfer lock enabled, a domain name could be incorrectly moved to a new registrar' +CHECK_REMEDIATION_extra7153='Ensure transfer lock is enabled' +CHECK_DOC_extra7153='https://docs.aws.amazon.com/Route53/latest/DeveloperGuide/domain-lock.html' +CHECK_CAF_EPIC_extra7153='Data Protection' + +extra7153(){ + # Route53 is a global service, looking for domains in US-EAST-1 + DOMAIN_NAMES=$($AWSCLI route53domains list-domains $PROFILE_OPT --region us-east-1 --query 'Domains[*].DomainName' --output text ) + if [[ $DOMAIN_NAMES ]];then + for domain_name in $DOMAIN_NAMES;do + DOMAIN_DETAIL=$($AWSCLI route53domains get-domain-detail $PROFILE_OPT --region us-east-1 --query 'StatusList' --domain-name $domain_name) + HAS_TRANSFER_LOCK=$( grep -o 'clientTransferProhibited' <<< $DOMAIN_DETAIL) + if [[ $HAS_TRANSFER_LOCK ]]; then + textPass "$regx: clientTransferProhibited found for: $domain_name" "$regx" "$domain_name" + else + textFail "$regx: clientTransferProhibited not found for: $domain_name" "$regx" "$domain_name" + fi + done + else + textPass "$regx: No Domain Names found" "$regx" + fi +} diff --git a/checks/check_extra7154 b/checks/check_extra7154 new file mode 100644 index 00000000..2fc74a6f --- /dev/null +++ b/checks/check_extra7154 @@ -0,0 +1,56 @@ +#!/usr/bin/env bash + +# Prowler - the handy cloud security tool (copyright 2019) by Toni de la Fuente +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy +# of the License at http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software distributed +# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +# CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. +# +# Remediation: +# +# https://docs.aws.amazon.com/cli/latest/reference/cloudformation/update-termination-protection.html +# +# aws cloudformation update-termination-protection \ +# --stack-name my-stack \ +# --enable-termination-protection + +CHECK_ID_extra7154="7.154" +CHECK_TITLE_extra7154="[extra7154] Enable termination protection for Cloudformation Stacks" +CHECK_SCORED_extra7154="NOT_SCORED" +CHECK_CIS_LEVEL_extra7154="EXTRA" +CHECK_SEVERITY_extra7154="MEDIUM" +CHECK_ASFF_RESOURCE_TYPE_extra7154="AwsCloudFormationStack" +CHECK_ALTERNATE_check7154="extra7154" +CHECK_SERVICENAME_extra7154="cloudformation" +CHECK_RISK_extra7154='Without termination protection enabled; a critical cloudformation stack can be accidently deleted.' +CHECK_REMEDIATION_extra7154='Ensure termination protection is enabled for the cloudformation stacks' +CHECK_DOC_extra7154='https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/using-cfn-protect-stacks.html' +CHECK_CAF_EPIC_extra7154='Infrastructure Protection' + +extra7154() { + for regx in $REGIONS; do + CFN_STACKS=$($AWSCLI cloudformation describe-stacks $PROFILE_OPT --region $regx --output json) + LIST_OF_CFN_STACKS=$(echo $CFN_STACKS | jq -r '.Stacks[].StackName') + if [[ $LIST_OF_CFN_STACKS ]];then + for stack in $LIST_OF_CFN_STACKS; do + CFN_STACK_DETAILS=$($AWSCLI cloudformation describe-stacks $PROFILE_OPT --region $regx --stack-name $stack --output json) + TERMINATION_ENABLED=$(echo $CFN_STACK_DETAILS | jq -r '.Stacks[].EnableTerminationProtection') + ROOT_ID=$(echo $CFN_STACK_DETAILS | jq -r '.Stacks[].RootId') + if [[ $ROOT_ID != null && $TERMINATION_ENABLED == "false" ]]; then + textInfo "$regx: $stack is a nested stack. Enable termination protection on the root stack $ROOT_ID" "$regx" "$stack" "$ROOT_ID" + elif [[ $TERMINATION_ENABLED == "true" ]]; then + textPass "$regx: Cloudformation stack $stack has termination protection enabled" "$regx" "$stack" + else + textFail "$regx: Cloudformation stack $stack has termination protection disabled" "$regx" "$stack" + fi + done + else + textInfo "$regx: No Cloudformation stacks found" "$regx" + fi + done +} diff --git a/checks/check_extra7155 b/checks/check_extra7155 new file mode 100644 index 00000000..c51d6669 --- /dev/null +++ b/checks/check_extra7155 @@ -0,0 +1,50 @@ +#!/usr/bin/env bash + +# Prowler - the handy cloud security tool (copyright 2019) by Toni de la Fuente +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy +# of the License at http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software distributed +# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +# CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. +# Remediation: +# +# https://docs.aws.amazon.com/cli/latest/reference/elbv2/modify-load-balancer-attributes.html +# +# aws elbv2 modify-load-balancer-attributes +# --load-balancer-arn \ +# --attributes Key=routing.http.desync_mitigation_mode,Value= + +CHECK_ID_extra7155="7.155" +CHECK_TITLE_extra7155="[extra7155] Check whether the Application Load Balancer is configured with defensive or strictest desync mitigation mode" +CHECK_SCORED_extra7155="NOT_SCORED" +CHECK_CIS_LEVEL_extra7155="EXTRA" +CHECK_SEVERITY_extra7155="MEDIUM" +CHECK_ASFF_RESOURCE_TYPE_extra7155="AwsElasticLoadBalancingV2LoadBalancer" +CHECK_ALTERNATE_check7155="extra7155" +CHECK_SERVICENAME_extra7155="elb" +CHECK_RISK_extra7155='HTTP Desync issues can lead to request smuggling and make your applications vulnerable to request queue or cache poisoning; which could lead to credential hijacking or execution of unauthorized commands.' +CHECK_REMEDIATION_extra7155='Ensure Application Load Balancer is configured with defensive or strictest desync mitigation mode' +CHECK_DOC_extra7155='https://aws.amazon.com/about-aws/whats-new/2020/08/application-and-classic-load-balancers-adding-defense-in-depth-with-introduction-of-desync-mitigation-mode/' +CHECK_CAF_EPIC_extra7155='Data Protection' + +extra7155() { + for regx in $REGIONS; do + LIST_OF_ELBSV2=$($AWSCLI elbv2 describe-load-balancers $PROFILE_OPT --region $regx --query 'LoadBalancers[?Type == `application`].[LoadBalancerArn]' --output text) + if [[ $LIST_OF_ELBSV2 ]];then + for alb in $LIST_OF_ELBSV2;do + CHECK_DESYNC_MITIGATION_MODE=$($AWSCLI elbv2 describe-load-balancer-attributes $PROFILE_OPT --region $regx --load-balancer-arn $alb --query 'Attributes[8]' --output json | jq -r '.Value') + if [[ $CHECK_DESYNC_MITIGATION_MODE == "monitor" ]]; then + textFail "$regx: Application load balancer $alb does not have desync mitigation mode set as defensive or strictest." "$regx" "$alb" + else + textPass "$regx: Application load balancer $alb is configured with correct desync mitigation mode." "$regx" "$alb" + fi + done + else + textInfo "$regx: No Application Load Balancers found" "$regx" + fi + done +} diff --git a/checks/check_extra7156 b/checks/check_extra7156 new file mode 100644 index 00000000..529c0616 --- /dev/null +++ b/checks/check_extra7156 @@ -0,0 +1,53 @@ +#!/usr/bin/env bash + +# Prowler - the handy cloud security tool (copyright 2018) by Toni de la Fuente +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy +# of the License at http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software distributed +# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +# CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. + +CHECK_ID_extra7156="7.156" +CHECK_TITLE_extra7156="[extra7156] Checks if API Gateway V2 has Access Logging enabled" +CHECK_SCORED_extra7156="NOT_SCORED" +CHECK_CIS_LEVEL_extra7156="EXTRA" +CHECK_SEVERITY_extra7156="Medium" +CHECK_ASFF_RESOURCE_TYPE_extra7156="AwsApiGatewayV2Api" +CHECK_ALTERNATE_check7156="extra7156" +CHECK_SERVICENAME_extra7156="apigateway" +CHECK_RISK_extra7156="If not enabled the logging of API calls is not possible. This information is important for monitoring API access." +CHECK_REMEDIATION_extra7156="Enable Access Logging in the API stage." +CHECK_DOC_extra7156="https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-apigatewayv2-stage-accesslogsettings.html" +CHECK_CAF_EPIC_extra7156="Logging and Monitoring" + +extra7156(){ + + # "Check if API Gateway V2 has Access Logging enabled " + for regx in $REGIONS; do + LIST_OF_API_GW=$($AWSCLI apigatewayv2 get-apis $PROFILE_OPT --region $regx --query Items[*].ApiId --output text) + if [[ $LIST_OF_API_GW ]];then + for apigwid in $LIST_OF_API_GW;do + API_GW_NAME=$($AWSCLI apigatewayv2 get-apis $PROFILE_OPT --region $regx --query "Items[?ApiId==\`$apigwid\`].Name" --output text) + CHECK_STAGES_NAME=$($AWSCLI apigatewayv2 get-stages $PROFILE_OPT --region $regx --api-id $apigwid --query "Items[*].StageName" --output text) + if [[ $CHECK_STAGES_NAME ]];then + for stagename in $CHECK_STAGES_NAME;do + CHECK_STAGE_METHOD_LOGGING=$($AWSCLI apigatewayv2 get-stages $PROFILE_OPT --region $regx --api-id $apigwid --query "Items[?StageName == \`$stagename\` ].AccessLogSettings.DestinationArn" --output text) + if [[ $CHECK_STAGE_METHOD_LOGGING ]];then + textPass "$regx: API Gateway V2 $API_GW_NAME ID: $apigwid, stage: $stagename, has access logging enabled to $CHECK_STAGE_METHOD_LOGGING" "$regx" "$API_GW_NAME" + else + textFail "$regx: API Gateway V2 $API_GW_NAME ID: $apigwid, stage: $stagename, has access logging disabled" "$regx" "$API_GW_NAME" + fi + done + else + textFail "$regx: No Stage name found for $API_GW_NAME" "$regx" "$API_GW_NAME" + fi + done + else + textInfo "$regx: No API Gateway found" "$regx" + fi + done +} \ No newline at end of file diff --git a/checks/check_extra7157 b/checks/check_extra7157 new file mode 100644 index 00000000..ec62f4a8 --- /dev/null +++ b/checks/check_extra7157 @@ -0,0 +1,43 @@ +#!/usr/bin/env bash + +# Prowler - the handy cloud security tool (copyright 2019) by Toni de la Fuente +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy +# of the License at http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software distributed +# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +# CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. +CHECK_ID_extra7157="7.157" +CHECK_TITLE_extra7157="[extra7157] Check if API Gateway V2 has configured authorizers" +CHECK_SCORED_extra7157="NOT_SCORED" +CHECK_CIS_LEVEL_extra7157="EXTRA" +CHECK_SEVERITY_extra7157="Medium" +CHECK_ASFF_RESOURCE_TYPE_extra7157="AwsApiGatewayV2Api" +CHECK_ALTERNATE_check746="extra7157" +CHECK_SERVICENAME_extra7157="apigateway" +CHECK_RISK_extra7157='If no authorizer is enabled anyone can use the service.' +CHECK_REMEDIATION_extra7157='Implement JWT or Lambda Function to control access to your API.' +CHECK_DOC_extra7157='https://docs.aws.amazon.com/apigatewayv2/latest/api-reference/apis-apiid-authorizers.html' +CHECK_CAF_EPIC_extra7157='IAM' + +extra7157(){ + for regx in $REGIONS; do + LIST_OF_API_GW=$($AWSCLI apigatewayv2 get-apis $PROFILE_OPT --region $regx --query "Items[*].ApiId" --output text) + if [[ $LIST_OF_API_GW ]];then + for api in $LIST_OF_API_GW; do + API_GW_NAME=$($AWSCLI apigatewayv2 get-apis $PROFILE_OPT --region $regx --query "Items[?ApiId==\`$api\`].Name" --output text) + AUTHORIZER_CONFIGURED=$($AWSCLI apigatewayv2 --region $regx get-authorizers --api-id $api --query "Items[*].AuthorizerType" --output text) + if [[ $AUTHORIZER_CONFIGURED ]]; then + textPass "$regx: API Gateway V2 $API_GW_NAME ID $api has authorizer configured" "$regx" "$API_GW_NAME" + else + textFail "$regx: API Gateway V2 $API_GW_NAME ID $api has no authorizer configured" "$regx" "$API_GW_NAME" + fi + done + else + textInfo "$regx: No API Gateways found" "$regx" + fi + done +} diff --git a/checks/check_extra7158 b/checks/check_extra7158 new file mode 100644 index 00000000..3aaf74ec --- /dev/null +++ b/checks/check_extra7158 @@ -0,0 +1,43 @@ +#!/usr/bin/env bash + +# Prowler - the handy cloud security tool (copyright 2018) by Toni de la Fuente +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy +# of the License at http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software distributed +# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +# CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. +CHECK_ID_extra7158="7.158" +CHECK_TITLE_extra7158="[extra7158] Check if ELBV2 has listeners underneath" +CHECK_SCORED_extra7158="NOT_SCORED" +CHECK_CIS_LEVEL_extra7158="EXTRA" +CHECK_SEVERITY_extra7158="Medium" +CHECK_ASFF_RESOURCE_TYPE_extra7158="AwsElbv2LoadBalancer" +CHECK_ALTERNATE_check7158="extra7158" +CHECK_SERVICENAME_extra7158="elb" +CHECK_RISK_extra7158='The rules that are defined for a listener determine how the load balancer routes requests to its registered targets.' +CHECK_REMEDIATION_extra7158='Add listeners to Elastic Load Balancers V2.' +CHECK_DOC_extra7158='https://docs.aws.amazon.com/elasticloadbalancing/latest/classic/elb-listener-config.html' +CHECK_CAF_EPIC_extra7158='Data Protection' + +extra7158(){ + for regx in $REGIONS; do + LIST_OF_ELBSV2=$($AWSCLI elbv2 describe-load-balancers --query 'LoadBalancers[*].LoadBalancerArn' $PROFILE_OPT --region $regx --output text) + if [[ $LIST_OF_ELBSV2 ]]; then + for elb in $LIST_OF_ELBSV2; do + LIST_OF_LISTENERS=$($AWSCLI elbv2 describe-listeners $PROFILE_OPT --region $regx --load-balancer-arn $elb --query 'Listeners[*]' --output text) + ELBV2_NAME=$(echo $elb|cut -d\/ -f3) + if [[ $LIST_OF_LISTENERS ]]; then + textPass "$regx: $ELBV2_NAME has listeners underneath" "$regx" "$elb" + else + textFail "$regx: $ELBV2_NAME has no listeners underneath" "$regx" "$elb" + fi + done + else + textInfo "$regx: No ELBs found" "$regx" + fi + done +} \ No newline at end of file diff --git a/checks/check_extra7159 b/checks/check_extra7159 new file mode 100644 index 00000000..58437519 --- /dev/null +++ b/checks/check_extra7159 @@ -0,0 +1,42 @@ +#!/usr/bin/env bash + +# Prowler - the handy cloud security tool (copyright 2018) by Toni de la Fuente +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy +# of the License at http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software distributed +# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +# CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. +CHECK_ID_extra7159="7.159" +CHECK_TITLE_extra7159="[extra7159] Check if ELB has listeners underneath" +CHECK_SCORED_extra7159="NOT_SCORED" +CHECK_CIS_LEVEL_extra7159="EXTRA" +CHECK_SEVERITY_extra7159="Medium" +CHECK_ASFF_RESOURCE_TYPE_extra7159="AwsElbLoadBalancer" +CHECK_ALTERNATE_check7159="extra7159" +CHECK_SERVICENAME_extra7159="elb" +CHECK_RISK_extra7159='The rules that are defined for a listener determine how the load balancer routes requests to its registered targets.' +CHECK_REMEDIATION_extra7159='Add listeners to Elastic Load Balancers.' +CHECK_DOC_extra7159='https://docs.aws.amazon.com/elasticloadbalancing/latest/classic/elb-listener-config.html' +CHECK_CAF_EPIC_extra7159='Data Protection' + +extra7159(){ + for regx in $REGIONS; do + LIST_OF_ELBS=$($AWSCLI elb describe-load-balancers --query 'LoadBalancerDescriptions[*].LoadBalancerName' $PROFILE_OPT --region $regx --output text) + if [[ $LIST_OF_ELBS ]]; then + for elb in $LIST_OF_ELBS; do + LIST_OF_LISTENERS=$($AWSCLI elb describe-load-balancers --load-balancer-name $elb --query 'LoadBalancerDescriptions[*].ListenerDescriptions' $PROFILE_OPT --region $regx --output text) + if [[ $LIST_OF_LISTENERS ]]; then + textPass "$regx: $elb has listeners underneath" "$regx" "$elb" + else + textFail "$regx: $elb has no listeners underneath" "$regx" "$elb" + fi + done + else + textInfo "$regx: No ELBs found" "$regx" + fi + done +} \ No newline at end of file diff --git a/checks/check_extra716 b/checks/check_extra716 index 9a307f67..08f2271d 100644 --- a/checks/check_extra716 +++ b/checks/check_extra716 @@ -13,7 +13,7 @@ CHECK_ID_extra716="7.16" CHECK_TITLE_extra716="[extra716] Check if Amazon Elasticsearch Service (ES) domains are set as Public or if it has open policy access" CHECK_SCORED_extra716="NOT_SCORED" -CHECK_TYPE_extra716="EXTRA" +CHECK_CIS_LEVEL_extra716="EXTRA" CHECK_SEVERITY_extra716="Critical" CHECK_ASFF_RESOURCE_TYPE_extra716="AwsElasticsearchDomain" CHECK_ALTERNATE_check716="extra716" diff --git a/checks/check_extra717 b/checks/check_extra717 index f2f8996c..aa42a36e 100644 --- a/checks/check_extra717 +++ b/checks/check_extra717 @@ -13,7 +13,7 @@ CHECK_ID_extra717="7.17" CHECK_TITLE_extra717="[extra717] Check if Elastic Load Balancers have logging enabled" CHECK_SCORED_extra717="NOT_SCORED" -CHECK_TYPE_extra717="EXTRA" +CHECK_CIS_LEVEL_extra717="EXTRA" CHECK_SEVERITY_extra717="Medium" CHECK_ASFF_RESOURCE_TYPE_extra717="AwsElbLoadBalancer" CHECK_ALTERNATE_check717="extra717" diff --git a/checks/check_extra718 b/checks/check_extra718 index 8e1e8020..c747e1c8 100644 --- a/checks/check_extra718 +++ b/checks/check_extra718 @@ -13,7 +13,7 @@ CHECK_ID_extra718="7.18" CHECK_TITLE_extra718="[extra718] Check if S3 buckets have server access logging enabled" CHECK_SCORED_extra718="NOT_SCORED" -CHECK_TYPE_extra718="EXTRA" +CHECK_CIS_LEVEL_extra718="EXTRA" CHECK_SEVERITY_extra718="Medium" CHECK_ASFF_RESOURCE_TYPE_extra718="AwsS3Bucket" CHECK_ALTERNATE_check718="extra718" diff --git a/checks/check_extra719 b/checks/check_extra719 index b3435656..20ad0a80 100644 --- a/checks/check_extra719 +++ b/checks/check_extra719 @@ -13,7 +13,7 @@ CHECK_ID_extra719="7.19" CHECK_TITLE_extra719="[extra719] Check if Route53 public hosted zones are logging queries to CloudWatch Logs" CHECK_SCORED_extra719="NOT_SCORED" -CHECK_TYPE_extra719="EXTRA" +CHECK_CIS_LEVEL_extra719="EXTRA" CHECK_SEVERITY_extra719="Medium" CHECK_ALTERNATE_check719="extra719" CHECK_ASFF_RESOURCE_TYPE_extra719="AwsRoute53HostedZone" diff --git a/checks/check_extra72 b/checks/check_extra72 index 0d088896..019bfb26 100644 --- a/checks/check_extra72 +++ b/checks/check_extra72 @@ -13,7 +13,7 @@ CHECK_ID_extra72="7.2" CHECK_TITLE_extra72="[extra72] Ensure there are no EBS Snapshots set as Public" CHECK_SCORED_extra72="NOT_SCORED" -CHECK_TYPE_extra72="EXTRA" +CHECK_CIS_LEVEL_extra72="EXTRA" CHECK_SEVERITY_extra72="Critical" CHECK_ASFF_RESOURCE_TYPE_extra72="AwsEc2Snapshot" CHECK_ALTERNATE_extra702="extra72" diff --git a/checks/check_extra720 b/checks/check_extra720 index 06608532..7f035229 100644 --- a/checks/check_extra720 +++ b/checks/check_extra720 @@ -13,7 +13,7 @@ CHECK_ID_extra720="7.20" CHECK_TITLE_extra720="[extra720] Check if Lambda functions invoke API operations are being recorded by CloudTrail" CHECK_SCORED_extra720="NOT_SCORED" -CHECK_TYPE_extra720="EXTRA" +CHECK_CIS_LEVEL_extra720="EXTRA" CHECK_SEVERITY_extra720="Low" CHECK_ASFF_RESOURCE_TYPE_extra720="AwsLambdaFunction" CHECK_ALTERNATE_check720="extra720" diff --git a/checks/check_extra721 b/checks/check_extra721 index 8b2e54bf..7d63b0ee 100644 --- a/checks/check_extra721 +++ b/checks/check_extra721 @@ -13,7 +13,7 @@ CHECK_ID_extra721="7.21" CHECK_TITLE_extra721="[extra721] Check if Redshift cluster has audit logging enabled" CHECK_SCORED_extra721="NOT_SCORED" -CHECK_TYPE_extra721="EXTRA" +CHECK_CIS_LEVEL_extra721="EXTRA" CHECK_SEVERITY_extra721="Medium" CHECK_ASFF_RESOURCE_TYPE_extra721="AwsRedshiftCluster" CHECK_ALTERNATE_check721="extra721" diff --git a/checks/check_extra722 b/checks/check_extra722 index 4db8470c..3def51c4 100644 --- a/checks/check_extra722 +++ b/checks/check_extra722 @@ -13,7 +13,7 @@ CHECK_ID_extra722="7.22" CHECK_TITLE_extra722="[extra722] Check if API Gateway has logging enabled" CHECK_SCORED_extra722="NOT_SCORED" -CHECK_TYPE_extra722="EXTRA" +CHECK_CIS_LEVEL_extra722="EXTRA" CHECK_SEVERITY_extra722="Medium" CHECK_ASFF_RESOURCE_TYPE_extra722="AwsApiGatewayRestApi" CHECK_ALTERNATE_check722="extra722" diff --git a/checks/check_extra723 b/checks/check_extra723 index 9653b956..11a746ec 100644 --- a/checks/check_extra723 +++ b/checks/check_extra723 @@ -13,7 +13,7 @@ CHECK_ID_extra723="7.23" CHECK_TITLE_extra723="[extra723] Check if RDS Snapshots and Cluster Snapshots are public" CHECK_SCORED_extra723="NOT_SCORED" -CHECK_TYPE_extra723="EXTRA" +CHECK_CIS_LEVEL_extra723="EXTRA" CHECK_SEVERITY_extra723="Critical" CHECK_ASFF_RESOURCE_TYPE_extra723="AwsRdsDbSnapshot" CHECK_ALTERNATE_check723="extra723" diff --git a/checks/check_extra724 b/checks/check_extra724 index e0b2497f..25ff48e6 100644 --- a/checks/check_extra724 +++ b/checks/check_extra724 @@ -13,7 +13,7 @@ CHECK_ID_extra724="7.24" CHECK_TITLE_extra724="[extra724] Check if ACM certificates have Certificate Transparency logging enabled" CHECK_SCORED_extra724="NOT_SCORED" -CHECK_TYPE_extra724="EXTRA" +CHECK_CIS_LEVEL_extra724="EXTRA" CHECK_SEVERITY_extra724="Medium" CHECK_ASFF_RESOURCE_TYPE_extra724="AwsCertificateManagerCertificate" CHECK_ALTERNATE_check724="extra724" diff --git a/checks/check_extra725 b/checks/check_extra725 index 4100b083..3f1edcf3 100644 --- a/checks/check_extra725 +++ b/checks/check_extra725 @@ -14,7 +14,7 @@ CHECK_ID_extra725="7.25" CHECK_TITLE_extra725="[extra725] Check if S3 buckets have Object-level logging enabled in CloudTrail" CHECK_SCORED_extra725="NOT_SCORED" -CHECK_TYPE_extra725="EXTRA" +CHECK_CIS_LEVEL_extra725="EXTRA" CHECK_SEVERITY_extra725="Medium" CHECK_ASFF_RESOURCE_TYPE_extra725="AwsS3Bucket" CHECK_ALTERNATE_check725="extra725" diff --git a/checks/check_extra726 b/checks/check_extra726 index 76de3c84..ac61aaea 100644 --- a/checks/check_extra726 +++ b/checks/check_extra726 @@ -14,7 +14,7 @@ CHECK_ID_extra726="7.26" CHECK_TITLE_extra726="[extra726] Check Trusted Advisor for errors and warnings" CHECK_SCORED_extra726="NOT_SCORED" -CHECK_TYPE_extra726="EXTRA" +CHECK_CIS_LEVEL_extra726="EXTRA" CHECK_SEVERITY_extra726="Medium" CHECK_ALTERNATE_check726="extra726" CHECK_SERVICENAME_extra726="trustedadvisor" diff --git a/checks/check_extra727 b/checks/check_extra727 index 797401be..e66962be 100644 --- a/checks/check_extra727 +++ b/checks/check_extra727 @@ -14,7 +14,7 @@ CHECK_ID_extra727="7.27" CHECK_TITLE_extra727="[extra727] Check if SQS queues have policy set as Public" CHECK_SCORED_extra727="NOT_SCORED" -CHECK_TYPE_extra727="EXTRA" +CHECK_CIS_LEVEL_extra727="EXTRA" CHECK_SEVERITY_extra727="Critical" CHECK_ASFF_RESOURCE_TYPE_extra727="AwsSqsQueue" CHECK_ALTERNATE_check727="extra727" diff --git a/checks/check_extra728 b/checks/check_extra728 index f7589af1..629d6029 100644 --- a/checks/check_extra728 +++ b/checks/check_extra728 @@ -14,7 +14,7 @@ CHECK_ID_extra728="7.28" CHECK_TITLE_extra728="[extra728] Check if SQS queues have Server Side Encryption enabled" CHECK_SCORED_extra728="NOT_SCORED" -CHECK_TYPE_extra728="EXTRA" +CHECK_CIS_LEVEL_extra728="EXTRA" CHECK_SEVERITY_extra728="Medium" CHECK_ASFF_RESOURCE_TYPE_extra728="AwsSqsQueue" CHECK_ALTERNATE_check728="extra728" diff --git a/checks/check_extra729 b/checks/check_extra729 index 743e568d..5a839e5e 100644 --- a/checks/check_extra729 +++ b/checks/check_extra729 @@ -14,7 +14,7 @@ CHECK_ID_extra729="7.29" CHECK_TITLE_extra729="[extra729] Ensure there are no EBS Volumes unencrypted" CHECK_SCORED_extra729="NOT_SCORED" -CHECK_TYPE_extra729="EXTRA" +CHECK_CIS_LEVEL_extra729="EXTRA" CHECK_SEVERITY_extra729="Medium" CHECK_ASFF_RESOURCE_TYPE_extra729="AwsEc2Volume" CHECK_ALTERNATE_check729="extra729" diff --git a/checks/check_extra73 b/checks/check_extra73 index c2329607..6bb99f69 100644 --- a/checks/check_extra73 +++ b/checks/check_extra73 @@ -14,7 +14,7 @@ CHECK_ID_extra73="7.3" CHECK_TITLE_extra73="[extra73] Ensure there are no S3 buckets open to Everyone or Any AWS user" CHECK_SCORED_extra73="NOT_SCORED" -CHECK_TYPE_extra73="EXTRA" +CHECK_CIS_LEVEL_extra73="EXTRA" CHECK_SEVERITY_extra73="Critical" CHECK_ASFF_RESOURCE_TYPE_extra73="AwsS3Bucket" CHECK_ALTERNATE_extra703="extra73" diff --git a/checks/check_extra730 b/checks/check_extra730 index 706922fa..ed013af9 100644 --- a/checks/check_extra730 +++ b/checks/check_extra730 @@ -16,7 +16,7 @@ DAYS_TO_EXPIRE_THRESHOLD="7" CHECK_ID_extra730="7.30" CHECK_TITLE_extra730="[extra730] Check if ACM Certificates are about to expire in $DAYS_TO_EXPIRE_THRESHOLD days or less" CHECK_SCORED_extra730="NOT_SCORED" -CHECK_TYPE_extra730="EXTRA" +CHECK_CIS_LEVEL_extra730="EXTRA" CHECK_SEVERITY_extra730="High" CHECK_ASFF_RESOURCE_TYPE_extra730="AwsCertificateManagerCertificate" CHECK_ALTERNATE_check730="extra730" diff --git a/checks/check_extra731 b/checks/check_extra731 index 3a5eec01..fcda11fa 100644 --- a/checks/check_extra731 +++ b/checks/check_extra731 @@ -14,7 +14,7 @@ CHECK_ID_extra731="7.31" CHECK_TITLE_extra731="[extra731] Check if SNS topics have policy set as Public" CHECK_SCORED_extra731="NOT_SCORED" -CHECK_TYPE_extra731="EXTRA" +CHECK_CIS_LEVEL_extra731="EXTRA" CHECK_SEVERITY_extra731="Critical" CHECK_ASFF_RESOURCE_TYPE_extra731="AwsSnsTopic" CHECK_ALTERNATE_check731="extra731" diff --git a/checks/check_extra732 b/checks/check_extra732 index 3b584d34..6cbb715d 100644 --- a/checks/check_extra732 +++ b/checks/check_extra732 @@ -14,7 +14,7 @@ CHECK_ID_extra732="7.32" CHECK_TITLE_extra732="[extra732] Check if Geo restrictions are enabled in CloudFront distributions" CHECK_SCORED_extra732="NOT_SCORED" -CHECK_TYPE_extra732="EXTRA" +CHECK_CIS_LEVEL_extra732="EXTRA" CHECK_SEVERITY_extra732="Low" CHECK_ASFF_RESOURCE_TYPE_extra732="AwsCloudFrontDistribution" CHECK_ALTERNATE_check732="extra732" diff --git a/checks/check_extra733 b/checks/check_extra733 index 24ea3275..4359bddb 100644 --- a/checks/check_extra733 +++ b/checks/check_extra733 @@ -14,7 +14,7 @@ CHECK_ID_extra733="7.33" CHECK_TITLE_extra733="[extra733] Check if there are SAML Providers then STS can be used" CHECK_SCORED_extra733="NOT_SCORED" -CHECK_TYPE_extra733="EXTRA" +CHECK_CIS_LEVEL_extra733="EXTRA" CHECK_SEVERITY_extra733="Low" CHECK_ALTERNATE_check733="extra733" CHECK_ASFF_COMPLIANCE_TYPE_extra733="ens-op.acc.1.aws.iam.1" diff --git a/checks/check_extra734 b/checks/check_extra734 index bb6e2ae5..08646604 100644 --- a/checks/check_extra734 +++ b/checks/check_extra734 @@ -13,7 +13,7 @@ CHECK_ID_extra734="7.34" CHECK_TITLE_extra734="[extra734] Check if S3 buckets have default encryption (SSE) enabled or use a bucket policy to enforce it" CHECK_SCORED_extra734="NOT_SCORED" -CHECK_TYPE_extra734="EXTRA" +CHECK_CIS_LEVEL_extra734="EXTRA" CHECK_SEVERITY_extra734="Medium" CHECK_ASFF_RESOURCE_TYPE_extra734="AwsS3Bucket" CHECK_ALTERNATE_check734="extra734" @@ -72,7 +72,7 @@ extra734(){ fi # check if the S3 policy forces SSE s3:x-amz-server-side-encryption:true - CHECK_BUCKET_SSE_POLICY_PRESENT=$(cat $TEMP_SSE_POLICY_FILE | jq --arg arn "arn:${AWS_PARTITION}:s3:::${bucket}/*" '.Statement[]|select(.Effect=="Deny" and ((.Principal|type == "object") and .Principal.AWS == "*") or ((.Principal|type == "string") and .Principal == "*") and .Action=="s3:PutObject" and .Resource==$arn and .Condition.StringEquals."s3:x-amz-server-side-encryption" != null)') + CHECK_BUCKET_SSE_POLICY_PRESENT=$(cat $TEMP_SSE_POLICY_FILE | jq --arg arn "arn:${AWS_PARTITION}:s3:::${bucket}/*" '.Statement[]|select(.Effect=="Deny" and ((.Principal|type == "object") and .Principal.AWS == "*") or ((.Principal|type == "string") and .Principal == "*") and .Action=="s3:PutObject" and .Resource==$arn and .Condition.StringNotEquals."s3:x-amz-server-side-encryption" != null)') if [[ $CHECK_BUCKET_SSE_POLICY_PRESENT == "" ]]; then textFail "$BUCKET_LOCATION: Bucket $bucket does not enforce encryption!" "$BUCKET_LOCATION" "$bucket" rm -f $TEMP_SSE_POLICY_FILE diff --git a/checks/check_extra735 b/checks/check_extra735 index 72cb30f9..0d7a88aa 100644 --- a/checks/check_extra735 +++ b/checks/check_extra735 @@ -13,7 +13,7 @@ CHECK_ID_extra735="7.35" CHECK_TITLE_extra735="[extra735] Check if RDS instances storage is encrypted" CHECK_SCORED_extra735="NOT_SCORED" -CHECK_TYPE_extra735="EXTRA" +CHECK_CIS_LEVEL_extra735="EXTRA" CHECK_SEVERITY_extra735="Medium" CHECK_ASFF_RESOURCE_TYPE_extra735="AwsRdsDbInstance" CHECK_ALTERNATE_check735="extra735" diff --git a/checks/check_extra736 b/checks/check_extra736 index 1c87be8e..2fb4c4bc 100644 --- a/checks/check_extra736 +++ b/checks/check_extra736 @@ -13,7 +13,7 @@ CHECK_ID_extra736="7.36" CHECK_TITLE_extra736="[extra736] Check exposed KMS keys" CHECK_SCORED_extra736="NOT_SCORED" -CHECK_TYPE_extra736="EXTRA" +CHECK_CIS_LEVEL_extra736="EXTRA" CHECK_SEVERITY_extra736="Critical" CHECK_ASFF_RESOURCE_TYPE_extra736="AwsKmsKey" CHECK_ALTERNATE_check736="extra736" @@ -29,7 +29,7 @@ extra736(){ LIST_OF_CUSTOMER_KMS_KEYS=$($AWSCLI kms list-aliases $PROFILE_OPT --region $regx --query "Aliases[].[AliasName,TargetKeyId]" --output text |grep -v ^alias/aws/ |awk '{ print $2 }') if [[ $LIST_OF_CUSTOMER_KMS_KEYS ]];then for key in $LIST_OF_CUSTOMER_KMS_KEYS; do - CHECK_POLICY=$($AWSCLI kms get-key-policy --key-id $key --policy-name default $PROFILE_OPT --region $regx --output text|awk '/Principal/{n=NR+1} n>=NR' |grep AWS\"\ :\ \"\\*\"$) + CHECK_POLICY=$($AWSCLI kms get-key-policy --key-id $key --policy-name default $PROFILE_OPT --region $regx --output text| jq '.Statement[]|select(.Effect=="Allow" and (((.Principal|type == "object") and .Principal.AWS == "*") or ((.Principal|type == "string") and .Principal == "*")) and .Condition == null)') if [[ $CHECK_POLICY ]]; then textFail "$regx: KMS key $key may be publicly accessible!" "$regx" "$key" else diff --git a/checks/check_extra738 b/checks/check_extra738 index 2a637a9d..10b97118 100644 --- a/checks/check_extra738 +++ b/checks/check_extra738 @@ -13,7 +13,7 @@ CHECK_ID_extra738="7.38" CHECK_TITLE_extra738="[extra738] Check if CloudFront distributions are set to HTTPS" CHECK_SCORED_extra738="NOT_SCORED" -CHECK_TYPE_extra738="EXTRA" +CHECK_CIS_LEVEL_extra738="EXTRA" CHECK_SEVERITY_extra738="Medium" CHECK_ASFF_RESOURCE_TYPE_extra738="AwsCloudFrontDistribution" CHECK_ALTERNATE_check738="extra738" diff --git a/checks/check_extra739 b/checks/check_extra739 index 0dea5d78..2f998df0 100644 --- a/checks/check_extra739 +++ b/checks/check_extra739 @@ -13,7 +13,7 @@ CHECK_ID_extra739="7.39" CHECK_TITLE_extra739="[extra739] Check if RDS instances have backup enabled" CHECK_SCORED_extra739="NOT_SCORED" -CHECK_TYPE_extra739="EXTRA" +CHECK_CIS_LEVEL_extra739="EXTRA" CHECK_SEVERITY_extra739="Medium" CHECK_ASFF_RESOURCE_TYPE_extra739="AwsRdsDbInstance" CHECK_ALTERNATE_check739="extra739" diff --git a/checks/check_extra74 b/checks/check_extra74 index 7d94a6a9..2c57b776 100644 --- a/checks/check_extra74 +++ b/checks/check_extra74 @@ -13,7 +13,7 @@ CHECK_ID_extra74="7.4" CHECK_TITLE_extra74="[extra74] Ensure there are no Security Groups without ingress filtering being used" CHECK_SCORED_extra74="NOT_SCORED" -CHECK_TYPE_extra74="EXTRA" +CHECK_CIS_LEVEL_extra74="EXTRA" CHECK_SEVERITY_extra74="High" CHECK_ASFF_RESOURCE_TYPE_extra74="AwsEc2SecurityGroup" CHECK_ALTERNATE_extra704="extra74" diff --git a/checks/check_extra740 b/checks/check_extra740 index 7f771663..d939a305 100644 --- a/checks/check_extra740 +++ b/checks/check_extra740 @@ -13,7 +13,7 @@ CHECK_ID_extra740="7.40" CHECK_TITLE_extra740="[extra740] Check if EBS snapshots are encrypted" CHECK_SCORED_extra740="NOT_SCORED" -CHECK_TYPE_extra740="EXTRA" +CHECK_CIS_LEVEL_extra740="EXTRA" CHECK_SEVERITY_extra740="Medium" CHECK_ASFF_RESOURCE_TYPE_extra740="AwsEc2Snapshot" CHECK_ALTERNATE_check740="extra740" diff --git a/checks/check_extra741 b/checks/check_extra741 index ebf12543..8a7d87e4 100644 --- a/checks/check_extra741 +++ b/checks/check_extra741 @@ -13,7 +13,7 @@ CHECK_ID_extra741="7.41" CHECK_TITLE_extra741="[extra741] Find secrets in EC2 User Data" CHECK_SCORED_extra741="NOT_SCORED" -CHECK_TYPE_extra741="EXTRA" +CHECK_CIS_LEVEL_extra741="EXTRA" CHECK_SEVERITY_extra741="Critical" CHECK_ASFF_RESOURCE_TYPE_extra741="AwsEc2Instance" CHECK_ALTERNATE_check741="extra741" @@ -50,7 +50,7 @@ extra741(){ # delete file if nothing interesting is there rm -f "$EC2_USERDATA_FILE" else - textFail "$regx: Potential secret found in $instance User Data" "$regx" "$regx" "$instance" + textFail "$regx: Potential secret found in $instance User Data" "$regx" "$instance" # delete file to not leave trace, user must look at the instance User Data rm -f "$EC2_USERDATA_FILE" fi diff --git a/checks/check_extra742 b/checks/check_extra742 index 6c78c7a9..6933c1af 100644 --- a/checks/check_extra742 +++ b/checks/check_extra742 @@ -13,7 +13,7 @@ CHECK_ID_extra742="7.42" CHECK_TITLE_extra742="[extra742] Find secrets in CloudFormation outputs" CHECK_SCORED_extra742="NOT_SCORED" -CHECK_TYPE_extra742="EXTRA" +CHECK_CIS_LEVEL_extra742="EXTRA" CHECK_SEVERITY_extra742="Critical" CHECK_ASFF_RESOURCE_TYPE_extra742="AwsCloudFormationStack" CHECK_ALTERNATE_check742="extra742" diff --git a/checks/check_extra743 b/checks/check_extra743 index b5c365a4..b7112cf9 100644 --- a/checks/check_extra743 +++ b/checks/check_extra743 @@ -13,7 +13,7 @@ CHECK_ID_extra743="7.43" CHECK_TITLE_extra743="[extra743] Check if API Gateway has client certificate enabled to access your backend endpoint" CHECK_SCORED_extra743="NOT_SCORED" -CHECK_TYPE_extra743="EXTRA" +CHECK_CIS_LEVEL_extra743="EXTRA" CHECK_SEVERITY_extra743="Medium" CHECK_ASFF_RESOURCE_TYPE_extra743="AwsApiGatewayRestApi" CHECK_ALTERNATE_check743="extra743" diff --git a/checks/check_extra744 b/checks/check_extra744 index 48cf6f11..a8672c94 100644 --- a/checks/check_extra744 +++ b/checks/check_extra744 @@ -13,7 +13,7 @@ CHECK_ID_extra744="7.44" CHECK_TITLE_extra744="[extra744] Check if API Gateway has a WAF ACL attached" CHECK_SCORED_extra744="NOT_SCORED" -CHECK_TYPE_extra744="EXTRA" +CHECK_CIS_LEVEL_extra744="EXTRA" CHECK_SEVERITY_extra744="Medium" CHECK_ASFF_RESOURCE_TYPE_extra744="AwsApiGatewayRestApi" CHECK_ALTERNATE_check744="extra744" diff --git a/checks/check_extra745 b/checks/check_extra745 index 1ee49e72..37cb6b17 100644 --- a/checks/check_extra745 +++ b/checks/check_extra745 @@ -13,7 +13,7 @@ CHECK_ID_extra745="7.45" CHECK_TITLE_extra745="[extra745] Check if API Gateway endpoint is public or private" CHECK_SCORED_extra745="NOT_SCORED" -CHECK_TYPE_extra745="EXTRA" +CHECK_CIS_LEVEL_extra745="EXTRA" CHECK_SEVERITY_extra745="Medium" CHECK_ASFF_RESOURCE_TYPE_extra745="AwsApiGatewayRestApi" CHECK_ALTERNATE_check745="extra745" diff --git a/checks/check_extra746 b/checks/check_extra746 index 638d15ef..e2ff570a 100644 --- a/checks/check_extra746 +++ b/checks/check_extra746 @@ -13,7 +13,7 @@ CHECK_ID_extra746="7.46" CHECK_TITLE_extra746="[extra746] Check if API Gateway has configured authorizers" CHECK_SCORED_extra746="NOT_SCORED" -CHECK_TYPE_extra746="EXTRA" +CHECK_CIS_LEVEL_extra746="EXTRA" CHECK_SEVERITY_extra746="Medium" CHECK_ASFF_RESOURCE_TYPE_extra746="AwsApiGatewayRestApi" CHECK_ALTERNATE_check746="extra746" diff --git a/checks/check_extra747 b/checks/check_extra747 index 2b2ede3b..80cedad3 100644 --- a/checks/check_extra747 +++ b/checks/check_extra747 @@ -13,7 +13,7 @@ CHECK_ID_extra747="7.47" CHECK_TITLE_extra747="[extra747] Check if RDS instances is integrated with CloudWatch Logs" CHECK_SCORED_extra747="NOT_SCORED" -CHECK_TYPE_extra747="EXTRA" +CHECK_CIS_LEVEL_extra747="EXTRA" CHECK_SEVERITY_extra747="Medium" CHECK_ASFF_RESOURCE_TYPE_extra747="AwsRdsDbInstance" CHECK_ALTERNATE_check747="extra747" diff --git a/checks/check_extra748 b/checks/check_extra748 index f46ef6c5..49c10e76 100644 --- a/checks/check_extra748 +++ b/checks/check_extra748 @@ -13,7 +13,7 @@ CHECK_ID_extra748="7.48" CHECK_TITLE_extra748="[extra748] Ensure no security groups allow ingress from 0.0.0.0/0 or ::/0 to any port" CHECK_SCORED_extra748="NOT_SCORED" -CHECK_TYPE_extra748="EXTRA" +CHECK_CIS_LEVEL_extra748="EXTRA" CHECK_SEVERITY_extra748="High" CHECK_ASFF_RESOURCE_TYPE_extra748="AwsEc2SecurityGroup" CHECK_ALTERNATE_check748="extra748" diff --git a/checks/check_extra749 b/checks/check_extra749 index 820d2f68..28dbaf46 100644 --- a/checks/check_extra749 +++ b/checks/check_extra749 @@ -13,7 +13,7 @@ CHECK_ID_extra749="7.49" CHECK_TITLE_extra749="[extra749] Ensure no security groups allow ingress from 0.0.0.0/0 or ::/0 to Oracle ports 1521 or 2483" CHECK_SCORED_extra749="NOT_SCORED" -CHECK_TYPE_extra749="EXTRA" +CHECK_CIS_LEVEL_extra749="EXTRA" CHECK_SEVERITY_extra749="High" CHECK_ASFF_RESOURCE_TYPE_extra749="AwsEc2SecurityGroup" CHECK_ALTERNATE_check749="extra749" diff --git a/checks/check_extra75 b/checks/check_extra75 index 34a05fb8..8d24a414 100644 --- a/checks/check_extra75 +++ b/checks/check_extra75 @@ -13,7 +13,7 @@ CHECK_ID_extra75="7.5" CHECK_TITLE_extra75="[extra75] Ensure there are no Security Groups not being used" CHECK_SCORED_extra75="NOT_SCORED" -CHECK_TYPE_extra75="EXTRA" +CHECK_CIS_LEVEL_extra75="EXTRA" CHECK_SEVERITY_extra75="Informational" CHECK_ASFF_RESOURCE_TYPE_extra75="AwsEc2SecurityGroup" CHECK_ALTERNATE_extra705="extra75" diff --git a/checks/check_extra750 b/checks/check_extra750 index 62dcf115..1f0f30c5 100644 --- a/checks/check_extra750 +++ b/checks/check_extra750 @@ -13,7 +13,7 @@ CHECK_ID_extra750="7.50" CHECK_TITLE_extra750="[extra750] Ensure no security groups allow ingress from 0.0.0.0/0 or ::/0 to MySQL port 3306" CHECK_SCORED_extra750="NOT_SCORED" -CHECK_TYPE_extra750="EXTRA" +CHECK_CIS_LEVEL_extra750="EXTRA" CHECK_SEVERITY_extra750="High" CHECK_ASFF_RESOURCE_TYPE_extra750="AwsEc2SecurityGroup" CHECK_ALTERNATE_check750="extra750" diff --git a/checks/check_extra751 b/checks/check_extra751 index c98cd4fe..2b31dd91 100644 --- a/checks/check_extra751 +++ b/checks/check_extra751 @@ -13,7 +13,7 @@ CHECK_ID_extra751="7.51" CHECK_TITLE_extra751="[extra751] Ensure no security groups allow ingress from 0.0.0.0/0 or ::/0 to Postgres port 5432" CHECK_SCORED_extra751="NOT_SCORED" -CHECK_TYPE_extra751="EXTRA" +CHECK_CIS_LEVEL_extra751="EXTRA" CHECK_SEVERITY_extra751="High" CHECK_ASFF_RESOURCE_TYPE_extra751="AwsEc2SecurityGroup" CHECK_ALTERNATE_check751="extra751" diff --git a/checks/check_extra752 b/checks/check_extra752 index 07aa549d..d60d32f2 100644 --- a/checks/check_extra752 +++ b/checks/check_extra752 @@ -13,7 +13,7 @@ CHECK_ID_extra752="7.52" CHECK_TITLE_extra752="[extra752] Ensure no security groups allow ingress from 0.0.0.0/0 or ::/0 to Redis port 6379" CHECK_SCORED_extra752="NOT_SCORED" -CHECK_TYPE_extra752="EXTRA" +CHECK_CIS_LEVEL_extra752="EXTRA" CHECK_SEVERITY_extra752="High" CHECK_ASFF_RESOURCE_TYPE_extra752="AwsEc2SecurityGroup" CHECK_ALTERNATE_check752="extra752" diff --git a/checks/check_extra753 b/checks/check_extra753 index 34042b6e..bd11d24b 100644 --- a/checks/check_extra753 +++ b/checks/check_extra753 @@ -13,7 +13,7 @@ CHECK_ID_extra753="7.53" CHECK_TITLE_extra753="[extra753] Ensure no security groups allow ingress from 0.0.0.0/0 or ::/0 to MongoDB ports 27017 and 27018" CHECK_SCORED_extra753="NOT_SCORED" -CHECK_TYPE_extra753="EXTRA" +CHECK_CIS_LEVEL_extra753="EXTRA" CHECK_SEVERITY_extra753="High" CHECK_ASFF_RESOURCE_TYPE_extra753="AwsEc2SecurityGroup" CHECK_ALTERNATE_check753="extra753" diff --git a/checks/check_extra754 b/checks/check_extra754 index 4277fe4f..a2252297 100644 --- a/checks/check_extra754 +++ b/checks/check_extra754 @@ -13,7 +13,7 @@ CHECK_ID_extra754="7.54" CHECK_TITLE_extra754="[extra754] Ensure no security groups allow ingress from 0.0.0.0/0 or ::/0 to Cassandra ports 7199 or 9160 or 8888" CHECK_SCORED_extra754="NOT_SCORED" -CHECK_TYPE_extra754="EXTRA" +CHECK_CIS_LEVEL_extra754="EXTRA" CHECK_SEVERITY_extra754="High" CHECK_ASFF_RESOURCE_TYPE_extra754="AwsEc2SecurityGroup" CHECK_ALTERNATE_check754="extra754" diff --git a/checks/check_extra755 b/checks/check_extra755 index 50430f1a..53ab014b 100644 --- a/checks/check_extra755 +++ b/checks/check_extra755 @@ -13,7 +13,7 @@ CHECK_ID_extra755="7.55" CHECK_TITLE_extra755="[extra755] Ensure no security groups allow ingress from 0.0.0.0/0 or ::/0 to Memcached port 11211" CHECK_SCORED_extra755="NOT_SCORED" -CHECK_TYPE_extra755="EXTRA" +CHECK_CIS_LEVEL_extra755="EXTRA" CHECK_SEVERITY_extra755="High" CHECK_ASFF_RESOURCE_TYPE_extra755="AwsEc2SecurityGroup" CHECK_ALTERNATE_check755="extra755" diff --git a/checks/check_extra757 b/checks/check_extra757 index 364caab7..dc603afe 100644 --- a/checks/check_extra757 +++ b/checks/check_extra757 @@ -13,7 +13,7 @@ CHECK_ID_extra757="7.57" CHECK_TITLE_extra757="[extra757] Check EC2 Instances older than 6 months" CHECK_SCORED_extra757="NOT_SCORED" -CHECK_TYPE_extra757="EXTRA" +CHECK_CIS_LEVEL_extra757="EXTRA" CHECK_SEVERITY_extra757="Medium" CHECK_ASFF_RESOURCE_TYPE_extra757="AwsEc2Instance" CHECK_ALTERNATE_check757="extra757" diff --git a/checks/check_extra758 b/checks/check_extra758 index 0beabcf4..63129eb3 100644 --- a/checks/check_extra758 +++ b/checks/check_extra758 @@ -13,7 +13,7 @@ CHECK_ID_extra758="7.58" CHECK_TITLE_extra758="[extra758] Check EC2 Instances older than 12 months " CHECK_SCORED_extra758="NOT_SCORED" -CHECK_TYPE_extra758="EXTRA" +CHECK_CIS_LEVEL_extra758="EXTRA" CHECK_SEVERITY_extra758="Medium" CHECK_ASFF_RESOURCE_TYPE_extra758="AwsEc2Instance" CHECK_ALTERNATE_check758="extra758" diff --git a/checks/check_extra759 b/checks/check_extra759 index 15f73fcd..533f0445 100644 --- a/checks/check_extra759 +++ b/checks/check_extra759 @@ -13,7 +13,7 @@ CHECK_ID_extra759="7.59" CHECK_TITLE_extra759="[extra759] Find secrets in Lambda functions variables " CHECK_SCORED_extra759="NOT_SCORED" -CHECK_TYPE_extra759="EXTRA" +CHECK_CIS_LEVEL_extra759="EXTRA" CHECK_SEVERITY_extra759="Critical" CHECK_ASFF_RESOURCE_TYPE_extra759="AwsLambdaFunction" CHECK_ALTERNATE_check759="extra759" @@ -35,7 +35,7 @@ extra759(){ if [[ $LIST_OF_FUNCTIONS ]]; then for lambdafunction in $LIST_OF_FUNCTIONS;do LAMBDA_FUNCTION_VARIABLES_FILE="$SECRETS_TEMP_FOLDER/extra759-$lambdafunction-$regx-variables.txt" - LAMBDA_FUNCTION_VARIABLES=$($AWSCLI lambda $PROFILE_OPT --region $regx get-function-configuration --function-name $lambdafunction --query 'Environment.Variables' --output text > $LAMBDA_FUNCTION_VARIABLES_FILE) + LAMBDA_FUNCTION_VARIABLES=$($AWSCLI lambda $PROFILE_OPT --region $regx get-function-configuration --function-name $lambdafunction --query 'Environment.Variables' --output json > $LAMBDA_FUNCTION_VARIABLES_FILE) if [ -s $LAMBDA_FUNCTION_VARIABLES_FILE ];then # Implementation using https://github.com/Yelp/detect-secrets FINDINGS=$(secretsDetector file $LAMBDA_FUNCTION_VARIABLES_FILE) diff --git a/checks/check_extra76 b/checks/check_extra76 index 9124b8cb..3a5d7e54 100644 --- a/checks/check_extra76 +++ b/checks/check_extra76 @@ -13,7 +13,7 @@ CHECK_ID_extra76="7.6" CHECK_TITLE_extra76="[extra76] Ensure there are no EC2 AMIs set as Public" CHECK_SCORED_extra76="NOT_SCORED" -CHECK_TYPE_extra76="EXTRA" +CHECK_CIS_LEVEL_extra76="EXTRA" CHECK_SEVERITY_extra76="Critical" CHECK_ALTERNATE_extra706="extra76" CHECK_ALTERNATE_check76="extra76" diff --git a/checks/check_extra760 b/checks/check_extra760 index ca70b83f..143ab96e 100644 --- a/checks/check_extra760 +++ b/checks/check_extra760 @@ -13,7 +13,7 @@ CHECK_ID_extra760="7.60" CHECK_TITLE_extra760="[extra760] Find secrets in Lambda functions code " CHECK_SCORED_extra760="NOT_SCORED" -CHECK_TYPE_extra760="EXTRA" +CHECK_CIS_LEVEL_extra760="EXTRA" CHECK_SEVERITY_extra760="Critical" CHECK_ASFF_RESOURCE_TYPE_extra760="AwsLambdaFunction" CHECK_ALTERNATE_check760="extra760" diff --git a/checks/check_extra761 b/checks/check_extra761 index 34ecb953..6d10e7e1 100644 --- a/checks/check_extra761 +++ b/checks/check_extra761 @@ -13,7 +13,7 @@ CHECK_ID_extra761="7.61" CHECK_TITLE_extra761="[extra761] Check if EBS Default Encryption is activated " CHECK_SCORED_extra761="NOT_SCORED" -CHECK_TYPE_extra761="EXTRA" +CHECK_CIS_LEVEL_extra761="EXTRA" CHECK_SEVERITY_extra761="Medium" CHECK_ALTERNATE_check761="extra761" CHECK_ASFF_COMPLIANCE_TYPE_extra761="ens-mp.info.3.aws.ebs.2" diff --git a/checks/check_extra762 b/checks/check_extra762 index 2345f058..28f6c2ab 100644 --- a/checks/check_extra762 +++ b/checks/check_extra762 @@ -13,7 +13,7 @@ CHECK_ID_extra762="7.62" CHECK_TITLE_extra762="[extra762] Find obsolete Lambda runtimes " CHECK_SCORED_extra762="NOT_SCORED" -CHECK_TYPE_extra762="EXTRA" +CHECK_CIS_LEVEL_extra762="EXTRA" CHECK_SEVERITY_extra762="Medium" CHECK_ASFF_RESOURCE_TYPE_extra762="AwsLambdaFunction" CHECK_ALTERNATE_check762="extra762" diff --git a/checks/check_extra763 b/checks/check_extra763 index d2d53e0d..765755ce 100644 --- a/checks/check_extra763 +++ b/checks/check_extra763 @@ -13,7 +13,7 @@ CHECK_ID_extra763="7.63" CHECK_TITLE_extra763="[extra763] Check if S3 buckets have object versioning enabled " CHECK_SCORED_extra763="NOT_SCORED" -CHECK_TYPE_extra763="EXTRA" +CHECK_CIS_LEVEL_extra763="EXTRA" CHECK_SEVERITY_extra763="Medium" CHECK_ASFF_RESOURCE_TYPE_extra763="AwsS3Bucket" CHECK_ALTERNATE_check763="extra763" diff --git a/checks/check_extra764 b/checks/check_extra764 index 67a6158c..ab84e553 100644 --- a/checks/check_extra764 +++ b/checks/check_extra764 @@ -13,7 +13,7 @@ CHECK_ID_extra764="7.64" CHECK_TITLE_extra764="[extra764] Check if S3 buckets have secure transport policy " CHECK_SCORED_extra764="NOT_SCORED" -CHECK_TYPE_extra764="EXTRA" +CHECK_CIS_LEVEL_extra764="EXTRA" CHECK_SEVERITY_extra764="Medium" CHECK_ASFF_RESOURCE_TYPE_extra764="AwsS3Bucket" CHECK_ALTERNATE_check764="extra764" diff --git a/checks/check_extra765 b/checks/check_extra765 index 38cdc508..34d417cd 100644 --- a/checks/check_extra765 +++ b/checks/check_extra765 @@ -23,7 +23,7 @@ CHECK_ID_extra765="7.65" CHECK_TITLE_extra765="[extra765] Check if ECR image scan on push is enabled " CHECK_SCORED_extra765="NOT_SCORED" -CHECK_TYPE_extra765="EXTRA" +CHECK_CIS_LEVEL_extra765="EXTRA" CHECK_SEVERITY_extra765="Medium" CHECK_ALTERNATE_check765="extra765" CHECK_SERVICENAME_extra765="ecr" diff --git a/checks/check_extra767 b/checks/check_extra767 index 7bff69fd..1b7d76d5 100644 --- a/checks/check_extra767 +++ b/checks/check_extra767 @@ -13,7 +13,7 @@ CHECK_ID_extra767="7.67" CHECK_TITLE_extra767="[extra767] Check if CloudFront distributions have Field Level Encryption enabled " CHECK_SCORED_extra767="NOT_SCORED" -CHECK_TYPE_extra767="EXTRA" +CHECK_CIS_LEVEL_extra767="EXTRA" CHECK_SEVERITY_extra767="Low" CHECK_ASFF_RESOURCE_TYPE_extra767="AwsCloudFrontDistribution" CHECK_ALTERNATE_check767="extra767" diff --git a/checks/check_extra768 b/checks/check_extra768 index dc222b5d..5e59c7b8 100644 --- a/checks/check_extra768 +++ b/checks/check_extra768 @@ -11,26 +11,25 @@ # CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. CHECK_ID_extra768="7.68" -CHECK_TITLE_extra768="[extra768] Find secrets in ECS task definitions variables " +CHECK_TITLE_extra768="[extra768] Find secrets in ECS task definitions environment variables " CHECK_SCORED_extra768="NOT_SCORED" -CHECK_TYPE_extra768="EXTRA" +CHECK_CIS_LEVEL_extra768="EXTRA" CHECK_SEVERITY_extra768="Critical" CHECK_ASFF_RESOURCE_TYPE_extra768="AwsEcsTaskDefinition" CHECK_ALTERNATE_check768="extra768" CHECK_SERVICENAME_extra768="ecs" -CHECK_RISK_extra768='The use of a hard-coded password increases the possibility of password guessing. If hard-coded passwords are used; it is possible that malicious users gain access through the account in question.' -CHECK_REMEDIATION_extra768='Use Secrets Manager to securely provide database credentials to Lambda functions and secure the databases as well as use the credentials to connect and query them without hardcoding the secrets in code or passing them through environmental variables. ' -CHECK_DOC_extra768='https://docs.aws.amazon.com/secretsmanager/latest/userguide/lambda-functions.html' +CHECK_RISK_extra768='The use of a hard-coded password increases the possibility of password guessing. If hard-coded passwords are used; it is possible that malicious users gain access through the account in question.' +CHECK_REMEDIATION_extra768='Use Secrets Manager or Parameter Store to securely provide credentials to containers without hardcoding the secrets in code or passing them through environment variables.' +CHECK_DOC_extra768='https://docs.aws.amazon.com/AmazonECS/latest/developerguide/specifying-sensitive-data.html' CHECK_CAF_EPIC_extra768='Logging and Monitoring' extra768(){ SECRETS_TEMP_FOLDER="$PROWLER_DIR/secrets-$ACCOUNT_NUM" if [[ ! -d $SECRETS_TEMP_FOLDER ]]; then - # this folder is deleted once this check is finished mkdir $SECRETS_TEMP_FOLDER fi for regx in $REGIONS; do - # Get a list of all families first: + # Get a list of all task definition families first: FAMILIES=$($AWSCLI ecs list-task-definition-families $PROFILE_OPT --region $regx --status ACTIVE | jq -r .families[]) if [[ $FAMILIES ]]; then for FAMILY in $FAMILIES;do @@ -60,5 +59,4 @@ extra768(){ textInfo "$regx: No ECS task definitions found" "$regx" fi done -# rm -rf $SECRETS_TEMP_FOLDER } diff --git a/checks/check_extra769 b/checks/check_extra769 index 00e34e00..6127c646 100644 --- a/checks/check_extra769 +++ b/checks/check_extra769 @@ -14,7 +14,7 @@ CHECK_ID_extra769="7.69" CHECK_TITLE_extra769="[extra769] Check if IAM Access Analyzer is enabled and its findings " CHECK_SCORED_extra769="NOT_SCORED" -CHECK_TYPE_extra769="EXTRA" +CHECK_CIS_LEVEL_extra769="EXTRA" CHECK_SEVERITY_extra769="High" CHECK_ALTERNATE_check769="extra769" CHECK_SERVICENAME_extra769="accessanalyzer" diff --git a/checks/check_extra77 b/checks/check_extra77 index d3cc4a50..fcbbd977 100644 --- a/checks/check_extra77 +++ b/checks/check_extra77 @@ -14,7 +14,7 @@ CHECK_ID_extra77="7.7" CHECK_TITLE_extra77="[extra77] Ensure there are no ECR repositories set as Public" CHECK_SCORED_extra77="NOT_SCORED" -CHECK_TYPE_extra77="EXTRA" +CHECK_CIS_LEVEL_extra77="EXTRA" CHECK_SEVERITY_extra77="Critical" CHECK_ALTERNATE_extra707="extra77" CHECK_ALTERNATE_check77="extra77" diff --git a/checks/check_extra770 b/checks/check_extra770 index c1e9694b..e2c3abf7 100644 --- a/checks/check_extra770 +++ b/checks/check_extra770 @@ -13,7 +13,7 @@ CHECK_ID_extra770="7.70" CHECK_TITLE_extra770="[extra770] Check for internet facing EC2 instances with Instance Profiles attached " CHECK_SCORED_extra770="NOT_SCORED" -CHECK_TYPE_extra770="EXTRA" +CHECK_CIS_LEVEL_extra770="EXTRA" CHECK_SEVERITY_extra770="Medium" CHECK_ASFF_RESOURCE_TYPE_extra770="AwsEc2Instance" CHECK_ALTERNATE_check770="extra770" diff --git a/checks/check_extra771 b/checks/check_extra771 index a2236c00..243d6441 100644 --- a/checks/check_extra771 +++ b/checks/check_extra771 @@ -13,7 +13,7 @@ CHECK_ID_extra771="7.71" CHECK_TITLE_extra771="[extra771] Check if S3 buckets have policies which allow WRITE access " CHECK_SCORED_extra771="NOT_SCORED" -CHECK_TYPE_extra771="EXTRA" +CHECK_CIS_LEVEL_extra771="EXTRA" CHECK_SEVERITY_extra771="Critical" CHECK_ASFF_RESOURCE_TYPE_extra771="AwsS3Bucket" CHECK_ALTERNATE_check771="extra771" diff --git a/checks/check_extra772 b/checks/check_extra772 index 93b36041..c56a7c32 100644 --- a/checks/check_extra772 +++ b/checks/check_extra772 @@ -13,7 +13,7 @@ CHECK_ID_extra772="7.72" CHECK_TITLE_extra772="[extra772] Check if elastic IPs are unused " CHECK_SCORED_extra772="NOT_SCORED" -CHECK_TYPE_extra772="EXTRA" +CHECK_CIS_LEVEL_extra772="EXTRA" CHECK_SEVERITY_extra772="Low" CHECK_ASFF_RESOURCE_TYPE_extra772="AwsEc2Eip" CHECK_ALTERNATE_check772="extra772" diff --git a/checks/check_extra773 b/checks/check_extra773 index 7c168fcd..12f0ccdf 100644 --- a/checks/check_extra773 +++ b/checks/check_extra773 @@ -13,7 +13,7 @@ CHECK_ID_extra773="7.73" CHECK_TITLE_extra773="[extra773] Check if CloudFront distributions are using WAF " CHECK_SCORED_extra773="NOT_SCORED" -CHECK_TYPE_extra773="EXTRA" +CHECK_CIS_LEVEL_extra773="EXTRA" CHECK_SEVERITY_extra773="Medium" CHECK_ASFF_RESOURCE_TYPE_extra773="AwsCloudFrontDistribution" CHECK_ALTERNATE_check773="extra773" diff --git a/checks/check_extra774 b/checks/check_extra774 index 4fce1afe..9bdc83da 100644 --- a/checks/check_extra774 +++ b/checks/check_extra774 @@ -13,7 +13,7 @@ CHECK_ID_extra774="7.74" CHECK_TITLE_extra774="[extra774] Ensure credentials unused for 30 days or greater are disabled" CHECK_SCORED_extra774="NOT_SCORED" -CHECK_TYPE_extra774="EXTRA" +CHECK_CIS_LEVEL_extra774="EXTRA" CHECK_SEVERITY_extra774="Medium" CHECK_ASFF_RESOURCE_TYPE_extra774="AwsIamUser" CHECK_ALTERNATE_check774="extra774" diff --git a/checks/check_extra775 b/checks/check_extra775 index 96d0d3b4..bc47823f 100644 --- a/checks/check_extra775 +++ b/checks/check_extra775 @@ -13,7 +13,7 @@ CHECK_ID_extra775="7.75" CHECK_TITLE_extra775="[extra775] Find secrets in EC2 Auto Scaling Launch Configuration " CHECK_SCORED_extra775="NOT_SCORED" -CHECK_TYPE_extra775="EXTRA" +CHECK_CIS_LEVEL_extra775="EXTRA" CHECK_SEVERITY_extra775="Critical" CHECK_ALTERNATE_check775="extra775" CHECK_SERVICENAME_extra775="autoscaling" diff --git a/checks/check_extra776 b/checks/check_extra776 index f234bb0b..97e9b7b1 100644 --- a/checks/check_extra776 +++ b/checks/check_extra776 @@ -28,7 +28,7 @@ CHECK_ID_extra776="7.76" CHECK_TITLE_extra776="[extra776] Check if ECR image scan found vulnerabilities in the newest image version " CHECK_SCORED_extra776="NOT_SCORED" -CHECK_TYPE_extra776="EXTRA" +CHECK_CIS_LEVEL_extra776="EXTRA" CHECK_SEVERITY_extra776="Medium" CHECK_ALTERNATE_check776="extra776" CHECK_SERVICENAME_extra776="ecr" diff --git a/checks/check_extra777 b/checks/check_extra777 index f79d907e..4cfc026a 100644 --- a/checks/check_extra777 +++ b/checks/check_extra777 @@ -17,7 +17,7 @@ CHECK_ID_extra777="7.77" CHECK_TITLE_extra777="[extra777] Find VPC security groups with many ingress or egress rules " CHECK_SCORED_extra777="NOT_SCORED" -CHECK_TYPE_extra777="EXTRA" +CHECK_CIS_LEVEL_extra777="EXTRA" CHECK_SEVERITY_extra777="Medium" CHECK_ASFF_RESOURCE_TYPE_extra777="AwsEc2SecurityGroup" CHECK_ALTERNATE_check777="extra777" diff --git a/checks/check_extra778 b/checks/check_extra778 index 63cb12cd..0966ee74 100644 --- a/checks/check_extra778 +++ b/checks/check_extra778 @@ -14,7 +14,7 @@ CHECK_ID_extra778="7.78" CHECK_TITLE_extra778="[extra778] Find VPC security groups with wide-open public IPv4 CIDR ranges (non-RFC1918) " CHECK_SCORED_extra778="NOT_SCORED" -CHECK_TYPE_extra778="EXTRA" +CHECK_CIS_LEVEL_extra778="EXTRA" CHECK_SEVERITY_extra778="Medium" CHECK_ASFF_RESOURCE_TYPE_extra778="AwsEc2SecurityGroup" CHECK_ALTERNATE_check778="extra778" diff --git a/checks/check_extra779 b/checks/check_extra779 index 79e0bf2c..ccb95abe 100644 --- a/checks/check_extra779 +++ b/checks/check_extra779 @@ -13,7 +13,7 @@ CHECK_ID_extra779="7.79" CHECK_TITLE_extra779="[extra779] Ensure no security groups allow ingress from 0.0.0.0/0 or ::/0 to Elasticsearch/Kibana ports" CHECK_SCORED_extra779="NOT_SCORED" -CHECK_TYPE_extra779="EXTRA" +CHECK_CIS_LEVEL_extra779="EXTRA" CHECK_SEVERITY_extra779="High" CHECK_ASFF_RESOURCE_TYPE_extra779="AwsEc2SecurityGroup" CHECK_ALTERNATE_check779="extra779" diff --git a/checks/check_extra78 b/checks/check_extra78 index bdabda6d..ce5e34ef 100644 --- a/checks/check_extra78 +++ b/checks/check_extra78 @@ -13,7 +13,7 @@ CHECK_ID_extra78="7.8" CHECK_TITLE_extra78="[extra78] Ensure there are no Public Accessible RDS instances" CHECK_SCORED_extra78="NOT_SCORED" -CHECK_TYPE_extra78="EXTRA" +CHECK_CIS_LEVEL_extra78="EXTRA" CHECK_SEVERITY_extra78="Critical" CHECK_ASFF_RESOURCE_TYPE_extra78="AwsRdsDbInstance" CHECK_ALTERNATE_extra708="extra78" diff --git a/checks/check_extra780 b/checks/check_extra780 index a2402595..d835d626 100644 --- a/checks/check_extra780 +++ b/checks/check_extra780 @@ -13,7 +13,7 @@ CHECK_ID_extra780="7.80" CHECK_TITLE_extra780="[extra780] Check if Amazon Elasticsearch Service (ES) domains has Amazon Cognito authentication for Kibana enabled" CHECK_SCORED_extra780="NOT_SCORED" -CHECK_TYPE_extra780="EXTRA" +CHECK_CIS_LEVEL_extra780="EXTRA" CHECK_SEVERITY_extra780="High" CHECK_ASFF_RESOURCE_TYPE_extra780="AwsElasticsearchDomain" CHECK_ALTERNATE_check780="extra780" diff --git a/checks/check_extra781 b/checks/check_extra781 index 69fe516f..38a4899a 100644 --- a/checks/check_extra781 +++ b/checks/check_extra781 @@ -13,7 +13,7 @@ CHECK_ID_extra781="7.81" CHECK_TITLE_extra781="[extra781] Check if Amazon Elasticsearch Service (ES) domains has encryption at-rest enabled" CHECK_SCORED_extra781="NOT_SCORED" -CHECK_TYPE_extra781="EXTRA" +CHECK_CIS_LEVEL_extra781="EXTRA" CHECK_SEVERITY_extra781="Medium" CHECK_ASFF_RESOURCE_TYPE_extra781="AwsElasticsearchDomain" CHECK_ALTERNATE_check781="extra781" diff --git a/checks/check_extra782 b/checks/check_extra782 index 5ec70206..f6babb04 100644 --- a/checks/check_extra782 +++ b/checks/check_extra782 @@ -13,7 +13,7 @@ CHECK_ID_extra782="7.82" CHECK_TITLE_extra782="[extra782] Check if Amazon Elasticsearch Service (ES) domains has node-to-node encryption enabled" CHECK_SCORED_extra782="NOT_SCORED" -CHECK_TYPE_extra782="EXTRA" +CHECK_CIS_LEVEL_extra782="EXTRA" CHECK_SEVERITY_extra782="Medium" CHECK_ASFF_RESOURCE_TYPE_extra782="AwsElasticsearchDomain" CHECK_ALTERNATE_check782="extra782" diff --git a/checks/check_extra783 b/checks/check_extra783 index 612e59ff..ec8005e2 100644 --- a/checks/check_extra783 +++ b/checks/check_extra783 @@ -13,7 +13,7 @@ CHECK_ID_extra783="7.83" CHECK_TITLE_extra783="[extra783] Check if Amazon Elasticsearch Service (ES) domains has enforce HTTPS enabled" CHECK_SCORED_extra783="NOT_SCORED" -CHECK_TYPE_extra783="EXTRA" +CHECK_CIS_LEVEL_extra783="EXTRA" CHECK_SEVERITY_extra783="Medium" CHECK_ASFF_RESOURCE_TYPE_extra783="AwsElasticsearchDomain" CHECK_ALTERNATE_check783="extra783" diff --git a/checks/check_extra784 b/checks/check_extra784 index d5426628..3a200968 100644 --- a/checks/check_extra784 +++ b/checks/check_extra784 @@ -13,7 +13,7 @@ CHECK_ID_extra784="7.84" CHECK_TITLE_extra784="[extra784] Check if Amazon Elasticsearch Service (ES) domains internal user database enabled" CHECK_SCORED_extra784="NOT_SCORED" -CHECK_TYPE_extra784="EXTRA" +CHECK_CIS_LEVEL_extra784="EXTRA" CHECK_SEVERITY_extra784="Medium" CHECK_ASFF_RESOURCE_TYPE_extra784="AwsElasticsearchDomain" CHECK_ALTERNATE_check784="extra784" @@ -30,9 +30,9 @@ extra784(){ for domain in $LIST_OF_DOMAINS;do CHECK_IF_INTERNALDB_ENABLED=$($AWSCLI es describe-elasticsearch-domain --domain-name $domain $PROFILE_OPT --region $regx --query 'DomainStatus.AdvancedSecurityOptions.InternalUserDatabaseEnabled' --output text|grep -i true) if [[ $CHECK_IF_INTERNALDB_ENABLED ]];then - textPass "$regx: Amazon ES domain $domain has internal user database enabled" "$regx" "$domain" + textFail "$regx: Amazon ES domain $domain has internal user database enabled" "$regx" "$domain" else - textFail "$regx: Amazon ES domain $domain does not have internal user database enabled" "$regx" "$domain" + textPass "$regx: Amazon ES domain $domain does not have internal user database enabled" "$regx" "$domain" fi done else diff --git a/checks/check_extra785 b/checks/check_extra785 index 62139103..e47e0a6f 100644 --- a/checks/check_extra785 +++ b/checks/check_extra785 @@ -13,7 +13,7 @@ CHECK_ID_extra785="7.85" CHECK_TITLE_extra785="[extra785] Check if Amazon Elasticsearch Service (ES) domains have updates available" CHECK_SCORED_extra785="NOT_SCORED" -CHECK_TYPE_extra785="EXTRA" +CHECK_CIS_LEVEL_extra785="EXTRA" CHECK_SEVERITY_extra785="Low" CHECK_ASFF_RESOURCE_TYPE_extra785="AwsElasticsearchDomain" CHECK_ALTERNATE_check785="extra785" diff --git a/checks/check_extra786 b/checks/check_extra786 index 87a33ecf..11643e4a 100644 --- a/checks/check_extra786 +++ b/checks/check_extra786 @@ -13,7 +13,7 @@ CHECK_ID_extra786="7.86" CHECK_TITLE_extra786="[extra786] Check if EC2 Instance Metadata Service Version 2 (IMDSv2) is Enabled and Required " CHECK_SCORED_extra786="NOT_SCORED" -CHECK_TYPE_extra786="EXTRA" +CHECK_CIS_LEVEL_extra786="EXTRA" CHECK_SEVERITY_extra786="Medium" CHECK_ASFF_RESOURCE_TYPE_extra786="AwsEc2Instance" CHECK_ALTERNATE_check786="extra786" diff --git a/checks/check_extra787 b/checks/check_extra787 index a8aeaf51..ef61f44f 100644 --- a/checks/check_extra787 +++ b/checks/check_extra787 @@ -13,7 +13,7 @@ CHECK_ID_extra787="7.87" CHECK_TITLE_extra787="[extra787] Check connection and authentication for Internet exposed Elasticsearch/Kibana ports" CHECK_SCORED_extra787="NOT_SCORED" -CHECK_TYPE_extra787="EXTRA" +CHECK_CIS_LEVEL_extra787="EXTRA" CHECK_SEVERITY_extra787="Critical" CHECK_ASFF_RESOURCE_TYPE_extra787="AwsEc2Instance" CHECK_ALTERNATE_check787="extra787" diff --git a/checks/check_extra788 b/checks/check_extra788 index 8d0655c1..b407ccac 100644 --- a/checks/check_extra788 +++ b/checks/check_extra788 @@ -13,7 +13,7 @@ CHECK_ID_extra788="7.88" CHECK_TITLE_extra788="[extra788] Check connection and authentication for Internet exposed Amazon Elasticsearch Service (ES) domains" CHECK_SCORED_extra788="NOT_SCORED" -CHECK_TYPE_extra788="EXTRA" +CHECK_CIS_LEVEL_extra788="EXTRA" CHECK_SEVERITY_extra788="Critical" CHECK_ASFF_RESOURCE_TYPE_extra788="AwsElasticsearchDomain" CHECK_ALTERNATE_check788="extra788" diff --git a/checks/check_extra789 b/checks/check_extra789 index ed014307..66fe6250 100644 --- a/checks/check_extra789 +++ b/checks/check_extra789 @@ -14,7 +14,7 @@ CHECK_ID_extra789="7.89" CHECK_TITLE_extra789="[extra789] Find trust boundaries in VPC endpoint services connections" CHECK_SCORED_extra789="NOT_SCORED" -CHECK_TYPE_extra789="EXTRA" +CHECK_CIS_LEVEL_extra789="EXTRA" CHECK_SEVERITY_extra789="Medium" CHECK_ASFF_RESOURCE_TYPE_extra789="AwsEc2Vpc" CHECK_ALTERNATE_extra789="extra789" diff --git a/checks/check_extra79 b/checks/check_extra79 index ffad816f..894dd0f9 100644 --- a/checks/check_extra79 +++ b/checks/check_extra79 @@ -13,7 +13,7 @@ CHECK_ID_extra79="7.9" CHECK_TITLE_extra79="[extra79] Check for internet facing Elastic Load Balancers" CHECK_SCORED_extra79="NOT_SCORED" -CHECK_TYPE_extra79="EXTRA" +CHECK_CIS_LEVEL_extra79="EXTRA" CHECK_SEVERITY_extra79="Medium" CHECK_ASFF_RESOURCE_TYPE_extra79="AwsElbLoadBalancer" CHECK_ALTERNATE_extra709="extra79" diff --git a/checks/check_extra790 b/checks/check_extra790 index e31f73cc..f2680c78 100644 --- a/checks/check_extra790 +++ b/checks/check_extra790 @@ -14,7 +14,7 @@ CHECK_ID_extra790="7.90" CHECK_TITLE_extra790="[extra790] Find trust boundaries in VPC endpoint services whitelisted principles" CHECK_SCORED_extra790="NOT_SCORED" -CHECK_TYPE_extra790="EXTRA" +CHECK_CIS_LEVEL_extra790="EXTRA" CHECK_SEVERITY_extra790="Medium" CHECK_ASFF_RESOURCE_TYPE_extra790="AwsEc2Vpc" CHECK_ALTERNATE_extra790="extra790" diff --git a/checks/check_extra791 b/checks/check_extra791 index 99ccac1a..22f1ee83 100644 --- a/checks/check_extra791 +++ b/checks/check_extra791 @@ -13,7 +13,7 @@ CHECK_ID_extra791="7.91" CHECK_TITLE_extra791="[extra791] Check if CloudFront distributions are using deprecated SSL protocols" CHECK_SCORED_extra791="NOT_SCORED" -CHECK_TYPE_extra791="EXTRA" +CHECK_CIS_LEVEL_extra791="EXTRA" CHECK_SEVERITY_extra791="Medium" CHECK_ASFF_RESOURCE_TYPE_extra791="AwsCloudFrontDistribution" CHECK_ALTERNATE_check791="extra791" diff --git a/checks/check_extra792 b/checks/check_extra792 index c9906104..04b36bf3 100644 --- a/checks/check_extra792 +++ b/checks/check_extra792 @@ -13,7 +13,7 @@ CHECK_ID_extra792="7.92" CHECK_TITLE_extra792="[extra792] Check if Elastic Load Balancers have insecure SSL ciphers " CHECK_SCORED_extra792="NOT_SCORED" -CHECK_TYPE_extra792="EXTRA" +CHECK_CIS_LEVEL_extra792="EXTRA" CHECK_SEVERITY_extra792="Medium" CHECK_ASFF_RESOURCE_TYPE_extra792="AwsElbLoadBalancer" CHECK_ALTERNATE_check792="extra792" diff --git a/checks/check_extra793 b/checks/check_extra793 index 42b3bf36..413a8a47 100644 --- a/checks/check_extra793 +++ b/checks/check_extra793 @@ -13,7 +13,7 @@ CHECK_ID_extra793="7.93" CHECK_TITLE_extra793="[extra793] Check if Elastic Load Balancers have SSL listeners " CHECK_SCORED_extra793="NOT_SCORED" -CHECK_TYPE_extra793="EXTRA" +CHECK_CIS_LEVEL_extra793="EXTRA" CHECK_SEVERITY_extra793="Medium" CHECK_ASFF_RESOURCE_TYPE_extra793="AwsElbLoadBalancer" CHECK_ALTERNATE_check793="extra793" diff --git a/checks/check_extra794 b/checks/check_extra794 index 347e4809..ef6f2aee 100644 --- a/checks/check_extra794 +++ b/checks/check_extra794 @@ -13,7 +13,7 @@ CHECK_ID_extra794="7.94" CHECK_TITLE_extra794="[extra794] Ensure EKS Control Plane Audit Logging is enabled for all log types" CHECK_SCORED_extra794="NOT_SCORED" -CHECK_TYPE_extra794="EXTRA" +CHECK_CIS_LEVEL_extra794="EXTRA" CHECK_SEVERITY_extra794="Medium" CHECK_ASFF_RESOURCE_TYPE_extra794="AwsEksCluster" CHECK_ALTERNATE_check794="extra794" diff --git a/checks/check_extra795 b/checks/check_extra795 index d28e0f4e..d71370e0 100644 --- a/checks/check_extra795 +++ b/checks/check_extra795 @@ -13,7 +13,7 @@ CHECK_ID_extra795="7.95" CHECK_TITLE_extra795="[extra795] Ensure EKS Clusters are created with Private Endpoint Enabled and Public Access Disabled" CHECK_SCORED_extra795="NOT_SCORED" -CHECK_TYPE_extra795="EXTRA" +CHECK_CIS_LEVEL_extra795="EXTRA" CHECK_SEVERITY_extra795="High" CHECK_ASFF_RESOURCE_TYPE_extra795="AwsEksCluster" CHECK_ALTERNATE_check795="extra795" diff --git a/checks/check_extra796 b/checks/check_extra796 index 5351d4e8..34789c74 100644 --- a/checks/check_extra796 +++ b/checks/check_extra796 @@ -13,7 +13,7 @@ CHECK_ID_extra796="7.96" CHECK_TITLE_extra796="[extra796] Restrict Access to the EKS Control Plane Endpoint" CHECK_SCORED_extra796="NOT_SCORED" -CHECK_TYPE_extra796="EXTRA" +CHECK_CIS_LEVEL_extra796="EXTRA" CHECK_SEVERITY_extra796="High" CHECK_ASFF_RESOURCE_TYPE_extra796="AwsEksCluster" CHECK_ALTERNATE_check796="extra796" diff --git a/checks/check_extra797 b/checks/check_extra797 index 7576292b..473c5376 100644 --- a/checks/check_extra797 +++ b/checks/check_extra797 @@ -13,7 +13,7 @@ CHECK_ID_extra797="7.97" CHECK_TITLE_extra797="[extra797] Ensure Kubernetes Secrets are encrypted using Customer Master Keys (CMKs)" CHECK_SCORED_extra797="NOT_SCORED" -CHECK_TYPE_extra797="EXTRA" +CHECK_CIS_LEVEL_extra797="EXTRA" CHECK_SEVERITY_extra797="Medium" CHECK_ASFF_RESOURCE_TYPE_extra797="AwsEksCluster" CHECK_ALTERNATE_check797="extra797" diff --git a/checks/check_extra798 b/checks/check_extra798 index d14799f1..1d8e00ef 100644 --- a/checks/check_extra798 +++ b/checks/check_extra798 @@ -14,7 +14,7 @@ CHECK_ID_extra798="7.98" CHECK_TITLE_extra798="[extra798] Check if Lambda functions have resource-based policy set as Public" CHECK_SCORED_extra798="NOT_SCORED" -CHECK_TYPE_extra798="EXTRA" +CHECK_CIS_LEVEL_extra798="EXTRA" CHECK_SEVERITY_extra798="Critical" CHECK_ASFF_RESOURCE_TYPE_extra798="AwsLambdaFunction" CHECK_ALTERNATE_check798="extra798" diff --git a/checks/check_extra799 b/checks/check_extra799 index c028df95..b96f062a 100644 --- a/checks/check_extra799 +++ b/checks/check_extra799 @@ -14,7 +14,7 @@ CHECK_ID_extra799="7.99" CHECK_TITLE_extra799="[extra799] Check if Security Hub is enabled and its standard subscriptions" CHECK_SCORED_extra799="NOT_SCORED" -CHECK_TYPE_extra799="EXTRA" +CHECK_CIS_LEVEL_extra799="EXTRA" CHECK_SEVERITY_extra799="High" CHECK_ASFF_RESOURCE_TYPE_extra799="AwsSecurityHubHub" CHECK_ALTERNATE_check799="extra799" diff --git a/checks/check_sample b/checks/check_sample index 2b16c752..b7b284bf 100644 --- a/checks/check_sample +++ b/checks/check_sample @@ -27,7 +27,7 @@ # CHECK_ID_checkN="N.N" # CHECK_TITLE_checkN="[checkN] Description " # CHECK_SCORED_checkN="NOT_SCORED" -# CHECK_TYPE_checkN="EXTRA" +# CHECK_CIS_LEVEL_checkN="EXTRA" # CHECK_SEVERITY_check="Medium" # CHECK_ASFF_RESOURCE_TYPE_checkN="AwsAccount" # Choose appropriate value from https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-findings-format.html#asff-resources # CHECK_ALTERNATE_checkN="extraN" diff --git a/groups/group1_iam b/groups/group1_iam index 910897ea..86da563c 100644 --- a/groups/group1_iam +++ b/groups/group1_iam @@ -12,4 +12,4 @@ GROUP_ID[1]='group1' GROUP_NUMBER[1]='1.0' GROUP_TITLE[1]='Identity and Access Management - CIS only - [group1] ***********' GROUP_RUN_BY_DEFAULT[1]='Y' # run it when execute_all is called -GROUP_CHECKS[1]='check11,check12,check13,check14,check15,check16,check17,check18,check19,check110,check111,check112,check113,check114,check115,check116,check117,check118,check119,check120,check121,check122,extra774' +GROUP_CHECKS[1]='check11,check12,check13,check14,check15,check16,check17,check18,check19,check110,check111,check112,check113,check114,check115,check116,check117,check118,check119,check120,check121,check122' diff --git a/groups/group25_FTR b/groups/group25_FTR new file mode 100644 index 00000000..7ca694f7 --- /dev/null +++ b/groups/group25_FTR @@ -0,0 +1,46 @@ +#!/usr/bin/env bash + +# Prowler - the handy cloud security tool (copyright 2018) by Toni de la Fuente +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy +# of the License at http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software distributed +# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +# CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. + +GROUP_ID[25]='ftr' +GROUP_NUMBER[25]='25.0' +GROUP_TITLE[25]='Amazon FTR related security checks - [ftr] ******************' +GROUP_RUN_BY_DEFAULT[9]='N' # run it when execute_all is called +GROUP_CHECKS[9]='check11,check12,check13,check14,check15,check16,check17,check18,check19,check110,check111,check111,check112,check113,check117,check118,check122,check21,check22,extra759,extra760,extra768,extra775,extra797,extra7141,extra73' + +# Checks from AWS FTR https://apn-checklists.s3.amazonaws.com/foundational/partner-hosted/partner-hosted/CVLHEC5X7.html +# 1.1 [check11] Avoid the use of the root account - iam [High] +# 1.2 [check12] Ensure multi-factor authentication (MFA) is enabled for all IAM users that have a console password - iam [High] +# 1.3 [check13] Ensure credentials unused for 90 days or greater are disabled - iam [Medium] +# 1.4 [check14] Ensure access keys are rotated every 90 days or less - iam [Medium] +# 1.5 [check15] Ensure IAM password policy requires at least one uppercase letter - iam [Medium] +# 1.6 [check16] Ensure IAM password policy require at least one lowercase letter - iam [Medium] +# 1.7 [check17] Ensure IAM password policy require at least one symbol - iam [Medium] +# 1.8 [check18] Ensure IAM password policy require at least one number - iam [Medium] +# 1.9 [check19] Ensure IAM password policy requires minimum length of 14 or greater - iam [Medium] +# 1.10 [check110] Ensure IAM password policy prevents password reuse: 24 or greater - iam [Medium] +# 1.11 [check111] Ensure IAM password policy expires passwords within 90 days or less - iam [Medium] +# 1.12 [check112] Ensure no root account access key exists - iam [Critical] +# 1.13 [check113] Ensure MFA is enabled for the root account - iam [Critical] +# 1.17 [check117] Maintain current contact details - support [Medium] +# 1.18 [check118] Ensure security contact information is registered - support [Medium] +# 1.22 [check122] Ensure IAM policies that allow full "*:*" administrative privileges are not created - iam [Medium] +# 2.1 [check21] Ensure CloudTrail is enabled in all regions - cloudtrail [High] +# 2.2 [check22] Ensure CloudTrail log file validation is enabled - cloudtrail [Medium] +# 7.59 [extra759] Find secrets in Lambda functions variables - lambda [Critical] +# 7.60 [extra760] Find secrets in Lambda functions code - lambda [Critical] +# 7.68 [extra768] Find secrets in ECS task definitions variables - ecs [Critical] +# 7.75 [extra775] Find secrets in EC2 Auto Scaling Launch Configuration - autoscaling [Critical] +# 7.97 [extra797] Ensure Kubernetes Secrets are encrypted using Customer Master Keys (CMKs) - eks [Medium] +# 7.141 [extra7141] Find secrets in SSM Documents - ssm [Critical] +# 7.3 [extra73] Ensure there are no S3 buckets open to Everyone or Any AWS user - s3 [Critical] + diff --git a/groups/group7_extras b/groups/group7_extras index 5c2b7010..60cffc06 100644 --- a/groups/group7_extras +++ b/groups/group7_extras @@ -15,7 +15,7 @@ GROUP_ID[7]='extras' GROUP_NUMBER[7]='7.0' GROUP_TITLE[7]='Extras - all non CIS specific checks - [extras] ****************' GROUP_RUN_BY_DEFAULT[7]='Y' # run it when execute_all is called -GROUP_CHECKS[7]='extra71,extra72,extra73,extra74,extra75,extra76,extra77,extra78,extra79,extra710,extra711,extra712,extra713,extra714,extra715,extra716,extra717,extra718,extra719,extra720,extra721,extra722,extra723,extra724,extra725,extra726,extra727,extra728,extra729,extra730,extra731,extra732,extra733,extra734,extra735,extra736,extra738,extra739,extra740,extra741,extra742,extra743,extra744,extra745,extra746,extra747,extra748,extra749,extra750,extra751,extra752,extra753,extra754,extra755,extra757,extra758,extra761,extra762,extra763,extra764,extra765,extra767,extra768,extra769,extra770,extra771,extra772,extra773,extra774,extra775,extra776,extra777,extra778,extra779,extra780,extra781,extra782,extra783,extra784,extra785,extra786,extra787,extra788,extra791,extra792,extra793,extra794,extra795,extra796,extra797,extra798,extra799,extra7100,extra7101,extra7102,extra7103,extra7104,extra7105,extra7106,extra7107,extra7108,extra7109,extra7110,extra7111,extra7112,extra7113,extra7114,extra7115,extra7116,extra7117,extra7118,extra7119,extra7120,extra7121,extra7122,extra7123,extra7124,extra7125,extra7126,extra7127,extra7128,extra7129,extra7130,extra7131,extra7132,extra7133,extra7134,extra7135,extra7136,extra7137,extra7138,extra7139,extra7140,extra7141,extra7142,extra7143,extra7144,extra7145,extra7146,extra7147' +GROUP_CHECKS[7]='extra71,extra72,extra73,extra74,extra75,extra76,extra77,extra78,extra79,extra710,extra711,extra712,extra713,extra714,extra715,extra716,extra717,extra718,extra719,extra720,extra721,extra722,extra723,extra724,extra725,extra726,extra727,extra728,extra729,extra730,extra731,extra732,extra733,extra734,extra735,extra736,extra738,extra739,extra740,extra741,extra742,extra743,extra744,extra745,extra746,extra747,extra748,extra749,extra750,extra751,extra752,extra753,extra754,extra755,extra757,extra758,extra761,extra762,extra763,extra764,extra765,extra767,extra768,extra769,extra770,extra771,extra772,extra773,extra774,extra775,extra776,extra777,extra778,extra779,extra780,extra781,extra782,extra783,extra784,extra785,extra786,extra787,extra788,extra791,extra792,extra793,extra794,extra795,extra796,extra797,extra798,extra799,extra7100,extra7101,extra7102,extra7103,extra7104,extra7105,extra7106,extra7107,extra7108,extra7109,extra7110,extra7111,extra7112,extra7113,extra7114,extra7115,extra7116,extra7117,extra7118,extra7119,extra7120,extra7121,extra7122,extra7123,extra7124,extra7125,extra7126,extra7127,extra7128,extra7129,extra7130,extra7131,extra7132,extra7133,extra7134,extra7135,extra7136,extra7137,extra7138,extra7139,extra7140,extra7141,extra7142,extra7143,extra7144,extra7145,extra7146,extra7147,extra7148,extra7149,extra7150,extra7151,extra7152,extra7153,extra7154,extra7155,extra7156,extra7157,extra7158,extra7159' # Extras 759 and 760 (lambda variables and code secrets finder are not included) # to run detect-secrets use `./prowler -g secrets` @@ -23,5 +23,3 @@ GROUP_CHECKS[7]='extra71,extra72,extra73,extra74,extra75,extra76,extra77,extra78 # Extras 789 and 790 VPC trust boundaries are not included by default in Extras # to run trust-boundaries use `./prowler -g trustboundaries` # read more in https://github.com/toniblyx/prowler/#trust-boundaries-checks - - diff --git a/include/assume_role b/include/assume_role index 173e44f5..75521329 100644 --- a/include/assume_role +++ b/include/assume_role @@ -26,22 +26,29 @@ assume_role(){ # temporary file where to store credentials TEMP_STS_ASSUMED_FILE=$(mktemp -t prowler.sts_assumed-XXXXXX) + # check if role arn or role name + if [[ $ROLE_TO_ASSUME == arn:* ]]; then + PROWLER_ROLE=$ROLE_TO_ASSUME + else + PROWLER_ROLE=arn:${AWS_PARTITION}:iam::$ACCOUNT_TO_ASSUME:role/$ROLE_TO_ASSUME + fi + #Check if external ID has bee provided if so execute with external ID if not ignore if [[ -z $ROLE_EXTERNAL_ID ]]; then # assume role command - $AWSCLI $PROFILE_OPT sts assume-role --role-arn arn:${AWS_PARTITION}:iam::$ACCOUNT_TO_ASSUME:role/$ROLE_TO_ASSUME \ + $AWSCLI $PROFILE_OPT sts assume-role --role-arn $PROWLER_ROLE \ --role-session-name ProwlerAssessmentSession \ --region $REGION_FOR_STS \ --duration-seconds $SESSION_DURATION_TO_ASSUME > $TEMP_STS_ASSUMED_FILE 2>&1 else - $AWSCLI $PROFILE_OPT sts assume-role --role-arn arn:${AWS_PARTITION}:iam::$ACCOUNT_TO_ASSUME:role/$ROLE_TO_ASSUME \ + $AWSCLI $PROFILE_OPT sts assume-role --role-arn $PROWLER_ROLE \ --role-session-name ProwlerAssessmentSession \ --duration-seconds $SESSION_DURATION_TO_ASSUME \ --region $REGION_FOR_STS \ --external-id $ROLE_EXTERNAL_ID > $TEMP_STS_ASSUMED_FILE 2>&1 fi if [[ $(grep AccessDenied $TEMP_STS_ASSUMED_FILE) ]]; then - textFail "Access Denied assuming role arn:${AWS_PARTITION}:iam::${ACCOUNT_TO_ASSUME}:role/${ROLE_TO_ASSUME}" + textFail "Access Denied assuming role $PROWLER_ROLE" rm -f $TEMP_STS_ASSUMED_FILE EXITCODE=1 exit $EXITCODE diff --git a/include/html_report b/include/html_report index c1097470..cf7218a6 100644 --- a/include/html_report +++ b/include/html_report @@ -137,6 +137,7 @@ addHtmlHeader() { CheckID Check Title Check Output + CIS Level CAF Epic Risk Remediation diff --git a/include/outputs b/include/outputs index 830404d2..e649a4d2 100644 --- a/include/outputs +++ b/include/outputs @@ -36,6 +36,8 @@ if [[ $OUTPUT_DIR_CUSTOM ]]; then fi if [ -z ${OUTPUT_FILE_NAME+x} ]; then OUTPUT_FILE_NAME="${OUTPUT_DIR}/prowler-output-${ACCOUNT_NUM}-${OUTPUT_DATE}" +else + OUTPUT_FILE_NAME="${OUTPUT_DIR}/$OUTPUT_FILE_NAME" fi HTML_LOGO_URL="https://github.com/toniblyx/prowler/" HTML_LOGO_IMG="https://github.com/toniblyx/prowler/raw/2.4/util/html/prowler-logo-new.png" @@ -50,7 +52,7 @@ PROWLER_PARAMETERS=$@ # $TITLE_ID Numeric identifier of each check (1.2, 2.3, etc), originally based on CIS checks. # $CHECK_RESULT values can be PASS, FAIL, INFO or WARNING if whitelisted # $ITEM_SCORED corresponds to CHECK_SCORED, values can be Scored/Not Scored. This is CIS only, will be deprecated in Prowler. -# $ITEM_LEVEL corresponds to CHECK_TYPE_ currently only for CIS Level 1, CIS Level 2 and Extras (all checks not part of CIS) +# $ITEM_CIS_LEVEL corresponds to CHECK_TYPE_ currently only for CIS Level 1, CIS Level 2 and Extras (all checks not part of CIS) # $TITLE_TEXT corresponds to CHECK_TITLE_ shows title of each check # $CHECK_RESULT_EXTENDED shows response of each check per resource like sg-123438 is open! # $CHECK_ASFF_COMPLIANCE_TYPE specify type from taxonomy https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-findings-format-type-taxonomy.html @@ -70,6 +72,9 @@ if [[ $MODE ]];then addHtmlHeader > ${OUTPUT_FILE_NAME}.$EXTENSION_HTML HTML_REPORT_INIT="1" fi + if [[ "${MODES[@]}" =~ "csv" ]]; then + printCsvHeader + fi fi # textInfo "HTML report will be saved: ${OUTPUT_FILE_NAME}.$EXTENSION_HTML" @@ -97,7 +102,7 @@ textPass(){ REPREGION=$REGION fi if [[ "${MODES[@]}" =~ "csv" ]]; then - echo "$PROFILE${SEP}$ACCOUNT_NUM${SEP}$REPREGION${SEP}$TITLE_ID${SEP}$CHECK_RESULT${SEP}$ITEM_SCORED${SEP}$ITEM_LEVEL${SEP}$TITLE_TEXT${SEP}$CHECK_RESULT_EXTENDED${SEP}$CHECK_ASFF_COMPLIANCE_TYPE${SEP}$CHECK_SEVERITY${SEP}$CHECK_SERVICENAME${SEP}$CHECK_ASFF_RESOURCE_TYPE${SEP}$CHECK_ASFF_TYPE${SEP}$CHECK_RISK${SEP}$CHECK_REMEDIATION${SEP}$CHECK_DOC${SEP}$CHECK_CAF_EPIC${SEP}$CHECK_RESOURCE_ID${SEP}$PROWLER_START_TIME" >> ${OUTPUT_FILE_NAME}.$EXTENSION_CSV + echo "$PROFILE${SEP}$ACCOUNT_NUM${SEP}$REPREGION${SEP}$TITLE_ID${SEP}$CHECK_RESULT${SEP}$ITEM_SCORED${SEP}$ITEM_CIS_LEVEL${SEP}$TITLE_TEXT${SEP}$CHECK_RESULT_EXTENDED${SEP}$CHECK_ASFF_COMPLIANCE_TYPE${SEP}$CHECK_SEVERITY${SEP}$CHECK_SERVICENAME${SEP}$CHECK_ASFF_RESOURCE_TYPE${SEP}$CHECK_ASFF_TYPE${SEP}$CHECK_RISK${SEP}$CHECK_REMEDIATION${SEP}$CHECK_DOC${SEP}$CHECK_CAF_EPIC${SEP}$CHECK_RESOURCE_ID${SEP}$PROWLER_START_TIME" >> ${OUTPUT_FILE_NAME}.$EXTENSION_CSV fi if [[ "${MODES[@]}" =~ "json" ]]; then generateJsonOutput "$1" "Pass" "$CHECK_RESOURCE_ID" >> ${OUTPUT_FILE_NAME}.$EXTENSION_JSON @@ -139,7 +144,7 @@ textInfo(){ REPREGION=$REGION fi if [[ "${MODES[@]}" =~ "csv" ]]; then - echo "$PROFILE${SEP}$ACCOUNT_NUM${SEP}$REPREGION${SEP}$TITLE_ID${SEP}$CHECK_RESULT${SEP}$ITEM_SCORED${SEP}$ITEM_LEVEL${SEP}$TITLE_TEXT${SEP}$CHECK_RESULT_EXTENDED${SEP}$CHECK_ASFF_COMPLIANCE_TYPE${SEP}$CHECK_SEVERITY${SEP}$CHECK_SERVICENAME${SEP}$CHECK_ASFF_RESOURCE_TYPE${SEP}$CHECK_ASFF_TYPE${SEP}$CHECK_RISK${SEP}$CHECK_REMEDIATION${SEP}$CHECK_DOC${SEP}$CHECK_CAF_EPIC${SEP}$CHECK_RESOURCE_ID${SEP}$PROWLER_START_TIME" >> ${OUTPUT_FILE_NAME}.$EXTENSION_CSV + echo "$PROFILE${SEP}$ACCOUNT_NUM${SEP}$REPREGION${SEP}$TITLE_ID${SEP}$CHECK_RESULT${SEP}$ITEM_SCORED${SEP}$ITEM_CIS_LEVEL${SEP}$TITLE_TEXT${SEP}$CHECK_RESULT_EXTENDED${SEP}$CHECK_ASFF_COMPLIANCE_TYPE${SEP}$CHECK_SEVERITY${SEP}$CHECK_SERVICENAME${SEP}$CHECK_ASFF_RESOURCE_TYPE${SEP}$CHECK_ASFF_TYPE${SEP}$CHECK_RISK${SEP}$CHECK_REMEDIATION${SEP}$CHECK_DOC${SEP}$CHECK_CAF_EPIC${SEP}$CHECK_RESOURCE_ID${SEP}$PROWLER_START_TIME" >> ${OUTPUT_FILE_NAME}.$EXTENSION_CSV fi if [[ "${MODES[@]}" =~ "json" ]]; then generateJsonOutput "$1" "Info" "$CHECK_RESOURCE_ID" >> ${OUTPUT_FILE_NAME}.${EXTENSION_JSON} @@ -181,7 +186,12 @@ textFail(){ # only set non-0 exit code on FAIL mode, WARN is ok if [[ "$level" == "FAIL" ]]; then FAIL_COUNTER=$((FAIL_COUNTER+1)) - EXITCODE=3 + if [ "$FAILED_CHECK_FAILED_SCAN" == 1 ] && [ -z "$FAILED_CHECK_FAILED_SCAN_LIST" ] ; then + EXITCODE=3 + fi + if [[ "${FAILED_CHECK_FAILED_SCAN_LIST[@]}" =~ "$CHECK_NAME" ]]; then + EXITCODE=3 + fi fi CHECK_RESULT=$level @@ -195,7 +205,7 @@ textFail(){ fi if [[ "${MODES[@]}" =~ "csv" ]]; then - echo "$PROFILE${SEP}$ACCOUNT_NUM${SEP}$REPREGION${SEP}$TITLE_ID${SEP}$CHECK_RESULT${SEP}$ITEM_SCORED${SEP}$ITEM_LEVEL${SEP}$TITLE_TEXT${SEP}$CHECK_RESULT_EXTENDED${SEP}$CHECK_ASFF_COMPLIANCE_TYPE${SEP}$CHECK_SEVERITY${SEP}$CHECK_SERVICENAME${SEP}$CHECK_ASFF_RESOURCE_TYPE${SEP}$CHECK_ASFF_TYPE${SEP}$CHECK_RISK${SEP}$CHECK_REMEDIATION${SEP}$CHECK_DOC${SEP}$CHECK_CAF_EPIC${SEP}$CHECK_RESOURCE_ID${SEP}$PROWLER_START_TIME" >> ${OUTPUT_FILE_NAME}.$EXTENSION_CSV + echo "$PROFILE${SEP}$ACCOUNT_NUM${SEP}$REPREGION${SEP}$TITLE_ID${SEP}$CHECK_RESULT${SEP}$ITEM_SCORED${SEP}$ITEM_CIS_LEVEL${SEP}$TITLE_TEXT${SEP}$CHECK_RESULT_EXTENDED${SEP}$CHECK_ASFF_COMPLIANCE_TYPE${SEP}$CHECK_SEVERITY${SEP}$CHECK_SERVICENAME${SEP}$CHECK_ASFF_RESOURCE_TYPE${SEP}$CHECK_ASFF_TYPE${SEP}$CHECK_RISK${SEP}$CHECK_REMEDIATION${SEP}$CHECK_DOC${SEP}$CHECK_CAF_EPIC${SEP}$CHECK_RESOURCE_ID${SEP}$PROWLER_START_TIME" >> ${OUTPUT_FILE_NAME}.$EXTENSION_CSV fi if [[ "${MODES[@]}" =~ "json" ]]; then generateJsonOutput "$1" "${level}" "$CHECK_RESOURCE_ID">> ${OUTPUT_FILE_NAME}.${EXTENSION_JSON} @@ -234,28 +244,16 @@ textTitle(){ fi TITLE_TEXT=$2 - CHECK_SERVICENAME="$MAGENTA$3$NORMAL" + local CHECK_SERVICENAME="$MAGENTA$3$NORMAL" local CHECK_SEVERITY="$BROWN[$4]$NORMAL" - # case "$3" in - # 0|No|NOT_SCORED) - # ITEM_SCORED="Not Scored" - # ;; - # 1|Yes|SCORED) - # ITEM_SCORED="Scored" - # ;; - # *) - # ITEM_SCORED="Unspecified" - # ;; - # esac - - # case "$4" in - # LEVEL1) ITEM_LEVEL="Level 1";; - # LEVEL2) ITEM_LEVEL="Level 2";; - # EXTRA) ITEM_LEVEL="Extra";; - # SUPPORT) ITEM_LEVEL="Support";; - # *) ITEM_LEVEL="Unspecified or Invalid";; - # esac + case "$6" in + LEVEL1) ITEM_CIS_LEVEL="CIS Level 1";; + LEVEL2) ITEM_CIS_LEVEL="CIS Level 2";; + EXTRA) ITEM_CIS_LEVEL="Extra";; + SUPPORT) ITEM_CIS_LEVEL="Support";; + *) ITEM_CIS_LEVEL="Unspecified or Invalid";; + esac local group_ids # if [[ -n "$4" ]]; then @@ -288,7 +286,7 @@ generateJsonOutput(){ --arg STATUS "$status" \ --arg SEVERITY "$(echo $CHECK_SEVERITY | sed 's/[][]//g')" \ --arg SCORED "$ITEM_SCORED" \ - --arg ITEM_LEVEL "$ITEM_LEVEL" \ + --arg ITEM_CIS_LEVEL "$ITEM_CIS_LEVEL" \ --arg TITLE_ID "$TITLE_ID" \ --arg REPREGION "$REPREGION" \ --arg TYPE "$CHECK_ASFF_COMPLIANCE_TYPE" \ @@ -307,7 +305,7 @@ generateJsonOutput(){ "Severity": $SEVERITY, "Status": $STATUS, "Scored": $SCORED, - "Level": $ITEM_LEVEL, + "Level": $ITEM_CIS_LEVEL, "Control ID": $TITLE_ID, "Region": $REPREGION, "Timestamp": $TIMESTAMP, @@ -422,6 +420,7 @@ generateHtmlOutput(){ echo ' '$TITLE_ID'' >> ${OUTPUT_FILE_NAME}.$EXTENSION_HTML echo ' '$TITLE_TEXT'' >> ${OUTPUT_FILE_NAME}.$EXTENSION_HTML echo ' '$message'' >> ${OUTPUT_FILE_NAME}.$EXTENSION_HTML + echo ' '$ITEM_CIS_LEVEL'' >> ${OUTPUT_FILE_NAME}.$EXTENSION_HTML echo ' '$CHECK_CAF_EPIC'' >> ${OUTPUT_FILE_NAME}.$EXTENSION_HTML echo '

'$CHECK_RISK'

' >> ${OUTPUT_FILE_NAME}.$EXTENSION_HTML echo '

'$CHECK_REMEDIATION'

' >> ${OUTPUT_FILE_NAME}.$EXTENSION_HTML diff --git a/include/scoring b/include/scoring index 3023d395..bf258d2e 100644 --- a/include/scoring +++ b/include/scoring @@ -23,7 +23,12 @@ scoring(){ # TOTAL_RESOURCES=$(awk "BEGIN {print $FAIL_COUNTER+$PASS_COUNTER; exit}") TOTAL_RESOURCES=$(($FAIL_COUNTER + $PASS_COUNTER)) # Score is % of passed compared to failures. The higher the better - PROWLER_SCORE=$(( $PASS_COUNTER * 100 / $TOTAL_RESOURCES )) + if [[ $PASS_COUNTER == "0" ]]; then + PROWLER_SCORE="0" + else + PROWLER_SCORE=$(( $PASS_COUNTER * 100 / $TOTAL_RESOURCES )) + + fi if [[ $SCORING == "1" ]]; then echo -e "$BLUE------------------------------------------------------------------ $NORMAL" diff --git a/include/securityhub_integration b/include/securityhub_integration index cf03cb0a..74a1c35d 100644 --- a/include/securityhub_integration +++ b/include/securityhub_integration @@ -47,7 +47,7 @@ resolveSecurityHubPreviousFails(){ local check="$1" NEW_TIMESTAMP=$(get_iso8601_timestamp) - FILTER="{\"GeneratorId\":[{\"Value\": \"prowler-$check\",\"Comparison\":\"EQUALS\"}],\"RecordState\":[{\"Value\": \"ACTIVE\",\"Comparison\":\"EQUALS\"}]}" + FILTER="{\"GeneratorId\":[{\"Value\": \"prowler-$check\",\"Comparison\":\"EQUALS\"}],\"RecordState\":[{\"Value\": \"ACTIVE\",\"Comparison\":\"EQUALS\"}],\"AwsAccountId\":[{\"Value\": \"$ACCOUNT_NUM\",\"Comparison\":\"EQUALS\"}]}" NEW_FINDING_IDS=$(echo -n "${SECURITYHUB_NEW_FINDINGS_IDS[@]}" | jq -cRs 'split(" ")') SECURITY_HUB_PREVIOUS_FINDINGS=$($AWSCLI securityhub --region "$regx" $PROFILE_OPT get-findings --filters "${FILTER}" | jq -c --argjson ids "$NEW_FINDING_IDS" --arg updated_at $NEW_TIMESTAMP '[ .Findings[] | select( .Id| first(select($ids[] == .)) // false | not) | .RecordState = "ARCHIVED" | .UpdatedAt = $updated_at ]') diff --git a/include/whoami b/include/whoami index a7c6256e..eb61f6b6 100644 --- a/include/whoami +++ b/include/whoami @@ -53,7 +53,6 @@ USER_ID=$(echo $GETCALLER | jq -r '.UserId' 2>&1) AWS_PARTITION=$(echo $CALLER_ARN| cut -d: -f2) getWhoami(){ - if [[ "$MODE" == "csv" ]]; then if [[ 255 -eq $? ]]; then # Failed to get own identity ... exit @@ -62,7 +61,6 @@ getWhoami(){ EXITCODE=2 exit $EXITCODE fi - printCsvHeader # textTitle "0.0" "Show report generation info" "NOT_SCORED" "SUPPORT" # textInfo "ARN: $CALLER_ARN TIMESTAMP: $SCRIPT_START_TIME" elif [[ "$MODE" == "json" || "$MODE" == "json-asff" ]]; then diff --git a/prowler b/prowler index 879f73b9..338ff7c2 100755 --- a/prowler +++ b/prowler @@ -32,7 +32,7 @@ OPTRED="" OPTNORMAL="" # Set the defaults variables -PROWLER_VERSION=2.5.0-12August2021 +PROWLER_VERSION=2.6.0-12November2021 PROWLER_DIR=$(dirname "$0") REGION="" @@ -45,6 +45,7 @@ SEP=',' KEEPCREDREPORT=0 EXITCODE=0 SEND_TO_SECURITY_HUB=0 +FAILED_CHECK_FAILED_SCAN=1 PROWLER_START_TIME=$( date -u +"%Y-%m-%dT%H:%M:%S%z" ) TITLE_ID="" TITLE_TEXT="CALLER ERROR - UNSET TITLE" @@ -71,6 +72,8 @@ USAGE: (i.e.: us-east-1), all regions are checked anyway if the check requires it -c specify one or multiple check ids separated by commas, to see all available checks use "-l" option (i.e.: "check11" for check 1.1 or "extra71,extra72" for extra check 71 and extra check 72) + -C Checklist file. See checklist.txt for reference and format. + (i.e.: checklist.txt) -g specify a group of checks by id, to see all available group of checks use "-L" (i.e.: "group3" for entire section 3, "cislevel1" for CIS Level 1 Profile Definitions or "forensics-ready") -f specify an AWS region to run checks against @@ -89,29 +92,32 @@ USAGE: -s show scoring report -S send check output to AWS Security Hub - only valid when the output mode is json-asff (i.e. "-M json-asff -S") -x specify external directory with custom checks (i.e. /my/own/checks, files must start by "check") - -q suppress info messages and passing test output + -q get only FAIL findings, will show WARNINGS when a resource is excluded -A account id for the account where to assume a role, requires -R and -T (i.e.: 123456789012) - -R role name to assume in the account, requires -A and -T + -R role name or role arn to assume in the account, requires -A and -T (i.e.: ProwlerRole) -T session duration given to that role credentials in seconds, default 1h (3600) recommended 12h, requires -R and -T (i.e.: 43200) -I External ID to be used when assuming roles (not mandatory), requires -A and -R -w whitelist file. See whitelist_sample.txt for reference and format (i.e.: whitelist_sample.txt) - -N Shoadan API key used by check extra7102. + -N Shodan API key used by check extra7102. -o Custom output directory, if not specified will use default prowler/output, requires -M (i.e.: -M csv -o /tmp/reports/) -B Custom output bucket, requires -M and it can work also with -o flag. (i.e.: -M csv -B my-bucket or -M csv -B my-bucket/folder/) -F Custom output report name, if not specified will use default output/prowler-output-ACCOUNT_NUM-OUTPUT_DATE + -z Failed Checks do not trigger exit code 3 + -Z Specify one or multiple check ids separated by commas that will trigger exit code 3 if they fail. Unspecified checks will not trigger exit code 3. This will override "-z". + (i.e.: "-Z check11,check12" will cause check11 and/or check12 to trigger exit code 3 -V show version number & exit -h this help " exit } -while getopts ":hlLkqp:r:c:g:f:m:M:E:x:enbVsSI:A:R:T:w:N:o:B:F:" OPTION; do +while getopts ":hlLkqp:r:c:C:g:f:m:M:E:x:enbVsSI:A:R:T:w:N:o:B:F:zZ:" OPTION; do case $OPTION in h ) usage @@ -136,6 +142,9 @@ while getopts ":hlLkqp:r:c:g:f:m:M:E:x:enbVsSI:A:R:T:w:N:o:B:F:" OPTION; do c ) CHECK_ID=$OPTARG ;; + C ) + CHECK_FILE=$OPTARG + ;; g ) GROUP_ID_READ=$OPTARG ;; @@ -204,6 +213,12 @@ while getopts ":hlLkqp:r:c:g:f:m:M:E:x:enbVsSI:A:R:T:w:N:o:B:F:" OPTION; do F ) OUTPUT_FILE_NAME=$OPTARG ;; + z ) + FAILED_CHECK_FAILED_SCAN=0 + ;; + Z ) + FAILED_CHECK_FAILED_SCAN_LIST=$OPTARG + ;; : ) echo "" echo "$OPTRED ERROR!$OPTNORMAL -$OPTARG requires an argument" @@ -269,6 +284,19 @@ unset AWS_DEFAULT_OUTPUT . $PROWLER_DIR/include/securityhub_integration . $PROWLER_DIR/include/junit_integration +# Parses the check file into CHECK_ID's. +if [[ -n "$CHECK_FILE" ]]; then + if [[ -f $CHECK_FILE ]]; then + # Parses the file, converting it to a comma seperated list. Ignores all # comments and removes extra blank spaces + CHECK_ID="$(awk '!/^[[:space:]]*#/{print }' <(cat $CHECK_FILE | sed 's/[[:space:]]*#.*$//g;/^$/d' | sed 'H;1h;$!d;x;y/\n/,/' | tr -d ' '))" + else + # If the file doesn't exist, exits Prowler + echo "$CHECK_FILE does not exist" + EXITCODE=1 + exit $EXITCODE + fi +fi + # Pre-process whitelist file if supplied if [[ -n "$WHITELIST_FILE" ]]; then # ignore lines starting with # (comments) @@ -324,7 +352,7 @@ show_check_title() { local check_id=CHECK_ID_$1 local check_title=CHECK_TITLE_$1 local check_scored=CHECK_SCORED_$1 - local check_type=CHECK_TYPE_$1 + local check_cis_level=CHECK_CIS_LEVEL_$1 local check_asff_compliance_type=CHECK_ASFF_COMPLIANCE_TYPE_$1 local check_severity=CHECK_SEVERITY_$1 local check_servicename=CHECK_SERVICENAME_$1 @@ -347,9 +375,9 @@ show_check_title() { fi # This shows ASFF_COMPLIANCE_TYPE if group used is ens, this si used to show ENS compliance ID control, can be used for other compliance groups as well. if [[ ${GROUP_ID_READ} == "ens" ]];then - textTitle "${!check_id}" "${!check_title}" "${!check_scored}" "${!check_type}" "$group_ids" "(${!check_asff_compliance_type})" + textTitle "${!check_id}" "${!check_title}" "${!check_scored}" "${!check_cis_level}" "$group_ids" "(${!check_asff_compliance_type})" else - textTitle "${!check_id}" "${!check_title}" "${!check_servicename}" "${!check_severity}" "$group_ids" + textTitle "${!check_id}" "${!check_title}" "${!check_servicename}" "${!check_severity}" "$group_ids" "${!check_cis_level}" fi } @@ -657,6 +685,7 @@ if [[ $CHECK_ID ]];then addHtmlFooter >> ${OUTPUT_FILE_NAME}.$EXTENSION_HTML fi copyToS3 + scoring cleanTemp exit $EXITCODE fi diff --git a/util/Dockerfile b/util/Dockerfile index 6fbed17c..9467cd0c 100644 --- a/util/Dockerfile +++ b/util/Dockerfile @@ -5,9 +5,9 @@ ARG USERID=34000 RUN addgroup -g ${USERID} ${USERNAME} && \ adduser -s /bin/sh -G ${USERNAME} -D -u ${USERID} ${USERNAME} && \ - apk --update --no-cache add python3 bash curl jq file coreutils py3-pip && \ + apk --update --no-cache add python3 bash curl jq file coreutils py3-pip git && \ pip3 install --upgrade pip && \ - pip3 install awscli boto3 detect-secrets + pip3 install awscli boto3 detect-secrets==1.0.3 WORKDIR /prowler diff --git a/util/codebuild/codebuild-prowler-audit-account-cfn.yaml b/util/codebuild/codebuild-prowler-audit-account-cfn.yaml index 1af61a7a..7cec6636 100644 --- a/util/codebuild/codebuild-prowler-audit-account-cfn.yaml +++ b/util/codebuild/codebuild-prowler-audit-account-cfn.yaml @@ -141,7 +141,7 @@ Resources: - id: W28 reason: "Explicit name is required for this resource to avoid circular dependencies." Properties: - RoleName: !Sub 'prowler-codebuild-role-${ServiceName}-${AWS::StackName}' + RoleName: !Sub 'prowler-codebuild-role' Path: '/service-role/' ManagedPolicyArns: - 'arn:aws:iam::aws:policy/job-function/SupportUser' @@ -341,4 +341,4 @@ Resources: Outputs: ArtifactBucketName: Description: Artifact Bucket Name - Value: !Ref 'ArtifactBucket' \ No newline at end of file + Value: !Ref 'ArtifactBucket' diff --git a/util/org-multi-account/serverless_codebuild/README.md b/util/org-multi-account/serverless_codebuild/README.md new file mode 100644 index 00000000..f2c12235 --- /dev/null +++ b/util/org-multi-account/serverless_codebuild/README.md @@ -0,0 +1,48 @@ +# Organizational Prowler with Serverless + +Langage: [Korean](README_kr.md) + +This project is created to apply prowler in a multi-account environment within AWS Organizations. +CloudWatch triggers CodeBuild every fixed time. +CodeBuild executes the script which clones the latest prowler from [here](https://github.com/toniblyx/prowler) and performs security assessment on all the accounts in AWS Organizations. The assessment reports are sent to S3 bucket in Log Archive Account. + +For more information on how to use prowler, see [here](https://github.com/toniblyx/prowler#usage). + +![Untitled](docs/images/prowler_org_architecture.png) + +1. **Log Archive Account** + 1. Deploy [ProwlerS3.yaml](templates/ProwlerS3.yaml) in CloudFormation console. + The template creates S3 bucket for reports and bucket policy that limits API actions to principals from its AWS Organizations. + - AwsOrgId : AWS Organizations' Organization ID + - S3Prefix : The prefix included in the bucket name +2. **Master Account** + 1. Deploy [ProwlerRole.yaml](templates/ProwlerRole.yaml) stack to CloudFormation in a bid to create resources to master account itself. + (The template will be also deployed for other member accounts as a StackSet) + - ProwlerCodeBuildAccount : Audit Acccount ID where CodeBuild resides. (preferably Audit/Security account) + - ProwlerCodeBulidRole : Role name to use in CodeBuild service + - ProwlerCrossAccountRole : Role name to assume for Cross account + - ProwlerS3 : The S3 bucket name where reports will be put + 1. Create **StackSet** with [ProwlerRole.yaml](templates/ProwlerRole.yaml) to deploy Role into member accounts in AWS Organizations. + - ProwlerCodeBuildAccount : Audit Acccount ID where CodeBuild resides. (preferably Audit/Security account) + - ProwlerCodeBulidRole : Role name to use in CodeBuild service + - ProwlerCrossAccountRole : Role name to assume for Cross account + - ProwlerS3 : The S3 bucket name where reports will be put + - Permission : Service-managed permissions + - Deploy target : Deploy to organization 선택, Enable, Delete stacks 선택 + - Specify regions : Region to deploy +3. **Audit Account** + 1. Go to S3 console, create a bucket, upload [run-prowler-reports.sh.zip](src/run-prowler-reports.sh.zip) + - bucket name : prowler-util-*[Account ID]*-*[region]* + ![Untitled](docs/images/s3_screenshot.png) + + 1. Deploy [ProwlerCodeBuildStack.yaml](templates/ProwlerCodeBuildStack.yaml) which creates CloudWatch Rule to trigger CodeBuild every fixed time, allowing prowler to audit multi-accounts. + - AwsOrgId : AWS Organizations' Organization ID + - CodeBuildRole : Role name to use in CodeBuild service + - CodeBuildSourceS3 : Object location uploaded from i + - prowler-util-*[Account ID]*-*[region]/**run-prowler-reports.sh.zip** + - CrossAccountRole : Role name for cross account created in the process **2** above. + - ProwlerReportS3 : The S3 bucket name where reports will be put + - ProwlerReportS3Account : The account where the report S3 bucket resides. + 1. If you'd like to change the scheduled time, + 1. You can change the cron expression of ScheduleExpression within [ProwlerCodeBuildStack.yaml](templates/ProwlerCodeBuildStack.yaml). + 2. Alternatively, you can make changes directrly from Events > Rules > ProwlerExecuteRule > Actions > Edit in CloudWatch console. \ No newline at end of file diff --git a/util/org-multi-account/serverless_codebuild/README_kr.md b/util/org-multi-account/serverless_codebuild/README_kr.md new file mode 100644 index 00000000..1570ab9f --- /dev/null +++ b/util/org-multi-account/serverless_codebuild/README_kr.md @@ -0,0 +1,62 @@ +# Organizational Prowler with Serverless + +Langage: [English](README.md) + +이 문서는 AWS Organization 내의 multi account 환경에서 prowler 를 적용하기 위해 작성된 문서입니다. +일정 시간마다 CloudWatch는 CodeBuild 를 트리거합니다. +CodeBuild 는 최신의 [prowler](https://github.com/toniblyx/prowler) 소스를 클론받고, +Organization 내의 모든 Account 에 대해 security assessment 를 수행합니다. +prowler 의 자세한 사용방법은 [이 곳](https://github.com/toniblyx/prowler#usagee) 을 참고합니다. + +![Untitled](docs/images/prowler_org_architecture.png) + +1. **Log Archive Account**에 접속합니다. + 1. 아래 템플릿을 CloudFormation console 에서 배포합니다. 이를 통해 prowler 의 security assessment report 가 저장되는 bucket 과 bucket policy 를 생성합니다. + + [ProwlerS3.yaml](templates/ProwlerS3.yaml) + + - AwsOrgId : AWS Organizations의 Organization ID + - S3Prefix : 생성될 버킷의 이름에 포함되는 prefix +2. **Master Account** 에 접속합니다. + 1. 아래 템플릿을 이용하여 CloudFormation **Stack**을 생성합니다. StackSet은 Master account 에 적용되지 않으므로 Stack 으로도 배포가 필요합니다. + + [ProwlerRole.yaml](templates/ProwlerRole.yaml) + + - ProwlerCodeBuildAccount : CodeBuild 가 있는 Audit Acccount ID + - ProwlerCodeBulidRole : CodeBuild의 생성될 Role 이름 + - ProwlerCrossAccountRole : Cross account 용 Assume할 Role 이름 + - ProwlerS3 : report 가 저장될 S3 bucket 명 + 2. 아래 템플릿을 이용하여 CloudFormation **StackSet**을 생성하여, Organazation에 포함된 account 대상으로도 아래 템플릿을 배포합니다. + + [ProwlerRole.yaml](templates/ProwlerRole.yaml) + + - ProwlerCodeBuildAccount : CodeBuild 가 있는 Audit Acccount + - ProwlerCodeBulidRole : CodeBuild에서 사용할 Role 이름 + - ProwlerCrossAccountRole : Cross account 용 Assume할 Role 이름 + - ProwlerS3 : report 가 저장될 S3 bucket 명 + - Permission : Service-managed permissions + - Deploy target : Deploy to organization 선택, Enable, Delete stacks 선택 + - Specify regions : 배포할 대상 리전을 선택 +3. **Audit Account**에 접속합니다. + 1. **S3 console** 로 이동하여 버킷을 생성하고 아래 항목을 **업로드**한 후, 버킷명을 복사해둡니다. + + [run-prowler-reports.sh.zip](src/run-prowler-reports.sh.zip) + + - bucket name : prowler-util-**-** + + ![Untitled](docs/images/s3_screenshot.png) + + 2. 아래 템플릿으로 **CloudFormation stack** 을 생성합니다. 이 템플릿은 CloudWatch Rule 을 생성하여 일정 시간마다 CodeBuild 를 실행하여 prowler 가 multi accounts 를 audit 할 수 있도록 합니다. + + [ProwlerCodeBuildStack.yaml](templates/ProwlerCodeBuildStack.yaml) + + - AwsOrgId : AWS Organizations의 Organization ID + - CodeBuildRole : CodeBuild의 서비스 Role 이름 + - CodeBuildSourceS3 : a 에서 업로드한 object 위치 + - prowler-util-**-*/***run-prowler-reports.sh.zip** + - CrossAccountRole : 2번에서 생성한 Cross Account 용 Role 이름 + - ProwlerReportS3 : report 가 저장될 S3 bucket 명 + - ProwlerReportS3Account : report 가 저장될 S3 bucket이 위치한 Account + 3. 스케줄 된 시간을 변경하고 싶은 경우 + 1. [ProwlerCodeBuildStack.yaml](templates/ProwlerCodeBuildStack.yaml) 내에서 ScheduleExpression의 크론 표현식을 변경할 수 있습니다. + 2. 또는 CloudWatch console 에서 Events > Rules > ProwlerExecuteRule > Actions > Edit 에서 직접 변경할 수 있습니다. \ No newline at end of file diff --git a/util/org-multi-account/serverless_codebuild/docs/images/prowler_org_architecture.png b/util/org-multi-account/serverless_codebuild/docs/images/prowler_org_architecture.png new file mode 100644 index 00000000..0996ae00 Binary files /dev/null and b/util/org-multi-account/serverless_codebuild/docs/images/prowler_org_architecture.png differ diff --git a/util/org-multi-account/serverless_codebuild/docs/images/s3_screenshot.png b/util/org-multi-account/serverless_codebuild/docs/images/s3_screenshot.png new file mode 100644 index 00000000..4dd14057 Binary files /dev/null and b/util/org-multi-account/serverless_codebuild/docs/images/s3_screenshot.png differ diff --git a/util/org-multi-account/serverless_codebuild/src/run-prowler-reports.sh b/util/org-multi-account/serverless_codebuild/src/run-prowler-reports.sh new file mode 100644 index 00000000..2b7350cc --- /dev/null +++ b/util/org-multi-account/serverless_codebuild/src/run-prowler-reports.sh @@ -0,0 +1,119 @@ +#!/bin/bash -e +# +# Run Prowler against All AWS Accounts in an AWS Organization + +# Change Directory (rest of the script, assumes your in the ec2-user home directory) +# cd /home/ec2-user || exit + +# Show Prowler Version, and Download Prowler, if it doesn't already exist +if ! ./prowler/prowler -V 2>/dev/null; then + git clone https://github.com/toniblyx/prowler.git + ./prowler/prowler -V +fi + +# Source .awsvariables (to read in Environment Variables from CloudFormation Data) +# shellcheck disable=SC1091 +# source .awsvariables + +# Get Values from Environment Variables Created on EC2 Instance from CloudFormation Data +echo "S3: $S3" +echo "S3ACCOUNT: $S3ACCOUNT" +echo "ROLE: $ROLE" +echo "FORMAT: $FORMAT" + +# CleanUp Last Ran Prowler Reports, as they are already stored in S3. +rm -rf prowler/output/*.html + +# Function to unset AWS Profile Variables +unset_aws() { + unset AWS_ACCESS_KEY_ID AWS_SECRET_ACCESS_KEY AWS_SESSION_TOKEN +} +unset_aws + +# Find THIS Account AWS Number +CALLER_ARN=$(aws sts get-caller-identity --output text --query "Arn") +PARTITION=$(echo "$CALLER_ARN" | cut -d: -f2) +THISACCOUNT=$(echo "$CALLER_ARN" | cut -d: -f5) +echo "THISACCOUNT: $THISACCOUNT" +echo "PARTITION: $PARTITION" + +# Function to Assume Role to THIS Account & Create Session +this_account_session() { + unset_aws + role_credentials=$(aws sts assume-role --role-arn arn:"$PARTITION":iam::"$THISACCOUNT":role/"$ROLE" --role-session-name ProwlerRun --output json) + AWS_ACCESS_KEY_ID=$(echo "$role_credentials" | jq -r .Credentials.AccessKeyId) + AWS_SECRET_ACCESS_KEY=$(echo "$role_credentials" | jq -r .Credentials.SecretAccessKey) + AWS_SESSION_TOKEN=$(echo "$role_credentials" | jq -r .Credentials.SessionToken) + echo "this_account_session done..." + export AWS_ACCESS_KEY_ID AWS_SECRET_ACCESS_KEY AWS_SESSION_TOKEN +} + +# Find AWS Master Account +this_account_session +AWSMASTER=$(aws organizations describe-organization --query Organization.MasterAccountId --output text) +echo "AWSMASTER: $AWSMASTER" + +# Function to Assume Role to Master Account & Create Session +master_account_session() { + unset_aws + role_credentials=$(aws sts assume-role --role-arn arn:"$PARTITION":iam::"$AWSMASTER":role/"$ROLE" --role-session-name ProwlerRun --output json) + AWS_ACCESS_KEY_ID=$(echo "$role_credentials" | jq -r .Credentials.AccessKeyId) + AWS_SECRET_ACCESS_KEY=$(echo "$role_credentials" | jq -r .Credentials.SecretAccessKey) + AWS_SESSION_TOKEN=$(echo "$role_credentials" | jq -r .Credentials.SessionToken) + echo "master_account_session done..." + export AWS_ACCESS_KEY_ID AWS_SECRET_ACCESS_KEY AWS_SESSION_TOKEN +} + +# Lookup All Accounts in AWS Organization +master_account_session +ACCOUNTS_IN_ORGS=$(aws organizations list-accounts --query Accounts[*].Id --output text) + +# Function to Assume Role to S3 Account & Create Session +s3_account_session() { + unset_aws + role_credentials=$(aws sts assume-role --role-arn arn:"$PARTITION":iam::"$S3ACCOUNT":role/"$ROLE" --role-session-name ProwlerRun --output json) + AWS_ACCESS_KEY_ID=$(echo "$role_credentials" | jq -r .Credentials.AccessKeyId) + AWS_SECRET_ACCESS_KEY=$(echo "$role_credentials" | jq -r .Credentials.SecretAccessKey) + AWS_SESSION_TOKEN=$(echo "$role_credentials" | jq -r .Credentials.SessionToken) + echo "s3_account_session done..." + export AWS_ACCESS_KEY_ID AWS_SECRET_ACCESS_KEY AWS_SESSION_TOKEN +} + +# Run Prowler against Accounts in AWS Organization +echo "AWS Accounts in Organization" +echo "$ACCOUNTS_IN_ORGS" +PARALLEL_ACCOUNTS="1" +for accountId in $ACCOUNTS_IN_ORGS; do + # shellcheck disable=SC2015 + test "$(jobs | wc -l)" -ge $PARALLEL_ACCOUNTS && wait || true + { + START_TIME=$SECONDS + # Unset AWS Profile Variables + unset_aws + # Run Prowler + echo -e "Assessing AWS Account: $accountId, using Role: $ROLE on $(date)" + # remove -g cislevel for a full report and add other formats if needed + ./prowler/prowler -R "$ROLE" -A "$accountId" -g cislevel1 -M $FORMAT + echo "Report stored locally at: prowler/output/ directory" + TOTAL_SEC=$((SECONDS - START_TIME)) + echo -e "Completed AWS Account: $accountId, using Role: $ROLE on $(date)" + printf "Completed AWS Account: $accountId in %02dh:%02dm:%02ds" $((TOTAL_SEC / 3600)) $((TOTAL_SEC % 3600 / 60)) $((TOTAL_SEC % 60)) + echo "" + } & +done + +# Wait for All Prowler Processes to finish +wait +echo "Prowler Assessments Completed against All Accounts in the AWS Organization. Starting S3 copy operations..." + +# Upload Prowler Report to S3 +s3_account_session +aws s3 cp prowler/output/ "$S3/reports/" --recursive --include "*.html" --acl bucket-owner-full-control +echo "Assessment reports successfully copied to S3 bucket" + +# Final Wait for All Prowler Processes to finish +wait +echo "Prowler Assessments Completed" + +# Unset AWS Profile Variables +unset_aws diff --git a/util/org-multi-account/serverless_codebuild/src/run-prowler-reports.sh.zip b/util/org-multi-account/serverless_codebuild/src/run-prowler-reports.sh.zip new file mode 100644 index 00000000..e73fdaba Binary files /dev/null and b/util/org-multi-account/serverless_codebuild/src/run-prowler-reports.sh.zip differ diff --git a/util/org-multi-account/serverless_codebuild/templates/ProwlerCodeBuildStack.yaml b/util/org-multi-account/serverless_codebuild/templates/ProwlerCodeBuildStack.yaml new file mode 100644 index 00000000..d89c5452 --- /dev/null +++ b/util/org-multi-account/serverless_codebuild/templates/ProwlerCodeBuildStack.yaml @@ -0,0 +1,210 @@ +--- +AWSTemplateFormatVersion: 2010-09-09 +Description: Creates a CodeBuild project to audit an AWS account with Prowler and stores the html report in a S3 bucket. +Parameters: + AwsOrgId: + Type: String + Description: Enter AWS Organizations ID + AllowedPattern: ^o-[a-z0-9]{10,32}$ + ConstraintDescription: The Org Id must be a 12 character string starting with o- and followed by 10 lower case alphanumeric characters. + Default: o-itdezkbz6h + CodeBuildRole: + Description: Enter Name for CodeBuild Role to create + Type: String + AllowedPattern: ^[\w+=,.@-]{1,64}$ + ConstraintDescription: Max 64 alphanumeric characters. Also special characters supported [+, =, ., @, -] + Default: ProwlerCodeBuild-Role + CodeBuildSourceS3: + Type: String + Description: Enter like //.zip + ConstraintDescription: Max 63 characters. Can't start or end with dash. Can use numbers and lowercase letters. + Default: prowler-util-411267690458-ap-northeast-2/run-prowler-reports.sh.zip + ProwlerReportS3: + Type: String + Description: Enter S3 Bucket for Prowler Reports. prefix-awsaccount-awsregion + AllowedPattern: ^[a-z0-9][a-z0-9-]{1,61}[a-z0-9]$ + ConstraintDescription: Max 63 characters. Can't start or end with dash. Can use numbers and lowercase letters. + Default: prowler-954896828174-ap-northeast-2 + ProwlerReportS3Account: + Type: String + Description: Enter AWS Account Number where Prowler S3 Bucket resides. + AllowedPattern: ^\d{12}$ + ConstraintDescription: An AWS Account Number must be a 12 digit numeric string. + Default: 954896828174 + CrossAccountRole: + Type: String + Description: Enter CrossAccount Role Prowler will be using to assess AWS Accounts in the AWS Organization. (ProwlerCrossAccountRole) + AllowedPattern: ^[\w+=,.@-]{1,64}$ + ConstraintDescription: Max 64 alphanumeric characters. Also special characters [+, =, ., @, -] + Default: ProwlerXA-CBRole + ProwlerReportFormat: + Type: String + Description: Enter Prowler Option like html, csv, json + Default: html + +Resources: + ProwlerCodeBuildRole: + Type: AWS::IAM::Role + Properties: + Description: Prowler CodeBuild Role + RoleName: !Ref CodeBuildRole + Tags: + - Key: App + Value: Prowler + AssumeRolePolicyDocument: + Version: 2012-10-17 + Statement: + - Effect: Allow + Principal: + Service: + - codebuild.amazonaws.com + Action: + - sts:AssumeRole + Policies: + - PolicyName: Prowler-S3 + PolicyDocument: + Version: 2012-10-17 + Statement: + - Sid: AllowGetPutListObject + Effect: Allow + Resource: + - !Sub arn:${AWS::Partition}:s3:::${ProwlerReportS3} + - !Sub arn:${AWS::Partition}:s3:::${ProwlerReportS3}/* + Action: + - s3:GetObject + - s3:PutObject + - s3:ListBucket + - s3:PutObjectAcl + - Sid: AllowReadOnlyS3Access + Effect: Allow + Resource: "*" + Action: + - "s3:Get*" + - "s3:List*" + - PolicyName: Prowler-CrossAccount-AssumeRole + PolicyDocument: + Version: 2012-10-17 + Statement: + - Sid: AllowStsAssumeRole + Effect: Allow + Resource: !Sub arn:${AWS::Partition}:iam::*:role/${CrossAccountRole} + Action: sts:AssumeRole + Condition: + StringEquals: + aws:PrincipalOrgId: !Ref AwsOrgId + - PolicyName: Prowler-CloudWatch + PolicyDocument: + Version: 2012-10-17 + Statement: + - Sid: AllowCreateLogs + Effect: Allow + Resource: !Sub arn:${AWS::Partition}:logs:*:*:log-group:* + Action: + - logs:CreateLogGroup + - logs:CreateLogStream + - Sid: AllowPutevent + Effect: Allow + Resource: !Sub arn:${AWS::Partition}:logs:*:*:log-group:*:log-stream:* + Action: + - logs:PutLogEvents + + ProwlerCodeBuild: + Type: AWS::CodeBuild::Project + Properties: + Artifacts: + Type: NO_ARTIFACTS + Source: + Type: S3 + Location: !Ref CodeBuildSourceS3 + BuildSpec: | + version: 0.2 + phases: + install: + runtime-versions: + python: 3.8 + commands: + - echo "Updating yum..." + - yum -y update + build: + commands: + - echo "Running Prowler with script" + - chmod +x run-prowler-reports.sh + - ./run-prowler-reports.sh + post_build: + commands: + - echo "Done!" + Environment: + # AWS CodeBuild free tier includes 100 build minutes of BUILD_GENERAL1_SMALL per month. + # BUILD_GENERAL1_SMALL: Use up to 3 GB memory and 2 vCPUs for builds. $0.005/minute. + # BUILD_GENERAL1_MEDIUM: Use up to 7 GB memory and 4 vCPUs for builds. $0.01/minute. + # BUILD_GENERAL1_LARGE: Use up to 15 GB memory and 8 vCPUs for builds. $0.02/minute. + # BUILD_GENERAL1_2XLARGE: Use up to 144 GB memory and 72 vCPUs for builds. $0.20/minute. + ComputeType: "BUILD_GENERAL1_SMALL" + Image: "aws/codebuild/amazonlinux2-x86_64-standard:3.0" + Type: "LINUX_CONTAINER" + EnvironmentVariables: + - Name: "S3" + Value: !Sub s3://${ProwlerReportS3} + Type: PLAINTEXT + - Name: "S3ACCOUNT" + Value: !Ref ProwlerReportS3Account + Type: PLAINTEXT + - Name: "ROLE" + Value: !Ref CrossAccountRole + Type: PLAINTEXT + - Name: "FORMAT" + Value: !Ref ProwlerReportFormat + Type: PLAINTEXT + Description: Run Prowler assessment + ServiceRole: !GetAtt ProwlerCodeBuildRole.Arn + TimeoutInMinutes: 300 + + ProwlerCWRuleRole: + Type: AWS::IAM::Role + Properties: + AssumeRolePolicyDocument: + Version: 2012-10-17 + Statement: + - Effect: Allow + Principal: + Service: + - events.amazonaws.com + Action: + - sts:AssumeRole + Description: ProwlerCWRuleRole + RoleName: ProwlerCWRule-Role + Policies: + - PolicyName: Rule-Events + PolicyDocument: + Version: 2012-10-17 + Statement: + - Sid: AWSEventInvokeCodeBuild + Effect: Allow + Resource: "*" + Action: + - codebuild:StartBuild + + ProwlerRule: + Type: AWS::Events::Rule + Properties: + Description: This rule will trigger CodeBuild to audit AWS Accounts in my Organization with Prowler + ScheduleExpression: cron(0 21 * * ? *) + RoleArn: !GetAtt ProwlerCWRuleRole.Arn + Name: ProwlerExecuteRule + State: ENABLED + Targets: + - Arn: !Sub ${ProwlerCodeBuild.Arn} + Id: Prowler-CodeBuild-Target + RoleArn: !GetAtt ProwlerCWRuleRole.Arn + + +Outputs: + ProwlerEc2Account: + Description: AWS Account Number where Prowler EC2 Instance resides. + Value: !Ref AWS::AccountId + ProwlerCodeBuildRole: + Description: Instance Role given to the Prowler EC2 Instance (needed to grant sts:AssumeRole rights). + Value: !Ref ProwlerCodeBuildRole + ProwlerReportS3: + Description: S3 Bucket for Prowler Reports + Value: !Ref ProwlerReportS3 diff --git a/util/org-multi-account/serverless_codebuild/templates/ProwlerRole.yaml b/util/org-multi-account/serverless_codebuild/templates/ProwlerRole.yaml new file mode 100644 index 00000000..138d8809 --- /dev/null +++ b/util/org-multi-account/serverless_codebuild/templates/ProwlerRole.yaml @@ -0,0 +1,123 @@ +AWSTemplateFormatVersion: 2010-09-09 +Description: Create the Cross-Account IAM Prowler Role + +Metadata: + AWS::CloudFormation::Interface: + ParameterGroups: + - Label: + default: CodeBuild Settings + Parameters: + - ProwlerCodeBuildAccount + - ProwlerCodeBulidRole + - Label: + default: S3 Settings + Parameters: + - ProwlerS3 + - Label: + default: CrossAccount Role + Parameters: + - ProwlerCrossAccountRole + +Parameters: + ProwlerS3: + Type: String + Description: Enter S3 Bucket for Prowler Reports. prefix-awsaccount-awsregion + AllowedPattern: ^[a-z0-9][a-z0-9-]{1,61}[a-z0-9]$ + Default: prowler-954896828174-ap-northeast-2 + ProwlerCodeBuildAccount: + Type: String + Description: Enter AWS Account Number where Prowler CodeBuild Instance will reside. + AllowedPattern: ^\d{12}$ + ConstraintDescription: An AWS Account Number must be a 12 digit numeric string. + Default: 411267690458 + ProwlerCodeBulidRole: + Type: String + Description: Enter Instance Role that will be given to the Prowler CodeBuild (needed to grant sts:AssumeRole rights). + AllowedPattern: ^[\w+=,.@-]{1,64}$ + ConstraintDescription: Max 64 alphanumeric characters. Also special characters supported [+, =, ., @, -] + Default: ProwlerCodeBuild-Role + ProwlerCrossAccountRole: + Type: String + Description: Enter Name for CrossAccount Role to be created for Prowler to assess all Accounts in the AWS Organization. + AllowedPattern: ^[\w+=,.@-]{1,64}$ + ConstraintDescription: Max 64 alphanumeric characters. Also special characters supported [+, =, ., @, -] + Default: ProwlerXA-CBRole + +Resources: + ProwlerRole: + Type: AWS::IAM::Role + Properties: + Description: Provides Prowler CodeBuild permissions to assess security of Accounts in AWS Organization + RoleName: !Ref ProwlerCrossAccountRole + Tags: + - Key: App + Value: Prowler + AssumeRolePolicyDocument: + Version: 2012-10-17 + Statement: + - Effect: Allow + Principal: + AWS: + - !Sub arn:${AWS::Partition}:iam::${ProwlerCodeBuildAccount}:root + Action: + - sts:AssumeRole + Condition: + StringLike: + aws:PrincipalArn: !Sub arn:${AWS::Partition}:iam::${ProwlerCodeBuildAccount}:role/${ProwlerCodeBulidRole} + ManagedPolicyArns: + - !Sub arn:${AWS::Partition}:iam::aws:policy/SecurityAudit + - !Sub arn:${AWS::Partition}:iam::aws:policy/job-function/ViewOnlyAccess + Policies: + - PolicyName: Prowler-Additions-Policy + PolicyDocument: + Version: 2012-10-17 + Statement: + - Sid: AllowMoreReadForProwler + Effect: Allow + Resource: "*" + Action: + - access-analyzer:List* + - apigateway:Get* + - apigatewayv2:Get* + - aws-marketplace:ViewSubscriptions + - dax:ListTables + - ds:ListAuthorizedApplications + - ds:DescribeRoles + - ecr:Describe* + - lambda:GetAccountSettings + - lambda:GetFunctionConfiguration + - lambda:GetLayerVersionPolicy + - lambda:GetPolicy + - opsworks-cm:Describe* + - opsworks:Describe* + - secretsmanager:ListSecretVersionIds + - sns:List* + - sqs:ListQueueTags + - states:ListActivities + - support:Describe* + - tag:GetTagKeys + - PolicyName: Prowler-S3-Reports + PolicyDocument: + Version: 2012-10-17 + Statement: + - Sid: AllowGetPutListObject + Effect: Allow + Resource: + - !Sub arn:${AWS::Partition}:s3:::${ProwlerS3} + - !Sub arn:${AWS::Partition}:s3:::${ProwlerS3}/* + Action: + - s3:GetObject + - s3:PutObject + - s3:ListBucket + Metadata: + cfn_nag: + rules_to_suppress: + - id: W11 + reason: "Prowler requires these rights to perform its Security Assessment." + - id: W28 + reason: "Using a defined Role Name." + +Outputs: + ProwlerCrossAccountRole: + Description: CrossAccount Role to be used by Prowler to assess AWS Accounts in the AWS Organization. + Value: !Ref ProwlerCrossAccountRole diff --git a/util/org-multi-account/serverless_codebuild/templates/ProwlerS3.yaml b/util/org-multi-account/serverless_codebuild/templates/ProwlerS3.yaml new file mode 100644 index 00000000..fc0ef4d9 --- /dev/null +++ b/util/org-multi-account/serverless_codebuild/templates/ProwlerS3.yaml @@ -0,0 +1,106 @@ +AWSTemplateFormatVersion: 2010-09-09 +Description: Create Prowler S3 Bucket for Prowler Reports + +Parameters: + AwsOrgId: + Type: String + Description: > + Enter AWS Organizations ID. + This is used to restrict permissions to least privilege. + AllowedPattern: ^o-[a-z0-9]{10,32}$ + ConstraintDescription: The Org Id must be a 12 character string starting with o- and followed by 10 lower case alphanumeric characters. + Default: o-abcde12345 + S3Prefix: + Type: String + Description: > + Enter S3 Bucket Name Prefix (in lowercase). + Bucket will be named: prefix-awsaccount-awsregion (i.e., prowler-123456789012-us-east-1) + AllowedPattern: ^[a-z0-9][a-z0-9-]{1,33}[a-z0-9]$ + ConstraintDescription: > + Max 35 characters, as "-awsaccount-awsregion" will be added, and max name is 63 characters. + Can't start or end with dash. Can use numbers and lowercase letters. + Default: prowler + +Resources: + ProwlerS3: + Type: AWS::S3::Bucket + Properties: + BucketName: !Sub ${S3Prefix}-${AWS::AccountId}-${AWS::Region} + BucketEncryption: + ServerSideEncryptionConfiguration: + - ServerSideEncryptionByDefault: + SSEAlgorithm: "AES256" + AccessControl: Private + PublicAccessBlockConfiguration: + BlockPublicAcls: True + BlockPublicPolicy: True + IgnorePublicAcls: True + RestrictPublicBuckets: True + VersioningConfiguration: + Status: Enabled + Tags: + - Key: App + Value: Prowler + Metadata: + cfn_nag: + rules_to_suppress: + - id: W35 + reason: "This S3 Bucket is only being used by the AWS Organization to download/upload prowler reports." + + ProwlerS3BucketPolicy: + Type: AWS::S3::BucketPolicy + Properties: + Bucket: !Ref ProwlerS3 + PolicyDocument: + Statement: + - Sid: AllowGetPutListObject + Effect: Allow + Principal: "*" + Action: + - s3:GetObject + - s3:PutObject + - s3:ListBucket + - s3:PutObjectAcl + Resource: + - !Sub arn:${AWS::Partition}:s3:::${ProwlerS3} + - !Sub arn:${AWS::Partition}:s3:::${ProwlerS3}/* + Condition: + StringEquals: + aws:PrincipalOrgId: !Ref AwsOrgId + - Sid: DenyNonSSLRequests + Effect: Deny + Action: s3:* + Resource: + - !Sub arn:${AWS::Partition}:s3:::${ProwlerS3} + - !Sub arn:${AWS::Partition}:s3:::${ProwlerS3}/* + Principal: "*" + Condition: + Bool: + aws:SecureTransport: false + - Sid: DenyIncorrectEncryptionHeader + Effect: Deny + Principal: "*" + Action: s3:PutObject + Resource: + - !Sub arn:${AWS::Partition}:s3:::${ProwlerS3}/* + # Allow uploads with No Encryption, as S3 Default Encryption still applies. + # If Encryption is set, only allow uploads with AES256. + Condition: + "Null": + s3:x-amz-server-side-encryption: false + StringNotEquals: + s3:x-amz-server-side-encryption: AES256 + Metadata: + cfn_nag: + rules_to_suppress: + - id: F16 + reason: "This S3 Bucket Policy has a condition that only allows access to the AWS Organization." + + +Outputs: + ProwlerS3: + Description: S3 Bucket for Prowler Reports + Value: !Ref ProwlerS3 + ProwlerS3Account: + Description: AWS Account Number where Prowler S3 Bucket resides. + Value: !Ref AWS::AccountId diff --git a/util/terraform-kickstarter/data.tf b/util/terraform-kickstarter/data.tf new file mode 100644 index 00000000..48105b29 --- /dev/null +++ b/util/terraform-kickstarter/data.tf @@ -0,0 +1,212 @@ +/* +© 2020 Amazon Web Services, Inc. or its affiliates. All Rights Reserved. + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non_exclusive, no_charge, royalty_free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non_exclusive, no_charge, royalty_free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross_claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third_party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON_INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third_party archives. + + Copyright [2020] [© 2020 Amazon Web Services, Inc. or its affiliates. All Rights Reserved.] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE_2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ +data "aws_iam_policy" "SecurityAudit" { + arn = "arn:aws:iam::aws:policy/SecurityAudit" +} +data "aws_caller_identity" "current" { +} +data "aws_region" "current" { +} \ No newline at end of file diff --git a/util/terraform-kickstarter/docs/tf.md b/util/terraform-kickstarter/docs/tf.md new file mode 100644 index 00000000..645a7057 --- /dev/null +++ b/util/terraform-kickstarter/docs/tf.md @@ -0,0 +1,53 @@ +## Requirements + +| Name | Version | +|------|---------| +| [aws](#requirement\_aws) | ~> 3.54 | + +## Providers + +| Name | Version | +|------|---------| +| [aws](#provider\_aws) | 3.55.0 | + +## Modules + +No modules. + +## Resources + +| Name | Type | +|------|------| +| [aws_cloudwatch_event_rule.prowler_check_scheduler_event](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/cloudwatch_event_rule) | resource | +| [aws_cloudwatch_event_target.run_prowler_scan](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/cloudwatch_event_target) | resource | +| [aws_codebuild_project.prowler_codebuild](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/codebuild_project) | resource | +| [aws_iam_policy.prowler_event_trigger_policy](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_policy) | resource | +| [aws_iam_policy.prowler_kickstarter_iam_policy](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_policy) | resource | +| [aws_iam_policy_attachment.prowler_event_trigger_policy_attach](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_policy_attachment) | resource | +| [aws_iam_policy_attachment.prowler_kickstarter_iam_policy_attach](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_policy_attachment) | resource | +| [aws_iam_role.prowler_event_trigger_role](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_role) | resource | +| [aws_iam_role.prowler_kick_start_role](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_role) | resource | +| [aws_s3_bucket.prowler_report_storage_bucket](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket) | resource | +| [aws_s3_bucket_policy.prowler_report_storage_bucket_policy](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket_policy) | resource | +| [aws_s3_bucket_public_access_block.prowler_report_storage_bucket_block_public](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket_public_access_block) | resource | +| [aws_securityhub_account.securityhub_resource](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/securityhub_account) | resource | +| [aws_securityhub_product_subscription.security_hub_enable_prowler_findings](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/securityhub_product_subscription) | resource | +| [aws_caller_identity.current](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/caller_identity) | data source | +| [aws_iam_policy.SecurityAudit](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/iam_policy) | data source | +| [aws_region.current](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/region) | data source | + +## Inputs + +| Name | Description | Type | Default | Required | +|------|-------------|------|---------|:--------:| +| [codebuild\_timeout](#input\_codebuild\_timeout) | Codebuild timeout setting | `number` | `300` | no | +| [enable\_security\_hub](#input\_enable\_security\_hub) | Enable AWS SecurityHub. | `bool` | `true` | no | +| [enable\_security\_hub\_prowler\_subscription](#input\_enable\_security\_hub\_prowler\_subscription) | Enable a Prowler Subscription. | `bool` | `true` | no | +| [prowler\_cli\_options](#input\_prowler\_cli\_options) | Run Prowler With The Following Command | `string` | `"_q _M json_asff _S _f us_east_1"` | no | +| [prowler\_schedule](#input\_prowler\_schedule) | Run Prowler based on cron schedule | `string` | `"cron(0 0 ? * * *)"` | no | + +## Outputs + +| Name | Description | +|------|-------------| +| [account\_id](#output\_account\_id) | TODO Move these to outputs file | diff --git a/util/terraform-kickstarter/main.tf b/util/terraform-kickstarter/main.tf new file mode 100644 index 00000000..c51557b6 --- /dev/null +++ b/util/terraform-kickstarter/main.tf @@ -0,0 +1,494 @@ +/* +© 2020 Amazon Web Services, Inc. or its affiliates. All Rights Reserved. + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non_exclusive, no_charge, royalty_free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non_exclusive, no_charge, royalty_free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross_claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third_party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON_INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third_party archives. + + Copyright [2020] [© 2020 Amazon Web Services, Inc. or its affiliates. All Rights Reserved.] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE_2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + +/* +Security Hub Import Commands + +Run this to get state of SecurityHub + +terraform import aws_securityhub_account.securityhubresource 123456789012 + +*/ + +terraform { + required_providers { + aws = { + source = "hashicorp/aws" + version = "~> 3.54" + } + } +} +provider "aws" { + region = var.select_region +} + +resource "aws_iam_role" "prowler_kick_start_role" { + name = "security_baseline_kickstarter_iam_role" + managed_policy_arns = ["${data.aws_iam_policy.SecurityAudit.arn}", + "arn:aws:iam::aws:policy/job-function/SupportUser", + "arn:aws:iam::aws:policy/job-function/ViewOnlyAccess"] + assume_role_policy = jsonencode({ + Version = "2012-10-17" + Statement = [ + { + Action="sts:AssumeRole" + Effect="Allow" + Sid = "CodeBuildProwler" + Principal = {Service="codebuild.amazonaws.com"} + } + ] + }) + force_detach_policies=true +} +resource "aws_iam_role" "prowler_event_trigger_role" { + name = "security_baseline_kickstarter_event_trigger_iam_role" + assume_role_policy = jsonencode({ + Version = "2012-10-17" + Statement = [ + { + Action="sts:AssumeRole" + Effect="Allow" + Sid = "TriggerCodeBuild" + Principal = {Service="events.amazonaws.com"} + } + ] + }) + +} +resource "aws_iam_policy" "prowler_event_trigger_policy" { + depends_on = [aws_codebuild_project.prowler_codebuild] + name = "security_baseline_kickstarter_trigger_iam_policy" + path = "/" + description = "IAM Policy used to trigger the Prowler in AWS Codebuild" + policy = jsonencode({ + Version = "2012-10-17" + Statement = [ + { + Action = ["codebuild:StartBuild"], + Effect = "Allow" + Resource = aws_codebuild_project.prowler_codebuild.arn + }] + }) +} +resource "aws_iam_policy_attachment" "prowler_event_trigger_policy_attach" { + depends_on = [aws_iam_policy.prowler_event_trigger_policy] + name = "prowler_event_trigger_policy_attach" + roles = toset([aws_iam_role.prowler_event_trigger_role.id]) + policy_arn = aws_iam_policy.prowler_event_trigger_policy.arn +} +resource "aws_iam_policy" "prowler_kickstarter_iam_policy" { + name = "security_baseline_kickstarter_iam_policy" + path = "/" + description = "IAM Policy used to run prowler from codebuild" + + # Terraform's "jsonencode" function converts a + # Terraform expression result to valid JSON syntax. + policy = jsonencode({ + Version = "2012-10-17" + Statement = [ + { + Action = [ + "logs:PutLogEvents" + ], + Effect = "Allow" + Resource = "arn:aws:logs:*:${data.aws_caller_identity.current.account_id}:log-group:*:log-stream:*" + }, + { + Action = [ + "logs:CreateLogStream", + "logs:CreateLogGroup" + ], + Effect = "Allow" + Resource = "arn:aws:logs:*:${data.aws_caller_identity.current.account_id}:log-group:*" + }, + { + Action = ["sts:AssumeRole"], + Effect = "Allow" + Resource = "arn:aws:iam::${data.aws_caller_identity.current.account_id}:role/${aws_iam_role.prowler_kick_start_role.name}" + }, + { + Action = [ + "s3:GetAccountPublicAccessBlock", + "glue:GetConnections", + "glue:SearchTables", + "ds:ListAuthorizedApplications", + "ec2:GetEbsEncryptionByDefault", + "ecr:Describe*", + "support:Describe*", + "tag:GetTagKeys", + "lambda:GetFunction" + ] + Effect = "Allow" + Resource = "arn:aws:glue:${data.aws_region.current.name}:${data.aws_caller_identity.current.account_id}:catalog" + + }, + { + Action = [ + "codebuild:CreateReportGroup", + "codebuild:CreateReport", + "codebuild:UpdateReport", + "codebuild:BatchPutTestCases", + "codebuild:BatchPutCodeCoverages" + ] + Effect = "Allow" + Resource = "arn:aws:codebuild:${data.aws_region.current.name}:${data.aws_caller_identity.current.account_id}:report-group/*" + + }, + { + Action = [ "securityhub:BatchImportFindings"] + Effect = "Allow" + Resource = "*" + }, + { + Action = [ "securityhub:GetFindings"] + Effect = "Allow" + Resource = "*" + }, + { + "Action": "codebuild:StartBuild", + "Resource": "arn:aws:codebuild:${data.aws_region.current.name}:${data.aws_caller_identity.current.account_id}:project/*", + "Effect": "Allow" + }, + { + "Action": ["s3:PutObject", "s3:GetObject", "s3:GetObjectVersion", "s3:GetBucketAcl", "s3:GetBucketLocation"], + "Resource": "arn:aws:s3:::prowler-kickstart-${data.aws_region.current.name}-${data.aws_caller_identity.current.account_id}-reports/*", + "Effect": "Allow" + }, + ] + }) +} +resource "aws_iam_policy_attachment" "prowler_kickstarter_iam_policy_attach" { + depends_on = [aws_iam_policy.prowler_kickstarter_iam_policy] + name = "security_baseline_kickstarter_policy_attach" + roles = toset([aws_iam_role.prowler_kick_start_role.id]) + policy_arn = aws_iam_policy.prowler_kickstarter_iam_policy.arn +} +resource "aws_s3_bucket" "prowler_report_storage_bucket" { + bucket = "prowler-kickstart-${data.aws_region.current.name}-${data.aws_caller_identity.current.account_id}-reports" + acl = "log-delivery-write" + versioning { + enabled = true + } + server_side_encryption_configuration { + rule { + apply_server_side_encryption_by_default { + sse_algorithm = "AES256" + } + } + } + } + +resource "aws_s3_bucket_policy" "prowler_report_storage_bucket_policy" { + depends_on = [aws_s3_bucket.prowler_report_storage_bucket] + bucket = aws_s3_bucket.prowler_report_storage_bucket.id + policy = jsonencode({Version = "2012-10-17" + Id = "ProwlerBucketReportPolicy" + Statement = [ + { + Sid = "S3ForceSSL" + Effect = "Deny" + Principal = "*" + Action = "s3:*" + Resource = ["${aws_s3_bucket.prowler_report_storage_bucket.arn}/*"] + Condition = { + Bool = { + "aws:SecureTransport" = "false" + } + } + }, + { + Sid = "DenyUnEncryptedObjectUploads" + Effect = "Deny" + Principal = "*" + Action = "s3:*" + Resource = ["${aws_s3_bucket.prowler_report_storage_bucket.arn}/*"] + Condition = { + Null = { + "s3:x-amz-server-side-encryption" = "true" + } + } + } + + ] + }) + } + + + +resource "aws_s3_bucket_public_access_block" "prowler_report_storage_bucket_block_public" { + depends_on = [aws_s3_bucket.prowler_report_storage_bucket, aws_s3_bucket_policy.prowler_report_storage_bucket_policy] + bucket = aws_s3_bucket.prowler_report_storage_bucket.id + block_public_acls = true + block_public_policy = true + ignore_public_acls = true + restrict_public_buckets = true + } + +resource "aws_codebuild_project" "prowler_codebuild" { + name = "security_baseline_kickstarter_codebuild" + description = "Run a Prowler Assessment with Prowler" + build_timeout = var.codebuild_timeout + service_role = aws_iam_role.prowler_kick_start_role.arn + + artifacts { + type = "NO_ARTIFACTS" + } + + environment { + compute_type = "BUILD_GENERAL1_SMALL" + image = "aws/codebuild/amazonlinux2-x86_64-standard:3.0" + type = "LINUX_CONTAINER" + + environment_variable { + name = "BUCKET_REPORT" + value = "${aws_s3_bucket.prowler_report_storage_bucket.id}" + } + + environment_variable { + name = "PROWLER_OPTIONS" + type = "PLAINTEXT" + value = var.prowler_cli_options + } + } + + + source { + type = "NO_SOURCE" + buildspec = "${file("prowler_build_spec.yml")}" + } + + tags = { + Environment = "Prowler KickStarter" + } + } + + + + + +resource "aws_securityhub_account" "securityhub_resource" { + } + +resource "aws_securityhub_product_subscription" "security_hub_enable_prowler_findings" { + depends_on = [aws_securityhub_account.securityhub_resource] + //arn:aws:securityhub:::product/prowler/prowler + product_arn = "arn:aws:securityhub:${data.aws_region.current.name}::product/prowler/prowler" + } + +resource "aws_cloudwatch_event_rule" "prowler_check_scheduler_event" { + + name = "security_baseline_kickstarter_event_cron" + description = "Run Prowler every night" + schedule_expression = var.prowler_schedule + } + +resource "aws_cloudwatch_event_target" "run_prowler_scan" { + + arn = aws_codebuild_project.prowler_codebuild.arn + rule = aws_cloudwatch_event_rule.prowler_check_scheduler_event.name + role_arn = aws_iam_role.prowler_event_trigger_role.arn + + } \ No newline at end of file diff --git a/util/terraform-kickstarter/outputs.tf b/util/terraform-kickstarter/outputs.tf new file mode 100644 index 00000000..4fa74162 --- /dev/null +++ b/util/terraform-kickstarter/outputs.tf @@ -0,0 +1,209 @@ +/* +© 2020 Amazon Web Services, Inc. or its affiliates. All Rights Reserved. + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non_exclusive, no_charge, royalty_free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non_exclusive, no_charge, royalty_free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross_claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third_party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON_INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third_party archives. + + Copyright [2020] [© 2020 Amazon Web Services, Inc. or its affiliates. All Rights Reserved.] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE_2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + +output "account_id" { + value = data.aws_caller_identity.current.account_id +} \ No newline at end of file diff --git a/util/terraform-kickstarter/prowler_build_spec.yml b/util/terraform-kickstarter/prowler_build_spec.yml new file mode 100644 index 00000000..8072b500 --- /dev/null +++ b/util/terraform-kickstarter/prowler_build_spec.yml @@ -0,0 +1,24 @@ +version: 0.2 +phases: + install: + runtime-versions: + python: 3.8 + commands: + - echo "Installing Prowler and dependencies..." + - pip3 install detect-secrets + - yum -y install jq + - curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip" + - unzip awscliv2.zip + - ./aws/install + - git clone https://github.com/toniblyx/prowler + - cd prowler + + build: + commands: + - echo "Running Prowler as ./prowler $PROWLER_OPTIONS" + - ./prowler $PROWLER_OPTIONS || true + post_build: + commands: + - echo "Scan Complete" + - aws s3 cp --sse AES256 output/ s3://$BUCKET_REPORT/ --recursive + - echo "Done!" diff --git a/util/terraform-kickstarter/readme.md b/util/terraform-kickstarter/readme.md new file mode 100644 index 00000000..3637cfb4 --- /dev/null +++ b/util/terraform-kickstarter/readme.md @@ -0,0 +1,96 @@ +# Install Security Baseline Kickstarter with Prowler + +## Introduction + +The following demonstartes how to quickly install the resources necessary to perform a security baseline using Prowler. The speed is based on the prebuilt terraform module that can configure all the resources necessuary to run Prowler with the findings being sent to AWS Security Hub. + +## Install + +Installing Prowler with Terraform is simple and can be completed in under 1 minute. + +- Start AWS CloudShell +- Run the following commands to install Terraform and clone the Prowler git repo + ``` + git clone https://github.com/singergs/prowler.git + git fetch + cd prowler + git checkout -t origin/terraform-kickstarter + sudo yum install -y yum-utils + sudo yum-config-manager --add-repo https://rpm.releases.hashicorp.com/AmazonLinux/hashicorp.repo + sudo yum -y install terraform + cd terraform-kickstarter + ``` +- Issue a `terraform init` + +- Issue a `terraform apply` + + ![Prowler Install](https://prowler-docs.s3.amazonaws.com/Prowler-Terraform-Install.gif) + + - It is likely an error will return related to the SecurityHub subscription. This appears to be Terraform related and you can validate the configuration by navigating to the SecurityHub console. Click Integreations and search for Prowler. Take noe of the green check where it says *Accepting findings* + + ![Prowler Subscription](https://prowler-docs.s3.amazonaws.com/Validate-Prowler-Subscription.gif) + + +Thats it! Install is now complete. The resources included a Cloudwatch event that will trigger the AWS Codebuild to run daily at 00:00 GMT. If you'd like to run an assessment after the deployment then simply navigate to the Codebuild console and start the job manually. + +## Terraform Resources + +## Requirements + +| Name | Version | +|------|---------| +| [aws](#requirement\_aws) | ~> 3.54 | + +## Providers + +| Name | Version | +|------|---------| +| [aws](#provider\_aws) | 3.56.0 | + +## Modules + +No modules. + +## Resources + +| Name | Type | +|------|------| +| [aws_cloudwatch_event_rule.prowler_check_scheduler_event](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/cloudwatch_event_rule) | resource | +| [aws_cloudwatch_event_target.run_prowler_scan](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/cloudwatch_event_target) | resource | +| [aws_codebuild_project.prowler_codebuild](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/codebuild_project) | resource | +| [aws_iam_policy.prowler_event_trigger_policy](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_policy) | resource | +| [aws_iam_policy.prowler_kickstarter_iam_policy](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_policy) | resource | +| [aws_iam_policy_attachment.prowler_event_trigger_policy_attach](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_policy_attachment) | resource | +| [aws_iam_policy_attachment.prowler_kickstarter_iam_policy_attach](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_policy_attachment) | resource | +| [aws_iam_role.prowler_event_trigger_role](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_role) | resource | +| [aws_iam_role.prowler_kick_start_role](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_role) | resource | +| [aws_s3_bucket.prowler_report_storage_bucket](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket) | resource | +| [aws_s3_bucket_policy.prowler_report_storage_bucket_policy](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket_policy) | resource | +| [aws_s3_bucket_public_access_block.prowler_report_storage_bucket_block_public](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket_public_access_block) | resource | +| [aws_securityhub_account.securityhub_resource](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/securityhub_account) | resource | +| [aws_securityhub_product_subscription.security_hub_enable_prowler_findings](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/securityhub_product_subscription) | resource | +| [aws_caller_identity.current](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/caller_identity) | data source | +| [aws_iam_policy.SecurityAudit](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/iam_policy) | data source | +| [aws_region.current](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/region) | data source | + +## Inputs + +| Name | Description | Type | Default | Required | +|------|-------------|------|---------|:--------:| +| [codebuild\_timeout](#input\_codebuild\_timeout) | Codebuild timeout setting | `number` | `300` | no | +| [enable\_security\_hub](#input\_enable\_security\_hub) | Enable AWS SecurityHub. | `bool` | `true` | no | +| [enable\_security\_hub\_prowler\_subscription](#input\_enable\_security\_hub\_prowler\_subscription) | Enable a Prowler Subscription. | `bool` | `true` | no | +| [prowler\_cli\_options](#input\_prowler\_cli\_options) | Run Prowler With The Following Command | `string` | `"-q -M json-asff -S -f us-east-1"` | no | +| [prowler\_schedule](#input\_prowler\_schedule) | Run Prowler based on cron schedule | `string` | `"cron(0 0 ? * * *)"` | no | +| [select\_region](#input\_select\_region) | Uses the following AWS Region. | `string` | `"us-east-1"` | no | + +## Outputs + +| Name | Description | +|------|-------------| +| [account\_id](#output\_account\_id) | n/a | + +## Kickoff Prowler Assessment From Install to Assessment Demo (Link to YouTube) + + + [![Prowler Install](https://img.youtube.com/vi/ShhzIArO8X0/0.jpg)](https://www.youtube.com/watch?v=ShhzIArO8X0 "Prowler Install") diff --git a/util/terraform-kickstarter/tf_install.sh b/util/terraform-kickstarter/tf_install.sh new file mode 100644 index 00000000..d3d71ce8 --- /dev/null +++ b/util/terraform-kickstarter/tf_install.sh @@ -0,0 +1,9 @@ +#!/bin/bash +#AMZN-Linux Terraform Install Script +git clone https://github.com/singergs/prowler.git +git fetch +cd prowler +git checkout -t origin/terraform-kickstart +sudo yum install -y yum-utils +sudo yum-config-manager --add-repo https://rpm.releases.hashicorp.com/AmazonLinux/hashicorp.repo +sudo yum -y install terraform \ No newline at end of file diff --git a/util/terraform-kickstarter/variables.tf b/util/terraform-kickstarter/variables.tf new file mode 100644 index 00000000..fa77cbef --- /dev/null +++ b/util/terraform-kickstarter/variables.tf @@ -0,0 +1,251 @@ +/* +© 2020 Amazon Web Services, Inc. or its affiliates. All Rights Reserved. + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non_exclusive, no_charge, royalty_free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non_exclusive, no_charge, royalty_free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross_claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third_party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON_INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third_party archives. + + Copyright [2020] [© 2020 Amazon Web Services, Inc. or its affiliates. All Rights Reserved.] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE_2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + +/* +Security Hub Import Commands + +Run this to get state of SecurityHub + +terraform import aws_securityhub_account.securityhubresource 123456789012 + +*/ + +variable "select_region" { + description = "Uses the following AWS Region." + type = string + default = "us-east-1" +} + +variable "enable_security_hub" { + description = "Enable AWS SecurityHub." + type = bool + default = true +} + +variable "enable_security_hub_prowler_subscription" { + description = "Enable a Prowler Subscription." + type = bool + default = true +} + +variable "prowler_cli_options" { + description = "Run Prowler With The Following Command" + type = string + default = "-q -M json-asff -S -f us-east-1" +} + +variable "prowler_schedule"{ + description = "Run Prowler based on cron schedule" + default="cron(0 0 ? * * *)" + type=string + +} + +variable "codebuild_timeout" { + description = "Codebuild timeout setting" + default = 300 + type=number +}