diff --git a/.gitignore b/.gitignore index c80f14d6..3d433f87 100644 --- a/.gitignore +++ b/.gitignore @@ -18,4 +18,13 @@ tags [._]*.un~ # MacOs DS_Store -*.DS_Store \ No newline at end of file +*.DS_Store + +# Prowler output +prowler-output-* + +# JUnit Reports +junit-reports/ + +# VSCode files +.vscode/ diff --git a/LIST_OF_CHECKS_AND_GROUPS.md b/LIST_OF_CHECKS_AND_GROUPS.md deleted file mode 100644 index b31b3c8d..00000000 --- a/LIST_OF_CHECKS_AND_GROUPS.md +++ /dev/null @@ -1,4 +0,0 @@ -``` -./prowler -l # to see all available checks and groups. -./prowler -L # to see all available groups only. -``` diff --git a/README.md b/README.md index bc1a3fa1..751587ed 100644 --- a/README.md +++ b/README.md @@ -7,6 +7,7 @@ - [Requirements and Installation](#requirements-and-installation) - [Usage](#usage) - [Advanced Usage](#advanced-usage) +- [Security Hub integration](#security-hub-integration) - [Fix](#fix) - [Screenshots](#screenshots) - [Troubleshooting](#troubleshooting) @@ -14,6 +15,7 @@ - [Forensics Ready Checks](#forensics-ready-checks) - [GDPR Checks](#gdpr-checks) - [HIPAA Checks](#hipaa-checks) +- [Trust Boundaries Checks](#trust-boundaries-checks) - [Add Custom Checks](#add-custom-checks) - [Third Party Integrations](#third-party-integrations) - [Full list of checks and groups](/LIST_OF_CHECKS_AND_GROUPS.md) @@ -29,10 +31,10 @@ Read more about [CIS Amazon Web Services Foundations Benchmark v1.2.0 - 05-23-20 ## Features -It covers hardening and security best practices for all AWS regions related to the next groups: +~140 checks controls covering security best practices across all AWS regions and most of AWS services and related to the next groups: -- Identity and Access Management (22 checks) [group1] -- Logging (9 checks) [group2] +- Identity and Access Management [group1] +- Logging [group2] - Monitoring (14 checks) [group3] - Networking (4 checks) [group4] - CIS Level 1 [cislevel1] @@ -41,16 +43,16 @@ It covers hardening and security best practices for all AWS regions related to t - Forensics related group of checks [forensics-ready] - GDPR [gdpr] Read more [here](#gdpr-checks) - HIPAA [hipaa] Read more [here](#hipaa-checks) - - -For a comprehensive list and resolution look at the guide on the link above. +- Trust Boundaries [trustboundaries] Read more [here](#trustboundaries-checks) With Prowler you can: - get a colorful or monochrome report -- a CSV format report for diff -- run specific checks without having to run the entire report -- check multiple AWS accounts in parallel +- a CSV, JSON or JSON ASFF format report +- send findings directly to Security Hub +- run specific checks +- check multiple AWS accounts in parallel or sequentially +- and more! Read examples below ## Requirements and Installation @@ -62,9 +64,10 @@ This script has been written in bash using AWS-CLI and it works in Linux and OSX pip install awscli ansi2html detect-secrets ``` - AWS-CLI can be also installed it using "brew", "apt", "yum" or manually from , but `ansi2html` and `detect-secrets` has to be installed using `pip`. You will need to install `jq` to get more accuracy in some checks. + AWS-CLI can be also installed it using "brew", "apt", "yum" or manually from , but `ansi2html` and `detect-secrets` has to be installed using `pip`. You will need to install `jq` to get more accuracy in some checks. - Make sure jq is installed (example below with "apt" but use a valid package manager for your OS): + ```sh sudo apt install jq ``` @@ -81,20 +84,23 @@ This script has been written in bash using AWS-CLI and it works in Linux and OSX ```sh aws configure ``` - or + + or + ```sh export AWS_ACCESS_KEY_ID="ASXXXXXXX" export AWS_SECRET_ACCESS_KEY="XXXXXXXXX" export AWS_SESSION_TOKEN="XXXXXXXXX" ``` -- Those credentials must be associated to a user or role with proper permissions to do all checks. To make sure add SecurityAuditor default policy to your user. Policy ARN is +- Those credentials must be associated to a user or role with proper permissions to do all checks. To make sure, add the AWS managed policies, SecurityAudit and ViewOnlyAccess, to the user or role being used. Policy ARNs are: ```sh arn:aws:iam::aws:policy/SecurityAudit + arn:aws:iam::aws:policy/job-function/ViewOnlyAccess ``` - > Additional permissions needed: to make sure Prowler can scan all services included in the group *Extras*, make sure you attach also the custom policy [prowler-additions-policy.json](https://github.com/toniblyx/prowler/blob/master/iam/prowler-additions-policy.json) to the role you are using. + > Additional permissions needed: to make sure Prowler can scan all services included in the group *Extras*, make sure you attach also the custom policy [prowler-additions-policy.json](https://github.com/toniblyx/prowler/blob/master/iam/prowler-additions-policy.json) to the role you are using. If you want Prowler to send findings to [AWS Security Hub](https://aws.amazon.com/security-hub), make sure you also attach the custom policy [prowler-security-hub.json](https://github.com/toniblyx/prowler/blob/master/iam/prowler-security-hub.json). ## Usage @@ -104,10 +110,10 @@ This script has been written in bash using AWS-CLI and it works in Linux and OSX ./prowler ``` - Use `-l` to list all available checks and group of checks (sections) + Use `-l` to list all available checks and the groups (sections) that reference them + + If you want to avoid installing dependencies run it using Docker: - If you want to avoid installing dependences run it using Docker: - ```sh docker run -ti --rm --name prowler --env AWS_ACCESS_KEY_ID --env AWS_SECRET_ACCESS_KEY --env AWS_SESSION_TOKEN toniblyx/prowler:latest ``` @@ -123,16 +129,21 @@ This script has been written in bash using AWS-CLI and it works in Linux and OSX ```sh ./prowler -c check310 ``` + With Docker: + ```sh docker run -ti --rm --name prowler --env AWS_ACCESS_KEY_ID --env AWS_SECRET_ACCESS_KEY --env AWS_SESSION_TOKEN toniblyx/prowler:latest "-c check310" ``` - + or multiple checks separated by comma: + ```sh ./prowler -c check310,check722 ``` + or all checks but some of them: + ```sh ./prowler -E check42,check43 ``` @@ -148,14 +159,38 @@ This script has been written in bash using AWS-CLI and it works in Linux and OSX ```sh ./prowler -g group1 # for iam related checks ``` + or exclude some checks in the group: + ```sh ./prowler -g group4 -E check42,check43 ``` Valid check numbers are based on the AWS CIS Benchmark guide, so 1.1 is check11 and 3.10 is check310 -1. If you want to save your report for later analysis: +### Save your reports + +1. If you want to save your report for later analysis thare are different ways, natively (supported text, mono, csv, json, json-asff and junit-xml see note below for more info): + + ```sh + ./prowler -M csv + ``` + + or with multiple formats at the same time: + + ```sh + ./prowler -M csv,json,json-asff + ``` + + or just a group of checks in multiple formats: + + ```sh + ./prowler -g gdpr -M csv,json,json-asff + ``` + + Now `-M` creates a file inside the prowler root directory named `prowler-output-AWSACCOUNTID-YYYYMMDDHHMMSS.format`. You don't have to specify anything else, no pipes, no redirects. + + or just saving the output to a file like below: ```sh ./prowler -M mono > prowler-report.txt @@ -168,18 +203,15 @@ This script has been written in bash using AWS-CLI and it works in Linux and OSX ./prowler | ansi2html -la > report.html ``` - or if you want a pipe-delimited report file, do: + To generate JUnit report files, include the junit-xml format. This can be combined with any other format. Files are written inside a prowler root directory named `junit-reports`: ```sh - ./prowler -M csv > output.psv - ``` - or json formatted output using jq, do: - - ```sh - ./prowler -M json > prowler-output.json + ./prowler -M text,junit-xml ``` - or save your report in a S3 bucket: + >Note about output formats to use with `-M`: "text" is the default one with colors, "mono" is like default one but monochrome, "csv" is comma separated values, "json" plain basic json (without comma between lines) and "json-asff" is also json with Amazon Security Finding Format that you can ship to Security Hub using `-S`. + + or save your report in a S3 bucket (this only works for text or mono, for csv, json or json-asff it has to be copied afterwards): ```sh ./prowler -M mono | aws s3 cp - s3://bucket-name/prowler-report.txt @@ -200,7 +232,7 @@ This script has been written in bash using AWS-CLI and it works in Linux and OSX 1. For help use: - ``` + ```sh ./prowler -h USAGE: @@ -217,17 +249,19 @@ This script has been written in bash using AWS-CLI and it works in Linux and OSX -f specify an AWS region to run checks against (i.e.: us-west-1) -m specify the maximum number of items to return for long-running requests (default: 100) - -M output mode: text (default), mono, json, csv (separator is ,; data is on stdout; progress on stderr) + -M output mode: text (default), mono, json, json-asff, junit-xml, csv. They can be used combined comma separated. + (separator is ","; data is on stdout; progress on stderr). -k keep the credential report -n show check numbers to sort easier (i.e.: 1.01 instead of 1.1) - -l list all available checks only (does not perform any check) + -l list all available checks only (does not perform any check). Add -g to only list checks within the specified group -L list all groups (does not perform any check) -e exclude group extras -E execute all tests except a list of specified checks separated by comma (i.e. check21,check31) -b do not print Prowler banner -V show version number & exit -s show scoring report + -S send check output to AWS Security Hub - only valid when the output mode is json-asff (i.e. "-M json-asff -S") -x specify external directory with custom checks (i.e. /my/own/checks, files must start by check) -q suppress info messages and passing test output -A account id for the account where to assume a role, requires -R and -T @@ -236,29 +270,51 @@ This script has been written in bash using AWS-CLI and it works in Linux and OSX (i.e.: ProwlerRole) -T session durantion given to that role credentials in seconds, default 1h (3600) recommended 12h, requires -R and -T (i.e.: 43200) + -I External ID to be used when assuming roles (no mandatory) -h this help ``` ## Advanced Usage -### Assume Role: +### Assume Role: Prowler uses the AWS CLI underneath so it uses the same authentication methods. However, there are few ways to run Prowler against multiple accounts using IAM Assume Role feature depending on eachg use case. You can just set up your custom profile inside `~/.aws/config` with all needed information about the role to assume then call it with `./prowler -p your-custom-profile`. Additionally you can use `-A 123456789012` and `-R RemoteRoleToAssume` and Prowler will get those temporary credentials using `aws sts assume-role`, set them up as environment variables and run against that given account. +```sh +./prowler -A 123456789012 -R ProwlerRole ``` -./prowler -A 123456789012 -R ProwlerRole + +```sh +./prowler -A 123456789012 -R ProwlerRole -I 123456 ``` > *NOTE 1 about Session Duration*: By default it gets credentials valid for 1 hour (3600 seconds). Depending on the mount of checks you run and the size of your infrastructure, Prowler may require more than 1 hour to finish. Use option `-T ` to allow up to 12h (43200 seconds). To allow more than 1h you need to modify *"Maximum CLI/API session duration"* for that particular role, read more [here](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_use.html#id_roles_use_view-role-max-session). -> *NOTE 2 about Session Duration*: Bear in mind that if you are using roles assumed by role chaining there is a hard limit of 1 hour so consider not using role chaining if possible, read more about that, in foot note 1 below the table [here](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_use.html). +> *NOTE 2 about Session Duration*: Bear in mind that if you are using roles assumed by role chaining there is a hard limit of 1 hour so consider not using role chaining if possible, read more about that, in foot note 1 below the table [here](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_use.html). For example, if you want to get only the fails in CSV format from all checks regarding RDS without banner from the AWS Account 123456789012 assuming the role RemoteRoleToAssume and set a fixed session duration of 1h: -``` +```sh ./prowler -A 123456789012 -R RemoteRoleToAssume -T 3600 -b -M cvs -q -g rds ``` +```sh +./prowler -A 123456789012 -R RemoteRoleToAssume -T 3600 -I 123456 -b -M cvs -q -g rds +``` + +### Assume Role and across all accounts in AWS Organizations: + +If you want to run Prowler or just a check or a group across all accounts of AWS Organizations you can do this: + +First get a list of accounts: +``` +ACCOUNTS_IN_ORGS=$(aws organizations list-accounts --query Accounts[*].Id --output text) +``` +Then run Prowler to assume a role (same in all members) per each account, in this example it is just running one particular check: +``` +for accountId in $ACCOUNTS_IN_ORGS; do ./prowler -A $accountId -R RemoteRoleToAssume -c extra79; done +``` + ### Custom folder for custom checks Flag `-x /my/own/checks` will include any check in that particular directory. To see how to write checks see [Add Custom Checks](#add-custom-checks) section. @@ -267,10 +323,25 @@ Flag `-x /my/own/checks` will include any check in that particular directory. To In order to remove noise and get only FAIL findings there is a `-q` flag that makes Prowler to show and log only FAILs. It can be combined with any other option. -``` +```sh ./prowler -q -M csv -b ``` +## Security Hub integration + +Since version v2.3, Prowler supports natively sending findings to [AWS Security Hub](https://aws.amazon.com/security-hub). This integration allows Prowler to import its findings to AWS Security Hub. With Security Hub, you now have a single place that aggregates, organizes, and prioritizes your security alerts, or findings, from multiple AWS services, such as Amazon GuardDuty, Amazon Inspector, Amazon Macie, AWS Identity and Access Management (IAM) Access Analyzer, and AWS Firewall Manager, as well as from AWS Partner solutions and now from Prowler. It is as simple as running the command below: + +```sh +./prowler -M json-asff -S +``` + +There are two requirements: + +1. Security Hub must be enabled for the active region from where you are calling Prowler (if no region is used with `-r` then `us-east-1` is used). It can be enabled by calling `aws securityhub enable-security-hub` +2. As mentioned in section "Custom IAM Policy", to allow Prowler to import its findings to AWS Security Hub you need to add the policy below to the role or user running Prowler: + - [iam/prowler-security-hub.json](iam/prowler-security-hub.json) + +>Note: to have updated findings in Security Hub you have to run Prowler periodically. Once a day or every certain amount of hours. ## How to fix every FAIL @@ -292,7 +363,7 @@ Check your report and fix the issues following all specific guidelines per check If you are using an STS token for AWS-CLI and your session is expired you probably get this error: -``` +```sh A client error (ExpiredToken) occurred when calling the GenerateCredentialReport operation: The security token included in the request is expired ``` @@ -302,41 +373,61 @@ To fix it, please renew your token by authenticating again to the AWS API, see n To run Prowler using a profile that requires MFA you just need to get the session token before hand. Just make sure you use this command: -``` +```sh aws --profile sts get-session-token --duration 129600 --serial-number --token-code --output text - ``` -Once you get your token you can export it as environment variable: ``` + +Once you get your token you can export it as environment variable: + +```sh export AWS_PROFILE=YOUR_AWS_PROFILE export AWS_SESSION_TOKEN=YOUR_NEW_TOKEN AWS_SECRET_ACCESS_KEY=YOUR_SECRET export AWS_ACCESS_KEY_ID=YOUR_KEY ``` + or set manually up your `~/.aws/credentials` file properly. There are some helpfull tools to save time in this process like [aws-mfa-script](https://github.com/asagage/aws-mfa-script) or [aws-cli-mfa](https://github.com/sweharris/aws-cli-mfa). +### AWS Managed IAM Policies + +[ViewOnlyAccess](https://docs.aws.amazon.com/IAM/latest/UserGuide/access_policies_job-functions.html#jf_view-only-user) +- Use case: This user can view a list of AWS resources and basic metadata in the account across all services. The user cannot read resource content or metadata that goes beyond the quota and list information for resources. +- Policy description: This policy grants List*, Describe*, Get*, View*, and Lookup* access to resources for most AWS services. To see what actions this policy includes for each service, see [ViewOnlyAccess Permissions](https://console.aws.amazon.com/iam/home#policies/arn:aws:iam::aws:policy/job-function/ViewOnlyAccess) + +[SecurityAudit](https://docs.aws.amazon.com/IAM/latest/UserGuide/access_policies_job-functions.html#jf_security-auditor) +- Use case: This user monitors accounts for compliance with security requirements. This user can access logs and events to investigate potential security breaches or potential malicious activity. +- Policy description: This policy grants permissions to view configuration data for many AWS services and to review their logs. To see what actions this policy includes for each service, see [SecurityAudit Permissions](https://console.aws.amazon.com/iam/home#policies/arn:aws:iam::aws:policy/SecurityAudit) + ### Custom IAM Policy -Some new and specific checks require Prowler to inherit more permissions than SecurityAudit to work properly. In addition to the AWS managed policy "SecurityAudit" for the role you use for checks you may need to create a custom policy with a few more permissions (get and list and additional services mostly). Here you go a good example for a "ProwlerReadOnlyPolicy" (see below bootstrap script for set it up): +[Prowler-Additions-Policy](iam/prowler-additions-policy.json) -[iam/prowler-additions-policy.json](iam/prowler-additions-policy.json) +Some new and specific checks require Prowler to inherit more permissions than SecurityAudit and ViewOnlyAccess to work properly. In addition to the AWS managed policies, "SecurityAudit" and "ViewOnlyAccess", the user/role you use for checks may need to be granted a custom policy with a few more read-only permissions (to support additional services mostly). Here is an example policy with the additional rights, "Prowler-Additions-Policy" (see below bootstrap script for set it up): -> Note: Action `ec2:get*` is included in "ProwlerReadOnlyPolicy" policy above, that includes `get-password-data`, type `aws ec2 get-password-data help` to better understand its implications. +- [iam/prowler-additions-policy.json](iam/prowler-additions-policy.json) + +[Prowler-Security-Hub Policy](iam/prowler-security-hub.json) + +Allows Prowler to import its findings to [AWS Security Hub](https://aws.amazon.com/security-hub). More information in [Security Hub integration](#security-hub-integration): + +- [iam/prowler-security-hub.json](iam/prowler-security-hub.json) ### Bootstrap Script -Quick bash script to set up a "prowler" IAM user with "SecurityAudit" group with the required permissions (including "ProwlerReadOnlyPolicy"). To run the script below, you need user with administrative permissions; set the `AWS_DEFAULT_PROFILE` to use that account: +Quick bash script to set up a "prowler" IAM user with "SecurityAudit" and "ViewOnlyAccess" group with the required permissions (including "Prowler-Additions-Policy"). To run the script below, you need user with administrative permissions; set the `AWS_DEFAULT_PROFILE` to use that account: ```sh export AWS_DEFAULT_PROFILE=default export ACCOUNT_ID=$(aws sts get-caller-identity --query 'Account' | tr -d '"') -aws iam create-group --group-name SecurityAudit -aws iam create-policy --policy-name ProwlerReadOnlyPolicy --policy-document file://$(pwd)/iam/prowler-additions-policy.json -aws iam attach-group-policy --group-name SecurityAudit --policy-arn arn:aws:iam::aws:policy/SecurityAudit -aws iam attach-group-policy --group-name SecurityAudit --policy-arn arn:aws:iam::${ACCOUNT_ID}:policy/ProwlerReadOnlyPolicy +aws iam create-group --group-name Prowler +aws iam create-policy --policy-name Prowler-Additions-Policy --policy-document file://$(pwd)/iam/prowler-additions-policy.json +aws iam attach-group-policy --group-name Prowler --policy-arn arn:aws:iam::aws:policy/SecurityAudit +aws iam attach-group-policy --group-name Prowler --policy-arn arn:aws:iam::aws:policy/job-function/ViewOnlyAccess +aws iam attach-group-policy --group-name Prowler --policy-arn arn:aws:iam::${ACCOUNT_ID}:policy/Prowler-Additions-Policy aws iam create-user --user-name prowler -aws iam add-user-to-group --user-name prowler --group-name SecurityAudit +aws iam add-user-to-group --user-name prowler --group-name Prowler aws iam create-access-key --user-name prowler unset ACCOUNT_ID AWS_DEFAULT_PROFILE ``` @@ -351,7 +442,7 @@ Some of these checks look for publicly facing resources may not actually be full To list all existing checks please run the command below: -``` +```sh ./prowler -l ``` @@ -369,6 +460,13 @@ or to run just one of the checks: ./prowler -c extraNUMBER ``` +or to run multiple extras in one go: + +```sh +./prowler -c extraNumber,extraNumber +``` + + ## Forensics Ready Checks With this group of checks, Prowler looks if each service with logging or audit capabilities has them enabled to ensure all needed evidences are recorded and collected for an eventual digital forensic investigation in case of incident. List of checks part of this group (you can also see all groups with `./prowler -L`). The list of checks can be seen in the group file at: @@ -400,6 +498,7 @@ With this group of checks, Prowler shows results of controls related to the "Sec More information on the original PR is [here](https://github.com/toniblyx/prowler/issues/227). ### Note on Business Associate Addendum's (BAA) + Under the HIPAA regulations, cloud service providers (CSPs) such as AWS are considered business associates. The Business Associate Addendum (BAA) is an AWS contract that is required under HIPAA rules to ensure that AWS appropriately safeguards protected health information (PHI). The BAA also serves to clarify and limit, as appropriate, the permissible uses and disclosures of PHI by AWS, based on the relationship between AWS and our customers, and the activities or services being performed by AWS. Customers may use any AWS service in an account designated as a HIPAA account, but they should only process, store, and transmit protected health information (PHI) in the HIPAA-eligible services defined in the Business Associate Addendum (BAA). For the latest list of HIPAA-eligible AWS services, see [HIPAA Eligible Services Reference](https://aws.amazon.com/compliance/hipaa-eligible-services-reference/). More information on AWS & HIPAA can be found [here](https://aws.amazon.com/compliance/hipaa-compliance/) @@ -414,6 +513,55 @@ The `hipaa` group of checks uses existing and extra checks. To get a HIPAA repor ./prowler -g hipaa ``` +## Trust Boundaries Checks + +### Definition and Terms + +The term "trust boundary" is originating from the threat modelling process and the most popular contributor Adam Shostack and author of "Threat Modeling: Designing for Security" defines it as following ([reference](https://adam.shostack.org/uncover.html)): + +> Trust boundaries are perhaps the most subjective of all: these represent the border between trusted and untrusted elements. Trust is complex. You might trust your mechanic with your car, your dentist with your teeth, and your banker with your money, but you probably don't trust your dentist to change your spark plugs. + +AWS is made to be flexible for service links within and between different AWS accounts, we all know that. + +This group of checks helps to analyse a particular AWS account (subject) on existing links to other AWS accounts across various AWS services, in order to identify untrusted links. + +### Run +To give it a quick shot just call: + +```sh +./prowler -g trustboundaries +``` + +### Scenarios + +Currently this check group supports two different scenarios: + +1. Single account environment: no action required, the configuration is happening automatically for you. +2. Multi account environment: in case you environment has multiple trusted and known AWS accounts you maybe want to append them manually to [groups/group16_trustboundaries](groups/group16_trustboundaries) as a space separated list into `GROUP_TRUSTBOUNDARIES_TRUSTED_ACCOUNT_IDS` variable, then just run prowler. + +### Coverage + +Current coverage of Amazon Web Service (AWS) taken from [here](https://docs.aws.amazon.com/whitepapers/latest/aws-overview/introduction.html): +| Topic | Service | Trust Boundary | +|---------------------------------|------------|---------------------------------------------------------------------------| +| Networking and Content Delivery | Amazon VPC | VPC endpoints connections ([extra786](checks/check_extra786)) | +| | | VPC endpoints whitelisted principals ([extra787](checks/check_extra787)) | + +All ideas or recommendations to extend this group are very welcome [here](https://github.com/toniblyx/prowler/issues/new/choose). + +### Detailed Explanation of the Concept + +The diagrams depict two common scenarios, single account and multi account environments. +Every circle represents one AWS account. +The dashed line represents the trust boundary, that separates trust and untrusted AWS accounts. +The arrow simply describes the direction of the trust, however the data can potentially flow in both directions. + +Single Account environment assumes that only the AWS account subject to this analysis is trusted. However there is a chance that two VPCs are existing within that one AWS account which are still trusted as a self reference. +![single-account-environment](/docs/images/prowler-single-account-environment.png) + +Multi Account environments assumes a minimum of two trusted or known accounts. For this particular example all trusted and known accounts will be tested. Therefore `GROUP_TRUSTBOUNDARIES_TRUSTED_ACCOUNT_IDS` variable in [groups/group16_trustboundaries](groups/group16_trustboundaries) should include all trusted accounts Account #A, Account #B, Account #C, and Account #D in order to finally raise Account #E and Account #F for being untrusted or unknown. +![multi-account-environment](/docs/images/prowler-multi-account-environment.png) + ## Add Custom Checks In order to add any new check feel free to create a new extra check in the extras group or other group. To do so, you will need to follow these steps: @@ -434,7 +582,7 @@ In order to add any new check feel free to create a new extra check in the extra ## Third Party Integrations -### AWS Security Hub +### AWS Security Hub There is a blog post about that integration in the AWS Security blog here @@ -459,3 +607,4 @@ NOTE: If you are interested in using Prowler for commercial purposes remember th **I'm not related anyhow with CIS organization, I just write and maintain Prowler to help companies over the world to make their cloud infrastructure more secure.** If you want to contact me visit + diff --git a/checks/check11 b/checks/check11 index f34ebed5..59e982ef 100644 --- a/checks/check11 +++ b/checks/check11 @@ -8,10 +8,11 @@ # You should have received a copy of the license along with this # work. If not, see . -CHECK_ID_check11="1.1,1.01" +CHECK_ID_check11="1.1" CHECK_TITLE_check11="[check11] Avoid the use of the root account (Scored)" CHECK_SCORED_check11="SCORED" CHECK_TYPE_check11="LEVEL1" +CHECK_ASFF_TYPE_check11="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ALTERNATE_check101="check11" check11(){ diff --git a/checks/check110 b/checks/check110 index 6f34b0f2..dabb80dc 100644 --- a/checks/check110 +++ b/checks/check110 @@ -12,6 +12,7 @@ CHECK_ID_check110="1.10" CHECK_TITLE_check110="[check110] Ensure IAM password policy prevents password reuse: 24 or greater (Scored)" CHECK_SCORED_check110="SCORED" CHECK_TYPE_check110="LEVEL1" +CHECK_ASFF_TYPE_check110="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ALTERNATE_check110="check110" check110(){ diff --git a/checks/check111 b/checks/check111 index ea652b63..83575d29 100644 --- a/checks/check111 +++ b/checks/check111 @@ -12,6 +12,7 @@ CHECK_ID_check111="1.11" CHECK_TITLE_check111="[check111] Ensure IAM password policy expires passwords within 90 days or less (Scored)" CHECK_SCORED_check111="SCORED" CHECK_TYPE_check111="LEVEL1" +CHECK_ASFF_TYPE_check111="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ALTERNATE_check111="check111" check111(){ diff --git a/checks/check112 b/checks/check112 index f6fa9481..a9825ee1 100644 --- a/checks/check112 +++ b/checks/check112 @@ -12,6 +12,7 @@ CHECK_ID_check112="1.12" CHECK_TITLE_check112="[check112] Ensure no root account access key exists (Scored)" CHECK_SCORED_check112="SCORED" CHECK_TYPE_check112="LEVEL1" +CHECK_ASFF_TYPE_check112="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ALTERNATE_check112="check112" check112(){ @@ -22,11 +23,11 @@ check112(){ if [ "$ROOTKEY1" == "false" ];then textPass "No access key 1 found for root" else - textFail "Found access key 1 for root " + textFail "Found access key 1 for root" fi if [ "$ROOTKEY2" == "false" ];then textPass "No access key 2 found for root" else - textFail "Found access key 2 for root " + textFail "Found access key 2 for root" fi } diff --git a/checks/check113 b/checks/check113 index 481daeef..1e034c8d 100644 --- a/checks/check113 +++ b/checks/check113 @@ -12,6 +12,7 @@ CHECK_ID_check113="1.13" CHECK_TITLE_check113="[check113] Ensure MFA is enabled for the root account (Scored)" CHECK_SCORED_check113="SCORED" CHECK_TYPE_check113="LEVEL1" +CHECK_ASFF_TYPE_check113="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ALTERNATE_check113="check113" check113(){ @@ -20,6 +21,6 @@ check113(){ if [ "$COMMAND113" == "1" ]; then textPass "Virtual MFA is enabled for root" else - textFail "MFA is not ENABLED for root account " + textFail "MFA is not ENABLED for root account" fi } diff --git a/checks/check114 b/checks/check114 index 99391d33..d1777c10 100644 --- a/checks/check114 +++ b/checks/check114 @@ -12,19 +12,20 @@ CHECK_ID_check114="1.14" CHECK_TITLE_check114="[check114] Ensure hardware MFA is enabled for the root account (Scored)" CHECK_SCORED_check114="SCORED" CHECK_TYPE_check114="LEVEL2" +CHECK_ASFF_TYPE_check114="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ALTERNATE_check114="check114" check114(){ # "Ensure hardware MFA is enabled for the root account (Scored)" COMMAND113=$($AWSCLI iam get-account-summary $PROFILE_OPT --region $REGION --output json --query 'SummaryMap.AccountMFAEnabled') if [ "$COMMAND113" == "1" ]; then - COMMAND114=$($AWSCLI iam list-virtual-mfa-devices $PROFILE_OPT --region $REGION --output text --assignment-status Assigned --query 'VirtualMFADevices[*].[SerialNumber]' | grep '^arn:aws:iam::[0-9]\{12\}:mfa/root-account-mfa-device$') + COMMAND114=$($AWSCLI iam list-virtual-mfa-devices $PROFILE_OPT --region $REGION --output text --assignment-status Assigned --query 'VirtualMFADevices[*].[SerialNumber]' | grep '^arn:${AWS_PARTITION}:iam::[0-9]\{12\}:mfa/root-account-mfa-device$') if [[ "$COMMAND114" ]]; then textFail "Only Virtual MFA is enabled for root" else - textPass "Hardware MFA is enabled for root " + textPass "Hardware MFA is enabled for root" fi else - textFail "MFA is not ENABLED for root account " + textFail "MFA is not ENABLED for root account" fi } diff --git a/checks/check115 b/checks/check115 index 848bd440..c52db37e 100644 --- a/checks/check115 +++ b/checks/check115 @@ -12,6 +12,7 @@ CHECK_ID_check115="1.15" CHECK_TITLE_check115="[check115] Ensure security questions are registered in the AWS account (Not Scored)" CHECK_SCORED_check115="NOT_SCORED" CHECK_TYPE_check115="LEVEL1" +CHECK_ASFF_TYPE_check115="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ALTERNATE_check115="check115" check115(){ diff --git a/checks/check116 b/checks/check116 index ad51e34c..c20abbf7 100644 --- a/checks/check116 +++ b/checks/check116 @@ -12,6 +12,8 @@ CHECK_ID_check116="1.16" CHECK_TITLE_check116="[check116] Ensure IAM policies are attached only to groups or roles (Scored)" CHECK_SCORED_check116="SCORED" CHECK_TYPE_check116="LEVEL1" +CHECK_ASFF_TYPE_check116="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" +CHECK_ASFF_RESOURCE_TYPE_check116="AwsIamUser" CHECK_ALTERNATE_check116="check116" check116(){ @@ -21,16 +23,16 @@ check116(){ for user in $LIST_USERS;do USER_POLICY=$($AWSCLI iam list-attached-user-policies --output text $PROFILE_OPT --region $REGION --user-name $user) if [[ $USER_POLICY ]]; then - textFail "$user has managed policy directly attached " + textFail "$user has managed policy directly attached" C116_NUM_USERS=$(expr $C116_NUM_USERS + 1) fi USER_POLICY=$($AWSCLI iam list-user-policies --output text $PROFILE_OPT --region $REGION --user-name $user) if [[ $USER_POLICY ]]; then - textFail "$user has inline policy directly attached " + textFail "$user has inline policy directly attached" C116_NUM_USERS=$(expr $C116_NUM_USERS + 1) fi done if [[ $C116_NUM_USERS -eq 0 ]]; then - textPass "No policies attached to users." + textPass "No policies attached to users" fi } diff --git a/checks/check117 b/checks/check117 index 69f897c9..1264c99e 100644 --- a/checks/check117 +++ b/checks/check117 @@ -12,6 +12,7 @@ CHECK_ID_check117="1.17" CHECK_TITLE_check117="[check117] Maintain current contact details (Not Scored)" CHECK_SCORED_check117="NOT_SCORED" CHECK_TYPE_check117="LEVEL1" +CHECK_ASFF_TYPE_check117="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ALTERNATE_check117="check117" check117(){ diff --git a/checks/check118 b/checks/check118 index 4217fbf1..abd76906 100644 --- a/checks/check118 +++ b/checks/check118 @@ -12,6 +12,7 @@ CHECK_ID_check118="1.18" CHECK_TITLE_check118="[check118] Ensure security contact information is registered (Not Scored)" CHECK_SCORED_check118="NOT_SCORED" CHECK_TYPE_check118="LEVEL1" +CHECK_ASFF_TYPE_check118="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ALTERNATE_check118="check118" check118(){ diff --git a/checks/check119 b/checks/check119 index 4dbc7fd0..5a0a90d7 100644 --- a/checks/check119 +++ b/checks/check119 @@ -12,6 +12,8 @@ CHECK_ID_check119="1.19" CHECK_TITLE_check119="[check119] Ensure IAM instance roles are used for AWS resource access from instances (Not Scored)" CHECK_SCORED_check119="NOT_SCORED" CHECK_TYPE_check119="LEVEL2" +CHECK_ASFF_TYPE_check119="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" +CHECK_ASFF_RESOURCE_TYPE_check119="AwsEc2Instance" CHECK_ALTERNATE_check119="check119" check119(){ @@ -25,9 +27,9 @@ check119(){ if [[ $STATE_NAME != "terminated" ]]; then PROFILEARN=$(echo $EC2_DATA | jq -r --arg i "$instance" 'select(.InstanceId==$i)|.ProfileArn') if [[ $PROFILEARN == "null" ]]; then - textFail "$regx: Instance $instance not associated with an instance role." $regx + textFail "$regx: Instance $instance not associated with an instance role" $regx else - textPass "$regx: Instance $instance associated with role ${PROFILEARN##*/}." $regx + textPass "$regx: Instance $instance associated with role ${PROFILEARN##*/}" $regx fi fi done diff --git a/checks/check12 b/checks/check12 index 6e82f10c..adccb3c1 100644 --- a/checks/check12 +++ b/checks/check12 @@ -8,16 +8,18 @@ # You should have received a copy of the license along with this # work. If not, see . -CHECK_ID_check12="1.2,1.02" +CHECK_ID_check12="1.2" CHECK_TITLE_check12="[check12] Ensure multi-factor authentication (MFA) is enabled for all IAM users that have a console password (Scored)" CHECK_SCORED_check12="SCORED" CHECK_TYPE_check12="LEVEL1" +CHECK_ASFF_TYPE_check12="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" +CHECK_ASFF_RESOURCE_TYPE_check12="AwsIamUser" CHECK_ALTERNATE_check102="check12" check12(){ # "Ensure multi-factor authentication (MFA) is enabled for all IAM users that have a console password (Scored)" # List users with password enabled - COMMAND12_LIST_USERS_WITH_PASSWORD_ENABLED=$(cat $TEMP_REPORT_FILE|awk -F, '{ print $1,$4 }' |grep true | awk '{ print $1 }') + COMMAND12_LIST_USERS_WITH_PASSWORD_ENABLED=$(cat $TEMP_REPORT_FILE|awk -F, '{ print $1,$4 }' |grep -F ' true$' | awk '{ print $1 }') COMMAND12=$( for i in $COMMAND12_LIST_USERS_WITH_PASSWORD_ENABLED; do cat $TEMP_REPORT_FILE|awk -F, '{ print $1,$8 }' |grep "^$i " |grep false | awk '{ print $1 }' diff --git a/checks/check120 b/checks/check120 index 2983642f..dd1d4fc5 100644 --- a/checks/check120 +++ b/checks/check120 @@ -12,6 +12,8 @@ CHECK_ID_check120="1.20" CHECK_TITLE_check120="[check120] Ensure a support role has been created to manage incidents with AWS Support (Scored)" CHECK_SCORED_check120="SCORED" CHECK_TYPE_check120="LEVEL1" +CHECK_ASFF_TYPE_check120="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" +CHECK_ASFF_RESOURCE_TYPE_check120="AwsIamRole" CHECK_ALTERNATE_check120="check120" check120(){ @@ -28,7 +30,7 @@ check120(){ # textInfo "User $user has support access via $policyarn" # done else - textFail "Support Policy not applied to any Role " + textFail "Support Policy not applied to any Role" fi done else diff --git a/checks/check121 b/checks/check121 index 64032c05..3fbd5535 100644 --- a/checks/check121 +++ b/checks/check121 @@ -12,6 +12,8 @@ CHECK_ID_check121="1.21" CHECK_TITLE_check121="[check121] Do not setup access keys during initial user setup for all IAM users that have a console password (Not Scored)" CHECK_SCORED_check121="NOT_SCORED" CHECK_TYPE_check121="LEVEL1" +CHECK_ASFF_TYPE_check121="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" +CHECK_ASFF_RESOURCE_TYPE_check121="AwsIamUser" CHECK_ALTERNATE_check121="check121" check121(){ @@ -19,22 +21,24 @@ check121(){ LIST_USERS=$($AWSCLI iam list-users --query 'Users[*].UserName' --output text $PROFILE_OPT --region $REGION) # List of USERS with KEY1 last_used_date as N/A LIST_USERS_KEY1_NA=$(for user in $LIST_USERS; do grep "^${user}," $TEMP_REPORT_FILE|awk -F, '{ print $1,$11 }'|grep N/A |awk '{ print $1 }'; done) - LIST_USERS_KEY1_ACTIVE=$(for user in $LIST_USERS_KEY1_NA; do grep "^${user}," $TEMP_REPORT_FILE|awk -F, '{ print $1,$9 }'|grep "true$"|awk '{ print $1 }'|sed 's/[[:blank:]]+/,/g' ; done) + # List of USERS with KEY1 active, last_used_date as N/A and have a console password + LIST_USERS_KEY1_ACTIVE=$(for user in $LIST_USERS_KEY1_NA; do grep "^${user}," $TEMP_REPORT_FILE|awk -F, '{ print $1,$4,$9 }'|grep "true true$"|awk '{ print $1 }'|sed 's/[[:blank:]]+/,/g' ; done) if [[ $LIST_USERS_KEY1_ACTIVE ]]; then for user in $LIST_USERS_KEY1_ACTIVE; do - textFail "$user has never used Access Key 1" + textFail "User $user has never used access key 1" done else - textPass "No users found with Access Key 1 never used" + textPass "No users found with access key 1 never used" fi # List of USERS with KEY2 last_used_date as N/A LIST_USERS_KEY2_NA=$(for user in $LIST_USERS; do grep "^${user}," $TEMP_REPORT_FILE|awk -F, '{ print $1,$16 }'|grep N/A |awk '{ print $1 }' ; done) - LIST_USERS_KEY2_ACTIVE=$(for user in $LIST_USERS_KEY2_NA; do grep "^${user}," $TEMP_REPORT_FILE|awk -F, '{ print $1,$14 }'|grep "true$" |awk '{ print $1 }' ; done) + # List of USERS with KEY2 active, last_used_date as N/A and have a console password + LIST_USERS_KEY2_ACTIVE=$(for user in $LIST_USERS_KEY2_NA; do grep "^${user}," $TEMP_REPORT_FILE|awk -F, '{ print $1,$4,$14 }'|grep "true true$" |awk '{ print $1 }' ; done) if [[ $LIST_USERS_KEY2_ACTIVE ]]; then for user in $LIST_USERS_KEY2_ACTIVE; do - textFail "$user has never used Access Key 2" + textFail "User $user has never used access key 2" done else - textPass "No users found with Access Key 2 never used" + textPass "No users found with access key 2 never used" fi } diff --git a/checks/check122 b/checks/check122 index 0ae9e6c3..1c4fdec2 100644 --- a/checks/check122 +++ b/checks/check122 @@ -12,6 +12,8 @@ CHECK_ID_check122="1.22" CHECK_TITLE_check122="[check122] Ensure IAM policies that allow full \"*:*\" administrative privileges are not created (Scored)" CHECK_SCORED_check122="SCORED" CHECK_TYPE_check122="LEVEL1" +CHECK_ASFF_TYPE_check122="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" +CHECK_ASFF_RESOURCE_TYPE_check122="AwsIamPolicy" CHECK_ALTERNATE_check122="check122" check122(){ diff --git a/checks/check13 b/checks/check13 index aeab1944..929a6aa2 100644 --- a/checks/check13 +++ b/checks/check13 @@ -8,32 +8,14 @@ # You should have received a copy of the license along with this # work. If not, see . -CHECK_ID_check13="1.3,1.03" +CHECK_ID_check13="1.3" CHECK_TITLE_check13="[check13] Ensure credentials unused for 90 days or greater are disabled (Scored)" CHECK_SCORED_check13="SCORED" CHECK_TYPE_check13="LEVEL1" +CHECK_ASFF_TYPE_check13="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" +CHECK_ASFF_RESOURCE_TYPE_check13="AwsIamUser" CHECK_ALTERNATE_check103="check13" check13(){ - # "Ensure credentials unused for 90 days or greater are disabled (Scored)" - COMMAND12_LIST_USERS_WITH_PASSWORD_ENABLED=$(cat $TEMP_REPORT_FILE|awk -F, '{ print $1,$4 }' |grep true | awk '{ print $1 }') - # Only check Password last used for users with password enabled - if [[ $COMMAND12_LIST_USERS_WITH_PASSWORD_ENABLED ]]; then - for i in $COMMAND12_LIST_USERS_WITH_PASSWORD_ENABLED; do - DATEUSED=$($AWSCLI iam list-users --query "Users[?UserName=='$i'].PasswordLastUsed" --output text $PROFILE_OPT --region $REGION | cut -d'T' -f1) - if [ "$DATEUSED" == "" ] - then - textFail "User \"$i\" has not logged in during the last 90 days " - else - HOWOLDER=$(how_older_from_today $DATEUSED) - if [ $HOWOLDER -gt "90" ];then - textFail "User \"$i\" has not logged in during the last 90 days " - else - textPass "User \"$i\" found with credentials used in the last 90 days" - fi - fi - done - else - textPass "No users found with password enabled" - fi + check_creds_used_in_last_days 90 } diff --git a/checks/check14 b/checks/check14 index 88d14407..438b8364 100644 --- a/checks/check14 +++ b/checks/check14 @@ -8,10 +8,12 @@ # You should have received a copy of the license along with this # work. If not, see . -CHECK_ID_check14="1.4,1.04" +CHECK_ID_check14="1.4" CHECK_TITLE_check14="[check14] Ensure access keys are rotated every 90 days or less (Scored)" CHECK_SCORED_check14="SCORED" CHECK_TYPE_check14="LEVEL1" +CHECK_ASFF_TYPE_check14="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" +CHECK_ASFF_RESOURCE_TYPE_check14="AwsIamUser" CHECK_ALTERNATE_check104="check14" check14(){ @@ -28,15 +30,15 @@ check14(){ HOWOLDER=$(how_older_from_today $DATEROTATED1) if [ $HOWOLDER -gt "90" ];then - textFail " $user has not rotated access key1 in over 90 days." + textFail "$user has not rotated access key 1 in over 90 days" C14_NUM_USERS1=$(expr $C14_NUM_USERS1 + 1) fi done if [[ $C14_NUM_USERS1 -eq 0 ]]; then - textPass "No users with access key 1 older than 90 days." + textPass "No users with access key 1 older than 90 days" fi else - textPass "No users with access key 1." + textPass "No users with access key 1" fi if [[ $LIST_OF_USERS_WITH_ACCESS_KEY2 ]]; then @@ -46,14 +48,14 @@ check14(){ DATEROTATED2=$(cat $TEMP_REPORT_FILE | grep -v user_creation_time | grep "^${user},"| awk -F, '{ print $15 }' | grep -v "N/A" | awk -F"T" '{ print $1 }') HOWOLDER=$(how_older_from_today $DATEROTATED2) if [ $HOWOLDER -gt "90" ];then - textFail " $user has not rotated access key2 in over 90 days. " + textFail "$user has not rotated access key 2 in over 90 days" C14_NUM_USERS2=$(expr $C14_NUM_USERS2 + 1) fi done if [[ $C14_NUM_USERS2 -eq 0 ]]; then - textPass "No users with access key 2 older than 90 days." + textPass "No users with access key 2 older than 90 days" fi else - textPass "No users with access key 2." + textPass "No users with access key 2" fi } diff --git a/checks/check15 b/checks/check15 index aedcba17..bfc31270 100644 --- a/checks/check15 +++ b/checks/check15 @@ -8,10 +8,11 @@ # You should have received a copy of the license along with this # work. If not, see . -CHECK_ID_check15="1.5,1.05" +CHECK_ID_check15="1.5" CHECK_TITLE_check15="[check15] Ensure IAM password policy requires at least one uppercase letter (Scored)" CHECK_SCORED_check15="SCORED" CHECK_TYPE_check15="LEVEL1" +CHECK_ASFF_TYPE_check15="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ALTERNATE_check105="check15" check15(){ diff --git a/checks/check16 b/checks/check16 index de224521..881b9a83 100644 --- a/checks/check16 +++ b/checks/check16 @@ -8,10 +8,11 @@ # You should have received a copy of the license along with this # work. If not, see . -CHECK_ID_check16="1.6,1.06" +CHECK_ID_check16="1.6" CHECK_TITLE_check16="[check16] Ensure IAM password policy require at least one lowercase letter (Scored)" CHECK_SCORED_check16="SCORED" CHECK_TYPE_check16="LEVEL1" +CHECK_ASFF_TYPE_check16="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ALTERNATE_check106="check16" check16(){ diff --git a/checks/check17 b/checks/check17 index f344c759..ad8faecd 100644 --- a/checks/check17 +++ b/checks/check17 @@ -8,10 +8,11 @@ # You should have received a copy of the license along with this # work. If not, see . -CHECK_ID_check17="1.7,1.07" +CHECK_ID_check17="1.7" CHECK_TITLE_check17="[check17] Ensure IAM password policy require at least one symbol (Scored)" CHECK_SCORED_check17="SCORED" CHECK_TYPE_check17="LEVEL1" +CHECK_ASFF_TYPE_check17="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ALTERNATE_check107="check17" check17(){ diff --git a/checks/check18 b/checks/check18 index 412de291..bec51868 100644 --- a/checks/check18 +++ b/checks/check18 @@ -8,10 +8,11 @@ # You should have received a copy of the license along with this # work. If not, see . -CHECK_ID_check18="1.8,1.08" +CHECK_ID_check18="1.8" CHECK_TITLE_check18="[check18] Ensure IAM password policy require at least one number (Scored)" CHECK_SCORED_check18="SCORED" CHECK_TYPE_check18="LEVEL1" +CHECK_ASFF_TYPE_check18="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ALTERNATE_check108="check18" check18(){ diff --git a/checks/check19 b/checks/check19 index 6e924ae8..28199d77 100644 --- a/checks/check19 +++ b/checks/check19 @@ -8,10 +8,11 @@ # You should have received a copy of the license along with this # work. If not, see . -CHECK_ID_check19="1.9,1.09" +CHECK_ID_check19="1.9" CHECK_TITLE_check19="[check19] Ensure IAM password policy requires minimum length of 14 or greater (Scored)" CHECK_SCORED_check19="SCORED" CHECK_TYPE_check19="LEVEL1" +CHECK_ASFF_TYPE_check19="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ALTERNATE_check109="check19" check19(){ diff --git a/checks/check21 b/checks/check21 index 383578e3..d354bad5 100644 --- a/checks/check21 +++ b/checks/check21 @@ -8,10 +8,12 @@ # You should have received a copy of the license along with this # work. If not, see . -CHECK_ID_check21="2.1,2.01" +CHECK_ID_check21="2.1" CHECK_TITLE_check21="[check21] Ensure CloudTrail is enabled in all regions (Scored)" CHECK_SCORED_check21="SCORED" CHECK_TYPE_check21="LEVEL1" +CHECK_ASFF_TYPE_check21="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" +CHECK_ASFF_RESOURCE_TYPE_check21="AwsCloudTrailTrail" CHECK_ALTERNATE_check201="check21" check21(){ @@ -20,20 +22,24 @@ check21(){ for regx in $REGIONS; do LIST_OF_TRAILS=$($AWSCLI cloudtrail describe-trails $PROFILE_OPT --region $regx --query 'trailList[*].Name' --output text --no-include-shadow-trails) if [[ $LIST_OF_TRAILS ]];then - for trail in $LIST_OF_TRAILS;do - trail_count=$((trail_count + 1)) - MULTIREGION_TRAIL_STATUS=$($AWSCLI cloudtrail describe-trails $PROFILE_OPT --region $regx --query 'trailList[*].IsMultiRegionTrail' --output text --trail-name-list $trail) - if [[ "$MULTIREGION_TRAIL_STATUS" == 'False' ]];then - textFail "$trail trail in $regx is not enabled in multi region mode" - else - textPass "$trail trail in $regx is enabled for all regions" - fi - done + for trail in $LIST_OF_TRAILS;do + trail_count=$((trail_count + 1)) + MULTIREGION_TRAIL_STATUS=$($AWSCLI cloudtrail describe-trails $PROFILE_OPT --region $regx --query 'trailList[*].IsMultiRegionTrail' --output text --trail-name-list $trail) + if [[ "$MULTIREGION_TRAIL_STATUS" == 'False' ]];then + textFail "$trail trail in $regx is not enabled in multi region mode" + else + textPass "$trail trail in $regx is enabled for all regions" + fi + done fi done if [[ $trail_count == 0 ]]; then - textFail "No CloudTrail trails were found in the account" + ORG_TRAIL=$($AWSCLI cloudtrail describe-trails $PROFILE_OPT --region us-east-1 | jq '.trailList[] | select(.IsMultiRegionTrail and .IsOrganizationTrail) | .Name' | sed 's/"//g') + if [[ $ORG_TRAIL != "" ]]; then + textPass "$ORG_TRAIL trail in $regx is enabled for all regions" + else + textFail "No CloudTrail trails were found in the account" + fi fi -} - +} \ No newline at end of file diff --git a/checks/check22 b/checks/check22 index 18b13742..d302f128 100644 --- a/checks/check22 +++ b/checks/check22 @@ -8,15 +8,17 @@ # You should have received a copy of the license along with this # work. If not, see . -CHECK_ID_check22="2.2,2.02" +CHECK_ID_check22="2.2" CHECK_TITLE_check22="[check22] Ensure CloudTrail log file validation is enabled (Scored)" CHECK_SCORED_check22="SCORED" CHECK_TYPE_check22="LEVEL2" +CHECK_ASFF_TYPE_check22="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" +CHECK_ASFF_RESOURCE_TYPE_check22="AwsCloudTrailTrail" CHECK_ALTERNATE_check202="check22" check22(){ # "Ensure CloudTrail log file validation is enabled (Scored)" - + for regx in $REGIONS; do LIST_OF_TRAILS=$($AWSCLI cloudtrail describe-trails $PROFILE_OPT --region $regx --query 'trailList[*].Name' --output text --no-include-shadow-trails) if [[ $LIST_OF_TRAILS ]];then diff --git a/checks/check23 b/checks/check23 index 53d1b6f6..9614fe68 100644 --- a/checks/check23 +++ b/checks/check23 @@ -8,10 +8,12 @@ # You should have received a copy of the license along with this # work. If not, see . -CHECK_ID_check23="2.3,2.03" +CHECK_ID_check23="2.3" CHECK_TITLE_check23="[check23] Ensure the S3 bucket CloudTrail logs to is not publicly accessible (Scored)" CHECK_SCORED_check23="SCORED" CHECK_TYPE_check23="LEVEL1" +CHECK_ASFF_TYPE_check23="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" +CHECK_ASFF_RESOURCE_TYPE_check23="AwsS3Bucket" CHECK_ALTERNATE_check203="check23" check23(){ @@ -21,7 +23,7 @@ check23(){ for bucket in $CLOUDTRAILBUCKET;do CLOUDTRAILBUCKET_HASALLPERMISIONS=$($AWSCLI s3api get-bucket-acl --bucket $bucket --query 'Grants[?Grantee.URI==`http://acs.amazonaws.com/groups/global/AllUsers`]' $PROFILE_OPT --region $REGION --output text 2>&1) if [[ $(echo "$CLOUDTRAILBUCKET_HASALLPERMISIONS" | grep AccessDenied) ]]; then - textFail "Access Denied Trying to Get Bucket Acl for $bucket" + textInfo "Access Denied Trying to Get Bucket Acl for $bucket" continue fi if [[ $CLOUDTRAILBUCKET_HASALLPERMISIONS ]]; then diff --git a/checks/check24 b/checks/check24 index 35185035..1fb3c133 100644 --- a/checks/check24 +++ b/checks/check24 @@ -8,10 +8,12 @@ # You should have received a copy of the license along with this # work. If not, see . -CHECK_ID_check24="2.4,2.04" +CHECK_ID_check24="2.4" CHECK_TITLE_check24="[check24] Ensure CloudTrail trails are integrated with CloudWatch Logs (Scored)" CHECK_SCORED_check24="SCORED" CHECK_TYPE_check24="LEVEL1" +CHECK_ASFF_TYPE_check24="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" +CHECK_ASFF_RESOURCE_TYPE_check24="AwsCloudTrailTrail" CHECK_ALTERNATE_check204="check24" check24(){ diff --git a/checks/check25 b/checks/check25 index d8d81732..456223fa 100644 --- a/checks/check25 +++ b/checks/check25 @@ -8,10 +8,11 @@ # You should have received a copy of the license along with this # work. If not, see . -CHECK_ID_check25="2.5,2.05" +CHECK_ID_check25="2.5" CHECK_TITLE_check25="[check25] Ensure AWS Config is enabled in all regions (Scored)" CHECK_SCORED_check25="SCORED" CHECK_TYPE_check25="LEVEL1" +CHECK_ASFF_TYPE_check25="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ALTERNATE_check205="check25" check25(){ diff --git a/checks/check26 b/checks/check26 index 83395527..da563445 100644 --- a/checks/check26 +++ b/checks/check26 @@ -8,10 +8,12 @@ # You should have received a copy of the license along with this # work. If not, see . -CHECK_ID_check26="2.6,2.06" +CHECK_ID_check26="2.6" CHECK_TITLE_check26="[check26] Ensure S3 bucket access logging is enabled on the CloudTrail S3 bucket (Scored)" CHECK_SCORED_check26="SCORED" CHECK_TYPE_check26="LEVEL1" +CHECK_ASFF_TYPE_check26="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" +CHECK_ASFF_RESOURCE_TYPE_check26="AwsS3Bucket" CHECK_ALTERNATE_check206="check26" check26(){ @@ -27,9 +29,9 @@ check26(){ if [[ $CLOUDTRAILBUCKET ]]; then bucket=$CLOUDTRAILBUCKET if [ "$CLOUDTRAIL_ACCOUNT_ID" == "$ACCOUNT_NUM" ]; then - CLOUDTRAILBUCKET_LOGENABLED=$($AWSCLI s3api get-bucket-logging --bucket $bucket $PROFILE_OPT --region $REGION --query 'LoggingEnabled.TargetBucket' --output text 2>&1) + CLOUDTRAILBUCKET_LOGENABLED=$($AWSCLI s3api get-bucket-logging --bucket $bucket $PROFILE_OPT --region $REGION --query 'LoggingEnabled.TargetBucket' 2>&1) if [[ $(echo "$CLOUDTRAILBUCKET_LOGENABLED" | grep AccessDenied) ]]; then - textFail "Access Denied Trying to Get Bucket Logging for $bucket" + textInfo "Access Denied Trying to Get Bucket Logging for $bucket" continue fi if [[ $CLOUDTRAILBUCKET_LOGENABLED != "null" ]]; then diff --git a/checks/check27 b/checks/check27 index f32b7bfe..6f5d81a3 100644 --- a/checks/check27 +++ b/checks/check27 @@ -8,10 +8,12 @@ # You should have received a copy of the license along with this # work. If not, see . -CHECK_ID_check27="2.7,2.07" +CHECK_ID_check27="2.7" CHECK_TITLE_check27="[check27] Ensure CloudTrail logs are encrypted at rest using KMS CMKs (Scored)" CHECK_SCORED_check27="SCORED" CHECK_TYPE_check27="LEVEL2" +CHECK_ASFF_TYPE_check27="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" +CHECK_ASFF_RESOURCE_TYPE_check27="AwsCloudTrailTrail" CHECK_ALTERNATE_check207="check27" check27(){ diff --git a/checks/check28 b/checks/check28 index bec0e234..23c797da 100644 --- a/checks/check28 +++ b/checks/check28 @@ -8,10 +8,12 @@ # You should have received a copy of the license along with this # work. If not, see . -CHECK_ID_check28="2.8,2.08" +CHECK_ID_check28="2.8" CHECK_TITLE_check28="[check28] Ensure rotation for customer created CMKs is enabled (Scored)" CHECK_SCORED_check28="SCORED" CHECK_TYPE_check28="LEVEL2" +CHECK_ASFF_TYPE_check28="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" +CHECK_ASFF_RESOURCE_TYPE_check28="AwsKmsKey" CHECK_ALTERNATE_check208="check28" check28(){ @@ -27,7 +29,7 @@ check28(){ for key in $CHECK_KMS_KEYLIST_NO_DEFAULT; do CHECK_KMS_KEY_TYPE=$($AWSCLI kms describe-key --key-id $key $PROFILE_OPT --region $regx --query 'KeyMetadata.Origin' | sed 's/["]//g') if [[ "$CHECK_KMS_KEY_TYPE" == "EXTERNAL" ]];then - textPass "$regx: Key $key in Region $regx Customer Uploaded Key Material." "$regx" + textPass "$regx: Key $key in Region $regx Customer Uploaded Key Material" "$regx" else CHECK_KMS_KEY_ROTATION=$($AWSCLI kms get-key-rotation-status --key-id $key $PROFILE_OPT --region $regx --output text) if [[ "$CHECK_KMS_KEY_ROTATION" == "True" ]];then diff --git a/checks/check29 b/checks/check29 index d1f23dc8..01681bb8 100644 --- a/checks/check29 +++ b/checks/check29 @@ -8,14 +8,16 @@ # You should have received a copy of the license along with this # work. If not, see . -CHECK_ID_check29="2.9,2.09" +CHECK_ID_check29="2.9" CHECK_TITLE_check29="[check29] Ensure VPC Flow Logging is Enabled in all VPCs (Scored)" CHECK_SCORED_check29="SCORED" CHECK_TYPE_check29="LEVEL2" +CHECK_ASFF_TYPE_check29="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" +CHECK_ASFF_RESOURCE_TYPE_check29="AwsEc2Vpc" CHECK_ALTERNATE_check209="check29" check29(){ - # "Ensure VPC Flow Logging is Enabled in all VPCs (Scored)" + # "Ensure VPC Flow Logging is Enabled in all VPCs (Scored)" for regx in $REGIONS; do AVAILABLE_VPC=$($AWSCLI ec2 describe-vpcs $PROFILE_OPT --region $regx --query 'Vpcs[?State==`available`].VpcId' --output text) for vpcx in $AVAILABLE_VPC; do @@ -26,7 +28,7 @@ check29(){ done else textFail "VPC $vpcx: No VPCFlowLog has been found in Region $regx" "$regx" - fi + fi done done } diff --git a/checks/check31 b/checks/check31 index 8a8d1329..2ea65085 100644 --- a/checks/check31 +++ b/checks/check31 @@ -33,10 +33,12 @@ # --actions-enabled \ # --alarm-actions arn:aws:sns:us-east-1:123456789012:CloudWatchAlarmTopic -CHECK_ID_check31="3.1,3.01" +CHECK_ID_check31="3.1" CHECK_TITLE_check31="[check31] Ensure a log metric filter and alarm exist for unauthorized API calls (Scored)" CHECK_SCORED_check31="SCORED" CHECK_TYPE_check31="LEVEL1" +CHECK_ASFF_TYPE_check31="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" +CHECK_ASFF_RESOURCE_TYPE_check31="AwsCloudTrailTrail" CHECK_ALTERNATE_check301="check31" check31(){ diff --git a/checks/check310 b/checks/check310 index 77f4b5a3..65d50773 100644 --- a/checks/check310 +++ b/checks/check310 @@ -37,6 +37,8 @@ CHECK_ID_check310="3.10" CHECK_TITLE_check310="[check310] Ensure a log metric filter and alarm exist for security group changes (Scored)" CHECK_SCORED_check310="SCORED" CHECK_TYPE_check310="LEVEL2" +CHECK_ASFF_TYPE_check310="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" +CHECK_ASFF_RESOURCE_TYPE_check310="AwsCloudTrailTrail" CHECK_ALTERNATE_check310="check310" check310(){ diff --git a/checks/check311 b/checks/check311 index b69fc75a..e38af0dc 100644 --- a/checks/check311 +++ b/checks/check311 @@ -37,6 +37,8 @@ CHECK_ID_check311="3.11" CHECK_TITLE_check311="[check311] Ensure a log metric filter and alarm exist for changes to Network Access Control Lists (NACL) (Scored)" CHECK_SCORED_check311="SCORED" CHECK_TYPE_check311="LEVEL2" +CHECK_ASFF_TYPE_check311="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" +CHECK_ASFF_RESOURCE_TYPE_check311="AwsCloudTrailTrail" CHECK_ALTERNATE_check311="check311" check311(){ diff --git a/checks/check312 b/checks/check312 index 49e1462e..b5abde10 100644 --- a/checks/check312 +++ b/checks/check312 @@ -37,6 +37,8 @@ CHECK_ID_check312="3.12" CHECK_TITLE_check312="[check312] Ensure a log metric filter and alarm exist for changes to network gateways (Scored)" CHECK_SCORED_check312="SCORED" CHECK_TYPE_check312="LEVEL1" +CHECK_ASFF_TYPE_check312="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" +CHECK_ASFF_RESOURCE_TYPE_check312="AwsCloudTrailTrail" CHECK_ALTERNATE_check312="check312" check312(){ diff --git a/checks/check313 b/checks/check313 index a6a81edc..0514045c 100644 --- a/checks/check313 +++ b/checks/check313 @@ -37,6 +37,8 @@ CHECK_ID_check313="3.13" CHECK_TITLE_check313="[check313] Ensure a log metric filter and alarm exist for route table changes (Scored)" CHECK_SCORED_check313="SCORED" CHECK_TYPE_check313="LEVEL1" +CHECK_ASFF_TYPE_check313="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" +CHECK_ASFF_RESOURCE_TYPE_check313="AwsCloudTrailTrail" CHECK_ALTERNATE_check313="check313" check313(){ diff --git a/checks/check314 b/checks/check314 index 3a6c9d7d..de9c3875 100644 --- a/checks/check314 +++ b/checks/check314 @@ -37,6 +37,8 @@ CHECK_ID_check314="3.14" CHECK_TITLE_check314="[check314] Ensure a log metric filter and alarm exist for VPC changes (Scored)" CHECK_SCORED_check314="SCORED" CHECK_TYPE_check314="LEVEL1" +CHECK_ASFF_TYPE_check314="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" +CHECK_ASFF_RESOURCE_TYPE_check314="AwsCloudTrailTrail" CHECK_ALTERNATE_check314="check314" check314(){ diff --git a/checks/check32 b/checks/check32 index 83779842..d6000238 100644 --- a/checks/check32 +++ b/checks/check32 @@ -33,10 +33,12 @@ # --actions-enabled \ # --alarm-actions arn:aws:sns:us-east-1:123456789012:CloudWatchAlarmTopic -CHECK_ID_check32="3.2,3.02" +CHECK_ID_check32="3.2" CHECK_TITLE_check32="[check32] Ensure a log metric filter and alarm exist for Management Console sign-in without MFA (Scored)" CHECK_SCORED_check32="SCORED" CHECK_TYPE_check32="LEVEL1" +CHECK_ASFF_TYPE_check32="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" +CHECK_ASFF_RESOURCE_TYPE_check32="AwsCloudTrailTrail" CHECK_ALTERNATE_check302="check32" check32(){ diff --git a/checks/check33 b/checks/check33 index 00c6d7dd..837d5fb5 100644 --- a/checks/check33 +++ b/checks/check33 @@ -33,10 +33,12 @@ # --actions-enabled \ # --alarm-actions arn:aws:sns:us-east-1:123456789012:CloudWatchAlarmTopic -CHECK_ID_check33="3.3,3.03" +CHECK_ID_check33="3.3" CHECK_TITLE_check33="[check33] Ensure a log metric filter and alarm exist for usage of root account (Scored)" CHECK_SCORED_check33="SCORED" CHECK_TYPE_check33="LEVEL1" +CHECK_ASFF_TYPE_check33="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" +CHECK_ASFF_RESOURCE_TYPE_check33="AwsCloudTrailTrail" CHECK_ALTERNATE_check303="check33" check33(){ diff --git a/checks/check34 b/checks/check34 index e4751c79..7d2a6e26 100644 --- a/checks/check34 +++ b/checks/check34 @@ -33,10 +33,12 @@ # --actions-enabled \ # --alarm-actions arn:aws:sns:us-east-1:123456789012:CloudWatchAlarmTopic -CHECK_ID_check34="3.4,3.04" +CHECK_ID_check34="3.4" CHECK_TITLE_check34="[check34] Ensure a log metric filter and alarm exist for IAM policy changes (Scored)" CHECK_SCORED_check34="SCORED" CHECK_TYPE_check34="LEVEL1" +CHECK_ASFF_TYPE_check34="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" +CHECK_ASFF_RESOURCE_TYPE_check34="AwsCloudTrailTrail" CHECK_ALTERNATE_check304="check34" check34(){ diff --git a/checks/check35 b/checks/check35 index b8b4c6cf..9fd5e0f5 100644 --- a/checks/check35 +++ b/checks/check35 @@ -33,10 +33,12 @@ # --actions-enabled \ # --alarm-actions arn:aws:sns:us-east-1:123456789012:CloudWatchAlarmTopic -CHECK_ID_check35="3.5,3.05" +CHECK_ID_check35="3.5" CHECK_TITLE_check35="[check35] Ensure a log metric filter and alarm exist for CloudTrail configuration changes (Scored)" CHECK_SCORED_check35="SCORED" CHECK_TYPE_check35="LEVEL1" +CHECK_ASFF_TYPE_check35="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" +CHECK_ASFF_RESOURCE_TYPE_check35="AwsCloudTrailTrail" CHECK_ALTERNATE_check305="check35" check35(){ diff --git a/checks/check36 b/checks/check36 index 699739d8..334ae475 100644 --- a/checks/check36 +++ b/checks/check36 @@ -33,10 +33,12 @@ # --actions-enabled \ # --alarm-actions arn:aws:sns:us-east-1:123456789012:CloudWatchAlarmTopic -CHECK_ID_check36="3.6,3.06" +CHECK_ID_check36="3.6" CHECK_TITLE_check36="[check36] Ensure a log metric filter and alarm exist for AWS Management Console authentication failures (Scored)" CHECK_SCORED_check36="SCORED" CHECK_TYPE_check36="LEVEL2" +CHECK_ASFF_TYPE_check36="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" +CHECK_ASFF_RESOURCE_TYPE_check36="AwsCloudTrailTrail" CHECK_ALTERNATE_check306="check36" check36(){ diff --git a/checks/check37 b/checks/check37 index c0390bc9..548535d0 100644 --- a/checks/check37 +++ b/checks/check37 @@ -33,10 +33,12 @@ # --actions-enabled \ # --alarm-actions arn:aws:sns:us-east-1:123456789012:CloudWatchAlarmTopic -CHECK_ID_check37="3.7,3.07" +CHECK_ID_check37="3.7" CHECK_TITLE_check37="[check37] Ensure a log metric filter and alarm exist for disabling or scheduled deletion of customer created CMKs (Scored)" CHECK_SCORED_check37="SCORED" CHECK_TYPE_check37="LEVEL2" +CHECK_ASFF_TYPE_check37="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" +CHECK_ASFF_RESOURCE_TYPE_check37="AwsCloudTrailTrail" CHECK_ALTERNATE_check307="check37" check37(){ diff --git a/checks/check38 b/checks/check38 index ddc69d36..829cd122 100644 --- a/checks/check38 +++ b/checks/check38 @@ -33,10 +33,12 @@ # --actions-enabled \ # --alarm-actions arn:aws:sns:us-east-1:123456789012:CloudWatchAlarmTopic -CHECK_ID_check38="3.8,3.08" +CHECK_ID_check38="3.8" CHECK_TITLE_check38="[check38] Ensure a log metric filter and alarm exist for S3 bucket policy changes (Scored)" CHECK_SCORED_check38="SCORED" CHECK_TYPE_check38="LEVEL1" +CHECK_ASFF_TYPE_check38="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" +CHECK_ASFF_RESOURCE_TYPE_check38="AwsCloudTrailTrail" CHECK_ALTERNATE_check308="check38" check38(){ diff --git a/checks/check39 b/checks/check39 index 3a812a4a..6ca13baa 100644 --- a/checks/check39 +++ b/checks/check39 @@ -33,10 +33,12 @@ # --actions-enabled \ # --alarm-actions arn:aws:sns:us-east-1:123456789012:CloudWatchAlarmTopic -CHECK_ID_check39="3.9,3.09" +CHECK_ID_check39="3.9" CHECK_TITLE_check39="[check39] Ensure a log metric filter and alarm exist for AWS Config configuration changes (Scored)" CHECK_SCORED_check39="SCORED" CHECK_TYPE_check39="LEVEL2" +CHECK_ASFF_TYPE_check39="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" +CHECK_ASFF_RESOURCE_TYPE_check39="AwsCloudTrailTrail" CHECK_ALTERNATE_check309="check39" check39(){ diff --git a/checks/check41 b/checks/check41 index 30fd9131..da704739 100644 --- a/checks/check41 +++ b/checks/check41 @@ -8,10 +8,12 @@ # You should have received a copy of the license along with this # work. If not, see . -CHECK_ID_check41="4.1,4.01" +CHECK_ID_check41="4.1" CHECK_TITLE_check41="[check41] Ensure no security groups allow ingress from 0.0.0.0/0 or ::/0 to port 22 (Scored)" CHECK_SCORED_check41="SCORED" CHECK_TYPE_check41="LEVEL2" +CHECK_ASFF_TYPE_check41="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" +CHECK_ASFF_RESOURCE_TYPE_check41="AwsEc2SecurityGroup" CHECK_ALTERNATE_check401="check41" check41(){ diff --git a/checks/check42 b/checks/check42 index 92187620..69e19891 100644 --- a/checks/check42 +++ b/checks/check42 @@ -8,10 +8,12 @@ # You should have received a copy of the license along with this # work. If not, see . -CHECK_ID_check42="4.2,4.02" +CHECK_ID_check42="4.2" CHECK_TITLE_check42="[check42] Ensure no security groups allow ingress from 0.0.0.0/0 or ::/0 to port 3389 (Scored)" CHECK_SCORED_check42="SCORED" CHECK_TYPE_check42="LEVEL2" +CHECK_ASFF_TYPE_check42="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" +CHECK_ASFF_RESOURCE_TYPE_check42="AwsEc2SecurityGroup" CHECK_ALTERNATE_check402="check42" check42(){ diff --git a/checks/check43 b/checks/check43 index 4599934d..35cf44c5 100644 --- a/checks/check43 +++ b/checks/check43 @@ -8,10 +8,12 @@ # You should have received a copy of the license along with this # work. If not, see . -CHECK_ID_check43="4.3,4.03" +CHECK_ID_check43="4.3" CHECK_TITLE_check43="[check43] Ensure the default security group of every VPC restricts all traffic (Scored)" CHECK_SCORED_check43="SCORED" CHECK_TYPE_check43="LEVEL2" +CHECK_ASFF_TYPE_check43="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" +CHECK_ASFF_RESOURCE_TYPE_check43="AwsEc2SecurityGroup" CHECK_ALTERNATE_check403="check43" check43(){ diff --git a/checks/check44 b/checks/check44 index a45c116b..e7f620f8 100644 --- a/checks/check44 +++ b/checks/check44 @@ -8,10 +8,12 @@ # You should have received a copy of the license along with this # work. If not, see . -CHECK_ID_check44="4.4,4.04" +CHECK_ID_check44="4.4" CHECK_TITLE_check44="[check44] Ensure routing tables for VPC peering are \"least access\" (Not Scored)" CHECK_SCORED_check44="NOT_SCORED" CHECK_TYPE_check44="LEVEL2" +CHECK_ASFF_TYPE_check44="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" +CHECK_ASFF_RESOURCE_TYPE_check44="AwsEc2Vpc" CHECK_ALTERNATE_check404="check44" check44(){ diff --git a/checks/check_extra71 b/checks/check_extra71 index 8667666b..19465244 100644 --- a/checks/check_extra71 +++ b/checks/check_extra71 @@ -10,10 +10,11 @@ # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR # CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. -CHECK_ID_extra71="7.1,7.01" +CHECK_ID_extra71="7.1" CHECK_TITLE_extra71="[extra71] Ensure users of groups with AdministratorAccess policy have MFA tokens enabled (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra71="NOT_SCORED" CHECK_TYPE_extra71="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra71="AwsIamUser" CHECK_ALTERNATE_extra701="extra71" CHECK_ALTERNATE_check71="extra71" CHECK_ALTERNATE_check701="extra71" @@ -21,15 +22,15 @@ CHECK_ALTERNATE_check701="extra71" extra71(){ # "Ensure users of groups with AdministratorAccess policy have MFA tokens enabled (Not Scored) (Not part of CIS benchmark)" ADMIN_GROUPS='' - AWS_GROUPS=$($AWSCLI $PROFILE_OPT iam list-groups --output text --query 'Groups[].GroupName') + AWS_GROUPS=$($AWSCLI $PROFILE_OPT iam list-groups --output text --region $REGION --query 'Groups[].GroupName') for grp in $AWS_GROUPS; do # aws --profile onlinetraining iam list-attached-group-policies --group-name Administrators --query 'AttachedPolicies[].PolicyArn' | grep 'arn:aws:iam::aws:policy/AdministratorAccess' # list-attached-group-policies - CHECK_ADMIN_GROUP=$($AWSCLI $PROFILE_OPT iam list-attached-group-policies --group-name $grp --output json --query 'AttachedPolicies[].PolicyArn' | grep 'arn:aws:iam::aws:policy/AdministratorAccess') + CHECK_ADMIN_GROUP=$($AWSCLI $PROFILE_OPT --region $REGION iam list-attached-group-policies --group-name $grp --output json --query 'AttachedPolicies[].PolicyArn' | grep 'arn:${AWS_PARTITION}:iam::aws:policy/AdministratorAccess') if [[ $CHECK_ADMIN_GROUP ]]; then ADMIN_GROUPS="$ADMIN_GROUPS $grp" textInfo "$grp group provides administrative access" - ADMIN_USERS=$($AWSCLI $PROFILE_OPT iam get-group --group-name $grp --output json --query 'Users[].UserName' | grep '"' | cut -d'"' -f2 ) + ADMIN_USERS=$($AWSCLI $PROFILE_OPT iam get-group --region $REGION --group-name $grp --output json --query 'Users[].UserName' | grep '"' | cut -d'"' -f2 ) for auser in $ADMIN_USERS; do # users in group are Administrators # users diff --git a/checks/check_extra710 b/checks/check_extra710 index c259695a..55216b3d 100644 --- a/checks/check_extra710 +++ b/checks/check_extra710 @@ -14,6 +14,7 @@ CHECK_ID_extra710="7.10" CHECK_TITLE_extra710="[extra710] Check for internet facing EC2 Instances (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra710="NOT_SCORED" CHECK_TYPE_extra710="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra710="AwsEc2Instance" CHECK_ALTERNATE_check710="extra710" extra710(){ diff --git a/checks/check_extra711 b/checks/check_extra711 index 717c8680..3e9b29ee 100644 --- a/checks/check_extra711 +++ b/checks/check_extra711 @@ -14,6 +14,7 @@ CHECK_ID_extra711="7.11" CHECK_TITLE_extra711="[extra711] Check for Publicly Accessible Redshift Clusters (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra711="NOT_SCORED" CHECK_TYPE_extra711="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra711="AwsRedshiftCluster" CHECK_ALTERNATE_check711="extra711" extra711(){ diff --git a/checks/check_extra712 b/checks/check_extra712 index 60e71566..d6ecfb7f 100644 --- a/checks/check_extra712 +++ b/checks/check_extra712 @@ -16,14 +16,13 @@ CHECK_SCORED_extra712="NOT_SCORED" CHECK_TYPE_extra712="EXTRA" CHECK_ALTERNATE_check712="extra712" -extra712(){ - # "Check if Amazon Macie is enabled (Not Scored) (Not part of CIS benchmark)" - textInfo "No API commands available to check if Macie is enabled," - textInfo "just looking if IAM Macie related permissions exist. " - MACIE_IAM_ROLES_CREATED=$($AWSCLI iam list-roles $PROFILE_OPT --query 'Roles[*].Arn'|grep AWSMacieServiceCustomer|wc -l) - if [[ $MACIE_IAM_ROLES_CREATED -eq 2 ]];then - textPass "Macie related IAM roles exist so it might be enabled. Check it out manually." - else - textFail "No Macie related IAM roles found. It is most likely not to be enabled" - fi + extra712(){ + textInfo "No API commands available to check if Macie is enabled," + textInfo "just looking if IAM Macie related permissions exist. " + MACIE_IAM_ROLES_CREATED=$($AWSCLI iam list-roles $PROFILE_OPT --query 'Roles[*].Arn'|grep AWSMacieServiceCustomer|wc -l) + if [[ $MACIE_IAM_ROLES_CREATED -eq 2 ]];then + textPass "Macie related IAM roles exist so it might be enabled. Check it out manually" +else + textFail "No Macie related IAM roles found. It is most likely not to be enabled" +fi } diff --git a/checks/check_extra714 b/checks/check_extra714 index a47ecffe..cb57de85 100644 --- a/checks/check_extra714 +++ b/checks/check_extra714 @@ -14,6 +14,7 @@ CHECK_ID_extra714="7.14" CHECK_TITLE_extra714="[extra714] Check if CloudFront distributions have logging enabled (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra714="NOT_SCORED" CHECK_TYPE_extra714="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra714="AwsCloudFrontDistribution" CHECK_ALTERNATE_check714="extra714" extra714(){ diff --git a/checks/check_extra715 b/checks/check_extra715 index 34eb9a3d..2268b719 100644 --- a/checks/check_extra715 +++ b/checks/check_extra715 @@ -11,9 +11,10 @@ # CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. CHECK_ID_extra715="7.15" -CHECK_TITLE_extra715="[extra715] Check if Elasticsearch Service domains have logging enabled (Not Scored) (Not part of CIS benchmark)" +CHECK_TITLE_extra715="[extra715] Check if Amazon Elasticsearch Service (ES) domains have logging enabled" CHECK_SCORED_extra715="NOT_SCORED" CHECK_TYPE_extra715="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra715="AwsElasticsearchDomain" CHECK_ALTERNATE_check715="extra715" extra715(){ @@ -23,19 +24,19 @@ extra715(){ for domain in $LIST_OF_DOMAINS;do SEARCH_SLOWLOG_ENABLED=$($AWSCLI es describe-elasticsearch-domain-config --domain-name $domain $PROFILE_OPT --region $regx --query DomainConfig.LogPublishingOptions.Options.SEARCH_SLOW_LOGS.Enabled --output text |grep -v ^None|grep -v ^False) if [[ $SEARCH_SLOWLOG_ENABLED ]];then - textPass "$regx: ElasticSearch Service domain $domain SEARCH_SLOW_LOGS enabled" "$regx" + textPass "$regx: Amazon ES domain $domain SEARCH_SLOW_LOGS enabled" "$regx" else - textFail "$regx: ElasticSearch Service domain $domain SEARCH_SLOW_LOGS disabled!" "$regx" + textFail "$regx: Amazon ES domain $domain SEARCH_SLOW_LOGS disabled!" "$regx" fi INDEX_SLOWLOG_ENABLED=$($AWSCLI es describe-elasticsearch-domain-config --domain-name $domain $PROFILE_OPT --region $regx --query DomainConfig.LogPublishingOptions.Options.INDEX_SLOW_LOGS.Enabled --output text |grep -v ^None|grep -v ^False) if [[ $INDEX_SLOWLOG_ENABLED ]];then - textPass "$regx: ElasticSearch Service domain $domain INDEX_SLOW_LOGS enabled" "$regx" + textPass "$regx: Amazon ES domain $domain INDEX_SLOW_LOGS enabled" "$regx" else - textFail "$regx: ElasticSearch Service domain $domain INDEX_SLOW_LOGS disabled!" "$regx" + textFail "$regx: Amazon ES domain $domain INDEX_SLOW_LOGS disabled!" "$regx" fi done else - textInfo "$regx: No Elasticsearch Service domain found" "$regx" + textInfo "$regx: No Amazon ES domain found" "$regx" fi done } diff --git a/checks/check_extra716 b/checks/check_extra716 index 549791ce..9d664bd1 100644 --- a/checks/check_extra716 +++ b/checks/check_extra716 @@ -11,35 +11,79 @@ # CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. CHECK_ID_extra716="7.16" -CHECK_TITLE_extra716="[extra716] Check if Elasticsearch Service domains allow open access (Not Scored) (Not part of CIS benchmark)" +CHECK_TITLE_extra716="[extra716] Check if Amazon Elasticsearch Service (ES) domains are set as Public or if it has open policy access" CHECK_SCORED_extra716="NOT_SCORED" CHECK_TYPE_extra716="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra716="AwsElasticsearchDomain" CHECK_ALTERNATE_check716="extra716" extra716(){ - # "Check if Elasticsearch Service domains allow open access (Not Scored) (Not part of CIS benchmark)" for regx in $REGIONS; do LIST_OF_DOMAINS=$($AWSCLI es list-domain-names $PROFILE_OPT --region $regx --query DomainNames --output text) if [[ $LIST_OF_DOMAINS ]]; then for domain in $LIST_OF_DOMAINS;do - CHECK_IF_MEMBER_OF_VPC=$($AWSCLI es describe-elasticsearch-domain-config --domain-name $domain $PROFILE_OPT --region $regx --query DomainConfig.VPCOptions.Options.VPCId --output text|grep -v ^None) - if [[ ! $CHECK_IF_MEMBER_OF_VPC ]];then - TEMP_POLICY_FILE=$(mktemp -t prowler-${ACCOUNT_NUM}-es-domain.policy.XXXXXXXXXX) - $AWSCLI es describe-elasticsearch-domain-config --domain-name $domain $PROFILE_OPT --region $regx --query DomainConfig.AccessPolicies.Options --output text > $TEMP_POLICY_FILE 2> /dev/null - # check if the policy has Principal as * - CHECK_ES_DOMAIN_ALLUSERS_POLICY=$(cat $TEMP_POLICY_FILE | jq -r '. | .Statement[] | select(.Effect == "Allow" and (((.Principal|type == "object") and .Principal.AWS == "*") or ((.Principal|type == "string") and .Principal == "*")) and .Condition == null)') - if [[ $CHECK_ES_DOMAIN_ALLUSERS_POLICY ]];then - textFail "$regx: $domain policy \"may\" allow Anonymous users to perform actions (Principal: \"*\")" "$regx" - else - textPass "$regx: $domain is not open" "$regx" - fi + TEMP_POLICY_FILE=$(mktemp -t prowler-${ACCOUNT_NUM}-es-domain.policy.XXXXXXXXXX) + # get endpoint or vpc endpoints + ES_DOMAIN_ENDPOINT=$($AWSCLI es describe-elasticsearch-domain --domain-name $domain $PROFILE_OPT --region $regx --query 'DomainStatus.[Endpoint || Endpoints]' --output text) + # If the endpoint starts with "vpc-" it is in a VPC then it is fine. + if [[ "$ES_DOMAIN_ENDPOINT" =~ ^vpc-* ]];then + ES_DOMAIN_VPC=$($AWSCLI es describe-elasticsearch-domain --domain-name $domain $PROFILE_OPT --region $regx --query 'DomainStatus.VPCOptions.VPCId' --output text) + textInfo "$regx: Amazon ES domain $domain is in VPC $ES_DOMAIN_VPC run extra779 to make sure it is not exposed using custom proxy" "$regx" else - textPass "$regx: $domain is in a VPC" "$regx" + $AWSCLI es describe-elasticsearch-domain-config --domain-name $domain $PROFILE_OPT --region $regx --query DomainConfig.AccessPolicies.Options --output text > $TEMP_POLICY_FILE 2> /dev/null + # check if the policy has a principal set up + CHECK_ES_POLICY_PRINCIPAL=$(cat $TEMP_POLICY_FILE | jq -r '. | .Statement[] | select(.Effect == "Allow" and (((.Principal|type == "object") and .Principal.AWS != "*") or ((.Principal|type == "string") and .Principal != "*")) and select(has("Condition") | not))') + if [[ $CHECK_ES_POLICY_PRINCIPAL ]]; then + textPass "$regx: Amazon ES domain $domain does have a Principal set up" "$regx" + fi + CHECK_ES_DOMAIN_POLICY_OPEN=$(cat $TEMP_POLICY_FILE | jq -r '. | .Statement[] | select(.Effect == "Allow" and (((.Principal|type == "object") and .Principal.AWS == "*") or ((.Principal|type == "string") and .Principal == "*")) and select(has("Condition") | not))') + CHECK_ES_DOMAIN_POLICY_HAS_CONDITION=$(cat $TEMP_POLICY_FILE | jq -r '. | .Statement[] | select(.Effect == "Allow" and (((.Principal|type == "object") and .Principal.AWS == "*") or ((.Principal|type == "string") and .Principal == "*")) and select(has("Condition")))' ) + if [[ $CHECK_ES_DOMAIN_POLICY_HAS_CONDITION ]]; then + # get content of IpAddress."aws:SourceIp" and get a clean list + LIST_CONDITION_IPS=$(cat $TEMP_POLICY_FILE | jq '.Statement[0] .Condition.IpAddress."aws:SourceIp"'| awk -F'"' '{print $2}' | tr -d '",^$' | sed '/^$/d') + unset CONDITION_HAS_PUBLIC_IP_ARRAY + for condition_ip in "${LIST_CONDITION_IPS}";do + CONDITION_HAS_PRIVATE_IP=$(echo "${condition_ip}" | grep -E '^(192\.168|10\.|172\.1[6789]\.|172\.2[0-9]\.|172\.3[01]\.)') + if [[ $CONDITION_HAS_PRIVATE_IP ]];then + CONDITION_HAS_PRIVATE_IP_ARRAY+=($condition_ip) + fi + CONDITION_HAS_PUBLIC_IP=$(echo "${condition_ip}" | grep -vE '^(192\.168|10\.|172\.1[6789]\.|172\.2[0-9]\.|172\.3[01]\.|0\.0\.0\.0|\*)') + if [[ $CONDITION_HAS_PUBLIC_IP ]];then + CONDITION_HAS_PUBLIC_IP_ARRAY+=($condition_ip) + fi + CONDITION_HAS_ZERO_NET=$(echo "${condition_ip}" | grep -E '^(0\.0\.0\.0)') + CONDITION_HAS_STAR=$(echo "${condition_ip}" | grep -E '^\*') + done + CHECK_ES_DOMAIN_POLICY_CONDITION_PRIVATE_IP=${CONDITION_HAS_PRIVATE_IP_ARRAY[@]} + CHECK_ES_DOMAIN_POLICY_CONDITION_PUBLIC_IP=${CONDITION_HAS_PUBLIC_IP_ARRAY[@]} + CHECK_ES_DOMAIN_POLICY_CONDITION_ZERO=$CONDITION_HAS_ZERO_NET + CHECK_ES_DOMAIN_POLICY_CONDITION_STAR=$CONDITION_HAS_STAR + fi + if [[ $CHECK_ES_DOMAIN_POLICY_OPEN || $CHECK_ES_DOMAIN_POLICY_CONDITION_ZERO || $CHECK_ES_DOMAIN_POLICY_CONDITION_STAR || ${CHECK_ES_DOMAIN_POLICY_CONDITION_PUBLIC_IP[@]} ]];then + if [[ $CHECK_ES_DOMAIN_POLICY_OPEN ]];then + textFail "$regx: Amazon ES domain $domain policy allows access (Principal: \"*\") - use extra788 to test AUTH" "$regx" + fi + if [[ $CHECK_ES_DOMAIN_POLICY_HAS_CONDITION && $CHECK_ES_DOMAIN_POLICY_CONDITION_ZERO ]];then + textFail "$regx: Amazon ES domain $domain policy allows access (Principal: \"*\" and network 0.0.0.0) - use extra788 to test AUTH" "$regx" + fi + if [[ $CHECK_ES_DOMAIN_POLICY_HAS_CONDITION && $CHECK_ES_DOMAIN_POLICY_CONDITION_STAR ]];then + textFail "$regx: Amazon ES domain $domain policy allows access (Principal: \"*\" and network \"*\") - use extra788 to test AUTH" "$regx" + fi + if [[ $CHECK_ES_DOMAIN_POLICY_HAS_CONDITION && ${CHECK_ES_DOMAIN_POLICY_CONDITION_PUBLIC_IP[@]} ]];then + textInfo "$regx: Amazon ES domain $domain policy allows access (Principal: \"*\" and Public IP or Network $(echo ${CONDITION_HAS_PUBLIC_IP_ARRAY[@]})) - use extra788 to test AUTH" "$regx" + fi + else + if [[ $CHECK_ES_DOMAIN_POLICY_HAS_CONDITION && ${CHECK_ES_DOMAIN_POLICY_CONDITION_PRIVATE_IP[@]} ]];then + textInfo "$regx: Amazon ES domain $domain policy allows access from a Private IP or CIDR RFC1918 $(echo ${CONDITION_HAS_PRIVATE_IP_ARRAY[@]})" "$regx" + else + textPass "$regx: Amazon ES domain $domain does not allow anonymous access" "$regx" + fi + fi + rm -f $TEMP_POLICY_FILE fi - rm -f $TEMP_POLICY_FILE done else - textInfo "$regx: No Elasticsearch Service domain found" "$regx" + textInfo "$regx: No Amazon ES domain found" "$regx" fi done } diff --git a/checks/check_extra717 b/checks/check_extra717 index 0bb04741..74a18937 100644 --- a/checks/check_extra717 +++ b/checks/check_extra717 @@ -14,6 +14,7 @@ CHECK_ID_extra717="7.17" CHECK_TITLE_extra717="[extra717] Check if Elastic Load Balancers have logging enabled (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra717="NOT_SCORED" CHECK_TYPE_extra717="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra717="AwsElbLoadBalancer" CHECK_ALTERNATE_check717="extra717" extra717(){ diff --git a/checks/check_extra718 b/checks/check_extra718 index 73319113..0d361c3c 100644 --- a/checks/check_extra718 +++ b/checks/check_extra718 @@ -14,6 +14,7 @@ CHECK_ID_extra718="7.18" CHECK_TITLE_extra718="[extra718] Check if S3 buckets have server access logging enabled (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra718="NOT_SCORED" CHECK_TYPE_extra718="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra718="AwsS3Bucket" CHECK_ALTERNATE_check718="extra718" extra718(){ diff --git a/checks/check_extra72 b/checks/check_extra72 index f9fa11b9..b9471f9b 100644 --- a/checks/check_extra72 +++ b/checks/check_extra72 @@ -10,10 +10,11 @@ # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR # CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. -CHECK_ID_extra72="7.2,7.02" +CHECK_ID_extra72="7.2" CHECK_TITLE_extra72="[extra72] Ensure there are no EBS Snapshots set as Public (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra72="NOT_SCORED" CHECK_TYPE_extra72="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra72="AwsEc2Snapshot" CHECK_ALTERNATE_extra702="extra72" CHECK_ALTERNATE_check72="extra72" CHECK_ALTERNATE_check702="extra72" diff --git a/checks/check_extra720 b/checks/check_extra720 index 1bf63cc5..f8b2a890 100644 --- a/checks/check_extra720 +++ b/checks/check_extra720 @@ -14,6 +14,7 @@ CHECK_ID_extra720="7.20" CHECK_TITLE_extra720="[extra720] Check if Lambda functions invoke API operations are being recorded by CloudTrail (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra720="NOT_SCORED" CHECK_TYPE_extra720="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra720="AwsLambdaFunction" CHECK_ALTERNATE_check720="extra720" extra720(){ @@ -22,10 +23,10 @@ extra720(){ LIST_OF_FUNCTIONS=$($AWSCLI lambda list-functions $PROFILE_OPT --region $regx --query Functions[*].FunctionName --output text) if [[ $LIST_OF_FUNCTIONS ]]; then for lambdafunction in $LIST_OF_FUNCTIONS;do - LIST_OF_TRAILS=$($AWSCLI cloudtrail describe-trails $PROFILE_OPT --region $regx --query trailList[?HomeRegion==\`$regx\`].Name --output text) + LIST_OF_TRAILS=$($AWSCLI cloudtrail describe-trails $PROFILE_OPT --region $regx --query trailList[].TrailARN --output text) if [[ $LIST_OF_TRAILS ]]; then for trail in $LIST_OF_TRAILS; do - FUNCTION_ENABLED_IN_TRAIL=$($AWSCLI cloudtrail get-event-selectors $PROFILE_OPT --trail-name $trail --region $regx --query "EventSelectors[*].DataResources[?Type == \`AWS::Lambda::Function\`].Values" --output text |xargs -n1| grep -E "^arn:aws:lambda.*function:$lambdafunction$|^arn:aws:lambda$") + FUNCTION_ENABLED_IN_TRAIL=$($AWSCLI cloudtrail get-event-selectors $PROFILE_OPT --trail-name $trail --region $regx --query "EventSelectors[*].DataResources[?Type == \`AWS::Lambda::Function\`].Values" --output text |xargs -n1| grep -E "^arn:${AWS_PARTITION}:lambda.*function:$lambdafunction$|^arn:${AWS_PARTITION}:lambda$") if [[ $FUNCTION_ENABLED_IN_TRAIL ]]; then textPass "$regx: Lambda function $lambdafunction enabled in trail $trail" "$regx" else diff --git a/checks/check_extra721 b/checks/check_extra721 index ac6ca054..d464786a 100644 --- a/checks/check_extra721 +++ b/checks/check_extra721 @@ -14,6 +14,7 @@ CHECK_ID_extra721="7.21" CHECK_TITLE_extra721="[extra721] Check if Redshift cluster has audit logging enabled (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra721="NOT_SCORED" CHECK_TYPE_extra721="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra721="AwsRedshiftCluster" CHECK_ALTERNATE_check721="extra721" extra721(){ diff --git a/checks/check_extra722 b/checks/check_extra722 index 605f7f19..e90596b7 100644 --- a/checks/check_extra722 +++ b/checks/check_extra722 @@ -14,6 +14,7 @@ CHECK_ID_extra722="7.22" CHECK_TITLE_extra722="[extra722] Check if API Gateway has logging enabled (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra722="NOT_SCORED" CHECK_TYPE_extra722="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra722="AwsApiGatewayRestApi" CHECK_ALTERNATE_check722="extra722" extra722(){ diff --git a/checks/check_extra723 b/checks/check_extra723 index 96039eb9..c527c3c8 100644 --- a/checks/check_extra723 +++ b/checks/check_extra723 @@ -14,6 +14,7 @@ CHECK_ID_extra723="7.23" CHECK_TITLE_extra723="[extra723] Check if RDS Snapshots and Cluster Snapshots are public (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra723="NOT_SCORED" CHECK_TYPE_extra723="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra723="AwsRdsDbSnapshot" CHECK_ALTERNATE_check723="extra723" extra723(){ diff --git a/checks/check_extra724 b/checks/check_extra724 index 068a07d2..2aa08b0f 100644 --- a/checks/check_extra724 +++ b/checks/check_extra724 @@ -14,6 +14,7 @@ CHECK_ID_extra724="7.24" CHECK_TITLE_extra724="[extra724] Check if ACM certificates have Certificate Transparency logging enabled (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra724="NOT_SCORED" CHECK_TYPE_extra724="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra724="AwsCertificateManagerCertificate" CHECK_ALTERNATE_check724="extra724" extra724(){ @@ -24,10 +25,16 @@ extra724(){ for cert_arn in $LIST_OF_CERTS;do CT_ENABLED=$($AWSCLI acm describe-certificate $PROFILE_OPT --region $regx --certificate-arn $cert_arn --query Certificate.Options.CertificateTransparencyLoggingPreference --output text) CERT_DOMAIN_NAME=$(aws acm describe-certificate $PROFILE_OPT --region $regx --certificate-arn $cert_arn --query Certificate.DomainName --output text) - if [[ $CT_ENABLED == "ENABLED" ]];then - textPass "$regx: ACM Certificate $CERT_DOMAIN_NAME has Certificate Transparency logging enabled!" "$regx" + CERT_TYPE=$(aws acm describe-certificate $PROFILE_OPT --region $regx --certificate-arn $cert_arn --query Certificate.Type --output text) + if [[ $CERT_TYPE == "IMPORTED" ]];then + # Ignore imported certificate + textInfo "$regx: ACM Certificate $CERT_DOMAIN_NAME is imported." "$regx" else - textFail "$regx: ACM Certificate $CERT_DOMAIN_NAME has Certificate Transparency logging disabled!" "$regx" + if [[ $CT_ENABLED == "ENABLED" ]];then + textPass "$regx: ACM Certificate $CERT_DOMAIN_NAME has Certificate Transparency logging enabled!" "$regx" + else + textFail "$regx: ACM Certificate $CERT_DOMAIN_NAME has Certificate Transparency logging disabled!" "$regx" + fi fi done else diff --git a/checks/check_extra725 b/checks/check_extra725 index 257a3d8e..eb336b79 100644 --- a/checks/check_extra725 +++ b/checks/check_extra725 @@ -15,6 +15,7 @@ CHECK_ID_extra725="7.25" CHECK_TITLE_extra725="[extra725] Check if S3 buckets have Object-level logging enabled in CloudTrail (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra725="NOT_SCORED" CHECK_TYPE_extra725="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra725="AwsS3Bucket" CHECK_ALTERNATE_check725="extra725" # per Object-level logging is not configured at Bucket level but at CloudTrail trail level @@ -22,42 +23,35 @@ extra725(){ # "Check if S3 buckets have Object-level logging enabled in CloudTrail (Not Scored) (Not part of CIS benchmark)" textInfo "Looking for S3 Buckets Object-level logging information in all trails... " - # create a file with a list of all buckets - TEMP_BUCKET_LIST_FILE=$(mktemp -t prowler.bucket-list-XXXXXX) - $AWSCLI s3api list-buckets --query 'Buckets[*].{Name:Name}' $PROFILE_OPT --region $REGION --output text > $TEMP_BUCKET_LIST_FILE - if [ ! -s $TEMP_BUCKET_LIST_FILE ]; then + LIST_OF_BUCKETS=$($AWSCLI s3api list-buckets $PROFILE_OPT --region $REGION --query 'Buckets[*].{Name:Name}' --output text) + LIST_OF_TRAILS=$($AWSCLI cloudtrail describe-trails $PROFILE_OPT --region $REGION --query 'trailList[].TrailARN' --output text) + if [[ $LIST_OF_BUCKETS ]]; then + for bucketName in $LIST_OF_BUCKETS;do + if [[ $LIST_OF_TRAILS ]]; then + BUCKET_ENABLED_TRAILS=() + for trail in $LIST_OF_TRAILS; do + BUCKET_ENABLED_IN_TRAIL=$($AWSCLI cloudtrail get-event-selectors --region $REGION $PROFILE_OPT --trail-name $trail --query "EventSelectors[*].DataResources[?Type == \`AWS::S3::Object\`].Values" --output text |xargs -n1| grep -E "^arn:${AWS_PARTITION}:s3:::$bucketName/\S*$|^arn:${AWS_PARTITION}:s3$|^arn:${AWS_PARTITION}:s3:::$") + if [[ $BUCKET_ENABLED_IN_TRAIL ]]; then + BUCKET_ENABLED_TRAILS+=($trail) + # textPass "$regx: S3 bucket $bucketName has Object-level logging enabled in trail $trail" "$regx" + #else + # textFail "$regx: S3 bucket $bucketName has Object-level logging disabled" "$regx" + fi + done + + if [[ ${#BUCKET_ENABLED_TRAILS[@]} -gt 0 ]]; then + for trail in "${BUCKET_ENABLED_TRAILS[@]}"; do + textPass "$regx: S3 bucket $bucketName has Object-level logging enabled in trail $trail" "$regx" + done + else + textFail "$regx: S3 bucket $bucketName has Object-level logging disabled" "$regx" + fi + + else + textFail "$regx: S3 bucket $bucketName is not being recorded no CloudTrail found!" "$regx" + fi + done + else textInfo "$regx: No S3 buckets found" "$regx" - exit fi - - # now create a list with all trails available and their region - TEMP_TRAILS_LIST_FILE=$(mktemp -t prowler.trails-list-XXXXXX) - for regx in $REGIONS; do - $AWSCLI cloudtrail describe-trails $PROFILE_OPT --region $regx --query trailList[?HomeRegion==\`$regx\`].[Name,HomeRegion] --output text >> $TEMP_TRAILS_LIST_FILE - done - - # look for buckets being logged per trail and create a list with them - TEMP_BUCKETS_LOGGING_LIST_FILE=$(mktemp -t prowler.buckets-logging-list-XXXXXX) - while IFS='' read -r LINE || [[ -n "${LINE}" ]]; do - TRAIL_REGION=$(echo "${LINE}" | awk '{ print $2 }') - TRAIL_NAME=$(echo "${LINE}" | awk '{ print $1 }') - BUCKETS_OBJECT_LOGGING_ENABLED=$($AWSCLI cloudtrail get-event-selectors --trail-name "${TRAIL_NAME}" $PROFILE_OPT --region $TRAIL_REGION --query "EventSelectors[*].DataResources[?Type == \`AWS::S3::Object\`].Values" --output text |xargs -n1 |cut -d: -f 6|sed 's/\///g') - echo $BUCKETS_OBJECT_LOGGING_ENABLED |tr " " "\n"|sort >> $TEMP_BUCKETS_LOGGING_LIST_FILE - if [[ $BUCKETS_OBJECT_LOGGING_ENABLED ]]; then - for bucket in $BUCKETS_OBJECT_LOGGING_ENABLED; do - textPass "$regx: S3 bucket $bucket has Object-level logging enabled in trail $trail" "$regx" - done - fi - done < $TEMP_TRAILS_LIST_FILE - - # diff to get the ones that are not in any trail then they are not logging - BUCKETS_NOT_LOGGING=$(diff $TEMP_BUCKETS_LOGGING_LIST_FILE $TEMP_BUCKET_LIST_FILE | sed -n 's/^> //p') - if [[ $BUCKETS_NOT_LOGGING ]]; then - for bucket in $BUCKETS_NOT_LOGGING; do - textFail "$regx: S3 bucket $bucket has Object-level logging disabled" "$regx" - done - fi - # delete all temp files - rm -fr $TEMP_BUCKET_LIST_FILE $TEMP_TRAILS_LIST_FILE $TEMP_BUCKETS_LOGGING_LIST_FILE - } diff --git a/checks/check_extra727 b/checks/check_extra727 index 5e14e2b5..47c41fcb 100644 --- a/checks/check_extra727 +++ b/checks/check_extra727 @@ -15,6 +15,7 @@ CHECK_ID_extra727="7.27" CHECK_TITLE_extra727="[extra727] Check if SQS queues have policy set as Public (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra727="NOT_SCORED" CHECK_TYPE_extra727="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra727="AwsSqsQueue" CHECK_ALTERNATE_check727="extra727" extra727(){ diff --git a/checks/check_extra728 b/checks/check_extra728 index eb27a9de..5399822d 100644 --- a/checks/check_extra728 +++ b/checks/check_extra728 @@ -15,6 +15,7 @@ CHECK_ID_extra728="7.28" CHECK_TITLE_extra728="[extra728] Check if SQS queues have Server Side Encryption enabled (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra728="NOT_SCORED" CHECK_TYPE_extra728="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra728="AwsSqsQueue" CHECK_ALTERNATE_check728="extra728" extra728(){ diff --git a/checks/check_extra729 b/checks/check_extra729 index 603acbb0..756aa09c 100644 --- a/checks/check_extra729 +++ b/checks/check_extra729 @@ -15,6 +15,7 @@ CHECK_ID_extra729="7.29" CHECK_TITLE_extra729="[extra729] Ensure there are no EBS Volumes unencrypted (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra729="NOT_SCORED" CHECK_TYPE_extra729="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra729="AwsEc2Volume" CHECK_ALTERNATE_check729="extra729" extra729(){ diff --git a/checks/check_extra73 b/checks/check_extra73 index a587a60f..281b9f90 100644 --- a/checks/check_extra73 +++ b/checks/check_extra73 @@ -11,10 +11,11 @@ # CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. -CHECK_ID_extra73="7.3,7.03" +CHECK_ID_extra73="7.3" CHECK_TITLE_extra73="[extra73] Ensure there are no S3 buckets open to the Everyone or Any AWS user (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra73="NOT_SCORED" CHECK_TYPE_extra73="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra73="AwsS3Bucket" CHECK_ALTERNATE_extra703="extra73" CHECK_ALTERNATE_check73="extra73" CHECK_ALTERNATE_check703="extra73" diff --git a/checks/check_extra730 b/checks/check_extra730 index 53be1c58..06266cd3 100644 --- a/checks/check_extra730 +++ b/checks/check_extra730 @@ -17,6 +17,7 @@ CHECK_ID_extra730="7.30" CHECK_TITLE_extra730="[extra730] Check if ACM Certificates are about to expire in $DAYS_TO_EXPIRE_THRESHOLD days or less (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra730="NOT_SCORED" CHECK_TYPE_extra730="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra730="AwsCertificateManagerCertificate" CHECK_ALTERNATE_check730="extra730" extra730(){ diff --git a/checks/check_extra731 b/checks/check_extra731 index 0baa1b6e..744d28a1 100644 --- a/checks/check_extra731 +++ b/checks/check_extra731 @@ -15,6 +15,7 @@ CHECK_ID_extra731="7.31" CHECK_TITLE_extra731="[extra731] Check if SNS topics have policy set as Public (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra731="NOT_SCORED" CHECK_TYPE_extra731="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra731="AwsSnsTopic" CHECK_ALTERNATE_check731="extra731" extra731(){ diff --git a/checks/check_extra732 b/checks/check_extra732 index 2fc63a22..30c6ec7f 100644 --- a/checks/check_extra732 +++ b/checks/check_extra732 @@ -15,6 +15,7 @@ CHECK_ID_extra732="7.32" CHECK_TITLE_extra732="[extra732] Check if Geo restrictions are enabled in CloudFront distributions (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra732="NOT_SCORED" CHECK_TYPE_extra732="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra732="AwsCloudFrontDistribution" CHECK_ALTERNATE_check732="extra732" extra732(){ diff --git a/checks/check_extra734 b/checks/check_extra734 index bebd2bfc..35930bd0 100644 --- a/checks/check_extra734 +++ b/checks/check_extra734 @@ -14,10 +14,11 @@ CHECK_ID_extra734="7.34" CHECK_TITLE_extra734="[extra734] Check if S3 buckets have default encryption (SSE) enabled or use a bucket policy to enforce it (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra734="NOT_SCORED" CHECK_TYPE_extra734="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra734="AwsS3Bucket" CHECK_ALTERNATE_check734="extra734" extra734(){ - LIST_OF_BUCKETS=$($AWSCLI s3api list-buckets $PROFILE_OPT --query Buckets[*].Name --output text|xargs -n1) + LIST_OF_BUCKETS=$($AWSCLI s3api list-buckets $PROFILE_OPT --region $REGION --query Buckets[*].Name --output text|xargs -n1) if [[ $LIST_OF_BUCKETS ]]; then for bucket in $LIST_OF_BUCKETS;do @@ -27,7 +28,7 @@ extra734(){ # - Have bucket policy denying s3:PutObject when s3:x-amz-server-side-encryption is absent # query to get if has encryption enabled or not - RESULT=$($AWSCLI s3api get-bucket-encryption $PROFILE_OPT --bucket $bucket --query ServerSideEncryptionConfiguration.Rules[].ApplyServerSideEncryptionByDefault[].SSEAlgorithm --output text 2>&1) + RESULT=$($AWSCLI s3api get-bucket-encryption $PROFILE_OPT --region $REGION --bucket $bucket --query ServerSideEncryptionConfiguration.Rules[].ApplyServerSideEncryptionByDefault[].SSEAlgorithm --output text 2>&1) if [[ $(echo "$RESULT" | grep AccessDenied) ]]; then textFail "Access Denied Trying to Get Encryption for $bucket" continue @@ -42,7 +43,7 @@ extra734(){ TEMP_SSE_POLICY_FILE=$(mktemp -t prowler-${ACCOUNT_NUM}-${bucket}.policy.XXXXXXXXXX) # get bucket policy - $AWSCLI s3api get-bucket-policy $PROFILE_OPT --bucket $bucket --output text --query Policy > $TEMP_SSE_POLICY_FILE 2>&1 + $AWSCLI s3api get-bucket-policy $PROFILE_OPT --bucket $bucket --region $REGION --output text --query Policy > $TEMP_SSE_POLICY_FILE 2>&1 if [[ $(grep AccessDenied $TEMP_SSE_POLICY_FILE) ]]; then textFail "Access Denied Trying to Get Bucket Policy for $bucket" rm -f $TEMP_SSE_POLICY_FILE @@ -55,7 +56,7 @@ extra734(){ fi # check if the S3 policy forces SSE s3:x-amz-server-side-encryption:true - CHECK_BUCKET_SSE_POLICY_PRESENT=$(cat $TEMP_SSE_POLICY_FILE | jq --arg arn "arn:aws:s3:::${bucket}/*" '.Statement[]|select(.Effect=="Deny" and ((.Principal|type == "object") and .Principal.AWS == "*") or ((.Principal|type == "string") and .Principal == "*") and .Action=="s3:PutObject" and .Resource==$arn and .Condition.StringEquals."s3:x-amz-server-side-encryption" != null)') + CHECK_BUCKET_SSE_POLICY_PRESENT=$(cat $TEMP_SSE_POLICY_FILE | jq --arg arn "arn:${AWS_PARTITION}:s3:::${bucket}/*" '.Statement[]|select(.Effect=="Deny" and ((.Principal|type == "object") and .Principal.AWS == "*") or ((.Principal|type == "string") and .Principal == "*") and .Action=="s3:PutObject" and .Resource==$arn and .Condition.StringEquals."s3:x-amz-server-side-encryption" != null)') if [[ $CHECK_BUCKET_SSE_POLICY_PRESENT == "" ]]; then textFail "Bucket $bucket does not enforce encryption!" rm -f $TEMP_SSE_POLICY_FILE diff --git a/checks/check_extra735 b/checks/check_extra735 index cc6ad238..0e49b698 100644 --- a/checks/check_extra735 +++ b/checks/check_extra735 @@ -14,6 +14,7 @@ CHECK_ID_extra735="7.35" CHECK_TITLE_extra735="[extra735] Check if RDS instances storage is encrypted (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra735="NOT_SCORED" CHECK_TYPE_extra735="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra735="AwsRdsDbInstance" CHECK_ALTERNATE_check735="extra735" extra735(){ diff --git a/checks/check_extra736 b/checks/check_extra736 index 0b5993a7..19082827 100644 --- a/checks/check_extra736 +++ b/checks/check_extra736 @@ -14,6 +14,7 @@ CHECK_ID_extra736="7.36" CHECK_TITLE_extra736="[extra736] Check exposed KMS keys (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra736="NOT_SCORED" CHECK_TYPE_extra736="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra736="AwsKmsKey" CHECK_ALTERNATE_check736="extra736" extra736(){ diff --git a/checks/check_extra737 b/checks/check_extra737 index d10a301f..3e1b6bf2 100644 --- a/checks/check_extra737 +++ b/checks/check_extra737 @@ -14,6 +14,7 @@ CHECK_ID_extra737="7.37" CHECK_TITLE_extra737="[extra737] Check KMS keys with key rotation disabled (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra737="NOT_SCORED" CHECK_TYPE_extra737="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra737="AwsKmsKey" CHECK_ALTERNATE_check737="extra737" extra737(){ diff --git a/checks/check_extra738 b/checks/check_extra738 index bc2b9dc5..c5b59eec 100644 --- a/checks/check_extra738 +++ b/checks/check_extra738 @@ -14,6 +14,7 @@ CHECK_ID_extra738="7.38" CHECK_TITLE_extra738="[extra738] Check if CloudFront distributions are set to HTTPS (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra738="NOT_SCORED" CHECK_TYPE_extra738="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra738="AwsCloudFrontDistribution" CHECK_ALTERNATE_check738="extra738" extra738(){ diff --git a/checks/check_extra739 b/checks/check_extra739 index 33ff5ecb..b1280683 100644 --- a/checks/check_extra739 +++ b/checks/check_extra739 @@ -14,6 +14,7 @@ CHECK_ID_extra739="7.39" CHECK_TITLE_extra739="[extra739] Check if RDS instances have backup enabled (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra739="NOT_SCORED" CHECK_TYPE_extra739="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra739="AwsRdsDbInstance" CHECK_ALTERNATE_check739="extra739" extra739(){ @@ -21,12 +22,12 @@ extra739(){ LIST_OF_RDS_INSTANCES=$($AWSCLI rds describe-db-instances $PROFILE_OPT --region $regx --query 'DBInstances[*].DBInstanceIdentifier' --output text) if [[ $LIST_OF_RDS_INSTANCES ]];then for rdsinstance in $LIST_OF_RDS_INSTANCES; do - # if retention is 0 then is disabled + # if retention is 0 then is disabled BACKUP_RETENTION=$($AWSCLI rds describe-db-instances $PROFILE_OPT --region $regx --db-instance-identifier $rdsinstance --query 'DBInstances[*].BackupRetentionPeriod' --output text) if [[ $BACKUP_RETENTION == "0" ]]; then textFail "$regx: RDS instance $rdsinstance has not backup enabled!" "$regx" else - textPass "$regx: RDS instance $rdsinstance has backup enabled with retention period $BACKUP_RETENTION days " "$regx" + textPass "$regx: RDS instance $rdsinstance has backup enabled with retention period $BACKUP_RETENTION days" "$regx" fi done else diff --git a/checks/check_extra74 b/checks/check_extra74 index 68dbfa92..c6d0aa04 100644 --- a/checks/check_extra74 +++ b/checks/check_extra74 @@ -10,10 +10,11 @@ # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR # CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. -CHECK_ID_extra74="7.4,7.04" +CHECK_ID_extra74="7.4" CHECK_TITLE_extra74="[extra74] Ensure there are no Security Groups without ingress filtering being used (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra74="NOT_SCORED" CHECK_TYPE_extra74="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra74="AwsEc2SecurityGroup" CHECK_ALTERNATE_extra704="extra74" CHECK_ALTERNATE_check74="extra74" CHECK_ALTERNATE_check704="extra74" diff --git a/checks/check_extra740 b/checks/check_extra740 index d74c7600..5fa00518 100644 --- a/checks/check_extra740 +++ b/checks/check_extra740 @@ -14,6 +14,7 @@ CHECK_ID_extra740="7.40" CHECK_TITLE_extra740="[extra740] Check if EBS snapshots are encrypted (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra740="NOT_SCORED" CHECK_TYPE_extra740="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra740="AwsEc2Snapshot" CHECK_ALTERNATE_check740="extra740" extra740(){ @@ -30,7 +31,7 @@ extra740(){ fi done else - textInfo "$regx: No EBS Snapshots found" "$regx" + textInfo "$regx: No EBS Snapshots found" "$regx" fi done } diff --git a/checks/check_extra741 b/checks/check_extra741 index f4e54d2c..7545c9aa 100644 --- a/checks/check_extra741 +++ b/checks/check_extra741 @@ -14,14 +14,15 @@ CHECK_ID_extra741="7.41" CHECK_TITLE_extra741="[extra741] Find secrets in EC2 User Data (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra741="NOT_SCORED" CHECK_TYPE_extra741="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra741="AwsEc2Instance" CHECK_ALTERNATE_check741="extra741" extra741(){ SECRETS_TEMP_FOLDER="$PROWLER_DIR/secrets-$ACCOUNT_NUM" - if [[ ! -d $SECRETS_TEMP_FOLDER ]]; then + if [[ ! -d $SECRETS_TEMP_FOLDER ]]; then # this folder is deleted once this check is finished mkdir $SECRETS_TEMP_FOLDER - fi + fi textInfo "Looking for secrets in EC2 User Data in instances across all regions... (max 100 instances per region use -m to increase it) " for regx in $REGIONS; do @@ -30,33 +31,25 @@ extra741(){ for instance in $LIST_OF_EC2_INSTANCES; do EC2_USERDATA_FILE="$SECRETS_TEMP_FOLDER/extra741-$instance-userData.decoded" EC2_USERDATA=$($AWSCLI ec2 describe-instance-attribute --attribute userData $PROFILE_OPT --region $regx --instance-id $instance --query UserData.Value --output text| grep -v ^None | decode_report > $EC2_USERDATA_FILE) - if [ -s $EC2_USERDATA_FILE ];then - FILE_FORMAT_ASCII=$(file -b $EC2_USERDATA_FILE|grep ASCII) + if [ -s "$EC2_USERDATA_FILE" ];then # This finds ftp or http URLs with credentials and common keywords # FINDINGS=$(egrep -i '[[:alpha:]]*://[[:alnum:]]*:[[:alnum:]]*@.*/|key|secret|token|pass' $EC2_USERDATA_FILE |wc -l|tr -d '\ ') # New implementation using https://github.com/Yelp/detect-secrets - if [[ $FILE_FORMAT_ASCII ]]; then - FINDINGS=$(secretsDetector file $EC2_USERDATA_FILE) - if [[ $FINDINGS -eq 0 ]]; then - textPass "$regx: No secrets found in $instance" "$regx" - # delete file if nothing interesting is there - rm -f $EC2_USERDATA_FILE - else - textFail "$regx: Potential secret found in $instance" "$regx" - # delete file to not leave trace, user must look at the instance User Data - rm -f $EC2_USERDATA_FILE - fi - else - mv $EC2_USERDATA_FILE $EC2_USERDATA_FILE.gz ; gunzip $EC2_USERDATA_FILE.gz - FINDINGS=$(secretsDetector file $EC2_USERDATA_FILE) - if [[ $FINDINGS -eq 0 ]]; then - textPass "$regx: No secrets found in $instance User Data" "$regx" - rm -f $EC2_USERDATA_FILE - else - textFail "$regx: Potential secret found in $instance" "$regx" - fi + # Test if user data is a valid GZIP file, if so gunzip first + if gunzip -t "$EC2_USERDATA_FILE" > /dev/null 2>&1; then + mv "$EC2_USERDATA_FILE" "$EC2_USERDATA_FILE.gz" ; gunzip "$EC2_USERDATA_FILE.gz" fi - else + FINDINGS=$(secretsDetector file "$EC2_USERDATA_FILE") + if [[ $FINDINGS -eq 0 ]]; then + textPass "$regx: No secrets found in $instance User Data" "$regx" + # delete file if nothing interesting is there + rm -f "$EC2_USERDATA_FILE" + else + textFail "$regx: Potential secret found in $instance User Data" "$regx" + # delete file to not leave trace, user must look at the instance User Data + rm -f "$EC2_USERDATA_FILE" + fi + else textPass "$regx: No secrets found in $instance User Data or it is empty" "$regx" fi done diff --git a/checks/check_extra742 b/checks/check_extra742 index 8d78ab22..309f75d5 100644 --- a/checks/check_extra742 +++ b/checks/check_extra742 @@ -14,11 +14,12 @@ CHECK_ID_extra742="7.42" CHECK_TITLE_extra742="[extra742] Find secrets in CloudFormation outputs (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra742="NOT_SCORED" CHECK_TYPE_extra742="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra742="AwsCloudFormationStack" CHECK_ALTERNATE_check742="extra742" extra742(){ SECRETS_TEMP_FOLDER="$PROWLER_DIR/secrets-$ACCOUNT_NUM" - if [[ ! -d $SECRETS_TEMP_FOLDER ]]; then + if [[ ! -d $SECRETS_TEMP_FOLDER ]]; then # this folder is deleted once this check is finished mkdir $SECRETS_TEMP_FOLDER fi diff --git a/checks/check_extra743 b/checks/check_extra743 index e24326f9..4cd22003 100644 --- a/checks/check_extra743 +++ b/checks/check_extra743 @@ -14,6 +14,7 @@ CHECK_ID_extra743="7.43" CHECK_TITLE_extra743="[extra743] Check if API Gateway has client certificate enabled to access your backend endpoint (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra743="NOT_SCORED" CHECK_TYPE_extra743="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra743="AwsApiGatewayRestApi" CHECK_ALTERNATE_check743="extra743" extra743(){ @@ -23,15 +24,15 @@ extra743(){ for api in $LIST_OF_REST_APIS; do API_GW_NAME=$($AWSCLI apigateway get-rest-apis $PROFILE_OPT --region $regx --query "items[?id==\`$api\`].name" --output text) LIST_OF_STAGES=$($AWSCLI $PROFILE_OPT --region $regx apigateway get-stages --rest-api-id $api --query 'item[*].stageName' --output text) - if [[ $LIST_OF_STAGES ]]; then + if [[ $LIST_OF_STAGES ]]; then for stage in $LIST_OF_STAGES; do CHECK_CERTIFICATE=$($AWSCLI $PROFILE_OPT --region $regx apigateway get-stages --rest-api-id $api --query "item[?stageName==\`$stage\`].clientCertificateId" --output text) if [[ $CHECK_CERTIFICATE ]]; then textPass "$regx: API Gateway $API_GW_NAME ID $api in $stage has client certificate enabled" "$regx" - else + else textFail "$regx: API Gateway $API_GW_NAME ID $api in $stage has not client certificate enabled" "$regx" - fi - done + fi + done fi done else diff --git a/checks/check_extra744 b/checks/check_extra744 index 6b7bfb3a..4bc9edd6 100644 --- a/checks/check_extra744 +++ b/checks/check_extra744 @@ -14,6 +14,7 @@ CHECK_ID_extra744="7.44" CHECK_TITLE_extra744="[extra744] Check if API Gateway has a WAF ACL attached (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra744="NOT_SCORED" CHECK_TYPE_extra744="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra744="AwsApiGatewayRestApi" CHECK_ALTERNATE_check744="extra744" extra744(){ @@ -23,15 +24,15 @@ extra744(){ for api in $LIST_OF_REST_APIS; do API_GW_NAME=$($AWSCLI apigateway get-rest-apis $PROFILE_OPT --region $regx --query "items[?id==\`$api\`].name" --output text) LIST_OF_STAGES=$($AWSCLI $PROFILE_OPT --region $regx apigateway get-stages --rest-api-id $api --query 'item[*].stageName' --output text) - if [[ $LIST_OF_STAGES ]]; then + if [[ $LIST_OF_STAGES ]]; then for stage in $LIST_OF_STAGES; do CHECK_WAFACL=$($AWSCLI $PROFILE_OPT --region $regx apigateway get-stages --rest-api-id $api --query "item[?stageName==\`$stage\`].webAclArn" --output text) if [[ $CHECK_WAFACL ]]; then textPass "$regx: API Gateway $API_GW_NAME ID $api in $stage has $CHECK_WAFACL WAF ACL attached" "$regx" - else + else textFail "$regx: API Gateway $API_GW_NAME ID $api in $stage has not WAF ACL attached" "$regx" - fi - done + fi + done fi done else diff --git a/checks/check_extra745 b/checks/check_extra745 index 98a98e63..b8674e5c 100644 --- a/checks/check_extra745 +++ b/checks/check_extra745 @@ -14,6 +14,7 @@ CHECK_ID_extra745="7.45" CHECK_TITLE_extra745="[extra745] Check if API Gateway endpoint is public or private (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra745="NOT_SCORED" CHECK_TYPE_extra745="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra745="AwsApiGatewayRestApi" CHECK_ALTERNATE_check745="extra745" extra745(){ @@ -23,7 +24,7 @@ extra745(){ for api in $LIST_OF_REST_APIS; do API_GW_NAME=$($AWSCLI apigateway get-rest-apis $PROFILE_OPT --region $regx --query "items[?id==\`$api\`].name" --output text) ENDPOINT_CONFIG_TYPE=$($AWSCLI $PROFILE_OPT --region $regx apigateway get-rest-api --rest-api-id $api --query endpointConfiguration.types --output text) - if [[ $ENDPOINT_CONFIG_TYPE ]]; then + if [[ $ENDPOINT_CONFIG_TYPE ]]; then case $ENDPOINT_CONFIG_TYPE in PRIVATE ) textPass "$regx: API Gateway $API_GW_NAME ID $api is set as $ENDPOINT_CONFIG_TYPE" "$regx" diff --git a/checks/check_extra746 b/checks/check_extra746 index 073ca55f..79de26e7 100644 --- a/checks/check_extra746 +++ b/checks/check_extra746 @@ -14,6 +14,7 @@ CHECK_ID_extra746="7.46" CHECK_TITLE_extra746="[extra746] Check if API Gateway has configured authorizers (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra746="NOT_SCORED" CHECK_TYPE_extra746="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra746="AwsApiGatewayRestApi" CHECK_ALTERNATE_check746="extra746" extra746(){ @@ -23,7 +24,7 @@ extra746(){ for api in $LIST_OF_REST_APIS; do API_GW_NAME=$($AWSCLI apigateway get-rest-apis $PROFILE_OPT --region $regx --query "items[?id==\`$api\`].name" --output text) AUTHORIZER_CONFIGURED=$($AWSCLI $PROFILE_OPT --region $regx apigateway get-authorizers --rest-api-id $api --query items[*].type --output text) - if [[ $AUTHORIZER_CONFIGURED ]]; then + if [[ $AUTHORIZER_CONFIGURED ]]; then textPass "$regx: API Gateway $API_GW_NAME ID $api has authorizer configured" "$regx" else textFail "$regx: API Gateway $API_GW_NAME ID $api has not authorizer configured" "$regx" diff --git a/checks/check_extra747 b/checks/check_extra747 index b9e28b1f..027359bf 100644 --- a/checks/check_extra747 +++ b/checks/check_extra747 @@ -14,6 +14,7 @@ CHECK_ID_extra747="7.47" CHECK_TITLE_extra747="[extra747] Check if RDS instances is integrated with CloudWatch Logs (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra747="NOT_SCORED" CHECK_TYPE_extra747="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra747="AwsRdsDbInstance" CHECK_ALTERNATE_check747="extra747" extra747(){ @@ -21,7 +22,7 @@ extra747(){ LIST_OF_RDS_INSTANCES=$($AWSCLI rds describe-db-instances $PROFILE_OPT --region $regx --query 'DBInstances[*].DBInstanceIdentifier' --output text) if [[ $LIST_OF_RDS_INSTANCES ]];then for rdsinstance in $LIST_OF_RDS_INSTANCES; do - # if retention is 0 then is disabled + # if retention is 0 then is disabled ENABLED_CLOUDWATCHLOGS_EXPORTS=$($AWSCLI rds describe-db-instances $PROFILE_OPT --region $regx --db-instance-identifier $rdsinstance --query 'DBInstances[*].EnabledCloudwatchLogsExports' --output text) if [[ $ENABLED_CLOUDWATCHLOGS_EXPORTS ]]; then textPass "$regx: RDS instance $rdsinstance is shipping $ENABLED_CLOUDWATCHLOGS_EXPORTS to CloudWatch Logs" "$regx" diff --git a/checks/check_extra748 b/checks/check_extra748 index 50056980..9aa71147 100644 --- a/checks/check_extra748 +++ b/checks/check_extra748 @@ -14,6 +14,7 @@ CHECK_ID_extra748="7.48" CHECK_TITLE_extra748="[extra748] Ensure no security groups allow ingress from 0.0.0.0/0 or ::/0 to any port (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra748="NOT_SCORED" CHECK_TYPE_extra748="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra748="AwsEc2SecurityGroup" CHECK_ALTERNATE_check748="extra748" extra748(){ diff --git a/checks/check_extra749 b/checks/check_extra749 index 3c4a53be..0a74e394 100644 --- a/checks/check_extra749 +++ b/checks/check_extra749 @@ -14,6 +14,7 @@ CHECK_ID_extra749="7.49" CHECK_TITLE_extra749="[extra749] Ensure no security groups allow ingress from 0.0.0.0/0 or ::/0 to Oracle ports 1521 or 2483 (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra749="NOT_SCORED" CHECK_TYPE_extra749="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra749="AwsEc2SecurityGroup" CHECK_ALTERNATE_check749="extra749" extra749(){ diff --git a/checks/check_extra75 b/checks/check_extra75 index f2007621..91a34df3 100644 --- a/checks/check_extra75 +++ b/checks/check_extra75 @@ -10,10 +10,11 @@ # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR # CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. -CHECK_ID_extra75="7.5,7.05" +CHECK_ID_extra75="7.5" CHECK_TITLE_extra75="[extra75] Ensure there are no Security Groups not being used (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra75="NOT_SCORED" CHECK_TYPE_extra75="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra75="AwsEc2SecurityGroup" CHECK_ALTERNATE_extra705="extra75" CHECK_ALTERNATE_check75="extra75" CHECK_ALTERNATE_check705="extra75" diff --git a/checks/check_extra750 b/checks/check_extra750 index 1b1d62e3..fd105bfc 100644 --- a/checks/check_extra750 +++ b/checks/check_extra750 @@ -14,6 +14,7 @@ CHECK_ID_extra750="7.50" CHECK_TITLE_extra750="[extra750] Ensure no security groups allow ingress from 0.0.0.0/0 or ::/0 to MySQL port 3306 (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra750="NOT_SCORED" CHECK_TYPE_extra750="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra750="AwsEc2SecurityGroup" CHECK_ALTERNATE_check750="extra750" extra750(){ diff --git a/checks/check_extra751 b/checks/check_extra751 index 3c8255ea..0d623ba8 100644 --- a/checks/check_extra751 +++ b/checks/check_extra751 @@ -14,6 +14,7 @@ CHECK_ID_extra751="7.51" CHECK_TITLE_extra751="[extra751] Ensure no security groups allow ingress from 0.0.0.0/0 or ::/0 to Postgres port 5432 (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra751="NOT_SCORED" CHECK_TYPE_extra751="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra751="AwsEc2SecurityGroup" CHECK_ALTERNATE_check751="extra751" extra751(){ diff --git a/checks/check_extra752 b/checks/check_extra752 index 650c7e82..e6472181 100644 --- a/checks/check_extra752 +++ b/checks/check_extra752 @@ -14,6 +14,7 @@ CHECK_ID_extra752="7.52" CHECK_TITLE_extra752="[extra752] Ensure no security groups allow ingress from 0.0.0.0/0 or ::/0 to Redis port 6379 (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra752="NOT_SCORED" CHECK_TYPE_extra752="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra752="AwsEc2SecurityGroup" CHECK_ALTERNATE_check752="extra752" extra752(){ diff --git a/checks/check_extra753 b/checks/check_extra753 index 2ba23a51..44824b93 100644 --- a/checks/check_extra753 +++ b/checks/check_extra753 @@ -14,6 +14,7 @@ CHECK_ID_extra753="7.53" CHECK_TITLE_extra753="[extra753] Ensure no security groups allow ingress from 0.0.0.0/0 or ::/0 to MongoDB ports 27017 and 27018 (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra753="NOT_SCORED" CHECK_TYPE_extra753="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra753="AwsEc2SecurityGroup" CHECK_ALTERNATE_check753="extra753" extra753(){ diff --git a/checks/check_extra754 b/checks/check_extra754 index 0b7472e0..30e8a939 100644 --- a/checks/check_extra754 +++ b/checks/check_extra754 @@ -14,6 +14,7 @@ CHECK_ID_extra754="7.54" CHECK_TITLE_extra754="[extra754] Ensure no security groups allow ingress from 0.0.0.0/0 or ::/0 to Cassandra ports 7199 or 9160 or 8888 (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra754="NOT_SCORED" CHECK_TYPE_extra754="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra754="AwsEc2SecurityGroup" CHECK_ALTERNATE_check754="extra754" extra754(){ diff --git a/checks/check_extra755 b/checks/check_extra755 index 0481f444..e0164d76 100644 --- a/checks/check_extra755 +++ b/checks/check_extra755 @@ -14,6 +14,7 @@ CHECK_ID_extra755="7.55" CHECK_TITLE_extra755="[extra755] Ensure no security groups allow ingress from 0.0.0.0/0 or ::/0 to Memcached port 11211 (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra755="NOT_SCORED" CHECK_TYPE_extra755="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra755="AwsEc2SecurityGroup" CHECK_ALTERNATE_check755="extra755" extra755(){ diff --git a/checks/check_extra756 b/checks/check_extra756 index 538fa55b..69dabb60 100644 --- a/checks/check_extra756 +++ b/checks/check_extra756 @@ -14,6 +14,7 @@ CHECK_ID_extra756="7.56" CHECK_TITLE_extra756="[extra756] Check if Redshift cluster is Public Accessible (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra756="NOT_SCORED" CHECK_TYPE_extra756="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra756="AwsRedshiftCluster" CHECK_ALTERNATE_check756="extra756" extra756(){ @@ -22,10 +23,10 @@ extra756(){ if [[ $LIST_OF_RS_CLUSTERS ]];then for cluster in $LIST_OF_RS_CLUSTERS; do IS_PUBLICLY_ACCESSIBLE=$($AWSCLI $PROFILE_OPT redshift describe-clusters --region $regx --cluster-identifier $cluster --query Clusters[*].PubliclyAccessible --output text|grep True) - if [[ $IS_PUBLICLY_ACCESSIBLE ]]; then - textFail "$regx: Redshift cluster $cluster is publicly accessible" "$regx" + if [[ $IS_PUBLICLY_ACCESSIBLE ]]; then + textFail "$regx: Redshift cluster $cluster is publicly accessible" "$regx" else - textPass "$regx: Redshift cluster $cluster is not publicly accessible" "$regx" + textPass "$regx: Redshift cluster $cluster is not publicly accessible" "$regx" fi done else diff --git a/checks/check_extra757 b/checks/check_extra757 index 0320081a..a5ddf6fd 100644 --- a/checks/check_extra757 +++ b/checks/check_extra757 @@ -14,6 +14,7 @@ CHECK_ID_extra757="7.57" CHECK_TITLE_extra757="[extra757] Check EC2 Instances older than 6 months (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra757="NOT_SCORED" CHECK_TYPE_extra757="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra757="AwsEc2Instance" CHECK_ALTERNATE_check757="extra757" extra757(){ diff --git a/checks/check_extra758 b/checks/check_extra758 index 1c402aa5..5f5fe40b 100644 --- a/checks/check_extra758 +++ b/checks/check_extra758 @@ -14,6 +14,7 @@ CHECK_ID_extra758="7.58" CHECK_TITLE_extra758="[extra758] Check EC2 Instances older than 12 months (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra758="NOT_SCORED" CHECK_TYPE_extra758="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra758="AwsEc2Instance" CHECK_ALTERNATE_check758="extra758" extra758(){ diff --git a/checks/check_extra759 b/checks/check_extra759 index dbd07632..6b3ff15e 100644 --- a/checks/check_extra759 +++ b/checks/check_extra759 @@ -14,14 +14,15 @@ CHECK_ID_extra759="7.59" CHECK_TITLE_extra759="[extra759] Find secrets in Lambda functions variables (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra759="NOT_SCORED" CHECK_TYPE_extra759="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra759="AwsLambdaFunction" CHECK_ALTERNATE_check759="extra759" extra759(){ SECRETS_TEMP_FOLDER="$PROWLER_DIR/secrets-$ACCOUNT_NUM" - if [[ ! -d $SECRETS_TEMP_FOLDER ]]; then + if [[ ! -d $SECRETS_TEMP_FOLDER ]]; then # this folder is deleted once this check is finished mkdir $SECRETS_TEMP_FOLDER - fi + fi textInfo "Looking for secrets in Lambda variables across all regions... " for regx in $REGIONS; do diff --git a/checks/check_extra76 b/checks/check_extra76 index 2a65705f..e524ea7d 100644 --- a/checks/check_extra76 +++ b/checks/check_extra76 @@ -10,7 +10,7 @@ # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR # CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. -CHECK_ID_extra76="7.6,7.06" +CHECK_ID_extra76="7.6" CHECK_TITLE_extra76="[extra76] Ensure there are no EC2 AMIs set as Public (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra76="NOT_SCORED" CHECK_TYPE_extra76="EXTRA" diff --git a/checks/check_extra760 b/checks/check_extra760 index eac466e7..ee66c791 100644 --- a/checks/check_extra760 +++ b/checks/check_extra760 @@ -14,11 +14,12 @@ CHECK_ID_extra760="7.60" CHECK_TITLE_extra760="[extra760] Find secrets in Lambda functions code (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra760="NOT_SCORED" CHECK_TYPE_extra760="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra760="AwsLambdaFunction" CHECK_ALTERNATE_check760="extra760" extra760(){ SECRETS_TEMP_FOLDER="$PROWLER_DIR/secrets-$ACCOUNT_NUM" - if [[ ! -d $SECRETS_TEMP_FOLDER ]]; then + if [[ ! -d $SECRETS_TEMP_FOLDER ]]; then # this folder is deleted once this check is finished mkdir $SECRETS_TEMP_FOLDER fi diff --git a/checks/check_extra762 b/checks/check_extra762 index b54cd5ac..c66974e6 100644 --- a/checks/check_extra762 +++ b/checks/check_extra762 @@ -14,6 +14,7 @@ CHECK_ID_extra762="7.62" CHECK_TITLE_extra762="[extra762] Find obsolete Lambda runtimes (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra762="NOT_SCORED" CHECK_TYPE_extra762="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra762="AwsLambdaFunction" CHECK_ALTERNATE_check762="extra762" extra762(){ diff --git a/checks/check_extra763 b/checks/check_extra763 index 7c839cf3..9ceb9494 100644 --- a/checks/check_extra763 +++ b/checks/check_extra763 @@ -14,6 +14,7 @@ CHECK_ID_extra763="7.63" CHECK_TITLE_extra763="[extra763] Check if S3 buckets have object versioning enabled (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra763="NOT_SCORED" CHECK_TYPE_extra763="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra763="AwsS3Bucket" CHECK_ALTERNATE_check763="extra763" extra763(){ @@ -28,7 +29,7 @@ extra763(){ fi if [[ $(echo "$BUCKET_VERSIONING_ENABLED" | grep "^Enabled$") ]]; then textPass "Bucket $bucket has versioning enabled" - else + else textFail "Bucket $bucket has versioning disabled!" fi done diff --git a/checks/check_extra764 b/checks/check_extra764 index 8b849208..96cf9100 100644 --- a/checks/check_extra764 +++ b/checks/check_extra764 @@ -14,16 +14,17 @@ CHECK_ID_extra764="7.64" CHECK_TITLE_extra764="[extra764] Check if S3 buckets have secure transport policy (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra764="NOT_SCORED" CHECK_TYPE_extra764="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra764="AwsS3Bucket" CHECK_ALTERNATE_check764="extra764" extra764(){ - LIST_OF_BUCKETS=$($AWSCLI s3api list-buckets $PROFILE_OPT --query Buckets[*].Name --output text|xargs -n1) + LIST_OF_BUCKETS=$($AWSCLI s3api list-buckets $PROFILE_OPT --query Buckets[*].Name --output text --region $REGION|xargs -n1) if [[ $LIST_OF_BUCKETS ]]; then for bucket in $LIST_OF_BUCKETS;do TEMP_STP_POLICY_FILE=$(mktemp -t prowler-${ACCOUNT_NUM}-${bucket}.policy.XXXXXXXXXX) # get bucket policy - $AWSCLI s3api get-bucket-policy $PROFILE_OPT --bucket $bucket --output text --query Policy > $TEMP_STP_POLICY_FILE 2>&1 + $AWSCLI s3api get-bucket-policy $PROFILE_OPT --bucket $bucket --output text --query Policy --region $REGION > $TEMP_STP_POLICY_FILE 2>&1 if [[ $(grep AccessDenied $TEMP_STP_POLICY_FILE) ]]; then textFail "Access Denied Trying to Get Bucket Policy for $bucket" rm -f $TEMP_STP_POLICY_FILE @@ -36,13 +37,12 @@ extra764(){ fi # https://aws.amazon.com/premiumsupport/knowledge-center/s3-bucket-policy-for-config-rule/ - CHECK_BUCKET_STP_POLICY_PRESENT=$(cat $TEMP_STP_POLICY_FILE | jq --arg arn "arn:aws:s3:::${bucket}" '.Statement[]|select((((.Principal|type == "object") and .Principal.AWS == "*") or ((.Principal|type == "string") and .Principal == "*")) and .Action=="s3:*" and (.Resource|type == "array") and (.Resource|map({(.):0})[]|has($arn)) and (.Resource|map({(.):0})[]|has($arn+"/*")) and .Condition.Bool."aws:SecureTransport" == "false")') + CHECK_BUCKET_STP_POLICY_PRESENT=$(cat $TEMP_STP_POLICY_FILE | jq --arg arn "arn:${AWS_PARTITION}:s3:::${bucket}" '.Statement[]|select((((.Principal|type == "object") and .Principal.AWS == "*") or ((.Principal|type == "string") and .Principal == "*")) and .Action=="s3:*" and (.Resource|type == "array") and (.Resource|map({(.):0})[]|has($arn)) and (.Resource|map({(.):0})[]|has($arn+"/*")) and .Condition.Bool."aws:SecureTransport" == "false")') if [[ $CHECK_BUCKET_STP_POLICY_PRESENT ]]; then textPass "Bucket $bucket has S3 bucket policy to deny requests over insecure transport" else textFail "Bucket $bucket allows requests over insecure transport" fi - rm -fr $TEMP_STP_POLICY_FILE done diff --git a/checks/check_extra767 b/checks/check_extra767 index 1bbe4d8a..1683d466 100644 --- a/checks/check_extra767 +++ b/checks/check_extra767 @@ -14,6 +14,7 @@ CHECK_ID_extra767="7.67" CHECK_TITLE_extra767="[extra767] Check if CloudFront distributions have Field Level Encryption enabled (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra767="NOT_SCORED" CHECK_TYPE_extra767="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra767="AwsCloudFrontDistribution" CHECK_ALTERNATE_check767="extra767" extra767(){ @@ -23,7 +24,7 @@ extra767(){ CHECK_FLE=$($AWSCLI cloudfront get-distribution --id $dist --query Distribution.DistributionConfig.DefaultCacheBehavior.FieldLevelEncryptionId $PROFILE_OPT --output text) if [[ $CHECK_FLE ]]; then textPass "CloudFront distribution $dist has Field Level Encryption enabled" "$regx" - else + else textFail "CloudFront distribution $dist has Field Level Encryption disabled!" "$regx" fi done diff --git a/checks/check_extra768 b/checks/check_extra768 index 2c10e1dd..591983af 100644 --- a/checks/check_extra768 +++ b/checks/check_extra768 @@ -14,6 +14,7 @@ CHECK_ID_extra768="7.68" CHECK_TITLE_extra768="[extra768] Find secrets in ECS task definitions variables (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra768="NOT_SCORED" CHECK_TYPE_extra768="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra768="AwsEcsTaskDefinition" CHECK_ALTERNATE_check768="extra768" extra768(){ @@ -22,19 +23,22 @@ extra768(){ # this folder is deleted once this check is finished mkdir $SECRETS_TEMP_FOLDER fi - textInfo "Looking for secrets in ECS task definitions' environment variables across all regions... " for regx in $REGIONS; do - LIST_OF_TASK_DEFINITIONS=$($AWSCLI ecs list-task-definitions $PROFILE_OPT --region $regx --query taskDefinitionArns[*] --output text) - if [[ $LIST_OF_TASK_DEFINITIONS ]]; then - for taskDefinition in $LIST_OF_TASK_DEFINITIONS;do - IFS='/' read -r -a splitArn <<< "$taskDefinition" + # Get a list of all families first: + FAMILIES=$($AWSCLI ecs list-task-definition-families $PROFILE_OPT --region $regx --status ACTIVE | jq -r .families[]) + if [[ $FAMILIES ]]; then + for FAMILY in $FAMILIES;do + # Get the full task definition arn: + TASK_DEFINITION_TEMP=$($AWSCLI ecs list-task-definitions $PROFILE_OPT --region $regx --family-prefix $FAMILY --sort DESC --max-items 1 | jq -r .taskDefinitionArns[0]) + # We only care about the task definition name: + IFS='/' read -r -a splitArn <<< "$TASK_DEFINITION_TEMP" TASK_DEFINITION=${splitArn[1]} TASK_DEFINITION_ENV_VARIABLES_FILE="$SECRETS_TEMP_FOLDER/extra768-$TASK_DEFINITION-$regx-variables.txt" - TASK_DEFINITION_ENV_VARIABLES=$($AWSCLI ecs $PROFILE_OPT --region $regx describe-task-definition --task-definition $taskDefinition --query 'taskDefinition.containerDefinitions[*].environment' --output text > $TASK_DEFINITION_ENV_VARIABLES_FILE) + TASK_DEFINITION_ENV_VARIABLES=$($AWSCLI ecs $PROFILE_OPT --region $regx describe-task-definition --task-definition $TASK_DEFINITION --query 'taskDefinition.containerDefinitions[*].environment' --output text > $TASK_DEFINITION_ENV_VARIABLES_FILE) if [ -s $TASK_DEFINITION_ENV_VARIABLES_FILE ];then - # Implementation using https://github.com/Yelp/detect-secrets - FINDINGS=$(secretsDetector file $TASK_DEFINITION_ENV_VARIABLES_FILE) + # Implementation using https://github.com/Yelp/detect-secrets + FINDINGS=$(secretsDetector file $TASK_DEFINITION_ENV_VARIABLES_FILE) if [[ $FINDINGS -eq 0 ]]; then textPass "$regx: No secrets found in ECS task definition $TASK_DEFINITION variables" "$regx" # delete file if nothing interesting is there diff --git a/checks/check_extra77 b/checks/check_extra77 index cfd1078a..ad3011c6 100644 --- a/checks/check_extra77 +++ b/checks/check_extra77 @@ -11,7 +11,7 @@ # CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. -CHECK_ID_extra77="7.7,7.07" +CHECK_ID_extra77="7.7" CHECK_TITLE_extra77="[extra77] Ensure there are no ECR repositories set as Public (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra77="NOT_SCORED" CHECK_TYPE_extra77="EXTRA" diff --git a/checks/check_extra770 b/checks/check_extra770 index f39de564..b657bbaa 100644 --- a/checks/check_extra770 +++ b/checks/check_extra770 @@ -14,6 +14,7 @@ CHECK_ID_extra770="7.70" CHECK_TITLE_extra770="[extra770] Check for internet facing EC2 instances with Instance Profiles attached (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra770="NOT_SCORED" CHECK_TYPE_extra770="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra770="AwsEc2Instance" CHECK_ALTERNATE_check770="extra770" extra770(){ diff --git a/checks/check_extra771 b/checks/check_extra771 index ecd240c3..98d2da9b 100644 --- a/checks/check_extra771 +++ b/checks/check_extra771 @@ -14,17 +14,18 @@ CHECK_ID_extra771="7.71" CHECK_TITLE_extra771="[extra771] Check if S3 buckets have policies which allow WRITE access (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra771="NOT_SCORED" CHECK_TYPE_extra771="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra771="AwsS3Bucket" CHECK_ALTERNATE_check771="extra771" extra771(){ - LIST_OF_BUCKETS=$($AWSCLI s3api list-buckets $PROFILE_OPT --query Buckets[*].Name --output text|xargs -n1) + LIST_OF_BUCKETS=$($AWSCLI s3api list-buckets $PROFILE_OPT --region $REGION --query Buckets[*].Name --output text|xargs -n1) if [[ $LIST_OF_BUCKETS ]]; then for bucket in $LIST_OF_BUCKETS;do - BUCKET_POLICY_STATEMENTS=$($AWSCLI s3api $PROFILE_OPT get-bucket-policy --bucket $bucket --output json --query Policy 2>&1) + BUCKET_POLICY_STATEMENTS=$($AWSCLI s3api $PROFILE_OPT get-bucket-policy --region $REGION --bucket $bucket --output json --query Policy 2>&1) if [[ $BUCKET_POLICY_STATEMENTS == *GetBucketPolicy* ]]; then textInfo "Bucket policy does not exist for bucket $bucket" else - BUCKET_POLICY_BAD_STATEMENTS=$(echo $BUCKET_POLICY_STATEMENTS | jq --arg arn "arn:aws:s3:::$bucket" 'fromjson | .Statement[]|select(.Effect=="Allow" and (((.Principal|type == "object") and .Principal.AWS == "*") or ((.Principal|type == "string") and .Principal == "*")) and (.Action|startswith("s3:Put") or startswith("s3:*")) and .Condition == null)') + BUCKET_POLICY_BAD_STATEMENTS=$(echo $BUCKET_POLICY_STATEMENTS | jq --arg arn "arn:${AWS_PARTITION}:s3:::$bucket" 'fromjson | .Statement[]|select(.Effect=="Allow" and (((.Principal|type == "object") and .Principal.AWS == "*") or ((.Principal|type == "string") and .Principal == "*")) and (.Action|startswith("s3:Put") or startswith("s3:*")) and .Condition == null)') if [[ $BUCKET_POLICY_BAD_STATEMENTS != "" ]]; then textFail "Bucket $bucket allows public write: $BUCKET_POLICY_BAD_STATEMENTS" else diff --git a/checks/check_extra772 b/checks/check_extra772 index 83f3cdd5..088be100 100644 --- a/checks/check_extra772 +++ b/checks/check_extra772 @@ -14,6 +14,7 @@ CHECK_ID_extra772="7.72" CHECK_TITLE_extra772="[extra772] Check if elastic IPs are unused (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra772="NOT_SCORED" CHECK_TYPE_extra772="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra772="AwsEc2Eip" CHECK_ALTERNATE_check772="extra772" extra772(){ diff --git a/checks/check_extra773 b/checks/check_extra773 index ecd26385..dbfba0ca 100644 --- a/checks/check_extra773 +++ b/checks/check_extra773 @@ -14,6 +14,7 @@ CHECK_ID_extra773="7.73" CHECK_TITLE_extra773="[extra773] Check if CloudFront distributions are using WAF (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra773="NOT_SCORED" CHECK_TYPE_extra773="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra773="AwsCloudFrontDistribution" CHECK_ALTERNATE_check773="extra773" extra773(){ diff --git a/checks/check_extra774 b/checks/check_extra774 index 83e2aa6f..b88bfad8 100644 --- a/checks/check_extra774 +++ b/checks/check_extra774 @@ -11,24 +11,12 @@ # CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. CHECK_ID_extra774="7.74" -CHECK_TITLE_extra774="[extra774] Check if user have unused console login" +CHECK_TITLE_extra774="[extra774] Ensure credentials unused for 30 days or greater are disabled" CHECK_SCORED_extra774="NOT_SCORED" CHECK_TYPE_extra774="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra774="AwsIamUser" CHECK_ALTERNATE_check774="extra774" extra774(){ - MAX_DAYS=-30 - LIST_USERS_WITH_PASSWORD_ENABLED=$(cat $TEMP_REPORT_FILE|awk -F, '{ print $1,$4,$5 }' |grep true | awk '{ print $1 }') - - for i in $LIST_USERS_WITH_PASSWORD_ENABLED; do - user=$(cat $TEMP_REPORT_FILE|awk -F, '{ print $1,$5 }' |grep "^$i " |awk '{ print $1 }') - last_login_date=$(cat $TEMP_REPORT_FILE|awk -F, '{ print $1,$5 }' |grep "^$i " |awk '{ print $2 }') - - days_not_in_use=$(how_many_days_from_today ${last_login_date%T*}) - if [ "$days_not_in_use" -lt "$MAX_DAYS" ];then - textFail "User $user has not used console login for more then ${MAX_DAYS#-} days" - else - textPass "User $user has used console login in the past ${MAX_DAYS#-} days" - fi - done + check_creds_used_in_last_days 30 } diff --git a/checks/check_extra777 b/checks/check_extra777 new file mode 100644 index 00000000..fa3d8c0c --- /dev/null +++ b/checks/check_extra777 @@ -0,0 +1,59 @@ +#!/usr/bin/env bash + +# Prowler - the handy cloud security tool (copyright 2020) by Toni de la Fuente +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy +# of the License at http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software distributed +# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +# CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. + +# Current VPC Limit is 120 rules (60 inbound and 60 outbound) +# Reference: https://docs.aws.amazon.com/vpc/latest/userguide/amazon-vpc-limits.html + +CHECK_ID_extra777="7.77" +CHECK_TITLE_extra777="[extra777] Find VPC security groups with many ingress or egress rules (Not Scored) (Not part of CIS benchmark)" +CHECK_SCORED_extra777="NOT_SCORED" +CHECK_TYPE_extra777="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra777="AwsEc2SecurityGroup" +CHECK_ALTERNATE_check777="extra777" + +extra777(){ + THRESHOLD=50 + textInfo "Looking for VPC security groups with more than ${THRESHOLD} rules across all regions... " + + for regx in ${REGIONS}; do + SECURITY_GROUP_IDS=$(${AWSCLI} ec2 describe-security-groups \ + ${PROFILE_OPT} \ + --region ${regx} \ + --query 'SecurityGroups[*].GroupId' \ + --output text | xargs + ) + + for SECURITY_GROUP in ${SECURITY_GROUP_IDS}; do + + INGRESS_TOTAL=$(${AWSCLI} ec2 describe-security-groups \ + ${PROFILE_OPT} \ + --filter "Name=group-id,Values=${SECURITY_GROUP}" \ + --query "SecurityGroups[*].IpPermissions[*].IpRanges" \ + --region ${regx} \ + --output text | wc -l | xargs + ) + + EGRESS_TOTAL=$(${AWSCLI} ec2 describe-security-groups \ + ${PROFILE_OPT} \ + --filter "Name=group-id,Values=${SECURITY_GROUP}" \ + --query "SecurityGroups[*].IpPermissionsEgress[*].IpRanges" \ + --region ${regx} \ + --output text | wc -l | xargs + ) + + if [[ (${INGRESS_TOTAL} -ge ${THRESHOLD}) || (${EGRESS_TOTAL} -ge ${THRESHOLD}) ]]; then + textFail "${regx}: ${SECURITY_GROUP} has ${INGRESS_TOTAL} inbound rules and ${EGRESS_TOTAL} outbound rules" "${regx}" + fi + done + done +} diff --git a/checks/check_extra778 b/checks/check_extra778 new file mode 100644 index 00000000..8d511687 --- /dev/null +++ b/checks/check_extra778 @@ -0,0 +1,73 @@ +#!/usr/bin/env bash + +# Prowler - the handy cloud security tool (copyright 2019) by Toni de la Fuente +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy +# of the License at http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software distributed +# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +# CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. + +CHECK_ID_extra778="7.78" +CHECK_TITLE_extra778="[extra778] Find VPC security groups with wide-open public IPv4 CIDR ranges (non-RFC1918) (Not Scored) (Not part of CIS benchmark)" +CHECK_SCORED_extra778="NOT_SCORED" +CHECK_TYPE_extra778="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra778="AwsEc2SecurityGroup" +CHECK_ALTERNATE_check778="extra778" + +extra778(){ + CIDR_THRESHOLD=24 + RFC1918_REGEX="(^127\.)|(^10\.)|(^172\.1[6-9]\.)|(^172\.2[0-9]\.)|(^172\.3[0-1]\.)|(^192\.168\.)" + textInfo "Looking for VPC security groups with wide-open (=\`$ES_API_PORT\`) || (FromPort<=\`$ES_DATA_PORT\` && ToPort>=\`$ES_DATA_PORT\`) || (FromPort<=\`$ES_KIBANA_PORT\` && ToPort>=\`$ES_KIBANA_PORT\`)) && (contains(IpRanges[].CidrIp, \`0.0.0.0/0\`) || contains(Ipv6Ranges[].CidrIpv6, \`::/0\`))]) > \`0\`].{GroupId:GroupId}") + # in case of open security groups goes through each one + if [[ $SG_LIST ]];then + for sg in $SG_LIST;do + # temp file store the list of instances IDs and public IP address if found + TEMP_EXTRA779_FILE=$(mktemp -t prowler-${ACCOUNT_NUM}-es-domain.EXTRA779.XXXXXXXXXX) + # finds instances with that open security group attached and get its public ip address (if it has one) + $AWSCLI $PROFILE_OPT --region $regx ec2 describe-instances --filters Name=instance.group-id,Values=$sg --query 'Reservations[*].Instances[*].[InstanceId,PublicIpAddress]' --output text > $TEMP_EXTRA779_FILE + # in case of exposed instances it does access checks + if [[ -s "$TEMP_EXTRA779_FILE" ]];then + while read instance eip ; do + if [[ "$eip" == "None" ]];then + textInfo "$regx: Found instance $instance with private IP on Security Group: $sg" "$regx" + else + textFail "$regx: Found instance $instance with public IP $eip on Security Group: $sg open to 0.0.0.0/0 on for Elasticsearch/Kibana ports - use extra787 to test AUTH" "$regx" + fi + done < <(cat $TEMP_EXTRA779_FILE) + fi + rm -rf $TEMP_EXTRA779_FILE + done + else + textPass "$regx: No Security Groups found open to 0.0.0.0/0 for Elasticsearch/Kibana ports" "$regx" + fi + done +} diff --git a/checks/check_extra78 b/checks/check_extra78 index 9bea1a3d..d1c0c8ab 100644 --- a/checks/check_extra78 +++ b/checks/check_extra78 @@ -10,10 +10,11 @@ # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR # CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. -CHECK_ID_extra78="7.8,7.08" +CHECK_ID_extra78="7.8" CHECK_TITLE_extra78="[extra78] Ensure there are no Public Accessible RDS instances (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra78="NOT_SCORED" CHECK_TYPE_extra78="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra78="AwsRdsDbInstance" CHECK_ALTERNATE_extra708="extra78" CHECK_ALTERNATE_check78="extra78" CHECK_ALTERNATE_check708="extra78" diff --git a/checks/check_extra780 b/checks/check_extra780 new file mode 100644 index 00000000..0a694c35 --- /dev/null +++ b/checks/check_extra780 @@ -0,0 +1,36 @@ +#!/usr/bin/env bash + +# Prowler - the handy cloud security tool (copyright 2020) by Toni de la Fuente +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy +# of the License at http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software distributed +# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +# CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. +CHECK_ID_extra780="7.80" +CHECK_TITLE_extra780="[extra780] Check if Amazon Elasticsearch Service (ES) domains has Amazon Cognito authentication for Kibana enabled" +CHECK_SCORED_extra780="NOT_SCORED" +CHECK_TYPE_extra780="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra780="AwsElasticsearchDomain" +CHECK_ALTERNATE_check780="extra780" + +extra780(){ + for regx in $REGIONS; do + LIST_OF_DOMAINS=$($AWSCLI es list-domain-names $PROFILE_OPT --region $regx --query DomainNames --output text) + if [[ $LIST_OF_DOMAINS ]]; then + for domain in $LIST_OF_DOMAINS;do + CHECK_IF_COGNITO_ENABLED=$($AWSCLI es describe-elasticsearch-domain --domain-name $domain $PROFILE_OPT --region $regx --query 'DomainStatus.CognitoOptions.Enabled' --output text|grep -i true) + if [[ $CHECK_IF_COGNITO_ENABLED ]];then + textPass "$regx: Amazon ES domain $domain has Amazon Cognito authentication for Kibana enabled" "$regx" + else + textFail "$regx: Amazon ES domain $domain does not have Amazon Cognito authentication for Kibana enabled" "$regx" + fi + done + else + textInfo "$regx: No Amazon ES domain found" "$regx" + fi + done +} diff --git a/checks/check_extra781 b/checks/check_extra781 new file mode 100644 index 00000000..e4f36620 --- /dev/null +++ b/checks/check_extra781 @@ -0,0 +1,36 @@ +#!/usr/bin/env bash + +# Prowler - the handy cloud security tool (copyright 2020) by Toni de la Fuente +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy +# of the License at http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software distributed +# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +# CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. +CHECK_ID_extra781="7.81" +CHECK_TITLE_extra781="[extra781] Check if Amazon Elasticsearch Service (ES) domains has encryption at-rest enabled" +CHECK_SCORED_extra781="NOT_SCORED" +CHECK_TYPE_extra781="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra781="AwsElasticsearchDomain" +CHECK_ALTERNATE_check781="extra781" + +extra781(){ + for regx in $REGIONS; do + LIST_OF_DOMAINS=$($AWSCLI es list-domain-names $PROFILE_OPT --region $regx --query DomainNames --output text) + if [[ $LIST_OF_DOMAINS ]]; then + for domain in $LIST_OF_DOMAINS;do + CHECK_IF_ENCREST_ENABLED=$($AWSCLI es describe-elasticsearch-domain --domain-name $domain $PROFILE_OPT --region $regx --query 'DomainStatus.EncryptionAtRestOptions.Enabled' --output text|grep -i true) + if [[ $CHECK_IF_ENCREST_ENABLED ]];then + textPass "$regx: Amazon ES domain $domain has encryption at-rest enabled" "$regx" + else + textFail "$regx: Amazon ES domain $domain does not have encryption at-rest enabled" "$regx" + fi + done + else + textInfo "$regx: No Amazon ES domain found" "$regx" + fi + done +} diff --git a/checks/check_extra782 b/checks/check_extra782 new file mode 100644 index 00000000..ab169bee --- /dev/null +++ b/checks/check_extra782 @@ -0,0 +1,36 @@ +#!/usr/bin/env bash + +# Prowler - the handy cloud security tool (copyright 2020) by Toni de la Fuente +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy +# of the License at http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software distributed +# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +# CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. +CHECK_ID_extra782="7.82" +CHECK_TITLE_extra782="[extra782] Check if Amazon Elasticsearch Service (ES) domains has node-to-node encryption enabled" +CHECK_SCORED_extra782="NOT_SCORED" +CHECK_TYPE_extra782="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra782="AwsElasticsearchDomain" +CHECK_ALTERNATE_check782="extra782" + +extra782(){ + for regx in $REGIONS; do + LIST_OF_DOMAINS=$($AWSCLI es list-domain-names $PROFILE_OPT --region $regx --query DomainNames --output text) + if [[ $LIST_OF_DOMAINS ]]; then + for domain in $LIST_OF_DOMAINS;do + CHECK_IF_NODETOENCR_ENABLED=$($AWSCLI es describe-elasticsearch-domain --domain-name $domain $PROFILE_OPT --region $regx --query 'DomainStatus.NodeToNodeEncryptionOptions.Enabled' --output text|grep -i true) + if [[ $CHECK_IF_NODETOENCR_ENABLED ]];then + textPass "$regx: Amazon ES domain $domain has node-to-node encryption enabled" "$regx" + else + textFail "$regx: Amazon ES domain $domain does not have node-to-node encryption enabled" "$regx" + fi + done + else + textInfo "$regx: No Amazon ES domain found" "$regx" + fi + done +} diff --git a/checks/check_extra783 b/checks/check_extra783 new file mode 100644 index 00000000..fa76f6f1 --- /dev/null +++ b/checks/check_extra783 @@ -0,0 +1,36 @@ +#!/usr/bin/env bash + +# Prowler - the handy cloud security tool (copyright 2020) by Toni de la Fuente +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy +# of the License at http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software distributed +# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +# CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. +CHECK_ID_extra783="7.83" +CHECK_TITLE_extra783="[extra783] Check if Amazon Elasticsearch Service (ES) domains has enforce HTTPS enabled" +CHECK_SCORED_extra783="NOT_SCORED" +CHECK_TYPE_extra783="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra783="AwsElasticsearchDomain" +CHECK_ALTERNATE_check783="extra783" + +extra783(){ + for regx in $REGIONS; do + LIST_OF_DOMAINS=$($AWSCLI es list-domain-names $PROFILE_OPT --region $regx --query DomainNames --output text) + if [[ $LIST_OF_DOMAINS ]]; then + for domain in $LIST_OF_DOMAINS;do + CHECK_IF_ENFORCEHTTPS_ENABLED=$($AWSCLI es describe-elasticsearch-domain --domain-name $domain $PROFILE_OPT --region $regx --query 'DomainStatus.DomainEndpointOptions.EnforceHTTPS' --output text|grep -i true) + if [[ $CHECK_IF_ENFORCEHTTPS_ENABLED ]];then + textPass "$regx: Amazon ES domain $domain has enforce HTTPS enabled" "$regx" + else + textFail "$regx: Amazon ES domain $domain does not have enforce HTTPS enabled" "$regx" + fi + done + else + textInfo "$regx: No Amazon ES domain found" "$regx" + fi + done +} diff --git a/checks/check_extra784 b/checks/check_extra784 new file mode 100644 index 00000000..29779d50 --- /dev/null +++ b/checks/check_extra784 @@ -0,0 +1,36 @@ +#!/usr/bin/env bash + +# Prowler - the handy cloud security tool (copyright 2020) by Toni de la Fuente +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy +# of the License at http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software distributed +# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +# CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. +CHECK_ID_extra784="7.84" +CHECK_TITLE_extra784="[extra784] Check if Amazon Elasticsearch Service (ES) domains internal user database enabled" +CHECK_SCORED_extra784="NOT_SCORED" +CHECK_TYPE_extra784="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra784="AwsElasticsearchDomain" +CHECK_ALTERNATE_check784="extra784" + +extra784(){ + for regx in $REGIONS; do + LIST_OF_DOMAINS=$($AWSCLI es list-domain-names $PROFILE_OPT --region $regx --query DomainNames --output text) + if [[ $LIST_OF_DOMAINS ]]; then + for domain in $LIST_OF_DOMAINS;do + CHECK_IF_INTERNALDB_ENABLED=$($AWSCLI es describe-elasticsearch-domain --domain-name $domain $PROFILE_OPT --region $regx --query 'DomainStatus.AdvancedSecurityOptions.InternalUserDatabaseEnabled' --output text|grep -i true) + if [[ $CHECK_IF_INTERNALDB_ENABLED ]];then + textPass "$regx: Amazon ES domain $domain has internal user database enabled" "$regx" + else + textFail "$regx: Amazon ES domain $domain does not have internal user database enabled" "$regx" + fi + done + else + textInfo "$regx: No Amazon ES domain found" "$regx" + fi + done +} diff --git a/checks/check_extra785 b/checks/check_extra785 new file mode 100644 index 00000000..59ffba11 --- /dev/null +++ b/checks/check_extra785 @@ -0,0 +1,43 @@ +#!/usr/bin/env bash + +# Prowler - the handy cloud security tool (copyright 2020) by Toni de la Fuente +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy +# of the License at http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software distributed +# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +# CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. +CHECK_ID_extra785="7.85" +CHECK_TITLE_extra785="[extra785] Check if Amazon Elasticsearch Service (ES) domains have updates available" +CHECK_SCORED_extra785="NOT_SCORED" +CHECK_TYPE_extra785="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra785="AwsElasticsearchDomain" +CHECK_ALTERNATE_check785="extra785" + +# NOTE! +# API does not properly shows if an update is available while it is a new version available +# that can be done using the Console but not the API, not sure if it is a bug +# I have to investigate further + +extra785(){ + for regx in $REGIONS; do + LIST_OF_DOMAINS=$($AWSCLI es list-domain-names $PROFILE_OPT --region $regx --query DomainNames --output text) + if [[ $LIST_OF_DOMAINS ]]; then + for domain in $LIST_OF_DOMAINS;do + CHECK_IF_UPDATE_AVAILABLE_AND_VERSION=$($AWSCLI es describe-elasticsearch-domain --domain-name $domain $PROFILE_OPT --region $regx --query 'DomainStatus.[ServiceSoftwareOptions.UpdateAvailable,ElasticsearchVersion]' --output text) + while read update_status es_version;do + if [[ $update_status != "False" ]];then + textInfo "$regx: Amazon ES domain $domain v$es_version has updates available" "$regx" + else + textPass "$regx: Amazon ES domain $domain v$es_version does not have have updates available" "$regx" + fi + done < <(echo $CHECK_IF_UPDATE_AVAILABLE_AND_VERSION) + done + else + textInfo "$regx: No Amazon ES domain found" "$regx" + fi + done +} diff --git a/checks/check_extra786 b/checks/check_extra786 new file mode 100644 index 00000000..f011a7f4 --- /dev/null +++ b/checks/check_extra786 @@ -0,0 +1,53 @@ +#!/usr/bin/env bash + +# Prowler - the handy cloud security tool (copyright 2020) by Toni de la Fuente +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy +# of the License at http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software distributed +# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +# CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. +CHECK_ID_extra786="7.86" +CHECK_TITLE_extra786="[extra786] Check if EC2 Instance Metadata Service Version 2 (IMDSv2) is Enabled and Required (Not Scored) (Not part of CIS benchmark)" +CHECK_SCORED_extra786="NOT_SCORED" +CHECK_TYPE_extra786="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra786="AwsEc2Instance" +CHECK_ALTERNATE_check786="extra786" + +extra786(){ + for regx in $REGIONS; do + TEMP_EXTRA786_FILE=$(mktemp -t prowler-${ACCOUNT_NUM}-es-domain.EXTRA786.XXXXXXXXXX) + $AWSCLI ec2 describe-instances $PROFILE_OPT --region $regx \ + --query 'Reservations[*].Instances[*].{HttpTokens:MetadataOptions.HttpTokens,HttpEndpoint:MetadataOptions.HttpEndpoint,InstanceId:InstanceId}' \ + --output text --max-items $MAXITEMS > $TEMP_EXTRA786_FILE + # if the file contains data, there are instances in that region + if [[ -s "$TEMP_EXTRA786_FILE" ]];then + # here we read content from the file fields instanceid httptokens_status httpendpoint + while read httpendpoint httptokens_status instanceid ; do + #echo i:$instanceid tok:$httptokens_status end:$httpendpoint + if [[ "$httpendpoint" == "enabled" && "$httptokens_status" == "required" ]];then + textPass "$regx: EC2 Instance $instanceid has IMDSv2 enabled and required" "$regx" + elif [[ "$httpendpoint" == "disabled" ]];then + textInfo "$regx: EC2 Instance $instanceid has HTTP endpoint access to metadata service disabled" "$regx" + else + textFail "$regx: EC2 Instance $instanceid has IMDSv2 disabled or not required" "$regx" + fi + done < <(cat $TEMP_EXTRA786_FILE) + else + textInfo "$regx: no EC2 Instances found" "$regx" + fi + rm -fr $TEMP_EXTRA786_FILE + done +} + +# Remediation: + +# aws ec2 modify-instance-metadata-options \ +# --instance-id i-1234567898abcdef0 \ +# --http-tokens required \ +# --http-endpoint enabled + +# More information here https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/configuring-instance-metadata-service.html diff --git a/checks/check_extra787 b/checks/check_extra787 new file mode 100644 index 00000000..ea681f8e --- /dev/null +++ b/checks/check_extra787 @@ -0,0 +1,82 @@ +#!/usr/bin/env bash + +# Prowler - the handy cloud security tool (copyright 2020) by Toni de la Fuente +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy +# of the License at http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software distributed +# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +# CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. +CHECK_ID_extra787="7.87" +CHECK_TITLE_extra787="[extra787] Check connection and authentication for Internet exposed Elasticsearch/Kibana ports" +CHECK_SCORED_extra787="NOT_SCORED" +CHECK_TYPE_extra787="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra787="AwsEc2Instance" +CHECK_ALTERNATE_check787="extra787" + +extra787(){ + # Prowler will try to access each ElasticSearch server to port: + # 9200 API, 9300 Communcation and 5601 Kibana to figure out if authentication is enabled. + # That is from the host where Prowler is running and will try to read indices or get kibana status + ES_API_PORT="9200" + ES_DATA_PORT="9300" + ES_KIBANA_PORT="5601" + + for regx in $REGIONS; do + # create a list of SG open to the world with port $ES_API_PORT or $ES_DATA_PORT or $ES_KIBANA_PORT + SG_LIST=$($AWSCLI ec2 describe-security-groups $PROFILE_OPT --region $regx --output text \ + --query "SecurityGroups[?length(IpPermissions[?((FromPort==null && ToPort==null) || (FromPort<=\`$ES_API_PORT\` && ToPort>=\`$ES_API_PORT\`) || (FromPort<=\`$ES_DATA_PORT\` && ToPort>=\`$ES_DATA_PORT\`) || (FromPort<=\`$ES_KIBANA_PORT\` && ToPort>=\`$ES_KIBANA_PORT\`)) && (contains(IpRanges[].CidrIp, \`0.0.0.0/0\`) || contains(Ipv6Ranges[].CidrIpv6, \`::/0\`))]) > \`0\`].{GroupId:GroupId}") + # in case of open security groups goes through each one + if [[ $SG_LIST ]];then + for sg in $SG_LIST;do + # temp file store the list of instances IDs and public IP address if found + TEMP_EXTRA787_FILE=$(mktemp -t prowler-${ACCOUNT_NUM}-es-domain.EXTRA787.XXXXXXXXXX) + # finds instances with that open security group attached and get its public ip address (if it has one) + $AWSCLI $PROFILE_OPT --region $regx ec2 describe-instances --filters Name=instance.group-id,Values=$sg --query 'Reservations[*].Instances[*].[InstanceId,PublicIpAddress]' --output text > $TEMP_EXTRA787_FILE + # in case of exposed instances it does access checks + if [[ -s "$TEMP_EXTRA787_FILE" ]];then + while read instance eip ; do + if [[ "$eip" != "None" ]];then + # check for Elasticsearch on port $ES_API_PORT, rest API HTTP. + CHECH_HTTP_ES_API=$(curl -m 2 -s -w "%{http_code}" -o /dev/null -X GET "http://$eip:$ES_API_PORT/_cat/indices") + httpStatus $CHECH_HTTP_ES_API + if [[ $CHECH_HTTP_ES_API -eq "200" ]];then + textFail "$regx: Found instance $instance with public IP $eip on Security Group: $sg with Elasticsearch port $ES_API_PORT response $SERVER_RESPONSE" "$regx" + else + textInfo "$regx: Found instance $instance with public IP $eip on Security Group: $sg with Elasticsearch port $ES_API_PORT response $SERVER_RESPONSE" "$regx" + fi + # check for port $ES_DATA_PORT TCP, this is the communication port, not: + # test_tcp_connectivity is in include/os_detector + # syntax is 'test_tcp_connectivity $HOST $PORT $TIMEOUT' (in seconds) + CHECH_HTTP_ES_DATA=$(test_tcp_connectivity $eip $ES_DATA_PORT 2) + # Using HTTP error codes here as well to reuse httpStatus function + # codes for better handling, so 200 is open and 000 is not responding + httpStatus $CHECH_HTTP_ES_DATA + if [[ $CHECH_HTTP_ES_DATA -eq "200" ]];then + textFail "$regx: Found instance $instance with public IP $eip on Security Group: $sg with Elasticsearch port $ES_DATA_PORT response $SERVER_RESPONSE" "$regx" + else + textInfo "$regx: Found instance $instance with public IP $eip on Security Group: $sg with Elasticsearch port $ES_DATA_PORT response $SERVER_RESPONSE" "$regx" + fi + # check for Kibana on port $ES_KIBANA_PORT + CHECH_HTTP_ES_KIBANA=$(curl -m 2 -s -w "%{http_code}" -o /dev/null -X GET "http://$eip:$ES_KIBANA_PORT/api/status") + httpStatus $CHECH_HTTP_ES_KIBANA + if [[ $CHECH_AUTH_5601 -eq "200" ]];then + textFail "$regx: Found instance $instance with public IP $eip on Security Group: $sg with Kibana on port $ES_KIBANA_PORT response $SERVER_RESPONSE" "$regx" + else + textInfo "$regx: Found instance $instance with public IP $eip on Security Group: $sg with Kibana on port $ES_KIBANA_PORT response $SERVER_RESPONSE" "$regx" + fi + else + textInfo "$regx: Found instance $instance with private IP on Security Group: $sg" "$regx" + fi + done < <(cat $TEMP_EXTRA787_FILE) + fi + rm -rf $TEMP_EXTRA787_FILE + done + else + textPass "$regx: No Security Groups found open to 0.0.0.0/0 for Elasticsearch/Kibana ports" "$regx" + fi + done +} diff --git a/checks/check_extra788 b/checks/check_extra788 new file mode 100644 index 00000000..f09a9fcc --- /dev/null +++ b/checks/check_extra788 @@ -0,0 +1,93 @@ +#!/usr/bin/env bash + +# Prowler - the handy cloud security tool (copyright 2018) by Toni de la Fuente +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy +# of the License at http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software distributed +# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +# CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. +CHECK_ID_extra788="7.88" +CHECK_TITLE_extra788="[extra788] Check connection and authentication for Internet exposed Amazon Elasticsearch Service (ES) domains" +CHECK_SCORED_extra788="NOT_SCORED" +CHECK_TYPE_extra788="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra788="AwsElasticsearchDomain" +CHECK_ALTERNATE_check788="extra788" + +extra788(){ + # Prowler will try to access each ElasticSearch server to the public URI endpoint. + # That is from the host where Prowler is running and will try to read indices or get kibana status + + # "Check if Elasticsearch Service domains allow open access (Not Scored) (Not part of CIS benchmark)" + for regx in $REGIONS; do + LIST_OF_DOMAINS=$($AWSCLI es list-domain-names $PROFILE_OPT --region $regx --query DomainNames --output text) + if [[ $LIST_OF_DOMAINS ]]; then + for domain in $LIST_OF_DOMAINS;do + TEMP_POLICY_FILE=$(mktemp -t prowler-${ACCOUNT_NUM}-es-domain.policy.XXXXXXXXXX) + # get endpoint or vpc endpoints + ES_DOMAIN_ENDPOINT=$($AWSCLI es describe-elasticsearch-domain --domain-name $domain $PROFILE_OPT --region $regx --query 'DomainStatus.[Endpoint || Endpoints]' --output text) + # If the endpoint starts with "vpc-" it is in a VPC then it is fine. + if [[ "$ES_DOMAIN_ENDPOINT" =~ ^vpc-* ]];then + ES_DOMAIN_VPC=$($AWSCLI es describe-elasticsearch-domain --domain-name $domain $PROFILE_OPT --region $regx --query 'DomainStatus.VPCOptions.VPCId' --output text) + textInfo "$regx: Amazon ES domain $domain is in VPC $ES_DOMAIN_VPC run extra779 to make sure it is not exposed using custom proxy" "$regx" + else + $AWSCLI es describe-elasticsearch-domain-config --domain-name $domain $PROFILE_OPT --region $regx --query DomainConfig.AccessPolicies.Options --output text > $TEMP_POLICY_FILE 2> /dev/null + CHECK_ES_DOMAIN_POLICY_OPEN=$(cat $TEMP_POLICY_FILE | jq -r '. | .Statement[] | select(.Effect == "Allow" and (((.Principal|type == "object") and .Principal.AWS == "*") or ((.Principal|type == "string") and .Principal == "*")) and select(has("Condition") | not))') + CHECK_ES_DOMAIN_POLICY_HAS_CONDITION=$(cat $TEMP_POLICY_FILE | jq -r '. | .Statement[] | select(.Effect == "Allow" and (((.Principal|type == "object") and .Principal.AWS == "*") or ((.Principal|type == "string") and .Principal == "*")) and select(has("Condition")))' ) + if [[ $CHECK_ES_DOMAIN_POLICY_HAS_CONDITION ]]; then + # get content of IpAddress."aws:SourceIp" and get a clean list + LIST_CONDITION_IPS=$(cat $TEMP_POLICY_FILE | jq '.Statement[0] .Condition.IpAddress."aws:SourceIp"'| awk -F'"' '{print $2}' | tr -d '",^$' | sed '/^$/d') + unset CONDITION_HAS_PUBLIC_IP_ARRAY + for condition_ip in "${LIST_CONDITION_IPS}";do + CONDITION_HAS_PRIVATE_IP=$(echo "${condition_ip}" | grep -E '^(192\.168|10\.|172\.1[6789]\.|172\.2[0-9]\.|172\.3[01]\.)') + if [[ $CONDITION_HAS_PRIVATE_IP ]];then + CONDITION_HAS_PRIVATE_IP_ARRAY+=($condition_ip) + fi + CONDITION_HAS_PUBLIC_IP=$(echo "${condition_ip}" | grep -vE '^(192\.168|10\.|172\.1[6789]\.|172\.2[0-9]\.|172\.3[01]\.|0\.0\.0\.0|\*)') + if [[ $CONDITION_HAS_PUBLIC_IP ]];then + CONDITION_HAS_PUBLIC_IP_ARRAY+=($condition_ip) + fi + CONDITION_HAS_ZERO_NET=$(echo "${condition_ip}" | grep -E '^(0\.0\.0\.0)') + CONDITION_HAS_STAR=$(echo "${condition_ip}" | grep -E '^\*') + done + CHECK_ES_DOMAIN_POLICY_CONDITION_PRIVATE_IP=${CONDITION_HAS_PRIVATE_IP_ARRAY[@]} + CHECK_ES_DOMAIN_POLICY_CONDITION_PUBLIC_IP=${CONDITION_HAS_PUBLIC_IP_ARRAY[@]} + CHECK_ES_DOMAIN_POLICY_CONDITION_ZERO=$CONDITION_HAS_ZERO_NET + CHECK_ES_DOMAIN_POLICY_CONDITION_STAR=$CONDITION_HAS_STAR + fi + if [[ $CHECK_ES_DOMAIN_POLICY_OPEN || $CHECK_ES_DOMAIN_POLICY_CONDITION_ZERO || $CHECK_ES_DOMAIN_POLICY_CONDITION_STAR || ${CHECK_ES_DOMAIN_POLICY_CONDITION_PUBLIC_IP[@]} ]];then + #Prowler will check to read indices or kibaba status if no conditions, condition IP is *, 0.0.0.0/0, 0.0.0.0/8 or any public IP. + # check for REST API on port 443 + CHECH_ES_HTTPS=$(curl -m 2 -s -w "%{http_code}" -o /dev/null -X GET "https://$ES_DOMAIN_ENDPOINT/_cat/indices") + httpStatus $CHECH_ES_HTTPS + if [[ $CHECH_ES_HTTPS -eq "200" ]];then + textFail "$regx: Amazon ES domain $domain policy allows Anonymous access and ES service endpoint $ES_DOMAIN_ENDPOINT responded $SERVER_RESPONSE" "$regx" + else + textInfo "$regx: Amazon ES domain $domain policy allows Anonymous access but ES service endpoint $ES_DOMAIN_ENDPOINT responded $SERVER_RESPONSE" "$regx" + fi + # check for Kibana on port 443 + CHECH_KIBANA_HTTPS=$(curl -m 2 -s -w "%{http_code}" -o /dev/null -X GET "https://$ES_DOMAIN_ENDPOINT/_plugin/kibana") + httpStatus $CHECH_KIBANA_HTTPS + if [[ $CHECH_KIBANA_HTTPS -eq "200" || $CHECH_KIBANA_HTTPS -eq "301" || $CHECH_KIBANA_HTTPS -eq "302" ]];then + textFail "$regx: Amazon ES domain $domain policy allows Anonymous access and Kibana service endpoint $ES_DOMAIN_ENDPOINT responded $SERVER_RESPONSE" "$regx" + else + textInfo "$regx: Amazon ES domain $domain policy allows Anonymous access but Kibana service endpoint $ES_DOMAIN_ENDPOINT responded $SERVER_RESPONSE" "$regx" + fi + else + if [[ $CHECK_ES_DOMAIN_POLICY_HAS_CONDITION && ${CHECK_ES_DOMAIN_POLICY_CONDITION_PRIVATE_IP[@]} ]];then + textInfo "$regx: Amazon ES domain $domain policy allows access from a Private IP or CIDR RFC1918 $(echo ${CONDITION_HAS_PRIVATE_IP_ARRAY[@]})" "$regx" + else + textPass "$regx: Amazon ES domain $domain does not allow Anonymous cross account access" "$regx" + fi + fi + rm -f $TEMP_POLICY_FILE + fi + done + else + textInfo "$regx: No Amazon ES domain found" "$regx" + fi + done +} diff --git a/checks/check_extra789 b/checks/check_extra789 new file mode 100644 index 00000000..87f3a1a1 --- /dev/null +++ b/checks/check_extra789 @@ -0,0 +1,58 @@ +#!/usr/bin/env bash + +# Prowler - the handy cloud security tool (copyright 2020) by Toni de la Fuente +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy +# of the License at http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software distributed +# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +# CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. + +CHECK_ID_extra789="7.89" +CHECK_TITLE_extra789="[extra789] Find trust boundaries in VPC endpoint services connections" +CHECK_SCORED_extra789="NOT_SCORED" +CHECK_TYPE_extra789="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra789="AwsEc2Vpc" +CHECK_ALTERNATE_extra789="extra789" + +extra789(){ + TRUSTED_ACCOUNT_IDS=$( echo "${ACCOUNT_NUM} ${GROUP_TRUSTBOUNDARIES_TRUSTED_ACCOUNT_IDS}" | xargs ) + + for regx in ${REGIONS}; do + ENDPOINT_SERVICES_IDS=$(${AWSCLI} ec2 describe-vpc-endpoint-services \ + ${PROFILE_OPT} \ + --query "ServiceDetails[?Owner=='${ACCOUNT_NUM}'].ServiceId" \ + --region ${regx} \ + --output text | xargs + ) + + for ENDPOINT_SERVICE_ID in ${ENDPOINT_SERVICES_IDS}; do + + ENDPOINT_CONNECTION_LIST=$(${AWSCLI} ec2 describe-vpc-endpoint-connections \ + ${PROFILE_OPT} \ + --query "VpcEndpointConnections[?VpcEndpointState=='available'].VpcEndpointOwner" \ + --region ${regx} \ + --output text | xargs + ) + + for ENDPOINT_CONNECTION in ${ENDPOINT_CONNECTION_LIST}; do + for ACCOUNT_ID in ${TRUSTED_ACCOUNT_IDS}; do + if [[ "${ACCOUNT_ID}" == "${ENDPOINT_CONNECTION}" ]]; then + textPass "${regx}: Found trusted account in VPC endpoint service connection ${ENDPOINT_CONNECTION}" "${regx}" + # Algorithm: + # Remove all trusted ACCOUNT_IDs from ENDPOINT_CONNECTION_LIST. + # As a result, the ENDPOINT_CONNECTION_LIST finally contains only unknown/untrusted account ids. + ENDPOINT_CONNECTION_LIST=("${ENDPOINT_CONNECTION_LIST[@]/$ENDPOINT_CONNECTION}") # remove hit from whitelist + fi + done + done + + for UNTRUSTED_CONNECTION in ${ENDPOINT_CONNECTION_LIST}; do + textFail "${regx}: Found untrusted account in VPC endpoint service connection ${UNTRUSTED_CONNECTION}" "${regx}" + done + done + done +} diff --git a/checks/check_extra79 b/checks/check_extra79 index e45e5ddc..01c7b41e 100644 --- a/checks/check_extra79 +++ b/checks/check_extra79 @@ -10,10 +10,11 @@ # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR # CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. -CHECK_ID_extra79="7.9,7.09" +CHECK_ID_extra79="7.9" CHECK_TITLE_extra79="[extra79] Check for internet facing Elastic Load Balancers (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra79="NOT_SCORED" CHECK_TYPE_extra79="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra79="AwsElbLoadBalancer" CHECK_ALTERNATE_extra709="extra79" CHECK_ALTERNATE_check79="extra79" CHECK_ALTERNATE_check709="extra79" diff --git a/checks/check_extra790 b/checks/check_extra790 new file mode 100644 index 00000000..6e9c2e80 --- /dev/null +++ b/checks/check_extra790 @@ -0,0 +1,61 @@ +#!/usr/bin/env bash + +# Prowler - the handy cloud security tool (copyright 2020) by Toni de la Fuente +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy +# of the License at http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software distributed +# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +# CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. + +CHECK_ID_extra790="7.90" +CHECK_TITLE_extra790="[extra790] Find trust boundaries in VPC endpoint services whitelisted principles" +CHECK_SCORED_extra790="NOT_SCORED" +CHECK_TYPE_extra790="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra790="AwsEc2Vpc" +CHECK_ALTERNATE_extra790="extra790" + +extra790(){ + TRUSTED_ACCOUNT_IDS=$( echo "${ACCOUNT_NUM} ${GROUP_TRUSTBOUNDARIES_TRUSTED_ACCOUNT_IDS}" | xargs ) + + for regx in ${REGIONS}; do + ENDPOINT_SERVICES_IDS=$(${AWSCLI} ec2 describe-vpc-endpoint-services \ + ${PROFILE_OPT} \ + --query "ServiceDetails[?Owner=='${ACCOUNT_NUM}'].ServiceId" \ + --region ${regx} \ + --output text | xargs + ) + + for ENDPOINT_SERVICE_ID in ${ENDPOINT_SERVICES_IDS}; do + ENDPOINT_PERMISSIONS_LIST=$(${AWSCLI} ec2 describe-vpc-endpoint-service-permissions \ + ${PROFILE_OPT} \ + --service-id ${ENDPOINT_SERVICE_ID} \ + --query "AllowedPrincipals[*].Principal" \ + --region ${regx} \ + --output text | xargs + ) + + for ENDPOINT_PERMISSION in ${ENDPOINT_PERMISSIONS_LIST}; do + # Take only account id from ENDPOINT_PERMISSION: arn:aws:iam::965406151242:root + ENDPOINT_PERMISSION_ACCOUNT_ID=$(echo ${ENDPOINT_PERMISSION} | cut -d':' -f5 | xargs) + + for ACCOUNT_ID in ${TRUSTED_ACCOUNT_IDS}; do + if [[ "${ACCOUNT_ID}" == "${ENDPOINT_PERMISSION_ACCOUNT_ID}" ]]; then + textPass "${regx}: Found trusted account in VPC endpoint service permission ${ENDPOINT_PERMISSION}" "${regx}" + # Algorithm: + # Remove all trusted ACCOUNT_IDs from ENDPOINT_PERMISSIONS_LIST. + # As a result, the ENDPOINT_PERMISSIONS_LIST finally contains only unknown/untrusted account ids. + ENDPOINT_PERMISSIONS_LIST=("${ENDPOINT_PERMISSIONS_LIST[@]/$ENDPOINT_PERMISSION}") + fi + done + done + + for UNTRUSTED_PERMISSION in ${ENDPOINT_PERMISSIONS_LIST}; do + textFail "${regx}: Found untrusted account in VPC endpoint service permission ${UNTRUSTED_PERMISSION}" "${regx}" + done + done + done +} diff --git a/checks/check_sample b/checks/check_sample index 54c9d880..b041e30c 100644 --- a/checks/check_sample +++ b/checks/check_sample @@ -15,7 +15,7 @@ # # here URL to the relevand/official documentation # -# here commands or steps to fix it if avalable, like: +# here commands or steps to fix it if avalable, like: # aws logs put-metric-filter \ # --region us-east-1 \ # --log-group-name CloudTrail/MyCloudTrailLG \ @@ -28,6 +28,7 @@ # CHECK_TITLE_checkN="[checkN] Description (Not Scored) (Not part of CIS benchmark)" # CHECK_SCORED_checkN="NOT_SCORED" # CHECK_TYPE_checkN="EXTRA" +# CHECK_ASFF_RESOURCE_TYPE_checkN="AwsAccount" # Choose appropriate value from https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-findings-format.html#asff-resources # CHECK_ALTERNATE_checkN="extraN" # # extraN(){ diff --git a/docs/images/prowler-multi-account-environment.png b/docs/images/prowler-multi-account-environment.png new file mode 100644 index 00000000..89adaf5f Binary files /dev/null and b/docs/images/prowler-multi-account-environment.png differ diff --git a/docs/images/prowler-single-account-environment.png b/docs/images/prowler-single-account-environment.png new file mode 100644 index 00000000..585fe33a Binary files /dev/null and b/docs/images/prowler-single-account-environment.png differ diff --git a/groups/group11_secrets b/groups/group11_secrets index 24ceda0c..52a2df02 100644 --- a/groups/group11_secrets +++ b/groups/group11_secrets @@ -13,7 +13,7 @@ GROUP_ID[11]='secrets' GROUP_NUMBER[11]='11.0' -GROUP_TITLE[11]='Look for keys secrets or passwords around resources - [secrets] **' +GROUP_TITLE[11]='Look for keys secrets or passwords around resources - [secrets]' GROUP_RUN_BY_DEFAULT[11]='N' # but it runs when execute_all is called (default) GROUP_CHECKS[11]='extra741,extra742,extra759,extra760,extra768,extra775' diff --git a/groups/group14_elasticsearch b/groups/group14_elasticsearch new file mode 100644 index 00000000..22ffbb4c --- /dev/null +++ b/groups/group14_elasticsearch @@ -0,0 +1,18 @@ +#!/usr/bin/env bash + +# Prowler - the handy cloud security tool (copyright 2018) by Toni de la Fuente +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy +# of the License at http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software distributed +# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +# CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. + +GROUP_ID[14]='elasticsearch' +GROUP_NUMBER[14]='14.0' +GROUP_TITLE[14]='Elasticsearch related security checks - [elasticsearch] *******' +GROUP_RUN_BY_DEFAULT[14]='N' # run it when execute_all is called +GROUP_CHECKS[14]='extra715,extra716,extra779,extra780,extra781,extra782,extra783,extra784,extra785,extra787,extra788' \ No newline at end of file diff --git a/groups/group15_pci b/groups/group15_pci new file mode 100644 index 00000000..89b59656 --- /dev/null +++ b/groups/group15_pci @@ -0,0 +1,21 @@ +#!/usr/bin/env bash + +# Prowler - the handy cloud security tool (copyright 2020) by Toni de la Fuente +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy +# of the License at http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software distributed +# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +# CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. + +GROUP_ID[15]='pci' +GROUP_NUMBER[15]='15.0' +GROUP_TITLE[15]='PCI-DSS v3.2.1 Readiness - ONLY AS REFERENCE - [pci] **********' +GROUP_RUN_BY_DEFAULT[15]='N' # run it when execute_all is called +GROUP_CHECKS[15]='' + +# Resources: +# https://github.com/toniblyx/prowler/issues/296 diff --git a/groups/group16_trustboundaries b/groups/group16_trustboundaries new file mode 100644 index 00000000..93aa07ba --- /dev/null +++ b/groups/group16_trustboundaries @@ -0,0 +1,23 @@ +#!/usr/bin/env bash + +# Prowler - the handy cloud security tool (copyright 2020) by Toni de la Fuente +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy +# of the License at http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software distributed +# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +# CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. + +GROUP_ID[16]='trustboundaries' +GROUP_NUMBER[16]='16.0' +GROUP_TITLE[16]='Find cross-account trust boundaries - [trustboundaries] *******' +GROUP_RUN_BY_DEFAULT[16]='N' # run it when execute_all is called +GROUP_CHECKS[16]='extra789,extra790' + +# Single account environment: No action required. The AWS account number will be automatically added by the checks. +# Multi account environment: Any additional trusted account number should be added as a space separated list, e.g. +# GROUP_TRUSTBOUNDARIES_TRUSTED_ACCOUNT_IDS="1234567890 0987654321 6789012345" +GROUP_TRUSTBOUNDARIES_TRUSTED_ACCOUNT_IDS='' diff --git a/groups/group1_iam b/groups/group1_iam index 19026030..910897ea 100644 --- a/groups/group1_iam +++ b/groups/group1_iam @@ -10,6 +10,6 @@ GROUP_ID[1]='group1' GROUP_NUMBER[1]='1.0' -GROUP_TITLE[1]='Identity and Access Management - [group1] **********************' +GROUP_TITLE[1]='Identity and Access Management - CIS only - [group1] ***********' GROUP_RUN_BY_DEFAULT[1]='Y' # run it when execute_all is called GROUP_CHECKS[1]='check11,check12,check13,check14,check15,check16,check17,check18,check19,check110,check111,check112,check113,check114,check115,check116,check117,check118,check119,check120,check121,check122,extra774' diff --git a/groups/group2_logging b/groups/group2_logging index 426d8ee9..a9d4db0b 100644 --- a/groups/group2_logging +++ b/groups/group2_logging @@ -10,6 +10,6 @@ GROUP_ID[2]='group2' GROUP_NUMBER[2]='2.0' -GROUP_TITLE[2]='Logging - [group2] *********************************************' +GROUP_TITLE[2]='Logging - CIS only - [group2] **********************************' GROUP_RUN_BY_DEFAULT[2]='Y' # run it when execute_all is called GROUP_CHECKS[2]='check21,check22,check23,check24,check25,check26,check27,check28,check29' diff --git a/groups/group3_monitoring b/groups/group3_monitoring index e6fd1e4a..058939a1 100644 --- a/groups/group3_monitoring +++ b/groups/group3_monitoring @@ -10,6 +10,6 @@ GROUP_ID[3]='group3' GROUP_NUMBER[3]='3.0' -GROUP_TITLE[3]='Monitoring - [group3] ******************************************' +GROUP_TITLE[3]='Monitoring - CIS only - [group3] *******************************' GROUP_RUN_BY_DEFAULT[3]='Y' # run it when execute_all is called GROUP_CHECKS[3]='check31,check32,check33,check34,check35,check36,check37,check38,check39,check310,check311,check312,check313,check314' diff --git a/groups/group4_networking b/groups/group4_networking index 0f552890..05b307b4 100644 --- a/groups/group4_networking +++ b/groups/group4_networking @@ -10,6 +10,6 @@ GROUP_ID[4]='group4' GROUP_NUMBER[4]='4.0' -GROUP_TITLE[4]='Networking - [group4] ******************************************' +GROUP_TITLE[4]='Networking - CIS only - [group4] *******************************' GROUP_RUN_BY_DEFAULT[4]='Y' # run it when execute_all is called GROUP_CHECKS[4]='check41,check42,check43,check44' diff --git a/groups/group5_cislevel1 b/groups/group5_cislevel1 index cbf29e74..3fe3a084 100644 --- a/groups/group5_cislevel1 +++ b/groups/group5_cislevel1 @@ -10,6 +10,6 @@ GROUP_ID[5]='cislevel1' GROUP_NUMBER[5]='5.0' -GROUP_TITLE[5]='CIS Level 1 - [cislevel1] **************************************' +GROUP_TITLE[5]='CIS Level 1 - CIS only - [cislevel1] ***************************' GROUP_RUN_BY_DEFAULT[5]='N' # run it when execute_all is called GROUP_CHECKS[5]='check11,check12,check13,check14,check15,check16,check17,check18,check19,check110,check111,check112,check113,check115,check116,check117,check118,check119,check120,check122,check21,check23,check24,check25,check26,check31,check32,check33,check34,check35,check38,check312,check313,check314,check41,check42' diff --git a/groups/group6_cislevel2 b/groups/group6_cislevel2 index 23b81f51..67fdc1e1 100644 --- a/groups/group6_cislevel2 +++ b/groups/group6_cislevel2 @@ -10,6 +10,6 @@ GROUP_ID[6]='cislevel2' GROUP_NUMBER[6]='6.0' -GROUP_TITLE[6]='CIS Level 2 - [cislevel2] **************************************' +GROUP_TITLE[6]='CIS Level 2 - CIS only - [cislevel2] ***************************' GROUP_RUN_BY_DEFAULT[6]='N' # run it when execute_all is called GROUP_CHECKS[6]='check11,check12,check13,check14,check15,check16,check17,check18,check19,check110,check111,check112,check113,check114,check115,check116,check117,check118,check119,check120,check121,check122,check21,check22,check23,check24,check25,check26,check27,check28,check29,check31,check32,check33,check34,check35,check36,check37,check38,check39,check310,check311,check312,check313,check314,check41,check42,check43,check44' diff --git a/groups/group7_extras b/groups/group7_extras index a1f88799..526df553 100644 --- a/groups/group7_extras +++ b/groups/group7_extras @@ -13,9 +13,9 @@ GROUP_ID[7]='extras' GROUP_NUMBER[7]='7.0' -GROUP_TITLE[7]='Extras - [extras] **********************************************' +GROUP_TITLE[7]='Extras - all non CIS specific checks - [extras] ****************' GROUP_RUN_BY_DEFAULT[7]='Y' # run it when execute_all is called -GROUP_CHECKS[7]='extra71,extra72,extra73,extra74,extra75,extra76,extra77,extra78,extra79,extra710,extra711,extra712,extra713,extra714,extra715,extra716,extra717,extra718,extra719,extra720,extra721,extra722,extra723,extra724,extra725,extra726,extra727,extra728,extra729,extra730,extra731,extra732,extra733,extra734,extra735,extra736,extra737,extra738,extra739,extra740,extra741,extra742,extra743,extra744,extra745,extra746,extra747,extra748,extra749,extra750,extra751,extra752,extra753,extra754,extra755,extra756,extra757,extra758,extra761,extra762,extra763,extra764,extra765,extra767,extra768,extra769,extra770,extra771,extra772,extra773,extra774,extra775,extra776' +GROUP_CHECKS[7]='extra71,extra72,extra73,extra74,extra75,extra76,extra77,extra78,extra79,extra710,extra711,extra712,extra713,extra714,extra715,extra716,extra717,extra718,extra719,extra720,extra721,extra722,extra723,extra724,extra725,extra726,extra727,extra728,extra729,extra730,extra731,extra732,extra733,extra734,extra735,extra736,extra737,extra738,extra739,extra740,extra741,extra742,extra743,extra744,extra745,extra746,extra747,extra748,extra749,extra750,extra751,extra752,extra753,extra754,extra755,extra756,extra757,extra758,extra761,extra762,extra763,extra764,extra765,extra767,extra768,extra769,extra770,extra771,extra772,extra773,extra774,extra775,extra776,extra777,extra778,extra779,extra780,extra781,extra782,extra783,extra784,extra785,extra786,extra787,extra788' # Extras 759 and 760 (lambda variables and code secrets finder are not included) # to run detect-secrets use `./prowler -g secrets` diff --git a/iam/prowler-additions-policy.json b/iam/prowler-additions-policy.json index 213c811e..79cfdc9a 100644 --- a/iam/prowler-additions-policy.json +++ b/iam/prowler-additions-policy.json @@ -1,113 +1,33 @@ { - "Version": "2012-10-17", - "Statement": [ - { - "Action": [ - "access-analyzer:List*", - "apigateway:get*", - "apigatewayv2:get*", - "aws-marketplace:viewsubscriptions", - "batch:listjobs", - "clouddirectory:listappliedschemaarns", - "clouddirectory:listdevelopmentschemaarns", - "clouddirectory:listpublishedschemaarns", - "cloudformation:list*", - "cloudhsm:listavailablezones", - "cloudsearch:list*", - "cloudwatch:get*", - "cloudwatch:list*", - "codebuild:listbuilds*", - "codestar:verify*", - "cognito-identity:listidentities", - "cognito-idp:list*", - "cognito-sync:listdatasets", - "connect:list*", - "datapipeline:getaccountlimits", - "dax:describeclusters", - "dax:describedefaultparameters", - "dax:describeevents", - "dax:describeparametergroups", - "dax:describeparameters", - "dax:describesubnetgroups", - "dax:describetable", - "dax:listtables", - "devicefarm:list*", - "discovery:list*", - "dms:list*", - "ds:ListAuthorizedApplications", - "ds:DescribeRoles", - "dynamodb:describebackup", - "dynamodb:describeglobaltablesettings", - "dynamodb:describelimits", - "dynamodb:describereservedcapacity", - "dynamodb:describereservedcapacityofferings", - "dynamodb:describestream", - "dynamodb:listtagsofresource", - "ec2:get*", - "ecr:describe*", - "ecr:listimages", - "elasticbeanstalk:listavailablesolutionstacks", - "elasticmapreduce:list*", - "elastictranscoder:list*", - "gamelift:list*", - "glacier:list*", - "importexport:listjobs", - "lambda:GetAccountSettings", - "lambda:GetFunctionConfiguration", - "lambda:GetLayerVersionPolicy", - "lambda:GetPolicy", - "lambda:List*", - "lex:getbotaliases", - "lex:getbotchannelassociations", - "lex:getbots", - "lex:getbotversions", - "lex:getintents", - "lex:getintentversions", - "lex:getslottypes", - "lex:getslottypeversions", - "lex:getutterancesview", - "lightsail:getblueprints", - "lightsail:getbundles", - "lightsail:getinstancesnapshots", - "lightsail:getkeypair", - "lightsail:getregions", - "lightsail:getstaticips", - "lightsail:isvpcpeered", - "machinelearning:describe*", - "mobilehub:listavailablefeatures", - "mobilehub:listavailableregions", - "mobilehub:listprojects", - "mobiletargeting:getapplicationsettings", - "mobiletargeting:getcampaigns", - "mobiletargeting:getimportjobs", - "mobiletargeting:getsegments", - "opsworks-cm:describe*", - "opsworks:describe*", - "polly:describe*", - "polly:list*", - "redshift:viewqueriesinconsole", - "route53domains:list*", - "s3:listbucket", - "sdb:list*", - "secretsmanager:listsecretversionids", - "servicecatalog:list*", - "ses:list*", - "sns:list*", - "sqs:listqueuetags", - "ssm:listassociations", - "states:listactivities", - "support:describe*", - "swf:list*", - "tag:gettagkeys", - "trustedadvisor:describe*", - "waf-regional:list*", - "waf:list*", - "workdocs:describeavailabledirectories", - "workdocs:describeinstances", - "workmail:describe*" - ], - "Effect": "Allow", - "Resource": "*" - } - ] -} + "Version": "2012-10-17", + "Statement": [ + { + "Action": [ + "access-analyzer:List*", + "apigateway:Get*", + "apigatewayv2:Get*", + "aws-marketplace:ViewSubscriptions", + "dax:ListTables", + "ds:ListAuthorizedApplications", + "ds:DescribeRoles", + "ec2:GetEbsEncryptionByDefault", + "ecr:Describe*", + "lambda:GetAccountSettings", + "lambda:GetFunctionConfiguration", + "lambda:GetLayerVersionPolicy", + "lambda:GetPolicy", + "opsworks-cm:Describe*", + "opsworks:Describe*", + "secretsmanager:ListSecretVersionIds", + "sns:List*", + "sqs:ListQueueTags", + "states:ListActivities", + "support:Describe*", + "tag:GetTagKeys" + ], + "Resource": "*", + "Effect": "Allow", + "Sid": "AllowMoreReadForProwler" + } + ] +} \ No newline at end of file diff --git a/iam/prowler-security-hub.json b/iam/prowler-security-hub.json new file mode 100644 index 00000000..309c086a --- /dev/null +++ b/iam/prowler-security-hub.json @@ -0,0 +1,12 @@ +{ + "Version": "2012-10-17", + "Statement": [ + { + "Action": [ + "securityhub:BatchImportFindings" + ], + "Effect": "Allow", + "Resource": "*" + } + ] +} diff --git a/include/assume_role b/include/assume_role index 4fedfb3d..25a244f4 100644 --- a/include/assume_role +++ b/include/assume_role @@ -25,11 +25,24 @@ if [[ $ACCOUNT_TO_ASSUME ]]; then # temporary file where to store credentials TEMP_STS_ASSUMED_FILE=$(mktemp -t prowler.sts_assumed-XXXXXX) + + #Check if external ID has bee provided if so execute with external ID if not ignore + if [[ -z $ROLE_EXTERNAL_ID ]]; then + # assume role command + $AWSCLI $PROFILE_OPT sts assume-role --role-arn arn:${AWS_PARTITION}:iam::$ACCOUNT_TO_ASSUME:role/$ROLE_TO_ASSUME \ + --role-session-name ProwlerAssessmentSession \ + --duration-seconds $SESSION_DURATION_TO_ASSUME > $TEMP_STS_ASSUMED_FILE + else + $AWSCLI $PROFILE_OPT sts assume-role --role-arn arn:${AWS_PARTITION}:iam::$ACCOUNT_TO_ASSUME:role/$ROLE_TO_ASSUME \ + --role-session-name ProwlerAssessmentSession \ + --duration-seconds $SESSION_DURATION_TO_ASSUME \ + --external-id $ROLE_EXTERNAL_ID > $TEMP_STS_ASSUMED_FILE + fi # assume role command - $AWSCLI $PROFILE_OPT sts assume-role --role-arn arn:aws:iam::$ACCOUNT_TO_ASSUME:role/$ROLE_TO_ASSUME \ - --role-session-name ProwlerAssessmentSession \ - --duration-seconds $SESSION_DURATION_TO_ASSUME > $TEMP_STS_ASSUMED_FILE + #$AWSCLI $PROFILE_OPT sts assume-role --role-arn arn:${AWS_PARTITION}:iam::$ACCOUNT_TO_ASSUME:role/$ROLE_TO_ASSUME \ + # --role-session-name ProwlerAssessmentSession \ + # --duration-seconds $SESSION_DURATION_TO_ASSUME > $TEMP_STS_ASSUMED_FILE # if previous command fails exit with the given error from aws-cli # this is likely to be due to session duration limit of 1h in case diff --git a/include/check3x b/include/check3x index 5b75d315..cabe626a 100644 --- a/include/check3x +++ b/include/check3x @@ -14,17 +14,19 @@ check3x(){ local CHECK_WARN local CHECK_CROSS_ACCOUNT_WARN - DESCRIBE_TRAILS_CACHE=$($AWSCLI cloudtrail describe-trails $PROFILE_OPT --region "$REGION") - TRAIL_LIST=$(echo $DESCRIBE_TRAILS_CACHE | jq -r '.trailList[]|@base64') + # In order to make all these checks work properly logs and alarms have to + # be based only on CloudTrail tail with CloudWatchLog configuration. + DESCRIBE_TRAILS_CACHE=$($AWSCLI cloudtrail describe-trails $PROFILE_OPT --region "$REGION" --query 'trailList[?CloudWatchLogsLogGroupArn != `null`]') + TRAIL_LIST=$(echo $DESCRIBE_TRAILS_CACHE | jq -r '. |@base64') CURRENT_ACCOUNT_ID=$($AWSCLI sts $PROFILE_OPT get-caller-identity --region "$REGION" --query Account --output text) CLOUDWATCH_LOGGROUP=$($AWSCLI cloudtrail describe-trails $PROFILE_OPT --region "$REGION" --query 'trailList[*].CloudWatchLogsLogGroupArn' --output text| tr '\011' '\012' | awk -F: '{print $7}') if [[ $CLOUDWATCH_LOGGROUP != "" ]]; then for group_obj_enc in $TRAIL_LIST; do group_obj_raw=$(echo $group_obj_enc | decode_report) - CLOUDWATCH_LOGGROUP_NAME=$(echo $group_obj_raw | jq -r '.CloudWatchLogsLogGroupArn|split(":")[6]') - CLOUDWATCH_LOGGROUP_REGION=$(echo $group_obj_raw | jq -r '.CloudWatchLogsLogGroupArn|split(":")[3]') - CLOUDWATCH_LOGGROUP_ACCOUNT=$(echo $group_obj_raw | jq -r '.CloudWatchLogsLogGroupArn|split(":")[4]') + CLOUDWATCH_LOGGROUP_NAME=$(echo $group_obj_raw | jq -r '.[] | .CloudWatchLogsLogGroupArn|split(":")[6]') + CLOUDWATCH_LOGGROUP_REGION=$(echo $group_obj_raw | jq -r '.[] | .CloudWatchLogsLogGroupArn|split(":")[3]') + CLOUDWATCH_LOGGROUP_ACCOUNT=$(echo $group_obj_raw | jq -r '.[] | .CloudWatchLogsLogGroupArn|split(":")[4]') if [ "$CLOUDWATCH_LOGGROUP_ACCOUNT" == "$CURRENT_ACCOUNT_ID" ];then # Filter control and whitespace from .metricFilters[*].filterPattern for easier matching later METRICFILTER_CACHE=$($AWSCLI logs describe-metric-filters --log-group-name "$CLOUDWATCH_LOGGROUP_NAME" $PROFILE_OPT --region "$CLOUDWATCH_LOGGROUP_REGION"|jq '.metricFilters|=map(.filterPattern|=gsub("[[:space:]]+"; " "))') diff --git a/include/check_creds_last_used b/include/check_creds_last_used new file mode 100644 index 00000000..4f8633b3 --- /dev/null +++ b/include/check_creds_last_used @@ -0,0 +1,143 @@ +#!/usr/bin/env bash + +# Prowler - the handy cloud security tool (copyright 2018) by Toni de la Fuente +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy +# of the License at http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software distributed +# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +# CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. + +# Set of functions for checking credential usage, following CIS 1.3 "Ensure credentials unused for 90 days or greater are disabled" rules +# but support a custom time-range to allow for stricter policies, e.g. extra774 + +# CSV Report Column Numbering +# See also https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_getting-report.html#id_credentials_understanding_the_report_format +# 1 - user +# 2 - arn +# 3 - user_creation_time +# 4 - password_enabled +# 5 - password_last_used +# 6 - password_last_changed +# 7 - password_next_rotation +# 8 - mfa_active +# 9 - access_key_1_active +# 10 - access_key_1_last_rotated +# 11 - access_key_1_last_used_date +# 12 - access_key_1_last_used_region +# 13 - access_key_1_last_used_service +# 14 - access_key_2_active +# 15 - access_key_2_last_rotated +# 16 - access_key_2_last_used_date +# 17 - access_key_2_last_used_region +# 18 - access_key_2_last_used_service +# 19 - cert_1_active +# 20 - cert_1_last_rotated +# 21 - cert_2_active +# 22 - cert_2_last_rotated + +# Check both passwords and access keys - e.g. CIS rule +check_creds_used_in_last_days() { + local max_days=$1 + + check_passwords_used_in_last_days "$max_days" + check_access_keys_used_in_last_days "$max_days" +} + +check_passwords_used_in_last_days() { + local max_days=$1 + + local user + local users_with_password_enabled + local last_login_date + local days_since_password_last_changed + local days_password_not_in_use + users_with_password_enabled=$(awk -F, '{ print $1,$4 }' "$TEMP_REPORT_FILE" | grep " true$" | awk '{ print $1 }') + # Only check password last used date for users with password enabled + if [[ $users_with_password_enabled ]]; then + for user in $users_with_password_enabled; do + last_login_date=$(awk -F, '{ print $1,$5 }' "$TEMP_REPORT_FILE" | grep "^$user " | awk '{ print $2 }') + + # If the user has never logged into the console, their last login date is 'no_information'. See: + # https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_getting-report.html#id_credentials_understanding_the_report_format + if [[ "${last_login_date}" == "no_information" ]]; then + user_password_changed_date=$(awk -F, '{ print $1,$6 }' "$TEMP_REPORT_FILE" | grep "^$user " | awk '{ print $2 }') + days_since_password_last_changed=$(how_older_from_today "${user_password_changed_date%T*}") + + # "When password_enabled is set to TRUE and password_last_used is set to no_information, ensure password_last_changed is less than X days ago" + if [[ "$days_since_password_last_changed" -ge "$max_days" ]]; then + textFail "User $user has never logged into the console since creation and their password not changed in the past ${max_days} days" + else + textInfo "User $user has not logged into the console since creation" + fi + else + days_password_not_in_use=$(how_older_from_today "${last_login_date%T*}") + + # "For each user having password_enabled set to TRUE, ensure password_last_used_date is less than X days ago." + if [[ "$days_password_not_in_use" -ge "$max_days" ]]; then + textFail "User $user has not logged into the console in the past ${max_days} days" + else + textPass "User $user has logged into the console in the past ${max_days} days" + fi + fi + done + else + textPass "No users found with password enabled" + fi +} + +check_access_keys_used_in_last_days() { + local max_days=$1 + + check_access_key_used_in_last_days "$max_days" 1 9 10 11 + check_access_key_used_in_last_days "$max_days" 2 14 15 16 +} + +check_access_key_used_in_last_days() { + local max_days=$1 + local access_key_name=$2 + local access_key_active_col=$3 + local access_key_last_rotated_col=$4 + local access_key_last_used_col=$5 + + local user + local users_with_access_key_enabled + local access_key_last_used_date + local access_key_last_rotated_date + local days_since_access_key_rotated + local days_since_access_key_used + users_with_access_key_enabled=$(awk -F, -v i="$access_key_active_col" '{ print $1,$i }' "$TEMP_REPORT_FILE" | grep " true$" | awk '{ print $1 }') + # Only check access key last used date for users with this access key enabled + if [[ $users_with_access_key_enabled ]]; then + for user in $users_with_access_key_enabled; do + access_key_last_used_date=$(awk -F, -v i="$access_key_last_used_col" '{ print $1,$i }' "$TEMP_REPORT_FILE" | grep "^$user " | awk '{ print $2 }') + + if [[ "${access_key_last_used_date}" == "N/A" ]]; then + access_key_last_rotated_date=$(awk -F, -v i="$access_key_last_rotated_col" '{ print $1,$i }' "$TEMP_REPORT_FILE" | grep "^$user " | awk '{ print $2 }') + days_since_access_key_rotated=$(how_older_from_today "${access_key_last_rotated_date%T*}") + + # "When a user having an access_key_x_active (where x is 1 or 2) to TRUE and corresponding access_key_x_last_used_date is set to N/A, + # ensure access_key_x_last_rotated is less than X days ago" + if [[ "$days_since_access_key_rotated" -ge "$max_days" ]]; then + textFail "User $user has never used access key $access_key_name since creation and not rotated it in the past ${max_days} days" + else + textInfo "User $user has not used access key $access_key_name since creation" + fi + else + days_since_access_key_used=$(how_older_from_today "${access_key_last_used_date%T*}") + + # "For each user having an access_key_1_active or access_key_2_active to TRUE, ensure the corresponding access_key_n_last_used_date is less than X days ago" + if [[ "$days_since_access_key_used" -ge "$max_days" ]]; then + textFail "User $user has not used access key $access_key_name in the past ${max_days} days" + else + textPass "User $user has used access key $access_key_name in the past ${max_days} days" + fi + fi + done + else + textPass "No users found with access key $access_key_name enabled" + fi +} diff --git a/include/colors b/include/colors index 2b7175ce..7bb9f84e 100644 --- a/include/colors +++ b/include/colors @@ -11,17 +11,18 @@ # CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. -if [[ $MODE != "mono" && $MODE != "text" && $MODE != "csv" && $MODE != "json" ]]; then - echo "" - echo "$OPTRED ERROR!$OPTNORMAL Invalid output mode. Choose text, mono, or csv." - usage - EXITCODE=1 - exit $EXITCODE -fi -if [[ "$MODE" == "mono" || "$MODE" == "csv" || "$MODE" == "json" ]]; then - MONOCHROME=1 -fi +IFS=',' read -ra MODES <<< "${MODE}" +for MODE in "${MODES[@]}"; do + if [[ "$MODE" != "mono" && "$MODE" != "text" && "$MODE" != "csv" && "$MODE" != "json" && "$MODE" != "json-asff" && "$MODE" != "junit-xml" ]]; then + echo -e "${OPTRED}ERROR!$OPTNORMAL Invalid output mode. Choose text, mono, csv, json, json-asff or junit-xml. ./prowler -h for help" + EXITCODE=1 + exit $EXITCODE + fi + if [[ "$MODE" == "mono" || "$MODE" == "csv" || "$MODE" == "json" || "$MODE" == "json-asff" ]]; then + MONOCHROME=1 + fi +done if [[ $MONOCHROME -eq 1 ]]; then # Colors diff --git a/include/connection_tests b/include/connection_tests new file mode 100644 index 00000000..6ceeb503 --- /dev/null +++ b/include/connection_tests @@ -0,0 +1,46 @@ +#!/usr/bin/env bash + +# Prowler - the handy cloud security tool (copyright 2018) by Toni de la Fuente +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy +# of the License at http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software distributed +# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +# CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. + + +# Function test_tcp_connectivity is in include/os_detector + +# see here https://gist.github.com/rsvp/1171304/3d6714a469105faf03943b685090f90f576cf904 + +# Functions to connection responses initially used for Elasticsearch related checks +httpStatus(){ + case $1 in + 000) SERVER_RESPONSE="000 Not responding" ;; + 200) SERVER_RESPONSE="200 Successful" ;; + 300) SERVER_RESPONSE="300 Multiple Choices" ;; + 301) SERVER_RESPONSE="301 Moved Permanently" ;; + 302) SERVER_RESPONSE="302 Found residing temporarily under different URI" ;; + 303) SERVER_RESPONSE="303 See Other" ;; + 304) SERVER_RESPONSE="304 Not Modified" ;; + 305) SERVER_RESPONSE="305 Use Proxy" ;; + 306) SERVER_RESPONSE="306 Status not defined" ;; + 307) SERVER_RESPONSE="307 Temporary Redirect" ;; + 301) SERVER_RESPONSE="301 Moved" ;; + 400) SERVER_RESPONSE="400 Error: Bad Request" ;; + 401) SERVER_RESPONSE="401 Error: Unauthorized" ;; + 403) SERVER_RESPONSE="403 Error: Forbidden" ;; + 404) SERVER_RESPONSE="404 Error: Not Found" ;; + 407) SERVER_RESPONSE="407 Error: Proxy Authentication Required" ;; + 408) SERVER_RESPONSE="408 Error: Request Timeout" ;; + 500) SERVER_RESPONSE="500 Error: Internal Server Error" ;; + 502) SERVER_RESPONSE="502 Error: Bad Gateway" ;; + 503) SERVER_RESPONSE="503 Error: Service Unavailable" ;; + 504) SERVER_RESPONSE="504 Error: Gateway Timeout" ;; + 505) SERVER_RESPONSE="505 Error: HTTP Version Not Supported" ;; + *) SERVER_RESPONSE="HTTP: SERVER_RESPONSE not defined." ;; + esac + } diff --git a/include/csv_header b/include/csv_header index abd1b8c4..07ac75bc 100644 --- a/include/csv_header +++ b/include/csv_header @@ -15,5 +15,5 @@ printCsvHeader() { >&2 echo "" >&2 echo "Generating \"${SEP}\" delimited report on stdout for profile $PROFILE, account $ACCOUNT_NUM" - echo "PROFILE${SEP}ACCOUNT_NUM${SEP}REGION${SEP}TITLE_ID${SEP}RESULT${SEP}SCORED${SEP}LEVEL${SEP}TITLE_TEXT${SEP}NOTES" + echo "PROFILE${SEP}ACCOUNT_NUM${SEP}REGION${SEP}TITLE_ID${SEP}RESULT${SEP}SCORED${SEP}LEVEL${SEP}TITLE_TEXT${SEP}NOTES" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_CSV } diff --git a/include/junit_integration b/include/junit_integration new file mode 100644 index 00000000..c52ecb42 --- /dev/null +++ b/include/junit_integration @@ -0,0 +1,97 @@ +#!/usr/bin/env bash + +# Prowler - the handy cloud security tool (copyright 2018) by Toni de la Fuente +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy +# of the License at http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software distributed +# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +# CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. + +# Generates JUnit XML reports which can be read by Jenkins or other CI tools + +JUNIT_OUTPUT_DIRECTORY="junit-reports" + +is_junit_output_enabled() { + if [[ " ${MODES[@]} " =~ " junit-xml " ]]; then + true + else + false + fi +} + +xml_escape() { + sed 's/&/\&/g; s//\>/g; s/\"/\"/g; s/'"'"'/\'/g' <<< "$1" +} + +prepare_junit_output() { + # Remove any JUnit output from previous runs + rm -rf "$JUNIT_OUTPUT_DIRECTORY" + mkdir "$JUNIT_OUTPUT_DIRECTORY" + echo "" + echo "$NOTICE Writing JUnit XML reports to $PROWLER_DIR/$JUNIT_OUTPUT_DIRECTORY $NORMAL" +} + +prepare_junit_check_output() { + # JUnit test cases must be named uniquely, but each Prowler check can output many times due to multiple resources, + # therefore append an index value to the test case name to provide uniqueness, reset it to 1 before starting this check + JUNIT_CHECK_INDEX=1 + # To match JUnit behaviour in Java, and ensure that an aborted execution does not leave a partially written and therefore invalid XML file, + # output a JUnit XML file per check + JUNIT_OUTPUT_FILE="$JUNIT_OUTPUT_DIRECTORY/TEST-$1.xml" + printf '%s\n' \ + "" \ + "" \ + " " \ + " " \ + " " \ + " " \ + " " \ + " " \ + " " \ + " " \ + " " \ + " " \ + > "$JUNIT_OUTPUT_FILE" + JUNIT_CHECK_START_TIME=$(get_time_in_milliseconds) +} + +finalise_junit_check_output() { + echo '' >> "$JUNIT_OUTPUT_FILE" +} + +output_junit_success() { + output_junit_test_case "$1" "$(xml_escape "$1")" +} + +output_junit_info() { + # Nothing to output for JUnit for this level of message, but reset the check timer for timing the next check + JUNIT_CHECK_START_TIME=$(get_time_in_milliseconds) +} + +output_junit_failure() { + output_junit_test_case "$1" "" +} + +get_junit_classname() { + #
. naturally follows a Java package structure, so it is suitable as a package name + echo "$TITLE_ID" +} + +output_junit_test_case() { + local time_now + local test_case_duration + time_now=$(get_time_in_milliseconds) + # JUnit test case time values are in seconds, so divide by 1000 using e-3 to convert from milliseconds without losing accuracy due to non-floating point arithmetic + test_case_duration=$(printf "%.3f" "$((time_now - JUNIT_CHECK_START_TIME))e-3") + printf '%s\n' \ + " " \ + " $2" \ + " " >> "$JUNIT_OUTPUT_FILE" + # Reset the check timer for timing the next check + JUNIT_CHECK_START_TIME=$(get_time_in_milliseconds) + ((JUNIT_CHECK_INDEX+=1)) +} diff --git a/include/os_detector b/include/os_detector index 2394c521..b06754e2 100644 --- a/include/os_detector +++ b/include/os_detector @@ -11,128 +11,245 @@ # CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. +DATE_CMD="date" +BASE64_CMD="base64" + +gnu_how_older_from_today() { + DATE_TO_COMPARE=$1 + TODAY_IN_DAYS=$("$DATE_CMD" -d "$("$DATE_CMD" +%Y-%m-%d)" +%s) + DATE_FROM_IN_DAYS=$("$DATE_CMD" -d $DATE_TO_COMPARE +%s) + DAYS_SINCE=$((($TODAY_IN_DAYS - $DATE_FROM_IN_DAYS )/60/60/24)) + echo $DAYS_SINCE +} +bsd_how_older_from_today() { + DATE_TO_COMPARE=$1 + TODAY_IN_DAYS=$("$DATE_CMD" +%s) + DATE_FROM_IN_DAYS=$("$DATE_CMD" -jf %Y-%m-%d $DATE_TO_COMPARE +%s) + DAYS_SINCE=$((($TODAY_IN_DAYS - $DATE_FROM_IN_DAYS )/60/60/24)) + echo $DAYS_SINCE +} + +# function to convert from timestamp to date +# output date format %Y-%m-%d +gnu_timestamp_to_date() { + # if date comes from cli v2 in format like 2020-04-29T10:13:09.191000-04:00 + # we have to get only '%Y-%m-%d' + if [[ $1 = 20* ]];then + echo $1 | cut -f1 -d"T" + else + # remove fractions of a second + TIMESTAMP_TO_CONVERT=$(echo $1 | cut -f1 -d".") + OUTPUT_DATE=$("$DATE_CMD" -d @$TIMESTAMP_TO_CONVERT +'%Y-%m-%d') + echo $OUTPUT_DATE + fi +} +bsd_timestamp_to_date() { + # if date comes from cli v2 in format like 2020-04-29T10:13:09.191000-04:00 + # we have to get only '%Y-%m-%d' + if [[ $1 = 20* ]];then + echo $1 | cut -f1 -d"T" + else + # remove fractions of a second + TIMESTAMP_TO_CONVERT=$(echo $1 | cut -f1 -d".") + OUTPUT_DATE=$("$DATE_CMD" -r $TIMESTAMP_TO_CONVERT +'%Y-%m-%d') + echo $OUTPUT_DATE + fi +} + +gnu_decode_report() { + "$BASE64_CMD" -d +} +bsd_decode_report() { + "$BASE64_CMD" -D +} + +gnu_how_many_days_from_today() { + DATE_TO_COMPARE=$1 + TODAY_IN_DAYS=$("$DATE_CMD" -d "$("$DATE_CMD" +%Y-%m-%d)" +%s) + DATE_IN_DAYS=$("$DATE_CMD" -d $DATE_TO_COMPARE +%s) + DAYS_TO=$((( $DATE_IN_DAYS - $TODAY_IN_DAYS )/60/60/24)) + echo $DAYS_TO +} +bsd_how_many_days_from_today() { + DATE_TO_COMPARE=$1 + TODAY_IN_DAYS=$("$DATE_CMD" +%s) + DATE_IN_DAYS=$("$DATE_CMD" -jf %Y-%m-%d $DATE_TO_COMPARE +%s) + DAYS_TO=$((( $DATE_IN_DAYS - $TODAY_IN_DAYS )/60/60/24)) + echo $DAYS_TO +} + +gnu_get_date_previous_than_months() { + MONTHS_TO_COMPARE=$1 + MONTHS_TO_COMPARE_IN_SECONDS=$(( 60 * 60 * 24 * 31 * $MONTHS_TO_COMPARE )) + CURRENTSECS=$("$DATE_CMD" +%s) + STARTDATEINSECS=$(( $CURRENTSECS - $MONTHS_TO_COMPARE_IN_SECONDS )) + DATE_BEFORE_MONTHS_TO_COMPARE=$("$DATE_CMD" -d @$STARTDATEINSECS '+%Y-%m-%d') + echo $DATE_BEFORE_MONTHS_TO_COMPARE +} +bsd_get_date_previous_than_months() { + MONTHS_TO_COMPARE=$1 + DATE_BEFORE_MONTHS_TO_COMPARE=$("$DATE_CMD" -v -$(echo $MONTHS_TO_COMPARE)m '+%Y-%m-%d') + echo $DATE_BEFORE_MONTHS_TO_COMPARE +} + +gnu_get_time_in_milliseconds() { + "$DATE_CMD" +%s%3N +} +bsd_get_time_in_milliseconds() { + # BSD date does not support outputting milliseconds, so pad with zeros + "$DATE_CMD" +%s000 +} + +gnu_get_iso8601_timestamp() { + "$DATE_CMD" -u +"%Y-%m-%dT%H:%M:%SZ" +} +bsd_get_iso8601_timestamp() { + "$DATE_CMD" -u +"%Y-%m-%dT%H:%M:%SZ" +} + +gnu_test_tcp_connectivity() { + HOST=$1 + PORT=$2 + TIMEOUT=$3 + # This is initially for ES port 9300, not not HTTP but I add HTTP error + # codes for better handling, so 200 is open and 000 is not responding + timeout $TIMEOUT bash -c '(echo > /dev/tcp/'$HOST'/'$PORT') >/dev/null 2>&1 && echo "200" || echo "000"' +} +bsd_test_tcp_connectivity() { + HOST=$1 + PORT=$2 + TIMEOUT=$3 + # This is initially for ES port 9300, not not HTTP but I add HTTP error + # codes for better handling, so 200 is open and 000 is not responding + nc -z -G $TIMEOUT $HOST $PORT >/dev/null 2>&1 && echo "200" || echo "000" +} # Functions to manage dates depending on OS if [ "$OSTYPE" == "linux-gnu" ] || [ "$OSTYPE" == "linux-musl" ]; then TEMP_REPORT_FILE=$(mktemp -t -p /tmp prowler.cred_report-XXXXXX) # function to compare in days, usage how_older_from_today date # date format %Y-%m-%d - how_older_from_today() - { - DATE_TO_COMPARE=$1 - TODAY_IN_DAYS=$(date -d "$(date +%Y-%m-%d)" +%s) - DATE_FROM_IN_DAYS=$(date -d $DATE_TO_COMPARE +%s) - DAYS_SINCE=$((($TODAY_IN_DAYS - $DATE_FROM_IN_DAYS )/60/60/24)) - echo $DAYS_SINCE - } - # function to convert from timestamp to date, usage timestamp_to_date timestamp - # output date format %Y-%m-%d - timestamp_to_date() - { - # remove fractions of a second - TIMESTAMP_TO_CONVERT=$(echo $1 | cut -f1 -d".") - OUTPUT_DATE=$(date -d @$TIMESTAMP_TO_CONVERT +'%Y-%m-%d') - echo $OUTPUT_DATE - } - decode_report() - { - base64 -d - } - how_many_days_from_today() - { - DATE_TO_COMPARE=$1 - TODAY_IN_DAYS=$(date -d "$(date +%Y-%m-%d)" +%s) - DATE_IN_DAYS=$(date -d $DATE_TO_COMPARE +%s) - DAYS_TO=$((( $DATE_IN_DAYS - $TODAY_IN_DAYS )/60/60/24)) - echo $DAYS_TO - } - get_date_previous_than_months() - { - MONTHS_TO_COMPARE=$1 - MONTHS_TO_COMPARE_IN_SECONDS=$(( 60 * 60 * 24 * 31 * $MONTHS_TO_COMPARE )) - CURRENTSECS=`date +%s` - STARTDATEINSECS=$(( $CURRENTSECS - $MONTHS_TO_COMPARE_IN_SECONDS )) - DATE_BEFORE_MONTHS_TO_COMPARE=$(date -d @$STARTDATEINSECS '+%Y-%m-%d') - echo $DATE_BEFORE_MONTHS_TO_COMPARE - } + how_older_from_today() { + gnu_how_older_from_today "$1" + } + timestamp_to_date() { + gnu_timestamp_to_date "$1" + } + decode_report() { + gnu_decode_report + } + how_many_days_from_today() { + gnu_how_many_days_from_today "$1" + } + get_date_previous_than_months() { + gnu_get_date_previous_than_months "$1" + } + get_time_in_milliseconds() { + gnu_get_time_in_milliseconds + } + get_iso8601_timestamp() { + gnu_get_iso8601_timestamp + } + test_tcp_connectivity() { + gnu_test_tcp_connectivity "$1" "$2" "$3" + } elif [[ "$OSTYPE" == "darwin"* ]]; then # BSD/OSX commands compatibility TEMP_REPORT_FILE=$(mktemp -t prowler.cred_report-XXXXXX) - how_older_from_today() - { - DATE_TO_COMPARE=$1 - TODAY_IN_DAYS=$(date +%s) - DATE_FROM_IN_DAYS=$(date -jf %Y-%m-%d $DATE_TO_COMPARE +%s) - DAYS_SINCE=$((($TODAY_IN_DAYS - $DATE_FROM_IN_DAYS )/60/60/24)) - echo $DAYS_SINCE + # It is possible that the user has installed GNU coreutils on OS X. By default, this will make GNU commands + # available with a 'g' prefix, e.g. 'gdate'. Test if this is present, and use it if so, as it supports more features. + # The user also may have replaced the default Mac OS X BSD tools with the GNU coreutils equivalents. + # Only GNU date/base64 allows --version as a valid argument, so use the validity of this argument + # as a means to detect that coreutils is installed and is overriding the default tools + GDATE=$(which gdate) + if [ -n "${GDATE}" ]; then + DATE_CMD="gdate" + fi + GBASE64=$(which gbase64) + if [ -n "${GBASE64}" ]; then + BASE64_CMD="gbase64" + fi + if "$DATE_CMD" --version >/dev/null 2>&1 ; then + how_older_from_today() { + gnu_how_older_from_today "$1" } - timestamp_to_date() - { - # remove fractions of a second - TIMESTAMP_TO_CONVERT=$(echo $1 | cut -f1 -d".") - OUTPUT_DATE=$(date -r $TIMESTAMP_TO_CONVERT +'%Y-%m-%d') - echo $OUTPUT_DATE + timestamp_to_date() { + gnu_timestamp_to_date "$1" } - decode_report() - { - base64 -D + how_many_days_from_today() { + gnu_how_many_days_from_today "$1" } - how_many_days_from_today() - { - DATE_TO_COMPARE=$1 - TODAY_IN_DAYS=$(date +%s) - DATE_IN_DAYS=$(date -jf %Y-%m-%d $DATE_TO_COMPARE +%s) - DAYS_TO=$((( $DATE_IN_DAYS - $TODAY_IN_DAYS )/60/60/24)) - echo $DAYS_TO + get_date_previous_than_months() { + gnu_get_date_previous_than_months "$1" } - get_date_previous_than_months() - { - MONTHS_TO_COMPARE=$1 - DATE_BEFORE_MONTHS_TO_COMPARE=$(date -v -$(echo $MONTHS_TO_COMPARE)m '+%Y-%m-%d') - echo $DATE_BEFORE_MONTHS_TO_COMPARE + get_time_in_milliseconds() { + gnu_get_time_in_milliseconds } + get_iso8601_timestamp() { + gnu_get_iso8601_timestamp + } + else + how_older_from_today() { + bsd_how_older_from_today "$1" + } + timestamp_to_date() { + bsd_timestamp_to_date "$1" + } + how_many_days_from_today() { + bsd_how_many_days_from_today "$1" + } + get_date_previous_than_months() { + bsd_get_date_previous_than_months "$1" + } + get_time_in_milliseconds() { + bsd_get_time_in_milliseconds + } + get_iso8601_timestamp() { + bsd_get_iso8601_timestamp + } + fi + if "$BASE64_CMD" --version >/dev/null 2>&1 ; then + decode_report() { + gnu_decode_report + } + else + decode_report() { + bsd_decode_report + } + fi + test_tcp_connectivity() { + bsd_test_tcp_connectivity "$1" "$2" "$3" + } elif [[ "$OSTYPE" == "cygwin" ]]; then # POSIX compatibility layer and Linux environment emulation for Windows TEMP_REPORT_FILE=$(mktemp -t -p /tmp prowler.cred_report-XXXXXX) - how_older_from_today() - { - DATE_TO_COMPARE=$1 - TODAY_IN_DAYS=$(date -d "$(date +%Y-%m-%d)" +%s) - DATE_FROM_IN_DAYS=$(date -d $DATE_TO_COMPARE +%s) - DAYS_SINCE=$((($TODAY_IN_DAYS - $DATE_FROM_IN_DAYS )/60/60/24)) - echo $DAYS_SINCE - } - timestamp_to_date() - { - # remove fractions of a second - TIMESTAMP_TO_CONVERT=$(echo $1 | cut -f1 -d".") - OUTPUT_DATE=$(date -d @$TIMESTAMP_TO_CONVERT +'%Y-%m-%d') - echo $OUTPUT_DATE - } - decode_report() - { - base64 -d - } - how_many_days_from_today() - { - DATE_TO_COMPARE=$1 - TODAY_IN_DAYS=$(date -d "$(date +%Y-%m-%d)" +%s) - DATE_IN_DAYS=$(date -d $DATE_TO_COMPARE +%s) - DAYS_TO=$((( $TODAY_IN_DAYS - $DATE_IN_DAYS )/60/60/24)) - echo $DAYS_TO - } - get_date_previous_than_months() - { - MONTHS_TO_COMPARE=$1 - MONTHS_TO_COMPARE_IN_SECONDS=$(( 60 * 60 * 24 * 31 * $MONTHS_TO_COMPARE )) - CURRENTSECS=`date +%s` - STARTDATEINSECS=$(( $CURRENTSECS - $MONTHS_TO_COMPARE_IN_SECONDS )) - DATE_BEFORE_MONTHS_TO_COMPARE=$(date -d @$STARTDATEINSECS '+%Y-%m-%d') - echo $DATE_BEFORE_MONTHS_TO_COMPARE - } + how_older_from_today() { + gnu_how_older_from_today "$1" + } + timestamp_to_date() { + gnu_timestamp_to_date "$1" + } + decode_report() { + gnu_decode_report + } + how_many_days_from_today() { + gnu_how_many_days_from_today "$1" + } + get_date_previous_than_months() { + gnu_get_date_previous_than_months "$1" + } + get_time_in_milliseconds() { + gnu_get_time_in_milliseconds + } + get_iso8601_timestamp() { + gnu_get_iso8601_timestamp + } + test_tcp_connectivity() { + gnu_test_tcp_connectivity "$1" "$2" "$3" + } else - echo "Unknown Operating System! Valid \$OSTYPE: linux-gnu, linux-musl, darwin* or cygwin" - echo "Found: $OSTYPE" - EXITCODE=1 - exit $EXITCODE + echo "Unknown Operating System! Valid \$OSTYPE: linux-gnu, linux-musl, darwin* or cygwin" + echo "Found: $OSTYPE" + EXITCODE=1 + exit $EXITCODE fi diff --git a/include/outputs b/include/outputs index 3f4a44c7..8931fc27 100644 --- a/include/outputs +++ b/include/outputs @@ -12,48 +12,46 @@ # specific language governing permissions and limitations under the License. # Output formatting functions + +EXTENSION_CSV="csv" +EXTENSION_JSON="json" +EXTENSION_ASFF="asff-json" +EXTENSION_TEXT="txt" +EXTENSION_HTML="html" # not implemented yet, use ansi2html as in documentation +OUTPUT_DATE=$(date -u +"%Y%m%d%H%M%S") +OUTPUT_FILE_NAME="prowler-output-${ACCOUNT_NUM}-${OUTPUT_DATE}" + textPass(){ if [[ "$QUIET" == 1 ]]; then return fi PASS_COUNTER=$((PASS_COUNTER+1)) - if [[ "$MODE" == "csv" ]]; then - if [[ $2 ]]; then - REPREGION=$2 - else - REPREGION=$REGION - fi - echo "$PROFILE${SEP}$ACCOUNT_NUM${SEP}$REPREGION${SEP}$TITLE_ID${SEP}PASS${SEP}$ITEM_SCORED${SEP}$ITEM_LEVEL${SEP}$TITLE_TEXT${SEP}$1" - elif [[ "$MODE" == "json" ]]; then - if [[ $2 ]]; then - REPREGION=$2 - else - REPREGION=$REGION - fi - jq -M -c \ - --arg PROFILE "$PROFILE" \ - --arg ACCOUNT_NUM "$ACCOUNT_NUM" \ - --arg TITLE_TEXT "$TITLE_TEXT" \ - --arg MESSAGE "$(echo -e "${1}" | sed -e 's/^[[:space:]]*//')" \ - --arg SCORED "$ITEM_SCORED" \ - --arg ITEM_LEVEL "$ITEM_LEVEL" \ - --arg TITLE_ID "$TITLE_ID" \ - --arg REPREGION "$REPREGION" \ - --arg TIMESTAMP $(date -u +"%Y-%m-%dT%H:%M:%SZ") \ - -n '{ - "Profile": $PROFILE, - "Account Number": $ACCOUNT_NUM, - "Control": $TITLE_TEXT, - "Message": $MESSAGE, - "Status": "Pass", - "Scored": $SCORED, - "Level": $ITEM_LEVEL, - "Control ID": $TITLE_ID, - "Region": $REPREGION, - "Timestamp": $TIMESTAMP, - }' + if [[ $2 ]]; then + REPREGION=$2 else + REPREGION=$REGION + fi + if [[ "${MODES[@]}" =~ "csv" ]]; then + echo "$PROFILE${SEP}$ACCOUNT_NUM${SEP}$REPREGION${SEP}$TITLE_ID${SEP}PASS${SEP}$ITEM_SCORED${SEP}$ITEM_LEVEL${SEP}$TITLE_TEXT${SEP}$1" | tee -a ${OUTPUT_FILE_NAME}.$EXTENSION_CSV + fi + if [[ "${MODES[@]}" =~ "json" ]]; then + generateJsonOutput "$1" "Pass" | tee -a ${OUTPUT_FILE_NAME}.$EXTENSION_JSON + fi + if [[ "${MODES[@]}" =~ "json-asff" ]]; then + JSON_ASFF_OUTPUT=$(generateJsonAsffOutput "$1" "PASSED" "INFORMATIONAL") + echo "${JSON_ASFF_OUTPUT}" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_ASFF + if [[ "${SEND_TO_SECURITY_HUB}" -eq 1 ]]; then + sendToSecurityHub "${JSON_ASFF_OUTPUT}" + fi + fi + if is_junit_output_enabled; then + output_junit_success "$1" + fi + if [[ "${MODES[@]}" =~ "mono" ]]; then + echo " $OK PASS!$NORMAL $1" | tee -a ${OUTPUT_FILE_NAME}.$EXTENSION_TEXT + fi + if [[ "${MODES[@]}" =~ "text" || "${MODES[@]}" =~ "mono" ]]; then echo " $OK PASS!$NORMAL $1" fi } @@ -63,42 +61,24 @@ textInfo(){ return fi - if [[ "$MODE" == "csv" ]]; then - if [[ $2 ]]; then - REPREGION=$2 - else - REPREGION=$REGION - fi - echo "$PROFILE${SEP}$ACCOUNT_NUM${SEP}$REPREGION${SEP}$TITLE_ID${SEP}INFO${SEP}$ITEM_SCORED${SEP}$ITEM_LEVEL${SEP}$TITLE_TEXT${SEP}$1" - elif [[ "$MODE" == "json" ]]; then - if [[ $2 ]]; then - REPREGION=$2 - else - REPREGION=$REGION - fi - jq -M -c \ - --arg PROFILE "$PROFILE" \ - --arg ACCOUNT_NUM "$ACCOUNT_NUM" \ - --arg TITLE_TEXT "$TITLE_TEXT" \ - --arg MESSAGE "$(echo -e "${1}" | sed -e 's/^[[:space:]]*//')" \ - --arg SCORED "$ITEM_SCORED" \ - --arg ITEM_LEVEL "$ITEM_LEVEL" \ - --arg TITLE_ID "$TITLE_ID" \ - --arg REPREGION "$REPREGION" \ - --arg TIMESTAMP $(date -u +"%Y-%m-%dT%H:%M:%SZ") \ - -n '{ - "Profile": $PROFILE, - "Account Number": $ACCOUNT_NUM, - "Control": $TITLE_TEXT, - "Message": $MESSAGE, - "Status": "Info", - "Scored": $SCORED, - "Level": $ITEM_LEVEL, - "Control ID": $TITLE_ID, - "Region": $REPREGION, - "Timestamp": $TIMESTAMP, - }' + if [[ $2 ]]; then + REPREGION=$2 else + REPREGION=$REGION + fi + if [[ "${MODES[@]}" =~ "csv" ]]; then + echo "$PROFILE${SEP}$ACCOUNT_NUM${SEP}$REPREGION${SEP}$TITLE_ID${SEP}INFO${SEP}$ITEM_SCORED${SEP}$ITEM_LEVEL${SEP}$TITLE_TEXT${SEP}$1" | tee -a ${OUTPUT_FILE_NAME}.${EXTENSION_CSV} + fi + if [[ "${MODES[@]}" =~ "json" ]]; then + generateJsonOutput "$1" "Info" | tee -a ${OUTPUT_FILE_NAME}.${EXTENSION_JSON} + fi + if is_junit_output_enabled; then + output_junit_info "$1" + fi + if [[ "${MODES[@]}" =~ "mono" ]]; then + echo " $NOTICE INFO! $1 $NORMAL" | tee -a ${OUTPUT_FILE_NAME}.$EXTENSION_TEXT + fi + if [[ "${MODES[@]}" =~ "text" ]]; then echo " $NOTICE INFO! $1 $NORMAL" fi } @@ -124,58 +104,43 @@ textFail(){ FAIL_COUNTER=$((FAIL_COUNTER+1)) EXITCODE=3 fi - - if [[ "$MODE" == "csv" ]]; then - if [[ $2 ]]; then - REPREGION=$2 - else - REPREGION=$REGION - fi - echo "$PROFILE${SEP}$ACCOUNT_NUM${SEP}$REPREGION${SEP}$TITLE_ID${SEP}${level}${SEP}$ITEM_SCORED${SEP}$ITEM_LEVEL${SEP}$TITLE_TEXT${SEP}$1" - elif [[ "$MODE" == "json" ]]; then - if [[ $2 ]]; then - REPREGION=$2 - else - REPREGION=$REGION - fi - jq -M -c \ - --arg PROFILE "$PROFILE" \ - --arg ACCOUNT_NUM "$ACCOUNT_NUM" \ - --arg TITLE_TEXT "$TITLE_TEXT" \ - --arg MESSAGE "$(echo -e "${1}" | sed -e 's/^[[:space:]]*//')" \ - --arg SCORED "$ITEM_SCORED" \ - --arg ITEM_LEVEL "$ITEM_LEVEL" \ - --arg TITLE_ID "$TITLE_ID" \ - --arg REPREGION "$REPREGION" \ - --arg TIMESTAMP "$(date -u +"%Y-%m-%dT%H:%M:%SZ")" \ - -n '{ - "Profile": $PROFILE, - "Account Number": $ACCOUNT_NUM, - "Control": $TITLE_TEXT, - "Message": $MESSAGE, - "Status": "Fail", - "Scored": $SCORED, - "Level": $ITEM_LEVEL, - "Control ID": $TITLE_ID, - "Region": $REPREGION, - "Timestamp": $TIMESTAMP, - }' + + if [[ $2 ]]; then + REPREGION=$2 else - if [[ "${level}" == "FAIL" ]]; then - echo " $BAD ${level}! $1 $NORMAL" - else - echo " $WARNING ${level}! $1 $NORMAL" + REPREGION=$REGION + fi + + if [[ "${MODES[@]}" =~ "csv" ]]; then + echo "$PROFILE${SEP}$ACCOUNT_NUM${SEP}$REPREGION${SEP}$TITLE_ID${SEP}${level}${SEP}$ITEM_SCORED${SEP}$ITEM_LEVEL${SEP}$TITLE_TEXT${SEP}$1" | tee -a ${OUTPUT_FILE_NAME}.${EXTENSION_CSV} + fi + if [[ "${MODES[@]}" =~ "json" ]]; then + generateJsonOutput "$1" "${level}" | tee -a ${OUTPUT_FILE_NAME}.${EXTENSION_JSON} + fi + if [[ "${MODES[@]}" =~ "json-asff" ]]; then + JSON_ASFF_OUTPUT=$(generateJsonAsffOutput "$1" "${level}" "HIGH") + echo "${JSON_ASFF_OUTPUT}" | tee -a ${OUTPUT_FILE_NAME}.${EXTENSION_ASFF} + if [[ "${SEND_TO_SECURITY_HUB}" -eq 1 ]]; then + sendToSecurityHub "${JSON_ASFF_OUTPUT}" fi fi + if is_junit_output_enabled && [[ "$level" == "FAIL" ]]; then + output_junit_failure "$1" + fi + if [[ "${MODES[@]}" =~ "mono" ]]; then + echo " $BAD ${level}! $1 $NORMAL" | tee -a ${OUTPUT_FILE_NAME}.$EXTENSION_TEXT + fi + if [[ "${MODES[@]}" =~ "text" ]]; then + echo " $BAD ${level}! $1 $NORMAL" + fi } textTitle(){ CHECKS_COUNTER=$((CHECKS_COUNTER+1)) TITLE_ID=$1 if [[ $NUMERAL ]]; then - TITLE_ID=$(echo $TITLE_ID | cut -d, -f2) - else - TITLE_ID=$(echo $TITLE_ID | cut -d, -f1) + # Left-pad the check ID with zeros to simplify sorting, e.g. 1.1 -> 1.01 + TITLE_ID=$(awk -F'.' '{ printf "%d.%02d", $1, $2 }' <<< "$TITLE_ID") fi TITLE_TEXT=$2 @@ -200,15 +165,102 @@ textTitle(){ *) ITEM_LEVEL="Unspecified or Invalid";; esac - if [[ "$MODE" == "csv" ]]; then - >&2 echo "$TITLE_ID $TITLE_TEXT" - elif [[ "$MODE" == "json" ]]; then + local group_ids + if [[ -n "$5" ]]; then + group_ids="$CYAN [$5] $NORMAL" + fi + + if [[ "${MODES[@]}" =~ "csv" ]]; then + >&2 echo "$TITLE_ID $TITLE_TEXT" | tee -a ${OUTPUT_FILE_NAME}.${EXTENSION_CSV} + elif [[ "${MODES[@]}" =~ "json" || "${MODES[@]}" =~ "json-asff" ]]; then : else if [[ "$ITEM_SCORED" == "Scored" ]]; then - echo -e "\n$BLUE $TITLE_ID $NORMAL $TITLE_TEXT" + echo -e "\n$BLUE $TITLE_ID $NORMAL $TITLE_TEXT $group_ids" else - echo -e "\n$PURPLE $TITLE_ID $TITLE_TEXT $NORMAL" + echo -e "\n$PURPLE $TITLE_ID $TITLE_TEXT $NORMAL $group_ids" fi fi } + +generateJsonOutput(){ + local message=$1 + local status=$2 + jq -M -c \ + --arg PROFILE "$PROFILE" \ + --arg ACCOUNT_NUM "$ACCOUNT_NUM" \ + --arg TITLE_TEXT "$TITLE_TEXT" \ + --arg MESSAGE "$(echo -e "${message}" | sed -e 's/^[[:space:]]*//')" \ + --arg STATUS "$status" \ + --arg SCORED "$ITEM_SCORED" \ + --arg ITEM_LEVEL "$ITEM_LEVEL" \ + --arg TITLE_ID "$TITLE_ID" \ + --arg REPREGION "$REPREGION" \ + --arg TIMESTAMP "$(get_iso8601_timestamp)" \ + -n '{ + "Profile": $PROFILE, + "Account Number": $ACCOUNT_NUM, + "Control": $TITLE_TEXT, + "Message": $MESSAGE, + "Status": $STATUS, + "Scored": $SCORED, + "Level": $ITEM_LEVEL, + "Control ID": $TITLE_ID, + "Region": $REPREGION, + "Timestamp": $TIMESTAMP, + }' +} + +generateJsonAsffOutput(){ + # UNIQUE_ID must only contain characters from the unreserved characters set defined in section 2.3 of RFC-3986 + # Replace any successive non-conforming characters with a single underscore + local message=$1 + local status=$2 + local severity=$3 + jq -M -c \ + --arg ACCOUNT_NUM "$ACCOUNT_NUM" \ + --arg TITLE_TEXT "$TITLE_TEXT" \ + --arg MESSAGE "$(echo -e "${message}" | sed -e 's/^[[:space:]]*//')" \ + --arg UNIQUE_ID "$(LC_ALL=C echo -e -n "${message}" | tr -cs '[:alnum:]._~-' '_')" \ + --arg STATUS "$status" \ + --arg SEVERITY "$severity" \ + --arg TITLE_ID "$TITLE_ID" \ + --arg TYPE "$ASFF_TYPE" \ + --arg RESOURCE_TYPE "$ASFF_RESOURCE_TYPE" \ + --arg REPREGION "$REPREGION" \ + --arg TIMESTAMP "$(get_iso8601_timestamp)" \ + --arg PROWLER_VERSION "$PROWLER_VERSION" \ +-n '{ + "SchemaVersion": "2018-10-08", + "Id": "prowler-\($TITLE_ID)-\($ACCOUNT_NUM)-\($REPREGION)-\($UNIQUE_ID)", + "ProductArn": "arn:${AWS_PARTITION}:securityhub:\($REPREGION):\($ACCOUNT_NUM):product/\($ACCOUNT_NUM)/default", + "ProductFields": { + "ProviderName": "Prowler", + "ProviderVersion": $PROWLER_VERSION + }, + "GeneratorId": "prowler-\($PROWLER_VERSION)", + "AwsAccountId": $ACCOUNT_NUM, + "Types": [ + $TYPE + ], + "FirstObservedAt": $TIMESTAMP, + "UpdatedAt": $TIMESTAMP, + "CreatedAt": $TIMESTAMP, + "Severity": { + "Label": $SEVERITY + }, + "Title": $TITLE_TEXT, + "Description": $MESSAGE, + "Resources": [ + { + "Type": $RESOURCE_TYPE, + "Id": "AWS::::Account:\($ACCOUNT_NUM)", + "Partition": "aws", + "Region": $REPREGION + } + ], + "Compliance": { + "Status": $STATUS + } + }' +} diff --git a/include/securityhub_integration b/include/securityhub_integration new file mode 100644 index 00000000..b08f5277 --- /dev/null +++ b/include/securityhub_integration @@ -0,0 +1,37 @@ +#!/usr/bin/env bash + +# Prowler - the handy cloud security tool (copyright 2018) by Toni de la Fuente +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy +# of the License at http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software distributed +# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +# CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. + +# Checks that the correct mode (json-asff) has been specified if wanting to send check output to AWS Security Hub +# and that Security Hub is enabled in the chosen region +checkSecurityHubCompatibility(){ + if [[ "${MODE}" != "json-asff" ]]; then + echo -e "\n$RED ERROR!$NORMAL Output can only be sent to Security Hub when the output mode is json-asff, i.e. -M json-asff -S\n" + EXITCODE=1 + exit $EXITCODE + fi + SECURITY_HUB_ENABLED=$($AWSCLI securityhub --region $REGION $PROFILE_OPT describe-hub) + if [[ -z "${SECURITY_HUB_ENABLED}" ]]; then + echo -e "\n$RED ERROR!$NORMAL Security Hub is not enabled in $REGION. Enable it by calling '$AWSCLI securityhub --region $REGION $PROFILE_OPT enable-security-hub'\n" + EXITCODE=1 + exit $EXITCODE + fi +} + +sendToSecurityHub(){ + BATCH_IMPORT_RESULT=$($AWSCLI securityhub --region $REGION $PROFILE_OPT batch-import-findings --findings "$1") + # A successful CLI response is: {"SuccessCount": 1,"FailedFindings": [],"FailedCount": 0} + # Therefore, check that SuccessCount is indeed 1 + if [[ -z "${BATCH_IMPORT_RESULT}" ]] || ! jq -e '.SuccessCount == 1' <<< "${BATCH_IMPORT_RESULT}" > /dev/null 2>&1; then + echo -e "\n$RED ERROR!$NORMAL Failed to send check output to AWS Security Hub\n" + fi +} diff --git a/include/whoami b/include/whoami index 0fa8479a..4322cb96 100644 --- a/include/whoami +++ b/include/whoami @@ -13,10 +13,26 @@ # Get whoami in AWS, who is the user running this shell script + +ACCOUNT_NUM=$($AWSCLI sts get-caller-identity --output text $PROFILE_OPT --region $REGION --query "Account") + +if [[ 255 -eq $? ]]; then + # Failed to get own identity ... exit + echo -e "$RED ERROR Getting credentials to run Prowler - EXITING! $NORMAL" + EXITCODE=2 + exit $EXITCODE +fi + +CALLER_ARN=$($AWSCLI sts get-caller-identity --output text $PROFILE_OPT --region $REGION --query "Arn") +USER_ID=$($AWSCLI sts get-caller-identity --output text $PROFILE_OPT --region $REGION --query "UserId") +AWS_PARTITION=$(echo $CALLER_ARN| cut -d: -f2) + +if [[ $ACCOUNT_TO_ASSUME ]]; then + ACCOUNT_NUM=$ACCOUNT_TO_ASSUME +fi + getWhoami(){ - ACCOUNT_NUM=$($AWSCLI sts get-caller-identity --output json $PROFILE_OPT --region $REGION --query "Account" | tr -d '"') if [[ "$MODE" == "csv" ]]; then - CALLER_ARN_RAW=$($AWSCLI sts get-caller-identity --output json $PROFILE_OPT --region $REGION --query "Arn") if [[ 255 -eq $? ]]; then # Failed to get own identity ... exit echo "ERROR WITH $PROFILE CREDENTIALS - EXITING!" @@ -24,19 +40,18 @@ getWhoami(){ EXITCODE=2 exit $EXITCODE fi - CALLER_ARN=$(echo $CALLER_ARN_RAW | tr -d '"') printCsvHeader textTitle "0.0" "Show report generation info" "NOT_SCORED" "SUPPORT" textInfo "ARN: $CALLER_ARN TIMESTAMP: $SCRIPT_START_TIME" - elif [[ "$MODE" == "json" ]]; then + elif [[ "$MODE" == "json" || "$MODE" == "json-asff" ]]; then : else echo "" echo -e " This report is being generated using credentials below:\n" - echo -e " AWS-CLI Profile: $NOTICE[$PROFILE]$NORMAL AWS API Region: $NOTICE[$REGION]$NORMAL AWS Filter Region: $NOTICE[${FILTERREGION:-all}]$NORMAL\n" + echo -e " AWS-CLI Profile: $NOTICE[$PROFILE]$NORMAL AWS API Region: $NOTICE[$REGION]$NORMAL AWS Filter Region: $NOTICE[${FILTERREGION:-all}]$NORMAL" if [[ $MONOCHROME -eq 1 ]]; then - echo -e " Caller Identity:" - $AWSCLI sts get-caller-identity --output text $PROFILE_OPT --region $REGION --query "Arn" + echo -e " AWS Account: $NOTICE[$ACCOUNT_NUM]$NORMAL UserId: $NOTICE[$USER_ID]$NORMAL" + echo -e " Caller Identity ARN: $NOTICE[$CALLER_ARN]$NORMAL" if [[ 255 -eq $? ]]; then # Failed to get own identity ... exit echo "ERROR WITH $PROFILE CREDENTIALS - EXITING!" @@ -44,8 +59,8 @@ getWhoami(){ exit 2 fi else - echo -e " Caller Identity:" - $AWSCLI sts get-caller-identity --output table $PROFILE_OPT --region $REGION + echo -e " AWS Account: $NOTICE[$ACCOUNT_NUM]$NORMAL UserId: $NOTICE[$USER_ID]$NORMAL" + echo -e " Caller Identity ARN: $NOTICE[$CALLER_ARN]$NORMAL" if [[ 255 -eq $? ]]; then # Failed to get own identity ... exit echo variable $PROFILE_OPT diff --git a/prowler b/prowler index 4e027eb2..4a8d9d4f 100755 --- a/prowler +++ b/prowler @@ -32,7 +32,7 @@ OPTRED="" OPTNORMAL="" # Set the defaults variables -PROWLER_VERSION=2.2.0 +PROWLER_VERSION=2.2.1 PROWLER_DIR=$(dirname "$0") REGION="" @@ -44,10 +44,12 @@ QUIET=0 SEP=',' KEEPCREDREPORT=0 EXITCODE=0 +SEND_TO_SECURITY_HUB=0 SCRIPT_START_TIME=$( date -u +"%Y-%m-%dT%H:%M:%S%z" ) TITLE_ID="" TITLE_TEXT="CALLER ERROR - UNSET TITLE" WHITELIST_FILE="" +TOTAL_CHECKS=() # Command usage menu usage(){ @@ -65,25 +67,25 @@ USAGE: -f specify an AWS region to run checks against (i.e.: us-west-1) -m specify the maximum number of items to return for long-running requests (default: 100) - -M output mode: text (default), mono, json, csv (separator is ","; data is on stdout; progress on stderr) + -M output mode: text (default), mono, json, json-asff, junit-xml, csv. They can be used combined comma separated. + (separator is ","; data is on stdout; progress on stderr). -k keep the credential report -n show check numbers to sort easier (i.e.: 1.01 instead of 1.1) - -l list all available checks only (does not perform any check) + -l list all available checks only (does not perform any check). Add -g to only list checks within the specified group -L list all groups (does not perform any check) -e exclude group extras -E execute all tests except a list of specified checks separated by comma (i.e. check21,check31) -b do not print Prowler banner -V show version number & exit -s show scoring report + -S send check output to AWS Security Hub - only valid when the output mode is json-asff (i.e. "-M json-asff -S") -x specify external directory with custom checks (i.e. /my/own/checks, files must start by "check") -q suppress info messages and passing test output - -A account id for the account where to assume a role, requires -R and -T + -A account id for the account where to assume a role, requires -R and -T (i.e.: 123456789012) - -R role name to assume in the account, requires -A and -T + -R role name to assume in the account, requires -A and -T (i.e.: ProwlerRole) - -T session durantion given to that role credentials in seconds, default 1h (3600) recommended 12h, requires -R and -T - (i.e.: 43200) -w whitelist file. (Lines starting with # are ignored as comments) Format: # ignore these due to some reason # check1 checks s3 buckets @@ -91,12 +93,15 @@ USAGE: : # checkid2 : + -T session duration given to that role credentials in seconds, default 1h (3600) recommended 12h, requires -R and -T + (i.e.: 43200) + -I External ID to be used when assuming roles (not mandatory), requires -A and -R. -h this help " exit } -while getopts ":hlLkqp:r:c:g:f:m:M:E:enbVsx:A:R:T:w:" OPTION; do +while getopts ":hlLkqp:r:c:g:f:m:M:E:enbVsSxI:A:R:T:w:" OPTION; do case $OPTION in h ) usage @@ -153,6 +158,9 @@ while getopts ":hlLkqp:r:c:g:f:m:M:E:enbVsx:A:R:T:w:" OPTION; do s ) SCORING=1 ;; + S ) + SEND_TO_SECURITY_HUB=1 + ;; x ) EXTERNAL_CHECKS_PATH=$OPTARG ;; @@ -165,6 +173,9 @@ while getopts ":hlLkqp:r:c:g:f:m:M:E:enbVsx:A:R:T:w:" OPTION; do R ) ROLE_TO_ASSUME=$OPTARG ;; + I ) + ROLE_EXTERNAL_ID=$OPTARG + ;; T ) SESSION_DURATION_TO_ASSUME=$OPTARG ;; @@ -197,16 +208,20 @@ trap "{ rm -f /tmp/prowler*.policy.*; }" EXIT . $PROWLER_DIR/include/os_detector . $PROWLER_DIR/include/aws_profile_loader . $PROWLER_DIR/include/awscli_detector +. $PROWLER_DIR/include/whoami . $PROWLER_DIR/include/outputs . $PROWLER_DIR/include/csv_header . $PROWLER_DIR/include/banner -. $PROWLER_DIR/include/whoami . $PROWLER_DIR/include/credentials_report . $PROWLER_DIR/include/scoring . $PROWLER_DIR/include/python_detector . $PROWLER_DIR/include/secrets_detector +. $PROWLER_DIR/include/check_creds_last_used . $PROWLER_DIR/include/check3x . $PROWLER_DIR/include/assume_role +. $PROWLER_DIR/include/connection_tests +. $PROWLER_DIR/include/securityhub_integration +. $PROWLER_DIR/include/junit_integration # Get a list of all available AWS Regions REGIONS=$($AWSCLI ec2 describe-regions --query 'Regions[].RegionName' \ @@ -240,19 +255,43 @@ if [[ $EXTERNAL_CHECKS_PATH ]]; then done fi -# Function to show the title of the check +# Get a list of total checks available by ID +for i in "${!GROUP_TITLE[@]}"; do + IFS=',' read -ra CHECKS <<< "${GROUP_CHECKS[$i]}" + for j in "${CHECKS[@]}"; do + TOTAL_CHECKS+=("$CHECK_ID_$j") + done +done +# Remove duplicates whilst preserving the order of checks, and store the result as an array +TOTAL_CHECKS=($(echo "${TOTAL_CHECKS[*]}" | tr ' ' '\n' | awk '!seen[$0]++')) + +# Function to show the title of the check, and optionally which group(s) it belongs to # using this way instead of arrays to keep bash3 (osx) and bash4(linux) compatibility show_check_title() { local check_id=CHECK_ID_$1 local check_title=CHECK_TITLE_$1 local check_scored=CHECK_SCORED_$1 local check_type=CHECK_TYPE_$1 - textTitle "${!check_id}" "${!check_title}" "${!check_scored}" "${!check_type}" + local group_ids + local group_index + # If requested ($2 is any non-null value) iterate all GROUP_CHECKS and produce a comma-separated list of all + # the GROUP_IDs that include this particular check + if [[ -n "$2" ]]; then + for group_index in "${!GROUP_ID[@]}"; do + if [[ "${GROUP_CHECKS[$group_index]}" =~ "$1" ]]; then + if [[ -n "$group_ids" ]]; then + group_ids+=", " + fi + group_ids+="${GROUP_ID[$group_index]}" + fi + done + fi + textTitle "${!check_id}" "${!check_title}" "${!check_scored}" "${!check_type}" "$group_ids" } # Function to show the title of a group, by numeric id show_group_title() { - # when csv mode is used, no group tittle is shown + # when csv mode is used, no group title is shown if [[ "$MODE" != "csv" ]]; then textTitle "${GROUP_NUMBER[$1]}" "${GROUP_TITLE[$1]}" "NOT_SCORED" "SUPPORT" fi @@ -260,41 +299,63 @@ show_group_title() { # Function to execute the check execute_check() { - # See if this is an alternate name for a check - # for example, we might have been passed 1.01 which is another name for 1.1 + # See if this is an alternate name for a check + # for example, we might have been passed 1.01 which is another name for 1.1 local alternate_name_var=CHECK_ALTERNATE_$1 local alternate_name=${!alternate_name_var} + # See if this check defines an ASFF Type, if so, use this, falling back to a sane default + # For a list of Types, see: https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-findings-format.html#securityhub-findings-format-type-taxonomy + local asff_type_var=CHECK_ASFF_TYPE_$1 + ASFF_TYPE="${!asff_type_var:-Software and Configuration Checks}" + # See if this check defines an ASFF Resource Type, if so, use this, falling back to a sane default + # For a list of Resource Types, see: https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-findings-format.html#asff-resources + local asff_resource_type_var=CHECK_ASFF_RESOURCE_TYPE_$1 + ASFF_RESOURCE_TYPE="${!asff_resource_type_var:-AwsAccount}" # Generate the credential report, only if it is group1 related which checks we # run so that the checks can safely assume it's available - if [ ${alternate_name} ];then + if [ ${alternate_name} ];then if [[ ${alternate_name} == check1* || ${alternate_name} == extra71 ]];then if [ ! -s $TEMP_REPORT_FILE ];then genCredReport saveReport fi fi - show_check_title ${alternate_name} - ${alternate_name} - else + show_check_title ${alternate_name} + if is_junit_output_enabled; then + prepare_junit_check_output "$1" + fi + # Execute the check + ${alternate_name} + if is_junit_output_enabled; then + finalise_junit_check_output "$1" + fi + else # Check to see if this is a real check local check_id_var=CHECK_ID_$1 local check_id=${!check_id_var} if [ ${check_id} ]; then - if [[ ${check_id} == 1* || ${check_id} == 7.1,7.01 ]];then + if [[ ${check_id} == 1* || ${check_id} == 7.1 || ${check_id} == 7.74 ]];then if [ ! -s $TEMP_REPORT_FILE ];then genCredReport saveReport fi fi show_check_title "$1" - ignores=$(awk '/${1}/{print}' <(echo "${WHITELIST}")) # set the custom ignores list for this check + ignores=$(awk '/${1}/{print}' <(echo "${WHITELIST}")) + if is_junit_output_enabled; then + prepare_junit_check_output "$1" + fi + # Execute the check IGNORES="${ignores}" CHECK_NAME="$1" $1 + if is_junit_output_enabled; then + finalise_junit_check_output "$1" + fi else textFail "ERROR! Use a valid check name (i.e. check41 or extra71)"; exit $EXITCODE fi - fi + fi } # Function to execute all checks in a group @@ -345,62 +406,68 @@ execute_all() { done } -# Function to show the titles of everything +# Function to show the titles of either all checks or only those in the specified group show_all_titles() { - MAIN_GROUPS=(1 2 3 4 7) - for i in "${MAIN_GROUPS[@]}"; do - show_group_title $i - # Display the title of the checks in groups 1,2,3,4 and 7 - # Any other group has checks in these groups - IFS=',' read -ra CHECKS <<< ${GROUP_CHECKS[$i]} - for j in ${CHECKS[@]}; do - show_check_title $j - done - done + local checks + local check_id + local group_index + # If '-g ' has been specified, only show the titles of checks within the specified group + if [[ $GROUP_ID_READ ]];then + if [[ " ${GROUP_ID[@]} " =~ " ${GROUP_ID_READ} " ]]; then + for group_index in "${!GROUP_ID[@]}"; do + if [ "${GROUP_ID[$group_index]}" == "${GROUP_ID_READ}" ]; then + show_group_title "$group_index" + IFS=',' read -ra checks <<< "${GROUP_CHECKS[$group_index]}" + for check_id in ${checks[@]}; do + show_check_title "$check_id" + done + fi + done + else + textFail "Use a valid check group ID i.e.: group1, extras, forensics-ready, etc." + show_all_group_titles + exit $EXITCODE + fi + else + for check_id in "${TOTAL_CHECKS[@]}"; do + # Pass 1 so that the group IDs that this check belongs to are printed + show_check_title "$check_id" 1 + done + fi } show_all_group_titles() { - for i in "${!GROUP_TITLE[@]}"; do - show_group_title $i - done + local group_index + for group_index in "${!GROUP_TITLE[@]}"; do + show_group_title "$group_index" + done } - # Function to execute all checks but exclude some of them get_all_checks_without_exclusion() { - CHECKS_EXCLUDED=() - local CHECKS_TO_EXCLUDE=() - local TOTAL_CHECKS=() - #Get a list of checks to exclude - IFS=',' read -ra E_CHECKS <<< "$1" - for E_CHECK in "${E_CHECKS[@]}"; do - CHECKS_TO_EXCLUDE+=($E_CHECK) - done - #Get a list of total checks available by ID - for i in "${!GROUP_TITLE[@]}"; do - #show_group_title $i - IFS=',' read -ra CHECKS <<< ${GROUP_CHECKS[$i]} - for j in ${CHECKS[@]}; do - TOTAL_CHECKS+=($CHECK_ID_$j) - done - done - TOTAL_CHECKS=($(echo "${TOTAL_CHECKS[*]}" | tr ' ' '\n' | sort -u)) #removes duplicate and store the result as an array - #Create a list that contains all checks but excluded ones - for i in "${TOTAL_CHECKS[@]}"; do - local COINCIDENCE=false - for x in "${CHECKS_TO_EXCLUDE[@]}"; do - if [[ "$i" == "$x" ]]; then - COINCIDENCE=true - fi - done - if [[ "$COINCIDENCE" = false ]]; then - CHECKS_EXCLUDED+=($i) - fi - done + CHECKS_EXCLUDED=() + local CHECKS_TO_EXCLUDE=() + # Get a list of checks to exclude + IFS=',' read -ra E_CHECKS <<< "$1" + for E_CHECK in "${E_CHECKS[@]}"; do + CHECKS_TO_EXCLUDE+=($E_CHECK) + done + # Create a list that contains all checks but excluded ones + for i in "${TOTAL_CHECKS[@]}"; do + local COINCIDENCE=false + for x in "${CHECKS_TO_EXCLUDE[@]}"; do + if [[ "$i" == "$x" ]]; then + COINCIDENCE=true + fi + done + if [[ "$COINCIDENCE" = false ]]; then + CHECKS_EXCLUDED+=($i) + fi + done } ### All functions defined above ... run the workflow -if [[ $MODE != "csv" ]]; then +if [[ " ${MODES[@]} " =~ " mono " || " ${MODES[@]} " =~ " text " ]]; then prowlerBanner fi @@ -416,20 +483,26 @@ if [[ $PRINTGROUPSONLY == "1" ]]; then exit $EXITCODE fi -# Check that jq is installed for JSON output -if [[ $MODE == "json" ]]; then +# Check that jq is installed for JSON outputs +if [[ " ${MODES[@]} " =~ " json " || " ${MODES[@]} " =~ " json-asff " ]]; then . $PROWLER_DIR/include/jq_detector fi +if [[ "$SEND_TO_SECURITY_HUB" -eq 1 ]]; then + checkSecurityHubCompatibility +fi + +if is_junit_output_enabled; then + prepare_junit_output +fi + # Gather account data / test aws cli connectivity getWhoami # Execute group of checks if called with -g if [[ $GROUP_ID_READ ]];then if [[ " ${GROUP_ID[@]} " =~ " ${GROUP_ID_READ} " ]]; then - if [[ $MODE == "csv" ]]; then - BANNER=0 - fi + execute_group_by_id ${GROUP_ID_READ} ${EXCLUDE_CHECK_ID} cleanTemp scoring @@ -461,6 +534,10 @@ if [[ $CHECK_ID ]];then exit $EXITCODE fi +execute_all +scoring +cleanTemp + if [[ $ACCOUNT_TO_ASSUME ]]; then # unset env variables with assumed role credentials unset AWS_ACCESS_KEY_ID @@ -469,7 +546,4 @@ if [[ $ACCOUNT_TO_ASSUME ]]; then fi -execute_all -scoring -cleanTemp exit $EXITCODE