mirror of
https://github.com/ghndrx/prowler.git
synced 2026-02-10 14:55:00 +00:00
Merge branch 'master' into improve-listing-of-checks-and-groups-545
This commit is contained in:
11
.gitignore
vendored
11
.gitignore
vendored
@@ -18,4 +18,13 @@ tags
|
|||||||
[._]*.un~
|
[._]*.un~
|
||||||
|
|
||||||
# MacOs DS_Store
|
# MacOs DS_Store
|
||||||
*.DS_Store
|
*.DS_Store
|
||||||
|
|
||||||
|
# Prowler output
|
||||||
|
prowler-output-*
|
||||||
|
|
||||||
|
# JUnit Reports
|
||||||
|
junit-reports/
|
||||||
|
|
||||||
|
# VSCode files
|
||||||
|
.vscode/
|
||||||
|
|||||||
75
README.md
75
README.md
@@ -45,7 +45,6 @@ Read more about [CIS Amazon Web Services Foundations Benchmark v1.2.0 - 05-23-20
|
|||||||
- HIPAA [hipaa] Read more [here](#hipaa-checks)
|
- HIPAA [hipaa] Read more [here](#hipaa-checks)
|
||||||
- Trust Boundaries [trustboundaries] Read more [here](#trustboundaries-checks)
|
- Trust Boundaries [trustboundaries] Read more [here](#trustboundaries-checks)
|
||||||
|
|
||||||
|
|
||||||
With Prowler you can:
|
With Prowler you can:
|
||||||
|
|
||||||
- get a colorful or monochrome report
|
- get a colorful or monochrome report
|
||||||
@@ -68,6 +67,7 @@ This script has been written in bash using AWS-CLI and it works in Linux and OSX
|
|||||||
AWS-CLI can be also installed it using "brew", "apt", "yum" or manually from <https://aws.amazon.com/cli/>, but `ansi2html` and `detect-secrets` has to be installed using `pip`. You will need to install `jq` to get more accuracy in some checks.
|
AWS-CLI can be also installed it using "brew", "apt", "yum" or manually from <https://aws.amazon.com/cli/>, but `ansi2html` and `detect-secrets` has to be installed using `pip`. You will need to install `jq` to get more accuracy in some checks.
|
||||||
|
|
||||||
- Make sure jq is installed (example below with "apt" but use a valid package manager for your OS):
|
- Make sure jq is installed (example below with "apt" but use a valid package manager for your OS):
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
sudo apt install jq
|
sudo apt install jq
|
||||||
```
|
```
|
||||||
@@ -84,7 +84,9 @@ This script has been written in bash using AWS-CLI and it works in Linux and OSX
|
|||||||
```sh
|
```sh
|
||||||
aws configure
|
aws configure
|
||||||
```
|
```
|
||||||
|
|
||||||
or
|
or
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
export AWS_ACCESS_KEY_ID="ASXXXXXXX"
|
export AWS_ACCESS_KEY_ID="ASXXXXXXX"
|
||||||
export AWS_SECRET_ACCESS_KEY="XXXXXXXXX"
|
export AWS_SECRET_ACCESS_KEY="XXXXXXXXX"
|
||||||
@@ -127,16 +129,21 @@ This script has been written in bash using AWS-CLI and it works in Linux and OSX
|
|||||||
```sh
|
```sh
|
||||||
./prowler -c check310
|
./prowler -c check310
|
||||||
```
|
```
|
||||||
|
|
||||||
With Docker:
|
With Docker:
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
docker run -ti --rm --name prowler --env AWS_ACCESS_KEY_ID --env AWS_SECRET_ACCESS_KEY --env AWS_SESSION_TOKEN toniblyx/prowler:latest "-c check310"
|
docker run -ti --rm --name prowler --env AWS_ACCESS_KEY_ID --env AWS_SECRET_ACCESS_KEY --env AWS_SESSION_TOKEN toniblyx/prowler:latest "-c check310"
|
||||||
```
|
```
|
||||||
|
|
||||||
or multiple checks separated by comma:
|
or multiple checks separated by comma:
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
./prowler -c check310,check722
|
./prowler -c check310,check722
|
||||||
```
|
```
|
||||||
|
|
||||||
or all checks but some of them:
|
or all checks but some of them:
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
./prowler -E check42,check43
|
./prowler -E check42,check43
|
||||||
```
|
```
|
||||||
@@ -152,7 +159,9 @@ This script has been written in bash using AWS-CLI and it works in Linux and OSX
|
|||||||
```sh
|
```sh
|
||||||
./prowler -g group1 # for iam related checks
|
./prowler -g group1 # for iam related checks
|
||||||
```
|
```
|
||||||
|
|
||||||
or exclude some checks in the group:
|
or exclude some checks in the group:
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
./prowler -g group4 -E check42,check43
|
./prowler -g group4 -E check42,check43
|
||||||
```
|
```
|
||||||
@@ -161,16 +170,20 @@ This script has been written in bash using AWS-CLI and it works in Linux and OSX
|
|||||||
|
|
||||||
### Save your reports
|
### Save your reports
|
||||||
|
|
||||||
1. If you want to save your report for later analysis thare are different ways, natively (supported text, mono, csv, json and json-asff see note below for more info):
|
1. If you want to save your report for later analysis thare are different ways, natively (supported text, mono, csv, json, json-asff and junit-xml see note below for more info):
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
./prowler -M csv
|
./prowler -M csv
|
||||||
```
|
```
|
||||||
|
|
||||||
or with multiple formats at the same time:
|
or with multiple formats at the same time:
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
./prowler -M csv,json,json-asff
|
./prowler -M csv,json,json-asff
|
||||||
```
|
```
|
||||||
|
|
||||||
or just a group of checks in multiple formats:
|
or just a group of checks in multiple formats:
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
./prowler -g gdpr -M csv,json,json-asff
|
./prowler -g gdpr -M csv,json,json-asff
|
||||||
```
|
```
|
||||||
@@ -190,6 +203,12 @@ This script has been written in bash using AWS-CLI and it works in Linux and OSX
|
|||||||
./prowler | ansi2html -la > report.html
|
./prowler | ansi2html -la > report.html
|
||||||
```
|
```
|
||||||
|
|
||||||
|
To generate JUnit report files, include the junit-xml format. This can be combined with any other format. Files are written inside a prowler root directory named `junit-reports`:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
./prowler -M text,junit-xml
|
||||||
|
```
|
||||||
|
|
||||||
>Note about output formats to use with `-M`: "text" is the default one with colors, "mono" is like default one but monochrome, "csv" is comma separated values, "json" plain basic json (without comma between lines) and "json-asff" is also json with Amazon Security Finding Format that you can ship to Security Hub using `-S`.
|
>Note about output formats to use with `-M`: "text" is the default one with colors, "mono" is like default one but monochrome, "csv" is comma separated values, "json" plain basic json (without comma between lines) and "json-asff" is also json with Amazon Security Finding Format that you can ship to Security Hub using `-S`.
|
||||||
|
|
||||||
or save your report in a S3 bucket (this only works for text or mono, for csv, json or json-asff it has to be copied afterwards):
|
or save your report in a S3 bucket (this only works for text or mono, for csv, json or json-asff it has to be copied afterwards):
|
||||||
@@ -213,7 +232,7 @@ This script has been written in bash using AWS-CLI and it works in Linux and OSX
|
|||||||
|
|
||||||
1. For help use:
|
1. For help use:
|
||||||
|
|
||||||
```
|
```sh
|
||||||
./prowler -h
|
./prowler -h
|
||||||
|
|
||||||
USAGE:
|
USAGE:
|
||||||
@@ -230,7 +249,7 @@ This script has been written in bash using AWS-CLI and it works in Linux and OSX
|
|||||||
-f <filterregion> specify an AWS region to run checks against
|
-f <filterregion> specify an AWS region to run checks against
|
||||||
(i.e.: us-west-1)
|
(i.e.: us-west-1)
|
||||||
-m <maxitems> specify the maximum number of items to return for long-running requests (default: 100)
|
-m <maxitems> specify the maximum number of items to return for long-running requests (default: 100)
|
||||||
-M <mode> output mode: text (default), mono, json, json-asff, csv. They can be used combined comma separated.
|
-M <mode> output mode: text (default), mono, json, json-asff, junit-xml, csv. They can be used combined comma separated.
|
||||||
(separator is ","; data is on stdout; progress on stderr).
|
(separator is ","; data is on stdout; progress on stderr).
|
||||||
-k keep the credential report
|
-k keep the credential report
|
||||||
-n show check numbers to sort easier
|
-n show check numbers to sort easier
|
||||||
@@ -261,11 +280,11 @@ This script has been written in bash using AWS-CLI and it works in Linux and OSX
|
|||||||
|
|
||||||
Prowler uses the AWS CLI underneath so it uses the same authentication methods. However, there are few ways to run Prowler against multiple accounts using IAM Assume Role feature depending on eachg use case. You can just set up your custom profile inside `~/.aws/config` with all needed information about the role to assume then call it with `./prowler -p your-custom-profile`. Additionally you can use `-A 123456789012` and `-R RemoteRoleToAssume` and Prowler will get those temporary credentials using `aws sts assume-role`, set them up as environment variables and run against that given account.
|
Prowler uses the AWS CLI underneath so it uses the same authentication methods. However, there are few ways to run Prowler against multiple accounts using IAM Assume Role feature depending on eachg use case. You can just set up your custom profile inside `~/.aws/config` with all needed information about the role to assume then call it with `./prowler -p your-custom-profile`. Additionally you can use `-A 123456789012` and `-R RemoteRoleToAssume` and Prowler will get those temporary credentials using `aws sts assume-role`, set them up as environment variables and run against that given account.
|
||||||
|
|
||||||
```
|
```sh
|
||||||
./prowler -A 123456789012 -R ProwlerRole
|
./prowler -A 123456789012 -R ProwlerRole
|
||||||
```
|
```
|
||||||
|
|
||||||
```
|
```sh
|
||||||
./prowler -A 123456789012 -R ProwlerRole -I 123456
|
./prowler -A 123456789012 -R ProwlerRole -I 123456
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -275,11 +294,11 @@ Prowler uses the AWS CLI underneath so it uses the same authentication methods.
|
|||||||
|
|
||||||
For example, if you want to get only the fails in CSV format from all checks regarding RDS without banner from the AWS Account 123456789012 assuming the role RemoteRoleToAssume and set a fixed session duration of 1h:
|
For example, if you want to get only the fails in CSV format from all checks regarding RDS without banner from the AWS Account 123456789012 assuming the role RemoteRoleToAssume and set a fixed session duration of 1h:
|
||||||
|
|
||||||
```
|
```sh
|
||||||
./prowler -A 123456789012 -R RemoteRoleToAssume -T 3600 -b -M cvs -q -g rds
|
./prowler -A 123456789012 -R RemoteRoleToAssume -T 3600 -b -M cvs -q -g rds
|
||||||
```
|
```
|
||||||
|
|
||||||
```
|
```sh
|
||||||
./prowler -A 123456789012 -R RemoteRoleToAssume -T 3600 -I 123456 -b -M cvs -q -g rds
|
./prowler -A 123456789012 -R RemoteRoleToAssume -T 3600 -I 123456 -b -M cvs -q -g rds
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -304,17 +323,18 @@ Flag `-x /my/own/checks` will include any check in that particular directory. To
|
|||||||
|
|
||||||
In order to remove noise and get only FAIL findings there is a `-q` flag that makes Prowler to show and log only FAILs. It can be combined with any other option.
|
In order to remove noise and get only FAIL findings there is a `-q` flag that makes Prowler to show and log only FAILs. It can be combined with any other option.
|
||||||
|
|
||||||
```
|
```sh
|
||||||
./prowler -q -M csv -b
|
./prowler -q -M csv -b
|
||||||
```
|
```
|
||||||
|
|
||||||
## Security Hub integration
|
## Security Hub integration
|
||||||
|
|
||||||
Since version v2.3, Prowler supports natively sending findings to [AWS Security Hub](https://aws.amazon.com/security-hub). This integration allows Prowler to import its findings to AWS Security Hub. With Security Hub, you now have a single place that aggregates, organizes, and prioritizes your security alerts, or findings, from multiple AWS services, such as Amazon GuardDuty, Amazon Inspector, Amazon Macie, AWS Identity and Access Management (IAM) Access Analyzer, and AWS Firewall Manager, as well as from AWS Partner solutions and now from Prowler. It is as simple as running the commanbd below:
|
Since version v2.3, Prowler supports natively sending findings to [AWS Security Hub](https://aws.amazon.com/security-hub). This integration allows Prowler to import its findings to AWS Security Hub. With Security Hub, you now have a single place that aggregates, organizes, and prioritizes your security alerts, or findings, from multiple AWS services, such as Amazon GuardDuty, Amazon Inspector, Amazon Macie, AWS Identity and Access Management (IAM) Access Analyzer, and AWS Firewall Manager, as well as from AWS Partner solutions and now from Prowler. It is as simple as running the command below:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
./prowler -M json-asff -S
|
||||||
|
```
|
||||||
|
|
||||||
```
|
|
||||||
./prowler -M json-asff -S
|
|
||||||
```
|
|
||||||
There are two requirements:
|
There are two requirements:
|
||||||
|
|
||||||
1. Security Hub must be enabled for the active region from where you are calling Prowler (if no region is used with `-r` then `us-east-1` is used). It can be enabled by calling `aws securityhub enable-security-hub`
|
1. Security Hub must be enabled for the active region from where you are calling Prowler (if no region is used with `-r` then `us-east-1` is used). It can be enabled by calling `aws securityhub enable-security-hub`
|
||||||
@@ -323,7 +343,6 @@ There are two requirements:
|
|||||||
|
|
||||||
>Note: to have updated findings in Security Hub you have to run Prowler periodically. Once a day or every certain amount of hours.
|
>Note: to have updated findings in Security Hub you have to run Prowler periodically. Once a day or every certain amount of hours.
|
||||||
|
|
||||||
|
|
||||||
## How to fix every FAIL
|
## How to fix every FAIL
|
||||||
|
|
||||||
Check your report and fix the issues following all specific guidelines per check in <https://d0.awsstatic.com/whitepapers/compliance/AWS_CIS_Foundations_Benchmark.pdf>
|
Check your report and fix the issues following all specific guidelines per check in <https://d0.awsstatic.com/whitepapers/compliance/AWS_CIS_Foundations_Benchmark.pdf>
|
||||||
@@ -344,7 +363,7 @@ Check your report and fix the issues following all specific guidelines per check
|
|||||||
|
|
||||||
If you are using an STS token for AWS-CLI and your session is expired you probably get this error:
|
If you are using an STS token for AWS-CLI and your session is expired you probably get this error:
|
||||||
|
|
||||||
```
|
```sh
|
||||||
A client error (ExpiredToken) occurred when calling the GenerateCredentialReport operation: The security token included in the request is expired
|
A client error (ExpiredToken) occurred when calling the GenerateCredentialReport operation: The security token included in the request is expired
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -354,16 +373,19 @@ To fix it, please renew your token by authenticating again to the AWS API, see n
|
|||||||
|
|
||||||
To run Prowler using a profile that requires MFA you just need to get the session token before hand. Just make sure you use this command:
|
To run Prowler using a profile that requires MFA you just need to get the session token before hand. Just make sure you use this command:
|
||||||
|
|
||||||
```
|
```sh
|
||||||
aws --profile <YOUR_AWS_PROFILE> sts get-session-token --duration 129600 --serial-number <ARN_OF_MFA> --token-code <MFA_TOKEN_CODE> --output text
|
aws --profile <YOUR_AWS_PROFILE> sts get-session-token --duration 129600 --serial-number <ARN_OF_MFA> --token-code <MFA_TOKEN_CODE> --output text
|
||||||
```
|
|
||||||
Once you get your token you can export it as environment variable:
|
|
||||||
```
|
```
|
||||||
|
|
||||||
|
Once you get your token you can export it as environment variable:
|
||||||
|
|
||||||
|
```sh
|
||||||
export AWS_PROFILE=YOUR_AWS_PROFILE
|
export AWS_PROFILE=YOUR_AWS_PROFILE
|
||||||
export AWS_SESSION_TOKEN=YOUR_NEW_TOKEN
|
export AWS_SESSION_TOKEN=YOUR_NEW_TOKEN
|
||||||
AWS_SECRET_ACCESS_KEY=YOUR_SECRET
|
AWS_SECRET_ACCESS_KEY=YOUR_SECRET
|
||||||
export AWS_ACCESS_KEY_ID=YOUR_KEY
|
export AWS_ACCESS_KEY_ID=YOUR_KEY
|
||||||
```
|
```
|
||||||
|
|
||||||
or set manually up your `~/.aws/credentials` file properly.
|
or set manually up your `~/.aws/credentials` file properly.
|
||||||
|
|
||||||
There are some helpfull tools to save time in this process like [aws-mfa-script](https://github.com/asagage/aws-mfa-script) or [aws-cli-mfa](https://github.com/sweharris/aws-cli-mfa).
|
There are some helpfull tools to save time in this process like [aws-mfa-script](https://github.com/asagage/aws-mfa-script) or [aws-cli-mfa](https://github.com/sweharris/aws-cli-mfa).
|
||||||
@@ -383,11 +405,13 @@ There are some helpfull tools to save time in this process like [aws-mfa-script]
|
|||||||
[Prowler-Additions-Policy](iam/prowler-additions-policy.json)
|
[Prowler-Additions-Policy](iam/prowler-additions-policy.json)
|
||||||
|
|
||||||
Some new and specific checks require Prowler to inherit more permissions than SecurityAudit and ViewOnlyAccess to work properly. In addition to the AWS managed policies, "SecurityAudit" and "ViewOnlyAccess", the user/role you use for checks may need to be granted a custom policy with a few more read-only permissions (to support additional services mostly). Here is an example policy with the additional rights, "Prowler-Additions-Policy" (see below bootstrap script for set it up):
|
Some new and specific checks require Prowler to inherit more permissions than SecurityAudit and ViewOnlyAccess to work properly. In addition to the AWS managed policies, "SecurityAudit" and "ViewOnlyAccess", the user/role you use for checks may need to be granted a custom policy with a few more read-only permissions (to support additional services mostly). Here is an example policy with the additional rights, "Prowler-Additions-Policy" (see below bootstrap script for set it up):
|
||||||
|
|
||||||
- [iam/prowler-additions-policy.json](iam/prowler-additions-policy.json)
|
- [iam/prowler-additions-policy.json](iam/prowler-additions-policy.json)
|
||||||
|
|
||||||
[Prowler-Security-Hub Policy](iam/prowler-security-hub.json)
|
[Prowler-Security-Hub Policy](iam/prowler-security-hub.json)
|
||||||
|
|
||||||
Allows Prowler to import its findings to [AWS Security Hub](https://aws.amazon.com/security-hub). More information in [Security Hub integration](#security-hub-integration):
|
Allows Prowler to import its findings to [AWS Security Hub](https://aws.amazon.com/security-hub). More information in [Security Hub integration](#security-hub-integration):
|
||||||
|
|
||||||
- [iam/prowler-security-hub.json](iam/prowler-security-hub.json)
|
- [iam/prowler-security-hub.json](iam/prowler-security-hub.json)
|
||||||
|
|
||||||
### Bootstrap Script
|
### Bootstrap Script
|
||||||
@@ -418,7 +442,7 @@ Some of these checks look for publicly facing resources may not actually be full
|
|||||||
|
|
||||||
To list all existing checks please run the command below:
|
To list all existing checks please run the command below:
|
||||||
|
|
||||||
```
|
```sh
|
||||||
./prowler -l
|
./prowler -l
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -474,6 +498,7 @@ With this group of checks, Prowler shows results of controls related to the "Sec
|
|||||||
More information on the original PR is [here](https://github.com/toniblyx/prowler/issues/227).
|
More information on the original PR is [here](https://github.com/toniblyx/prowler/issues/227).
|
||||||
|
|
||||||
### Note on Business Associate Addendum's (BAA)
|
### Note on Business Associate Addendum's (BAA)
|
||||||
|
|
||||||
Under the HIPAA regulations, cloud service providers (CSPs) such as AWS are considered business associates. The Business Associate Addendum (BAA) is an AWS contract that is required under HIPAA rules to ensure that AWS appropriately safeguards protected health information (PHI). The BAA also serves to clarify and limit, as appropriate, the permissible uses and disclosures of PHI by AWS, based on the relationship between AWS and our customers, and the activities or services being performed by AWS. Customers may use any AWS service in an account designated as a HIPAA account, but they should only process, store, and transmit protected health information (PHI) in the HIPAA-eligible services defined in the Business Associate Addendum (BAA). For the latest list of HIPAA-eligible AWS services, see [HIPAA Eligible Services Reference](https://aws.amazon.com/compliance/hipaa-eligible-services-reference/).
|
Under the HIPAA regulations, cloud service providers (CSPs) such as AWS are considered business associates. The Business Associate Addendum (BAA) is an AWS contract that is required under HIPAA rules to ensure that AWS appropriately safeguards protected health information (PHI). The BAA also serves to clarify and limit, as appropriate, the permissible uses and disclosures of PHI by AWS, based on the relationship between AWS and our customers, and the activities or services being performed by AWS. Customers may use any AWS service in an account designated as a HIPAA account, but they should only process, store, and transmit protected health information (PHI) in the HIPAA-eligible services defined in the Business Associate Addendum (BAA). For the latest list of HIPAA-eligible AWS services, see [HIPAA Eligible Services Reference](https://aws.amazon.com/compliance/hipaa-eligible-services-reference/).
|
||||||
|
|
||||||
More information on AWS & HIPAA can be found [here](https://aws.amazon.com/compliance/hipaa-compliance/)
|
More information on AWS & HIPAA can be found [here](https://aws.amazon.com/compliance/hipaa-compliance/)
|
||||||
@@ -489,7 +514,9 @@ The `hipaa` group of checks uses existing and extra checks. To get a HIPAA repor
|
|||||||
```
|
```
|
||||||
|
|
||||||
## Trust Boundaries Checks
|
## Trust Boundaries Checks
|
||||||
|
|
||||||
### Definition and Terms
|
### Definition and Terms
|
||||||
|
|
||||||
The term "trust boundary" is originating from the threat modelling process and the most popular contributor Adam Shostack and author of "Threat Modeling: Designing for Security" defines it as following ([reference](https://adam.shostack.org/uncover.html)):
|
The term "trust boundary" is originating from the threat modelling process and the most popular contributor Adam Shostack and author of "Threat Modeling: Designing for Security" defines it as following ([reference](https://adam.shostack.org/uncover.html)):
|
||||||
|
|
||||||
> Trust boundaries are perhaps the most subjective of all: these represent the border between trusted and untrusted elements. Trust is complex. You might trust your mechanic with your car, your dentist with your teeth, and your banker with your money, but you probably don't trust your dentist to change your spark plugs.
|
> Trust boundaries are perhaps the most subjective of all: these represent the border between trusted and untrusted elements. Trust is complex. You might trust your mechanic with your car, your dentist with your teeth, and your banker with your money, but you probably don't trust your dentist to change your spark plugs.
|
||||||
@@ -500,15 +527,20 @@ This group of checks helps to analyse a particular AWS account (subject) on exis
|
|||||||
|
|
||||||
### Run
|
### Run
|
||||||
To give it a quick shot just call:
|
To give it a quick shot just call:
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
./prowler -g trustboundaries
|
./prowler -g trustboundaries
|
||||||
```
|
```
|
||||||
|
|
||||||
### Scenarios
|
### Scenarios
|
||||||
|
|
||||||
Currently this check group supports two different scenarios:
|
Currently this check group supports two different scenarios:
|
||||||
1. Single account environment: no action required, the configuration is happening automatically for you.
|
|
||||||
2. Multi account environment: in case you environment has multiple trusted and known AWS accounts you maybe want to append them manually to [groups/group16_trustboundaries](groups/group16_trustboundaries) as a space separated list into `GROUP_TRUSTBOUNDARIES_TRUSTED_ACCOUNT_IDS` variable, then just run prowler.
|
1. Single account environment: no action required, the configuration is happening automatically for you.
|
||||||
|
2. Multi account environment: in case you environment has multiple trusted and known AWS accounts you maybe want to append them manually to [groups/group16_trustboundaries](groups/group16_trustboundaries) as a space separated list into `GROUP_TRUSTBOUNDARIES_TRUSTED_ACCOUNT_IDS` variable, then just run prowler.
|
||||||
|
|
||||||
### Coverage
|
### Coverage
|
||||||
|
|
||||||
Current coverage of Amazon Web Service (AWS) taken from [here](https://docs.aws.amazon.com/whitepapers/latest/aws-overview/introduction.html):
|
Current coverage of Amazon Web Service (AWS) taken from [here](https://docs.aws.amazon.com/whitepapers/latest/aws-overview/introduction.html):
|
||||||
| Topic | Service | Trust Boundary |
|
| Topic | Service | Trust Boundary |
|
||||||
|---------------------------------|------------|---------------------------------------------------------------------------|
|
|---------------------------------|------------|---------------------------------------------------------------------------|
|
||||||
@@ -518,6 +550,7 @@ Current coverage of Amazon Web Service (AWS) taken from [here](https://docs.aws.
|
|||||||
All ideas or recommendations to extend this group are very welcome [here](https://github.com/toniblyx/prowler/issues/new/choose).
|
All ideas or recommendations to extend this group are very welcome [here](https://github.com/toniblyx/prowler/issues/new/choose).
|
||||||
|
|
||||||
### Detailed Explanation of the Concept
|
### Detailed Explanation of the Concept
|
||||||
|
|
||||||
The diagrams depict two common scenarios, single account and multi account environments.
|
The diagrams depict two common scenarios, single account and multi account environments.
|
||||||
Every circle represents one AWS account.
|
Every circle represents one AWS account.
|
||||||
The dashed line represents the trust boundary, that separates trust and untrusted AWS accounts.
|
The dashed line represents the trust boundary, that separates trust and untrusted AWS accounts.
|
||||||
|
|||||||
@@ -30,7 +30,7 @@ extra725(){
|
|||||||
if [[ $LIST_OF_TRAILS ]]; then
|
if [[ $LIST_OF_TRAILS ]]; then
|
||||||
BUCKET_ENABLED_TRAILS=()
|
BUCKET_ENABLED_TRAILS=()
|
||||||
for trail in $LIST_OF_TRAILS; do
|
for trail in $LIST_OF_TRAILS; do
|
||||||
BUCKET_ENABLED_IN_TRAIL=$($AWSCLI cloudtrail get-event-selectors $PROFILE_OPT --trail-name $trail --query "EventSelectors[*].DataResources[?Type == \`AWS::S3::Object\`].Values" --output text |xargs -n1| grep -E "^arn:aws:s3:::$bucketName/\S*$|^arn:aws:s3$")
|
BUCKET_ENABLED_IN_TRAIL=$($AWSCLI cloudtrail get-event-selectors $PROFILE_OPT --trail-name $trail --query "EventSelectors[*].DataResources[?Type == \`AWS::S3::Object\`].Values" --output text |xargs -n1| grep -E "^arn:aws:s3:::$bucketName/\S*$|^arn:aws:s3$|^arn:aws:s3:::$")
|
||||||
if [[ $BUCKET_ENABLED_IN_TRAIL ]]; then
|
if [[ $BUCKET_ENABLED_IN_TRAIL ]]; then
|
||||||
BUCKET_ENABLED_TRAILS+=($trail)
|
BUCKET_ENABLED_TRAILS+=($trail)
|
||||||
# textPass "$regx: S3 bucket $bucketName has Object-level logging enabled in trail $trail" "$regx"
|
# textPass "$regx: S3 bucket $bucketName has Object-level logging enabled in trail $trail" "$regx"
|
||||||
|
|||||||
@@ -23,19 +23,22 @@ extra768(){
|
|||||||
# this folder is deleted once this check is finished
|
# this folder is deleted once this check is finished
|
||||||
mkdir $SECRETS_TEMP_FOLDER
|
mkdir $SECRETS_TEMP_FOLDER
|
||||||
fi
|
fi
|
||||||
|
|
||||||
textInfo "Looking for secrets in ECS task definitions' environment variables across all regions... "
|
textInfo "Looking for secrets in ECS task definitions' environment variables across all regions... "
|
||||||
for regx in $REGIONS; do
|
for regx in $REGIONS; do
|
||||||
LIST_OF_TASK_DEFINITIONS=$($AWSCLI ecs list-task-definitions $PROFILE_OPT --region $regx --query taskDefinitionArns[*] --output text)
|
# Get a list of all families first:
|
||||||
if [[ $LIST_OF_TASK_DEFINITIONS ]]; then
|
FAMILIES=$($AWSCLI ecs list-task-definition-families $PROFILE_OPT --region $regx --status ACTIVE | jq -r .families[])
|
||||||
for taskDefinition in $LIST_OF_TASK_DEFINITIONS;do
|
if [[ $FAMILIES ]]; then
|
||||||
IFS='/' read -r -a splitArn <<< "$taskDefinition"
|
for FAMILY in $FAMILIES;do
|
||||||
|
# Get the full task definition arn:
|
||||||
|
TASK_DEFINITION_TEMP=$($AWSCLI ecs list-task-definitions $PROFILE_OPT --region $regx --family-prefix $FAMILY --sort DESC --max-items 1 | jq -r .taskDefinitionArns[0])
|
||||||
|
# We only care about the task definition name:
|
||||||
|
IFS='/' read -r -a splitArn <<< "$TASK_DEFINITION_TEMP"
|
||||||
TASK_DEFINITION=${splitArn[1]}
|
TASK_DEFINITION=${splitArn[1]}
|
||||||
TASK_DEFINITION_ENV_VARIABLES_FILE="$SECRETS_TEMP_FOLDER/extra768-$TASK_DEFINITION-$regx-variables.txt"
|
TASK_DEFINITION_ENV_VARIABLES_FILE="$SECRETS_TEMP_FOLDER/extra768-$TASK_DEFINITION-$regx-variables.txt"
|
||||||
TASK_DEFINITION_ENV_VARIABLES=$($AWSCLI ecs $PROFILE_OPT --region $regx describe-task-definition --task-definition $taskDefinition --query 'taskDefinition.containerDefinitions[*].environment' --output text > $TASK_DEFINITION_ENV_VARIABLES_FILE)
|
TASK_DEFINITION_ENV_VARIABLES=$($AWSCLI ecs $PROFILE_OPT --region $regx describe-task-definition --task-definition $TASK_DEFINITION --query 'taskDefinition.containerDefinitions[*].environment' --output text > $TASK_DEFINITION_ENV_VARIABLES_FILE)
|
||||||
if [ -s $TASK_DEFINITION_ENV_VARIABLES_FILE ];then
|
if [ -s $TASK_DEFINITION_ENV_VARIABLES_FILE ];then
|
||||||
# Implementation using https://github.com/Yelp/detect-secrets
|
# Implementation using https://github.com/Yelp/detect-secrets
|
||||||
FINDINGS=$(secretsDetector file $TASK_DEFINITION_ENV_VARIABLES_FILE)
|
FINDINGS=$(secretsDetector file $TASK_DEFINITION_ENV_VARIABLES_FILE)
|
||||||
if [[ $FINDINGS -eq 0 ]]; then
|
if [[ $FINDINGS -eq 0 ]]; then
|
||||||
textPass "$regx: No secrets found in ECS task definition $TASK_DEFINITION variables" "$regx"
|
textPass "$regx: No secrets found in ECS task definition $TASK_DEFINITION variables" "$regx"
|
||||||
# delete file if nothing interesting is there
|
# delete file if nothing interesting is there
|
||||||
|
|||||||
@@ -14,16 +14,15 @@
|
|||||||
|
|
||||||
IFS=',' read -ra MODES <<< "${MODE}"
|
IFS=',' read -ra MODES <<< "${MODE}"
|
||||||
for MODE in "${MODES[@]}"; do
|
for MODE in "${MODES[@]}"; do
|
||||||
if [[ "$MODE" != "mono" && "$MODE" != "text" && "$MODE" != "csv" && "$MODE" != "json" && "$MODE" != "json-asff" ]]; then
|
if [[ "$MODE" != "mono" && "$MODE" != "text" && "$MODE" != "csv" && "$MODE" != "json" && "$MODE" != "json-asff" && "$MODE" != "junit-xml" ]]; then
|
||||||
echo -e "${OPTRED}ERROR!$OPTNORMAL Invalid output mode. Choose text, mono, csv, json or json-asff. ./prowler -h for help"
|
echo -e "${OPTRED}ERROR!$OPTNORMAL Invalid output mode. Choose text, mono, csv, json, json-asff or junit-xml. ./prowler -h for help"
|
||||||
EXITCODE=1
|
EXITCODE=1
|
||||||
exit $EXITCODE
|
exit $EXITCODE
|
||||||
fi
|
fi
|
||||||
done
|
if [[ "$MODE" == "mono" || "$MODE" == "csv" || "$MODE" == "json" || "$MODE" == "json-asff" ]]; then
|
||||||
|
MONOCHROME=1
|
||||||
if [[ "$MODE" == "mono" || "$MODE" == "csv" || "$MODE" == "json" || "$MODE" == "json-asff" ]]; then
|
fi
|
||||||
MONOCHROME=1
|
done
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ $MONOCHROME -eq 1 ]]; then
|
if [[ $MONOCHROME -eq 1 ]]; then
|
||||||
# Colors
|
# Colors
|
||||||
|
|||||||
97
include/junit_integration
Normal file
97
include/junit_integration
Normal file
@@ -0,0 +1,97 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
# Prowler - the handy cloud security tool (copyright 2018) by Toni de la Fuente
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
|
||||||
|
# use this file except in compliance with the License. You may obtain a copy
|
||||||
|
# of the License at http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software distributed
|
||||||
|
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
|
||||||
|
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||||
|
# specific language governing permissions and limitations under the License.
|
||||||
|
|
||||||
|
# Generates JUnit XML reports which can be read by Jenkins or other CI tools
|
||||||
|
|
||||||
|
JUNIT_OUTPUT_DIRECTORY="junit-reports"
|
||||||
|
|
||||||
|
is_junit_output_enabled() {
|
||||||
|
if [[ ${MODES[@]} =~ "junit-xml" ]]; then
|
||||||
|
true
|
||||||
|
else
|
||||||
|
false
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
xml_escape() {
|
||||||
|
sed 's/&/\&/g; s/</\</g; s/>/\>/g; s/\"/\"/g; s/'"'"'/\'/g' <<< "$1"
|
||||||
|
}
|
||||||
|
|
||||||
|
prepare_junit_output() {
|
||||||
|
# Remove any JUnit output from previous runs
|
||||||
|
rm -rf "$JUNIT_OUTPUT_DIRECTORY"
|
||||||
|
mkdir "$JUNIT_OUTPUT_DIRECTORY"
|
||||||
|
echo ""
|
||||||
|
echo "$NOTICE Writing JUnit XML reports to $PROWLER_DIR/$JUNIT_OUTPUT_DIRECTORY $NORMAL"
|
||||||
|
}
|
||||||
|
|
||||||
|
prepare_junit_check_output() {
|
||||||
|
# JUnit test cases must be named uniquely, but each Prowler check can output many times due to multiple resources,
|
||||||
|
# therefore append an index value to the test case name to provide uniqueness, reset it to 1 before starting this check
|
||||||
|
JUNIT_CHECK_INDEX=1
|
||||||
|
# To match JUnit behaviour in Java, and ensure that an aborted execution does not leave a partially written and therefore invalid XML file,
|
||||||
|
# output a JUnit XML file per check
|
||||||
|
JUNIT_OUTPUT_FILE="$JUNIT_OUTPUT_DIRECTORY/TEST-$1.xml"
|
||||||
|
printf '%s\n' \
|
||||||
|
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>" \
|
||||||
|
"<testsuite name=\"$(xml_escape "$(get_junit_classname)")\" timestamp=\"$(get_iso8601_timestamp)\">" \
|
||||||
|
" <properties>" \
|
||||||
|
" <property name=\"prowler.version\" value=\"$(xml_escape "$PROWLER_VERSION")\"/>" \
|
||||||
|
" <property name=\"aws.profile\" value=\"$(xml_escape "$PROFILE")\"/>" \
|
||||||
|
" <property name=\"aws.accountNumber\" value=\"$(xml_escape "$ACCOUNT_NUM")\"/>" \
|
||||||
|
" <property name=\"check.id\" value=\"$(xml_escape "$TITLE_ID")\"/>" \
|
||||||
|
" <property name=\"check.scored\" value=\"$(xml_escape "$ITEM_SCORED")\"/>" \
|
||||||
|
" <property name=\"check.level\" value=\"$(xml_escape "$ITEM_LEVEL")\"/>" \
|
||||||
|
" <property name=\"check.asff.type\" value=\"$(xml_escape "$ASFF_TYPE")\"/>" \
|
||||||
|
" <property name=\"check.asff.resourceType\" value=\"$(xml_escape "$ASFF_RESOURCE_TYPE")\"/>" \
|
||||||
|
" </properties>" \
|
||||||
|
> "$JUNIT_OUTPUT_FILE"
|
||||||
|
JUNIT_CHECK_START_TIME=$(get_time_in_milliseconds)
|
||||||
|
}
|
||||||
|
|
||||||
|
finalise_junit_check_output() {
|
||||||
|
echo '</testsuite>' >> "$JUNIT_OUTPUT_FILE"
|
||||||
|
}
|
||||||
|
|
||||||
|
output_junit_success() {
|
||||||
|
output_junit_test_case "$1" "<system-out>$(xml_escape "$1")</system-out>"
|
||||||
|
}
|
||||||
|
|
||||||
|
output_junit_info() {
|
||||||
|
# Nothing to output for JUnit for this level of message, but reset the check timer for timing the next check
|
||||||
|
JUNIT_CHECK_START_TIME=$(get_time_in_milliseconds)
|
||||||
|
}
|
||||||
|
|
||||||
|
output_junit_failure() {
|
||||||
|
output_junit_test_case "$1" "<failure message=\"$(xml_escape "$1")\"/>"
|
||||||
|
}
|
||||||
|
|
||||||
|
get_junit_classname() {
|
||||||
|
# <section>.<check_id> naturally follows a Java package structure, so it is suitable as a package name
|
||||||
|
echo "$TITLE_ID"
|
||||||
|
}
|
||||||
|
|
||||||
|
output_junit_test_case() {
|
||||||
|
local time_now
|
||||||
|
local test_case_duration
|
||||||
|
time_now=$(get_time_in_milliseconds)
|
||||||
|
# JUnit test case time values are in seconds, so divide by 1000 using e-3 to convert from milliseconds without losing accuracy due to non-floating point arithmetic
|
||||||
|
test_case_duration=$(printf "%.3f" "$((time_now - JUNIT_CHECK_START_TIME))e-3")
|
||||||
|
printf '%s\n' \
|
||||||
|
" <testcase name=\"$(xml_escape "$TITLE_TEXT") ($JUNIT_CHECK_INDEX)\" classname=\"$(xml_escape "$(get_junit_classname)")\" time=\"$test_case_duration\">" \
|
||||||
|
" $2" \
|
||||||
|
" </testcase>" >> "$JUNIT_OUTPUT_FILE"
|
||||||
|
# Reset the check timer for timing the next check
|
||||||
|
JUNIT_CHECK_START_TIME=$(get_time_in_milliseconds)
|
||||||
|
((JUNIT_CHECK_INDEX+=1))
|
||||||
|
}
|
||||||
@@ -11,17 +11,19 @@
|
|||||||
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||||
# specific language governing permissions and limitations under the License.
|
# specific language governing permissions and limitations under the License.
|
||||||
|
|
||||||
|
DATE_CMD="date"
|
||||||
|
|
||||||
gnu_how_older_from_today() {
|
gnu_how_older_from_today() {
|
||||||
DATE_TO_COMPARE=$1
|
DATE_TO_COMPARE=$1
|
||||||
TODAY_IN_DAYS=$(date -d "$(date +%Y-%m-%d)" +%s)
|
TODAY_IN_DAYS=$("$DATE_CMD" -d "$("$DATE_CMD" +%Y-%m-%d)" +%s)
|
||||||
DATE_FROM_IN_DAYS=$(date -d $DATE_TO_COMPARE +%s)
|
DATE_FROM_IN_DAYS=$("$DATE_CMD" -d $DATE_TO_COMPARE +%s)
|
||||||
DAYS_SINCE=$((($TODAY_IN_DAYS - $DATE_FROM_IN_DAYS )/60/60/24))
|
DAYS_SINCE=$((($TODAY_IN_DAYS - $DATE_FROM_IN_DAYS )/60/60/24))
|
||||||
echo $DAYS_SINCE
|
echo $DAYS_SINCE
|
||||||
}
|
}
|
||||||
bsd_how_older_from_today() {
|
bsd_how_older_from_today() {
|
||||||
DATE_TO_COMPARE=$1
|
DATE_TO_COMPARE=$1
|
||||||
TODAY_IN_DAYS=$(date +%s)
|
TODAY_IN_DAYS=$("$DATE_CMD" +%s)
|
||||||
DATE_FROM_IN_DAYS=$(date -jf %Y-%m-%d $DATE_TO_COMPARE +%s)
|
DATE_FROM_IN_DAYS=$("$DATE_CMD" -jf %Y-%m-%d $DATE_TO_COMPARE +%s)
|
||||||
DAYS_SINCE=$((($TODAY_IN_DAYS - $DATE_FROM_IN_DAYS )/60/60/24))
|
DAYS_SINCE=$((($TODAY_IN_DAYS - $DATE_FROM_IN_DAYS )/60/60/24))
|
||||||
echo $DAYS_SINCE
|
echo $DAYS_SINCE
|
||||||
}
|
}
|
||||||
@@ -31,13 +33,13 @@ bsd_how_older_from_today() {
|
|||||||
gnu_timestamp_to_date() {
|
gnu_timestamp_to_date() {
|
||||||
# remove fractions of a second
|
# remove fractions of a second
|
||||||
TIMESTAMP_TO_CONVERT=$(echo $1 | cut -f1 -d".")
|
TIMESTAMP_TO_CONVERT=$(echo $1 | cut -f1 -d".")
|
||||||
OUTPUT_DATE=$(date -d @$TIMESTAMP_TO_CONVERT +'%Y-%m-%d')
|
OUTPUT_DATE=$("$DATE_CMD" -d @$TIMESTAMP_TO_CONVERT +'%Y-%m-%d')
|
||||||
echo $OUTPUT_DATE
|
echo $OUTPUT_DATE
|
||||||
}
|
}
|
||||||
bsd_timestamp_to_date() {
|
bsd_timestamp_to_date() {
|
||||||
# remove fractions of a second
|
# remove fractions of a second
|
||||||
TIMESTAMP_TO_CONVERT=$(echo $1 | cut -f1 -d".")
|
TIMESTAMP_TO_CONVERT=$(echo $1 | cut -f1 -d".")
|
||||||
OUTPUT_DATE=$(date -r $TIMESTAMP_TO_CONVERT +'%Y-%m-%d')
|
OUTPUT_DATE=$("$DATE_CMD" -r $TIMESTAMP_TO_CONVERT +'%Y-%m-%d')
|
||||||
echo $OUTPUT_DATE
|
echo $OUTPUT_DATE
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -50,15 +52,15 @@ bsd_decode_report() {
|
|||||||
|
|
||||||
gnu_how_many_days_from_today() {
|
gnu_how_many_days_from_today() {
|
||||||
DATE_TO_COMPARE=$1
|
DATE_TO_COMPARE=$1
|
||||||
TODAY_IN_DAYS=$(date -d "$(date +%Y-%m-%d)" +%s)
|
TODAY_IN_DAYS=$("$DATE_CMD" -d "$("$DATE_CMD" +%Y-%m-%d)" +%s)
|
||||||
DATE_IN_DAYS=$(date -d $DATE_TO_COMPARE +%s)
|
DATE_IN_DAYS=$("$DATE_CMD" -d $DATE_TO_COMPARE +%s)
|
||||||
DAYS_TO=$((( $DATE_IN_DAYS - $TODAY_IN_DAYS )/60/60/24))
|
DAYS_TO=$((( $DATE_IN_DAYS - $TODAY_IN_DAYS )/60/60/24))
|
||||||
echo $DAYS_TO
|
echo $DAYS_TO
|
||||||
}
|
}
|
||||||
bsd_how_many_days_from_today() {
|
bsd_how_many_days_from_today() {
|
||||||
DATE_TO_COMPARE=$1
|
DATE_TO_COMPARE=$1
|
||||||
TODAY_IN_DAYS=$(date +%s)
|
TODAY_IN_DAYS=$("$DATE_CMD" +%s)
|
||||||
DATE_IN_DAYS=$(date -jf %Y-%m-%d $DATE_TO_COMPARE +%s)
|
DATE_IN_DAYS=$("$DATE_CMD" -jf %Y-%m-%d $DATE_TO_COMPARE +%s)
|
||||||
DAYS_TO=$((( $DATE_IN_DAYS - $TODAY_IN_DAYS )/60/60/24))
|
DAYS_TO=$((( $DATE_IN_DAYS - $TODAY_IN_DAYS )/60/60/24))
|
||||||
echo $DAYS_TO
|
echo $DAYS_TO
|
||||||
}
|
}
|
||||||
@@ -66,17 +68,32 @@ bsd_how_many_days_from_today() {
|
|||||||
gnu_get_date_previous_than_months() {
|
gnu_get_date_previous_than_months() {
|
||||||
MONTHS_TO_COMPARE=$1
|
MONTHS_TO_COMPARE=$1
|
||||||
MONTHS_TO_COMPARE_IN_SECONDS=$(( 60 * 60 * 24 * 31 * $MONTHS_TO_COMPARE ))
|
MONTHS_TO_COMPARE_IN_SECONDS=$(( 60 * 60 * 24 * 31 * $MONTHS_TO_COMPARE ))
|
||||||
CURRENTSECS=$(date +%s)
|
CURRENTSECS=$("$DATE_CMD" +%s)
|
||||||
STARTDATEINSECS=$(( $CURRENTSECS - $MONTHS_TO_COMPARE_IN_SECONDS ))
|
STARTDATEINSECS=$(( $CURRENTSECS - $MONTHS_TO_COMPARE_IN_SECONDS ))
|
||||||
DATE_BEFORE_MONTHS_TO_COMPARE=$(date -d @$STARTDATEINSECS '+%Y-%m-%d')
|
DATE_BEFORE_MONTHS_TO_COMPARE=$("$DATE_CMD" -d @$STARTDATEINSECS '+%Y-%m-%d')
|
||||||
echo $DATE_BEFORE_MONTHS_TO_COMPARE
|
echo $DATE_BEFORE_MONTHS_TO_COMPARE
|
||||||
}
|
}
|
||||||
bsd_get_date_previous_than_months() {
|
bsd_get_date_previous_than_months() {
|
||||||
MONTHS_TO_COMPARE=$1
|
MONTHS_TO_COMPARE=$1
|
||||||
DATE_BEFORE_MONTHS_TO_COMPARE=$(date -v -$(echo $MONTHS_TO_COMPARE)m '+%Y-%m-%d')
|
DATE_BEFORE_MONTHS_TO_COMPARE=$("$DATE_CMD" -v -$(echo $MONTHS_TO_COMPARE)m '+%Y-%m-%d')
|
||||||
echo $DATE_BEFORE_MONTHS_TO_COMPARE
|
echo $DATE_BEFORE_MONTHS_TO_COMPARE
|
||||||
}
|
}
|
||||||
|
|
||||||
|
gnu_get_time_in_milliseconds() {
|
||||||
|
"$DATE_CMD" +%s%3N
|
||||||
|
}
|
||||||
|
bsd_get_time_in_milliseconds() {
|
||||||
|
# BSD date does not support outputting milliseconds, so pad with zeros
|
||||||
|
"$DATE_CMD" +%s000
|
||||||
|
}
|
||||||
|
|
||||||
|
gnu_get_iso8601_timestamp() {
|
||||||
|
"$DATE_CMD" -u +"%Y-%m-%dT%H:%M:%SZ"
|
||||||
|
}
|
||||||
|
bsd_get_iso8601_timestamp() {
|
||||||
|
"$DATE_CMD" -u +"%Y-%m-%dT%H:%M:%SZ"
|
||||||
|
}
|
||||||
|
|
||||||
gnu_test_tcp_connectivity() {
|
gnu_test_tcp_connectivity() {
|
||||||
HOST=$1
|
HOST=$1
|
||||||
PORT=$2
|
PORT=$2
|
||||||
@@ -114,16 +131,28 @@ if [ "$OSTYPE" == "linux-gnu" ] || [ "$OSTYPE" == "linux-musl" ]; then
|
|||||||
get_date_previous_than_months() {
|
get_date_previous_than_months() {
|
||||||
gnu_get_date_previous_than_months "$1"
|
gnu_get_date_previous_than_months "$1"
|
||||||
}
|
}
|
||||||
|
get_time_in_milliseconds() {
|
||||||
|
gnu_get_time_in_milliseconds
|
||||||
|
}
|
||||||
|
get_iso8601_timestamp() {
|
||||||
|
gnu_get_iso8601_timestamp
|
||||||
|
}
|
||||||
test_tcp_connectivity() {
|
test_tcp_connectivity() {
|
||||||
gnu_test_tcp_connectivity "$1" "$2" "$3"
|
gnu_test_tcp_connectivity "$1" "$2" "$3"
|
||||||
}
|
}
|
||||||
elif [[ "$OSTYPE" == "darwin"* ]]; then
|
elif [[ "$OSTYPE" == "darwin"* ]]; then
|
||||||
# BSD/OSX commands compatibility
|
# BSD/OSX commands compatibility
|
||||||
TEMP_REPORT_FILE=$(mktemp -t prowler.cred_report-XXXXXX)
|
TEMP_REPORT_FILE=$(mktemp -t prowler.cred_report-XXXXXX)
|
||||||
# It is possible that the user has installed GNU coreutils, replacing the default Mac OS X BSD tools with
|
# It is possible that the user has installed GNU coreutils on OS X. By default, this will make GNU commands
|
||||||
# GNU coreutils equivalents. Only GNU date allows --version as a valid argument, so use the validity of this argument
|
# available with a 'g' prefix, e.g. 'gdate'. Test if this is present, and use it if so, as it supports more features.
|
||||||
|
# The user also may have replaced the default Mac OS X BSD tools with the GNU coreutils equivalents.
|
||||||
|
# Only GNU date allows --version as a valid argument, so use the validity of this argument
|
||||||
# as a means to detect that coreutils is installed and is overriding the default tools
|
# as a means to detect that coreutils is installed and is overriding the default tools
|
||||||
if date --version >/dev/null 2>&1 ; then
|
GDATE=$(which gdate)
|
||||||
|
if [ -n "${GDATE}" ]; then
|
||||||
|
DATE_CMD="gdate"
|
||||||
|
fi
|
||||||
|
if "$DATE_CMD" --version >/dev/null 2>&1 ; then
|
||||||
how_older_from_today() {
|
how_older_from_today() {
|
||||||
gnu_how_older_from_today "$1"
|
gnu_how_older_from_today "$1"
|
||||||
}
|
}
|
||||||
@@ -139,6 +168,12 @@ elif [[ "$OSTYPE" == "darwin"* ]]; then
|
|||||||
get_date_previous_than_months() {
|
get_date_previous_than_months() {
|
||||||
gnu_get_date_previous_than_months "$1"
|
gnu_get_date_previous_than_months "$1"
|
||||||
}
|
}
|
||||||
|
get_time_in_milliseconds() {
|
||||||
|
gnu_get_time_in_milliseconds
|
||||||
|
}
|
||||||
|
get_iso8601_timestamp() {
|
||||||
|
gnu_get_iso8601_timestamp
|
||||||
|
}
|
||||||
else
|
else
|
||||||
how_older_from_today() {
|
how_older_from_today() {
|
||||||
bsd_how_older_from_today "$1"
|
bsd_how_older_from_today "$1"
|
||||||
@@ -155,6 +190,12 @@ elif [[ "$OSTYPE" == "darwin"* ]]; then
|
|||||||
get_date_previous_than_months() {
|
get_date_previous_than_months() {
|
||||||
bsd_get_date_previous_than_months "$1"
|
bsd_get_date_previous_than_months "$1"
|
||||||
}
|
}
|
||||||
|
get_time_in_milliseconds() {
|
||||||
|
bsd_get_time_in_milliseconds
|
||||||
|
}
|
||||||
|
get_iso8601_timestamp() {
|
||||||
|
bsd_get_iso8601_timestamp
|
||||||
|
}
|
||||||
fi
|
fi
|
||||||
test_tcp_connectivity() {
|
test_tcp_connectivity() {
|
||||||
bsd_test_tcp_connectivity "$1" "$2" "$3"
|
bsd_test_tcp_connectivity "$1" "$2" "$3"
|
||||||
@@ -177,6 +218,12 @@ elif [[ "$OSTYPE" == "cygwin" ]]; then
|
|||||||
get_date_previous_than_months() {
|
get_date_previous_than_months() {
|
||||||
gnu_get_date_previous_than_months "$1"
|
gnu_get_date_previous_than_months "$1"
|
||||||
}
|
}
|
||||||
|
get_time_in_milliseconds() {
|
||||||
|
gnu_get_time_in_milliseconds
|
||||||
|
}
|
||||||
|
get_iso8601_timestamp() {
|
||||||
|
gnu_get_iso8601_timestamp
|
||||||
|
}
|
||||||
test_tcp_connectivity() {
|
test_tcp_connectivity() {
|
||||||
gnu_test_tcp_connectivity "$1" "$2" "$3"
|
gnu_test_tcp_connectivity "$1" "$2" "$3"
|
||||||
}
|
}
|
||||||
|
|||||||
113
include/outputs
113
include/outputs
@@ -27,26 +27,28 @@ textPass(){
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
PASS_COUNTER=$((PASS_COUNTER+1))
|
PASS_COUNTER=$((PASS_COUNTER+1))
|
||||||
if [[ "${MODES[@]}" =~ "csv" || "${MODES[@]}" =~ "json" || "${MODES[@]}" =~ "json-asff" ]]; then
|
if [[ $2 ]]; then
|
||||||
if [[ $2 ]]; then
|
REPREGION=$2
|
||||||
REPREGION=$2
|
|
||||||
else
|
|
||||||
REPREGION=$REGION
|
|
||||||
fi
|
|
||||||
if [[ "${MODES[@]}" =~ "csv" ]]; then
|
|
||||||
echo "$PROFILE${SEP}$ACCOUNT_NUM${SEP}$REPREGION${SEP}$TITLE_ID${SEP}PASS${SEP}$ITEM_SCORED${SEP}$ITEM_LEVEL${SEP}$TITLE_TEXT${SEP}$1" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_CSV
|
|
||||||
fi
|
|
||||||
if [[ "${MODES[@]}" =~ "json" ]]; then
|
|
||||||
generateJsonOutput "$1" "Pass" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_JSON
|
|
||||||
fi
|
|
||||||
if [[ "${MODES[@]}" =~ "json-asff" ]]; then
|
|
||||||
JSON_ASFF_OUTPUT=$(generateJsonAsffOutput "$1" "PASSED" "INFORMATIONAL")
|
|
||||||
echo "${JSON_ASFF_OUTPUT}" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_ASFF
|
|
||||||
if [[ "${SEND_TO_SECURITY_HUB}" -eq 1 ]]; then
|
|
||||||
sendToSecurityHub "${JSON_ASFF_OUTPUT}"
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
else
|
else
|
||||||
|
REPREGION=$REGION
|
||||||
|
fi
|
||||||
|
if [[ "${MODES[@]}" =~ "csv" ]]; then
|
||||||
|
echo "$PROFILE${SEP}$ACCOUNT_NUM${SEP}$REPREGION${SEP}$TITLE_ID${SEP}PASS${SEP}$ITEM_SCORED${SEP}$ITEM_LEVEL${SEP}$TITLE_TEXT${SEP}$1" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_CSV
|
||||||
|
fi
|
||||||
|
if [[ "${MODES[@]}" =~ "json" ]]; then
|
||||||
|
generateJsonOutput "$1" "Pass" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_JSON
|
||||||
|
fi
|
||||||
|
if [[ "${MODES[@]}" =~ "json-asff" ]]; then
|
||||||
|
JSON_ASFF_OUTPUT=$(generateJsonAsffOutput "$1" "PASSED" "INFORMATIONAL")
|
||||||
|
echo "${JSON_ASFF_OUTPUT}" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_ASFF
|
||||||
|
if [[ "${SEND_TO_SECURITY_HUB}" -eq 1 ]]; then
|
||||||
|
sendToSecurityHub "${JSON_ASFF_OUTPUT}"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
if is_junit_output_enabled; then
|
||||||
|
output_junit_success "$1"
|
||||||
|
fi
|
||||||
|
if [[ "${MODES[@]}" =~ "text" ]]; then
|
||||||
echo " $OK PASS!$NORMAL $1"
|
echo " $OK PASS!$NORMAL $1"
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
@@ -56,19 +58,21 @@ textInfo(){
|
|||||||
return
|
return
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [[ "${MODES[@]}" =~ "csv" || "${MODES[@]}" =~ "json" || "${MODES[@]}" =~ "json-asff" ]]; then
|
if [[ $2 ]]; then
|
||||||
if [[ $2 ]]; then
|
REPREGION=$2
|
||||||
REPREGION=$2
|
|
||||||
else
|
|
||||||
REPREGION=$REGION
|
|
||||||
fi
|
|
||||||
if [[ "${MODES[@]}" =~ "csv" ]]; then
|
|
||||||
echo "$PROFILE${SEP}$ACCOUNT_NUM${SEP}$REPREGION${SEP}$TITLE_ID${SEP}INFO${SEP}$ITEM_SCORED${SEP}$ITEM_LEVEL${SEP}$TITLE_TEXT${SEP}$1" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_CSV
|
|
||||||
fi
|
|
||||||
if [[ "${MODES[@]}" =~ "json" ]]; then
|
|
||||||
generateJsonOutput "$1" "Info" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_JSON
|
|
||||||
fi
|
|
||||||
else
|
else
|
||||||
|
REPREGION=$REGION
|
||||||
|
fi
|
||||||
|
if [[ "${MODES[@]}" =~ "csv" ]]; then
|
||||||
|
echo "$PROFILE${SEP}$ACCOUNT_NUM${SEP}$REPREGION${SEP}$TITLE_ID${SEP}INFO${SEP}$ITEM_SCORED${SEP}$ITEM_LEVEL${SEP}$TITLE_TEXT${SEP}$1" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_CSV
|
||||||
|
fi
|
||||||
|
if [[ "${MODES[@]}" =~ "json" ]]; then
|
||||||
|
generateJsonOutput "$1" "Info" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_JSON
|
||||||
|
fi
|
||||||
|
if is_junit_output_enabled; then
|
||||||
|
output_junit_info "$1"
|
||||||
|
fi
|
||||||
|
if [[ "${MODES[@]}" =~ "text" ]]; then
|
||||||
echo " $NOTICE INFO! $1 $NORMAL"
|
echo " $NOTICE INFO! $1 $NORMAL"
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
@@ -76,26 +80,28 @@ textInfo(){
|
|||||||
textFail(){
|
textFail(){
|
||||||
FAIL_COUNTER=$((FAIL_COUNTER+1))
|
FAIL_COUNTER=$((FAIL_COUNTER+1))
|
||||||
EXITCODE=3
|
EXITCODE=3
|
||||||
if [[ "${MODES[@]}" =~ "csv" || "${MODES[@]}" =~ "json" || "${MODES[@]}" =~ "json-asff" ]]; then
|
if [[ $2 ]]; then
|
||||||
if [[ $2 ]]; then
|
REPREGION=$2
|
||||||
REPREGION=$2
|
|
||||||
else
|
|
||||||
REPREGION=$REGION
|
|
||||||
fi
|
|
||||||
if [[ "${MODES[@]}" =~ "csv" ]]; then
|
|
||||||
echo "$PROFILE${SEP}$ACCOUNT_NUM${SEP}$REPREGION${SEP}$TITLE_ID${SEP}FAIL${SEP}$ITEM_SCORED${SEP}$ITEM_LEVEL${SEP}$TITLE_TEXT${SEP}$1" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_CSV
|
|
||||||
fi
|
|
||||||
if [[ "${MODES[@]}" =~ "json" ]]; then
|
|
||||||
generateJsonOutput "$1" "Fail" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_JSON
|
|
||||||
fi
|
|
||||||
if [[ "${MODES[@]}" =~ "json-asff" ]]; then
|
|
||||||
JSON_ASFF_OUTPUT=$(generateJsonAsffOutput "$1" "FAILED" "HIGH")
|
|
||||||
echo "${JSON_ASFF_OUTPUT}" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_ASFF
|
|
||||||
if [[ "${SEND_TO_SECURITY_HUB}" -eq 1 ]]; then
|
|
||||||
sendToSecurityHub "${JSON_ASFF_OUTPUT}"
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
else
|
else
|
||||||
|
REPREGION=$REGION
|
||||||
|
fi
|
||||||
|
if [[ "${MODES[@]}" =~ "csv" ]]; then
|
||||||
|
echo "$PROFILE${SEP}$ACCOUNT_NUM${SEP}$REPREGION${SEP}$TITLE_ID${SEP}FAIL${SEP}$ITEM_SCORED${SEP}$ITEM_LEVEL${SEP}$TITLE_TEXT${SEP}$1" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_CSV
|
||||||
|
fi
|
||||||
|
if [[ "${MODES[@]}" =~ "json" ]]; then
|
||||||
|
generateJsonOutput "$1" "Fail" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_JSON
|
||||||
|
fi
|
||||||
|
if [[ "${MODES[@]}" =~ "json-asff" ]]; then
|
||||||
|
JSON_ASFF_OUTPUT=$(generateJsonAsffOutput "$1" "FAILED" "HIGH")
|
||||||
|
echo "${JSON_ASFF_OUTPUT}" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_ASFF
|
||||||
|
if [[ "${SEND_TO_SECURITY_HUB}" -eq 1 ]]; then
|
||||||
|
sendToSecurityHub "${JSON_ASFF_OUTPUT}"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
if is_junit_output_enabled; then
|
||||||
|
output_junit_failure "$1"
|
||||||
|
fi
|
||||||
|
if [[ "${MODES[@]}" =~ "text" ]]; then
|
||||||
echo " $BAD FAIL! $1 $NORMAL"
|
echo " $BAD FAIL! $1 $NORMAL"
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
@@ -161,7 +167,7 @@ generateJsonOutput(){
|
|||||||
--arg ITEM_LEVEL "$ITEM_LEVEL" \
|
--arg ITEM_LEVEL "$ITEM_LEVEL" \
|
||||||
--arg TITLE_ID "$TITLE_ID" \
|
--arg TITLE_ID "$TITLE_ID" \
|
||||||
--arg REPREGION "$REPREGION" \
|
--arg REPREGION "$REPREGION" \
|
||||||
--arg TIMESTAMP $(date -u +"%Y-%m-%dT%H:%M:%SZ") \
|
--arg TIMESTAMP "$(get_iso8601_timestamp)" \
|
||||||
-n '{
|
-n '{
|
||||||
"Profile": $PROFILE,
|
"Profile": $PROFILE,
|
||||||
"Account Number": $ACCOUNT_NUM,
|
"Account Number": $ACCOUNT_NUM,
|
||||||
@@ -183,20 +189,17 @@ generateJsonAsffOutput(){
|
|||||||
local status=$2
|
local status=$2
|
||||||
local severity=$3
|
local severity=$3
|
||||||
jq -M -c \
|
jq -M -c \
|
||||||
--arg PROFILE "$PROFILE" \
|
|
||||||
--arg ACCOUNT_NUM "$ACCOUNT_NUM" \
|
--arg ACCOUNT_NUM "$ACCOUNT_NUM" \
|
||||||
--arg TITLE_TEXT "$TITLE_TEXT" \
|
--arg TITLE_TEXT "$TITLE_TEXT" \
|
||||||
--arg MESSAGE "$(echo -e "${message}" | sed -e 's/^[[:space:]]*//')" \
|
--arg MESSAGE "$(echo -e "${message}" | sed -e 's/^[[:space:]]*//')" \
|
||||||
--arg UNIQUE_ID "$(LC_ALL=C echo -e "${message}" | tr -cs '[:alnum:]._~-\n' '_')" \
|
--arg UNIQUE_ID "$(LC_ALL=C echo -e "${message}" | tr -cs '[:alnum:]._~-\n' '_')" \
|
||||||
--arg STATUS "$status" \
|
--arg STATUS "$status" \
|
||||||
--arg SEVERITY "$severity" \
|
--arg SEVERITY "$severity" \
|
||||||
--arg SCORED "$ITEM_SCORED" \
|
|
||||||
--arg ITEM_LEVEL "$ITEM_LEVEL" \
|
|
||||||
--arg TITLE_ID "$TITLE_ID" \
|
--arg TITLE_ID "$TITLE_ID" \
|
||||||
--arg TYPE "$ASFF_TYPE" \
|
--arg TYPE "$ASFF_TYPE" \
|
||||||
--arg RESOURCE_TYPE "$ASFF_RESOURCE_TYPE" \
|
--arg RESOURCE_TYPE "$ASFF_RESOURCE_TYPE" \
|
||||||
--arg REPREGION "$REPREGION" \
|
--arg REPREGION "$REPREGION" \
|
||||||
--arg TIMESTAMP $(date -u +"%Y-%m-%dT%H:%M:%SZ") \
|
--arg TIMESTAMP "$(get_iso8601_timestamp)" \
|
||||||
--arg PROWLER_VERSION "$PROWLER_VERSION" \
|
--arg PROWLER_VERSION "$PROWLER_VERSION" \
|
||||||
-n '{
|
-n '{
|
||||||
"SchemaVersion": "2018-10-08",
|
"SchemaVersion": "2018-10-08",
|
||||||
|
|||||||
45
prowler
45
prowler
@@ -66,7 +66,7 @@ USAGE:
|
|||||||
-f <filterregion> specify an AWS region to run checks against
|
-f <filterregion> specify an AWS region to run checks against
|
||||||
(i.e.: us-west-1)
|
(i.e.: us-west-1)
|
||||||
-m <maxitems> specify the maximum number of items to return for long-running requests (default: 100)
|
-m <maxitems> specify the maximum number of items to return for long-running requests (default: 100)
|
||||||
-M <mode> output mode: text (default), mono, json, json-asff, csv. They can be used combined comma separated.
|
-M <mode> output mode: text (default), mono, json, json-asff, junit-xml, csv. They can be used combined comma separated.
|
||||||
(separator is ","; data is on stdout; progress on stderr).
|
(separator is ","; data is on stdout; progress on stderr).
|
||||||
-k keep the credential report
|
-k keep the credential report
|
||||||
-n show check numbers to sort easier
|
-n show check numbers to sort easier
|
||||||
@@ -85,9 +85,9 @@ USAGE:
|
|||||||
(i.e.: 123456789012)
|
(i.e.: 123456789012)
|
||||||
-R role name to assume in the account, requires -A and -T
|
-R role name to assume in the account, requires -A and -T
|
||||||
(i.e.: ProwlerRole)
|
(i.e.: ProwlerRole)
|
||||||
-T session durantion given to that role credentials in seconds, default 1h (3600) recommended 12h, requires -R and -T
|
-T session duration given to that role credentials in seconds, default 1h (3600) recommended 12h, requires -R and -T
|
||||||
(i.e.: 43200)
|
(i.e.: 43200)
|
||||||
-I External ID to be used when assuming roles (no mandatory), requires -A and -R.
|
-I External ID to be used when assuming roles (not mandatory), requires -A and -R.
|
||||||
-h this help
|
-h this help
|
||||||
"
|
"
|
||||||
exit
|
exit
|
||||||
@@ -207,6 +207,7 @@ trap "{ rm -f /tmp/prowler*.policy.*; }" EXIT
|
|||||||
. $PROWLER_DIR/include/assume_role
|
. $PROWLER_DIR/include/assume_role
|
||||||
. $PROWLER_DIR/include/connection_tests
|
. $PROWLER_DIR/include/connection_tests
|
||||||
. $PROWLER_DIR/include/securityhub_integration
|
. $PROWLER_DIR/include/securityhub_integration
|
||||||
|
. $PROWLER_DIR/include/junit_integration
|
||||||
|
|
||||||
# Get a list of all available AWS Regions
|
# Get a list of all available AWS Regions
|
||||||
REGIONS=$($AWSCLI ec2 describe-regions --query 'Regions[].RegionName' \
|
REGIONS=$($AWSCLI ec2 describe-regions --query 'Regions[].RegionName' \
|
||||||
@@ -277,8 +278,8 @@ show_group_title() {
|
|||||||
|
|
||||||
# Function to execute the check
|
# Function to execute the check
|
||||||
execute_check() {
|
execute_check() {
|
||||||
# See if this is an alternate name for a check
|
# See if this is an alternate name for a check
|
||||||
# for example, we might have been passed 1.01 which is another name for 1.1
|
# for example, we might have been passed 1.01 which is another name for 1.1
|
||||||
local alternate_name_var=CHECK_ALTERNATE_$1
|
local alternate_name_var=CHECK_ALTERNATE_$1
|
||||||
local alternate_name=${!alternate_name_var}
|
local alternate_name=${!alternate_name_var}
|
||||||
# See if this check defines an ASFF Type, if so, use this, falling back to a sane default
|
# See if this check defines an ASFF Type, if so, use this, falling back to a sane default
|
||||||
@@ -291,16 +292,23 @@ execute_check() {
|
|||||||
ASFF_RESOURCE_TYPE="${!asff_resource_type_var:-AwsAccount}"
|
ASFF_RESOURCE_TYPE="${!asff_resource_type_var:-AwsAccount}"
|
||||||
# Generate the credential report, only if it is group1 related which checks we
|
# Generate the credential report, only if it is group1 related which checks we
|
||||||
# run so that the checks can safely assume it's available
|
# run so that the checks can safely assume it's available
|
||||||
if [ ${alternate_name} ];then
|
if [ ${alternate_name} ];then
|
||||||
if [[ ${alternate_name} == check1* || ${alternate_name} == extra71 ]];then
|
if [[ ${alternate_name} == check1* || ${alternate_name} == extra71 ]];then
|
||||||
if [ ! -s $TEMP_REPORT_FILE ];then
|
if [ ! -s $TEMP_REPORT_FILE ];then
|
||||||
genCredReport
|
genCredReport
|
||||||
saveReport
|
saveReport
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
show_check_title ${alternate_name}
|
show_check_title ${alternate_name}
|
||||||
${alternate_name}
|
if is_junit_output_enabled; then
|
||||||
else
|
prepare_junit_check_output "$1"
|
||||||
|
fi
|
||||||
|
# Execute the check
|
||||||
|
${alternate_name}
|
||||||
|
if is_junit_output_enabled; then
|
||||||
|
finalise_junit_check_output "$1"
|
||||||
|
fi
|
||||||
|
else
|
||||||
# Check to see if this is a real check
|
# Check to see if this is a real check
|
||||||
local check_id_var=CHECK_ID_$1
|
local check_id_var=CHECK_ID_$1
|
||||||
local check_id=${!check_id_var}
|
local check_id=${!check_id_var}
|
||||||
@@ -311,13 +319,20 @@ execute_check() {
|
|||||||
saveReport
|
saveReport
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
show_check_title $1
|
show_check_title $1
|
||||||
$1
|
if is_junit_output_enabled; then
|
||||||
|
prepare_junit_check_output "$1"
|
||||||
|
fi
|
||||||
|
# Execute the check
|
||||||
|
$1
|
||||||
|
if is_junit_output_enabled; then
|
||||||
|
finalise_junit_check_output "$1"
|
||||||
|
fi
|
||||||
else
|
else
|
||||||
textFail "ERROR! Use a valid check name (i.e. check41 or extra71)";
|
textFail "ERROR! Use a valid check name (i.e. check41 or extra71)";
|
||||||
exit $EXITCODE
|
exit $EXITCODE
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
# Function to execute all checks in a group
|
# Function to execute all checks in a group
|
||||||
@@ -447,7 +462,7 @@ if [[ $PRINTGROUPSONLY == "1" ]]; then
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
# Check that jq is installed for JSON outputs
|
# Check that jq is installed for JSON outputs
|
||||||
if [[ "$MODE" == "json" || "$MODE" == "json-asff" ]]; then
|
if [[ ${MODES[@]} =~ "json" || ${MODES[@]} =~ "json-asff" ]]; then
|
||||||
. $PROWLER_DIR/include/jq_detector
|
. $PROWLER_DIR/include/jq_detector
|
||||||
fi
|
fi
|
||||||
|
|
||||||
@@ -455,6 +470,10 @@ if [[ "$SEND_TO_SECURITY_HUB" -eq 1 ]]; then
|
|||||||
checkSecurityHubCompatibility
|
checkSecurityHubCompatibility
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
if is_junit_output_enabled; then
|
||||||
|
prepare_junit_output
|
||||||
|
fi
|
||||||
|
|
||||||
# Gather account data / test aws cli connectivity
|
# Gather account data / test aws cli connectivity
|
||||||
getWhoami
|
getWhoami
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user