Merge branch 'master' into dlpzx-master

This commit is contained in:
Toni de la Fuente
2020-11-16 18:31:18 +01:00
committed by GitHub
16 changed files with 492 additions and 43 deletions

View File

@@ -9,7 +9,9 @@
- [Screenshots](#screenshots)
- [Advanced Usage](#advanced-usage)
- [Security Hub integration](#security-hub-integration)
- [Fix](#fix)
- [CodeBuild deployment](#codebuild-deployment)
- [Whitelist/allowlist or remove FAIL from resources](whitelist-allowlist-or-remove-fail-from-resources)
- [Fix](#how-to-fix-every-fail)
- [Troubleshooting](#troubleshooting)
- [Extras](#extras)
- [Forensics Ready Checks](#forensics-ready-checks)
@@ -381,7 +383,11 @@ To use Prowler and Security Hub integration in China regions there is an additio
./prowler -r cn-north-1 -f cn-north-1 -q -S -M csv,json-asff
```
## Whitelist or remove FAIL from resources
## CodeBuild deployment
CodeBuild can help you running Prowler and there is a Cloud Formation template that helps you doing that [here](https://github.com/toniblyx/prowler/blob/master/util/codebuild/codebuild-auditor-account-cfn.yaml).
## Whitelist or allowlist or remove a fail from resources
Sometimes you may find resources that are intentionally configured in a certain way that may be a bad practice but it is all right with it, for example an S3 bucket open to the internet hosting a web site, or a security group with an open port needed in your use case. Now you can use `-w whitelist_sample.txt` and add your resources as `checkID:resourcename` as in this command:

38
checks/check_extra7115 Normal file
View File

@@ -0,0 +1,38 @@
#!/usr/bin/env bash
# Prowler - the handy cloud security tool (copyright 2018) by Toni de la Fuente
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy
# of the License at http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
CHECK_ID_extra7115="7.115"
CHECK_TITLE_extra7115="[extra7115] Check if Glue database connection has SSL connection enabled."
CHECK_SCORED_extra7115="NOT_SCORED"
CHECK_TYPE_extra7115="EXTRA"
CHECK_SEVERITY_extra7115="Medium"
CHECK_ASFF_RESOURCE_TYPE_extra7115="AwsGlue"
CHECK_ALTERNATE_check7115="extra7115"
extra7115(){
for regx in $REGIONS; do
CONNECTION_LIST=$($AWSCLI glue get-connections $PROFILE_OPT --region $regx --output json --query 'ConnectionList[*].{Name:Name,SSL:ConnectionProperties.JDBC_ENFORCE_SSL}')
if [[ $CONNECTION_LIST != '[]' ]]; then
for connection in $(echo "${CONNECTION_LIST}" | jq -r '.[] | @base64'); do
CONNECTION_NAME=$(echo $connection | base64 --decode | jq -r '.Name' )
CONNECTION_SSL_STATE=$(echo $connection | base64 --decode | jq -r '.SSL')
if [[ "$CONNECTION_SSL_STATE" == "false" ]]; then
textFail "$regx: Glue connection $CONNECTION_NAME has SSL connection disabled" "$regx"
else
textPass "$regx: Glue connection $CONNECTION_NAME has SSL connection enabled" "$regx"
fi
done
else
textInfo "$regx: There are no Glue connections" "$regx"
fi
done
}

30
checks/check_extra7116 Normal file
View File

@@ -0,0 +1,30 @@
#!/usr/bin/env bash
# Prowler - the handy cloud security tool (copyright 2018) by Toni de la Fuente
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy
# of the License at http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
CHECK_ID_extra7116="7.116"
CHECK_TITLE_extra7116="[extra7116] Check if Glue data-catalog settings have metadata encryption enabled."
CHECK_SCORED_extra7116="NOT_SCORED"
CHECK_TYPE_extra7116="EXTRA"
CHECK_SEVERITY_extra7116="Medium"
CHECK_ASFF_RESOURCE_TYPE_extra7116="AwsGlue"
CHECK_ALTERNATE_check7116="extra7116"
extra7116(){
for regx in $REGIONS; do
METADATA_ENCRYPTED=$($AWSCLI glue get-data-catalog-encryption-settings $PROFILE_OPT --region $regx --output text --query "DataCatalogEncryptionSettings.EncryptionAtRest.CatalogEncryptionMode")
if [[ "$METADATA_ENCRYPTED" == "DISABLED" ]]; then
textFail "$regx: Glue data catalog settings have metadata encryption disabled" "$regx"
else
textPass "$regx: Glue data catalog settings have metadata encryption enabled" "$regx"
fi
done
}

30
checks/check_extra7117 Normal file
View File

@@ -0,0 +1,30 @@
#!/usr/bin/env bash
# Prowler - the handy cloud security tool (copyright 2018) by Toni de la Fuente
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy
# of the License at http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
CHECK_ID_extra7117="7.117"
CHECK_TITLE_extra7117="[extra7117] Check if Glue data catalog settings have encrypt connection password enabled."
CHECK_SCORED_extra7117="NOT_SCORED"
CHECK_TYPE_extra7117="EXTRA"
CHECK_SEVERITY_extra7117="Medium"
CHECK_ASFF_RESOURCE_TYPE_extra7117="AwsGlue"
CHECK_ALTERNATE_check7117="extra7117"
extra7117(){
for regx in $REGIONS; do
METADATA_ENCRYPTED=$($AWSCLI glue get-data-catalog-encryption-settings $PROFILE_OPT --region $regx --output text --query "DataCatalogEncryptionSettings.ConnectionPasswordEncryption.ReturnConnectionPasswordEncrypted")
if [[ "$METADATA_ENCRYPTED" == "False" ]]; then
textFail "$regx: Glue data catalog connection password is not encrypted" "$regx"
else
textPass "$regx: Glue data catalog connection password is encrypted" "$regx"
fi
done
}

50
checks/check_extra7118 Normal file
View File

@@ -0,0 +1,50 @@
#!/usr/bin/env bash
# Prowler - the handy cloud security tool (copyright 2018) by Toni de la Fuente
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy
# of the License at http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
CHECK_ID_extra7118="7.118"
CHECK_TITLE_extra7118="[extra7118] Check if Glue ETL Jobs have S3 encryption enabled."
CHECK_SCORED_extra7118="NOT_SCORED"
CHECK_TYPE_extra7118="EXTRA"
CHECK_SEVERITY_extra7118="Medium"
CHECK_ASFF_RESOURCE_TYPE_extra7118="AwsGlue"
CHECK_ALTERNATE_check7118="extra7118"
extra7118(){
for regx in $REGIONS; do
JOB_LIST=$($AWSCLI glue get-jobs $PROFILE_OPT --region $regx --output json --query 'Jobs[*].{Name:Name,SecurityConfiguration:SecurityConfiguration,JobEncryption:DefaultArguments."--encryption-type"}')
if [[ $JOB_LIST != '[]' ]]; then
for job in $(echo "${JOB_LIST}" | jq -r '.[] | @base64'); do
JOB_NAME=$(echo $job | base64 --decode | jq -r '.Name')
SECURITY_CONFIGURATION=$(echo $job | base64 --decode | jq -r '.SecurityConfiguration // empty')
JOB_ENCRYPTION=$(echo $job | base64 --decode | jq -r '.JobEncryption // empty')
if [[ ! -z "$SECURITY_CONFIGURATION" ]]; then
S3_ENCRYPTION=$($AWSCLI glue get-security-configuration --name "${SECURITY_CONFIGURATION}" $PROFILE_OPT --region $regx --output text --query 'SecurityConfiguration.EncryptionConfiguration.S3Encryption[0].S3EncryptionMode')
if [[ "$S3_ENCRYPTION" == "DISABLED" ]]; then
if [[ ! -z "$JOB_ENCRYPTION" ]]; then
textPass "$regx: Glue job $JOB_NAME does have $JOB_ENCRYPTION for S3 encryption enabled" "$regx"
else
textFail "$regx: Glue job $JOB_NAME does not have S3 encryption enabled" "$regx"
fi
else
textPass "$regx: Glue job $JOB_NAME does have $S3_ENCRYPTION for S3 encryption enabled" "$regx"
fi
elif [[ ! -z "$JOB_ENCRYPTION" ]]; then
textPass "$regx: Glue job $JOB_NAME does have $JOB_ENCRYPTION for S3 encryption enabled" "$regx"
else
textFail "$regx: Glue job $JOB_NAME does not have S3 encryption enabled" "$regx"
fi
done
else
textInfo "$regx: There are no Glue jobs" "$regx"
fi
done
}

43
checks/check_extra7120 Normal file
View File

@@ -0,0 +1,43 @@
#!/usr/bin/env bash
# Prowler - the handy cloud security tool (copyright 2018) by Toni de la Fuente
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy
# of the License at http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
CHECK_ID_extra7120="7.120"
CHECK_TITLE_extra7120="[extra7120] Check if Glue ETL Jobs have CloudWatch Logs encryption enabled."
CHECK_SCORED_extra7120="NOT_SCORED"
CHECK_TYPE_extra7120="EXTRA"
CHECK_SEVERITY_extra7120="Medium"
CHECK_ASFF_RESOURCE_TYPE_extra7120="AwsGlue"
CHECK_ALTERNATE_check7120="extra7120"
extra7120(){
for regx in $REGIONS; do
JOB_LIST=$($AWSCLI glue get-jobs $PROFILE_OPT --region $regx --output json --query 'Jobs[*].{Name:Name,SecurityConfiguration:SecurityConfiguration}')
if [[ $JOB_LIST != '[]' ]]; then
for job in $(echo "${JOB_LIST}" | jq -r '.[] | @base64'); do
JOB_NAME=$(echo $job | base64 --decode | jq -r '.Name')
SECURITY_CONFIGURATION=$(echo $job | base64 --decode | jq -r '.SecurityConfiguration // empty')
if [[ ! -z "$SECURITY_CONFIGURATION" ]]; then
CLOUDWATCH_ENCRYPTION=$($AWSCLI glue get-security-configuration --name "${SECURITY_CONFIGURATION}" $PROFILE_OPT --region $regx --output text --query 'SecurityConfiguration.EncryptionConfiguration.CloudWatchEncryption.CloudWatchEncryptionMode')
if [[ "$CLOUDWATCH_ENCRYPTION" == "DISABLED" ]]; then
textFail "$regx: Glue job $JOB_NAME does not have CloudWatch Logs encryption enabled" "$regx"
else
textPass "$regx: Glue job $JOB_NAME does have $CLOUDWATCH_ENCRYPTION CloudWatch Logs encryption enabled" "$regx"
fi
else
textFail "$regx: Glue job $JOB_NAME does not have CloudWatch Logs encryption enabled" "$regx"
fi
done
else
textInfo "$regx: There are no Glue jobs" "$regx"
fi
done
}

43
checks/check_extra7122 Normal file
View File

@@ -0,0 +1,43 @@
#!/usr/bin/env bash
# Prowler - the handy cloud security tool (copyright 2018) by Toni de la Fuente
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy
# of the License at http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
CHECK_ID_extra7122="7.122"
CHECK_TITLE_extra7122="[extra7122] Check if Glue ETL Jobs have Job bookmark encryption enabled."
CHECK_SCORED_extra7122="NOT_SCORED"
CHECK_TYPE_extra7122="EXTRA"
CHECK_SEVERITY_extra7122="Medium"
CHECK_ASFF_RESOURCE_TYPE_extra7122="AwsGlue"
CHECK_ALTERNATE_check7122="extra7122"
extra7122(){
for regx in $REGIONS; do
JOB_LIST=$($AWSCLI glue get-jobs $PROFILE_OPT --region $regx --output json --query 'Jobs[*].{Name:Name,SecurityConfiguration:SecurityConfiguration}')
if [[ $JOB_LIST != '[]' ]]; then
for job in $(echo "${JOB_LIST}" | jq -r '.[] | @base64'); do
JOB_NAME=$(echo $job | base64 --decode | jq -r '.Name')
SECURITY_CONFIGURATION=$(echo $job | base64 --decode | jq -r '.SecurityConfiguration // empty')
if [[ ! -z "$SECURITY_CONFIGURATION" ]]; then
JOB_BOOKMARK_ENCRYPTION=$($AWSCLI glue get-security-configuration --name "${SECURITY_CONFIGURATION}" $PROFILE_OPT --region $regx --output text --query 'SecurityConfiguration.EncryptionConfiguration.JobBookmarksEncryption.JobBookmarksEncryptionMode')
if [[ "$JOB_BOOKMARK_ENCRYPTION" == "DISABLED" ]]; then
textFail "$regx: Glue job $JOB_NAME does not have Job bookmark encryption enabled" "$regx"
else
textPass "$regx: Glue job $JOB_NAME does have $JOB_BOOKMARK_ENCRYPTION for Job bookmark encryption enabled" "$regx"
fi
else
textFail "$regx: Glue job $JOB_NAME does not have Job bookmark encryption enabled" "$regx"
fi
done
else
textInfo "$regx: There are no Glue jobs" "$regx"
fi
done
}

View File

@@ -47,7 +47,8 @@ extra764(){
# checking if $TEMP_STP_POLICY_FILE is a valid json before converting it to json with jq
policy_str=$(cat "$TEMP_STP_POLICY_FILE")
if jq -e . >/dev/null 2>&1 <<< "$policy_str"; then
CHECK_BUCKET_STP_POLICY_PRESENT=$(cat $TEMP_STP_POLICY_FILE | jq --arg arn "arn:${AWS_PARTITION}:s3:::${bucket}" '.Statement[]|select((((.Principal|type == "object") and .Principal.AWS == "*") or ((.Principal|type == "string") and .Principal == "*")) and .Action=="s3:*" and (.Resource|type == "array") and (.Resource|map({(.):0})[]|has($arn)) and (.Resource|map({(.):0})[]|has($arn+"/*")) and .Condition.Bool."aws:SecureTransport" == "false")')
CHECK_BUCKET_STP_POLICY_PRESENT=$(cat $TEMP_STP_POLICY_FILE | jq --arg arn "arn:${AWS_PARTITION}:s3:::${bucket}" \
'.Statement[]|select((((.Principal|type == "object") and .Principal.AWS == "*") or ((.Principal|type == "string") and .Principal == "*")) and .Effect=="Deny" and (.Action=="s3:*" or .Action=="*") and (.Resource|type == "array") and (.Resource|map({(.):0})[]|has($arn)) and (.Resource|map({(.):0})[]|has($arn+"/*")) and .Condition.Bool."aws:SecureTransport" == "false")')
if [[ $CHECK_BUCKET_STP_POLICY_PRESENT ]]; then
textPass "Bucket $bucket has S3 bucket policy to deny requests over insecure transport"
else

View File

@@ -24,7 +24,7 @@ extra78(){
# "Ensure there are no Public Accessible RDS instances (Not Scored) (Not part of CIS benchmark)"
textInfo "Looking for RDS instances in all regions... "
for regx in $REGIONS; do
LIST_OF_RDS_PUBLIC_INSTANCES=$($AWSCLI rds describe-db-instances $PROFILE_OPT --region $regx --query 'DBInstances[?PubliclyAccessible==`true` && DBInstanceStatus=="available"].[DBInstanceIdentifier,Endpoint.Address]' --output text)
LIST_OF_RDS_PUBLIC_INSTANCES=$($AWSCLI rds describe-db-instances $PROFILE_OPT --region $regx --query 'DBInstances[?PubliclyAccessible==`true` && DBInstanceStatus==`"available"`].[DBInstanceIdentifier,Endpoint.Address]' --output text)
if [[ $LIST_OF_RDS_PUBLIC_INSTANCES ]];then
while read -r rds_instance;do
RDS_NAME=$(echo $rds_instance | awk '{ print $1; }')

View File

@@ -1,6 +1,6 @@
#!/usr/bin/env bash
# Prowler - the handy cloud security tool (copyright 2222) by Toni de la Fuente
# Prowler - the handy cloud security tool (copyright 2020) by Toni de la Fuente
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy
@@ -15,5 +15,4 @@ GROUP_ID[24]='glue'
GROUP_NUMBER[24]='24.0'
GROUP_TITLE[24]='Amazon Glue related security checks - [glue] ******************'
GROUP_RUN_BY_DEFAULT[24]='N' # run it when execute_all is called
GROUP_CHECKS[24]='extra7114,extra7115,extra7116,extra7117,extra7118,extra7119,extra7120,extra7121,extra7122'
GROUP_CHECKS[24]='extra7114,extra7115,extra7116,extra7117,extra7118,extra7119,extra7120,extra7121,extra7122'

View File

@@ -58,7 +58,7 @@ addHtmlHeader() {
<li class="list-group-item">
<b>Date:</b> $TIMESTAMP
</li>
<li class="list-group-item">
<li class="list-group-item text-center">
<a href="$HTML_LOGO_URL"><img src="$HTML_LOGO_IMG"
alt="prowler-logo"></a>
</li>

View File

@@ -14,6 +14,11 @@
# Generates JUnit XML reports which can be read by Jenkins or other CI tools
JUNIT_OUTPUT_DIRECTORY="junit-reports"
JUNIT_TESTS_COUNT="0"
JUNIT_SUCCESS_COUNT="0"
JUNIT_FAILURES_COUNT="0"
JUNIT_SKIPPED_COUNT="0"
JUNIT_ERRORS_COUNT="0"
is_junit_output_enabled() {
if [[ " ${MODES[@]} " =~ " junit-xml " ]]; then
@@ -44,7 +49,7 @@ prepare_junit_check_output() {
JUNIT_OUTPUT_FILE="$JUNIT_OUTPUT_DIRECTORY/TEST-$1.xml"
printf '%s\n' \
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>" \
"<testsuite name=\"$(xml_escape "$(get_junit_classname)")\" timestamp=\"$(get_iso8601_timestamp)\">" \
"<testsuite name=\"$(xml_escape "$(get_junit_classname)")\" tests=\"_TESTS_COUNT_\" failures=\"_FAILURES_COUNT_\" skipped=\"_SKIPPED_COUNT_\" errors=\"_ERRORS_COUNT_\" timestamp=\"$(get_iso8601_timestamp)\">" \
" <properties>" \
" <property name=\"prowler.version\" value=\"$(xml_escape "$PROWLER_VERSION")\"/>" \
" <property name=\"aws.profile\" value=\"$(xml_escape "$PROFILE")\"/>" \
@@ -60,10 +65,21 @@ prepare_junit_check_output() {
}
finalise_junit_check_output() {
# Calculate Total and populate summary info
JUNIT_TESTS_COUNT=$((JUNIT_SUCCESS_COUNT+$JUNIT_FAILURES_COUNT+$JUNIT_SKIPPED_COUNT+$JUNIT_ERRORS_COUNT))
sed "s/_TESTS_COUNT_/${JUNIT_TESTS_COUNT}/g;s/_FAILURES_COUNT_/${JUNIT_FAILURES_COUNT}/g;s/_SKIPPED_COUNT_/${JUNIT_SKIPPED_COUNT}/g;s/_ERRORS_COUNT_/${JUNIT_ERRORS_COUNT}/g" "$JUNIT_OUTPUT_FILE" > "$JUNIT_OUTPUT_FILE.$$"
mv "$JUNIT_OUTPUT_FILE.$$" "$JUNIT_OUTPUT_FILE"
echo '</testsuite>' >> "$JUNIT_OUTPUT_FILE"
# Reset global counters as test output closed
JUNIT_TESTS_COUNT="0"
JUNIT_SUCCESS_COUNT="0"
JUNIT_FAILURES_COUNT="0"
JUNIT_SKIPPED_COUNT="0"
JUNIT_ERRORS_COUNT="0"
}
output_junit_success() {
((JUNIT_SUCCESS_COUNT++))
output_junit_test_case "$1" "<system-out>$(xml_escape "$1")</system-out>"
}
@@ -73,10 +89,12 @@ output_junit_info() {
}
output_junit_failure() {
((JUNIT_FAILURES_COUNT++))
output_junit_test_case "$1" "<failure message=\"$(xml_escape "$1")\"/>"
}
output_junit_skipped() {
((JUNIT_SKIPPED_COUNT++))
output_junit_test_case "$1" "<skipped message=\"$(xml_escape "$1")\"/>"
}

View File

@@ -104,26 +104,10 @@ gnu_get_iso8601_timestamp() {
"$DATE_CMD" -u +"%Y-%m-%dT%H:%M:%SZ"
}
gsu_get_iso8601_one_minute_ago() {
"$DATE_CMD" -d "1 minute ago" -u +"%Y-%m-%dT%H:%M:%SZ"
}
gsu_get_iso8601_hundred_days_ago() {
"$DATE_CMD" -d "100 days ago" -u +"%Y-%m-%dT%H:%M:%SZ"
}
bsd_get_iso8601_timestamp() {
"$DATE_CMD" -u +"%Y-%m-%dT%H:%M:%SZ"
}
bsd_get_iso8601_hundred_days_ago() {
"$DATE_CMD" -v-100d -u +"%Y-%m-%dT%H:%M:%SZ"
}
bsd_get_iso8601_one_minute_ago() {
"$DATE_CMD" -v-1M -u +"%Y-%m-%dT%H:%M:%SZ"
}
gnu_test_tcp_connectivity() {
HOST=$1
PORT=$2
@@ -167,12 +151,6 @@ if [ "$OSTYPE" == "linux-gnu" ] || [ "$OSTYPE" == "linux-musl" ]; then
get_iso8601_timestamp() {
gnu_get_iso8601_timestamp
}
get_iso8601_one_minute_ago() {
gsu_get_iso8601_one_minute_ago
}
get_iso8601_hundred_days_ago() {
gsu_get_iso8601_hundred_days_ago
}
test_tcp_connectivity() {
gnu_test_tcp_connectivity "$1" "$2" "$3"
}
@@ -230,12 +208,6 @@ elif [[ "$OSTYPE" == "darwin"* ]]; then
get_iso8601_timestamp() {
bsd_get_iso8601_timestamp
}
get_iso8601_one_minute_ago() {
bsd_get_iso8601_one_minute_ago
}
get_iso8601_hundred_days_ago() {
bsd_get_iso8601_hundred_days_ago
}
fi
if "$BASE64_CMD" --version >/dev/null 2>&1 ; then
decode_report() {

View File

@@ -14,7 +14,6 @@
# Checks that the correct mode (json-asff) has been specified if wanting to send check output to AWS Security Hub
# and that Security Hub is enabled in the chosen region
checkSecurityHubCompatibility(){
OLD_TIMESTAMP=$(get_iso8601_one_minute_ago)
local regx
if [[ "${MODE}" != "json-asff" ]]; then
@@ -48,16 +47,16 @@ resolveSecurityHubPreviousFails(){
local check="$1"
NEW_TIMESTAMP=$(get_iso8601_timestamp)
PREVIOUS_DATE=$(get_iso8601_hundred_days_ago)
FILTER="{\"GeneratorId\":[{\"Value\": \"prowler-$check\",\"Comparison\":\"EQUALS\"}],\"RecordState\":[{\"Value\": \"ACTIVE\",\"Comparison\":\"EQUALS\"}]}"
NEW_FINDING_IDS=$(echo -n "${SECURITYHUB_NEW_FINDINGS_IDS[@]}" | jq -cRs 'split(" ")')
SECURITY_HUB_PREVIOUS_FINDINGS=$($AWSCLI securityhub --region "$regx" $PROFILE_OPT get-findings --filters "${FILTER}" | jq -c --argjson ids "$NEW_FINDING_IDS" --arg updated_at $NEW_TIMESTAMP '[ .Findings[] | select( .Id| first(select($ids[] == .)) // false | not) | .RecordState = "ARCHIVED" | .UpdatedAt = $updated_at ]')
FILTER="{\"UpdatedAt\":[{\"Start\":\"$PREVIOUS_DATE\",\"End\":\"$OLD_TIMESTAMP\"}],\"GeneratorId\":[{\"Value\": \"prowler-$check\",\"Comparison\":\"PREFIX\"}],\"ComplianceStatus\":[{\"Value\": \"FAILED\",\"Comparison\":\"EQUALS\"}],\"RecordState\":[{\"Value\": \"ACTIVE\",\"Comparison\":\"EQUALS\"}]}"
SECURITY_HUB_PREVIOUS_FINDINGS=$($AWSCLI securityhub --region "$regx" $PROFILE_OPT get-findings --filters "${FILTER}" | jq -c --arg updated_at $NEW_TIMESTAMP '[ .Findings[] | .RecordState = "ARCHIVED" | .UpdatedAt = $updated_at ]')
if [[ $SECURITY_HUB_PREVIOUS_FINDINGS != "[]" ]]; then
FINDINGS_COUNT=$(echo $SECURITY_HUB_PREVIOUS_FINDINGS | jq '. | length')
for i in `seq 0 100 $FINDINGS_COUNT`;
do
# Import in batches of 100
BATCH_FINDINGS=$(echo $SECURITY_HUB_PREVIOUS_FINDINGS | jq '.['"$i:$i+100"']')
BATCH_FINDINGS=$(echo $SECURITY_HUB_PREVIOUS_FINDINGS | jq -c '.['"$i:$i+100"']')
BATCH_IMPORT_RESULT=$($AWSCLI securityhub --region "$regx" $PROFILE_OPT batch-import-findings --findings "${BATCH_FINDINGS}")
if [[ -z "${BATCH_IMPORT_RESULT}" ]] || jq -e '.FailedCount >= 1' <<< "${BATCH_IMPORT_RESULT}" > /dev/null 2>&1; then
echo -e "\n$RED ERROR!$NORMAL Failed to send check output to AWS Security Hub\n"
@@ -73,6 +72,8 @@ sendToSecurityHub(){
local findings="$1"
local region="$2"
local finding_id=$(echo ${findings} | jq -r .Id )
SECURITYHUB_NEW_FINDINGS_IDS+=( "$finding_id" )
BATCH_IMPORT_RESULT=$($AWSCLI securityhub --region "$region" $PROFILE_OPT batch-import-findings --findings "${findings}")
# Check for success if imported

View File

@@ -331,6 +331,8 @@ execute_check() {
ASFF_RESOURCE_TYPE="${!asff_resource_type_var:-AwsAccount}"
SECURITYHUB_NEW_FINDINGS_IDS=()
# Generate the credential report, only if it is group1 related which checks we
# run so that the checks can safely assume it's available
# set the custom ignores list for this check

View File

@@ -0,0 +1,216 @@
---
AWSTemplateFormatVersion: 2010-09-09
Description: Creates a CodeBuild project to audit the AWS account with Prowler and stores the html report in a S3 bucket / Original author https://github.com/stevecjones
Parameters:
ServiceName:
Description: 'Specifies the service name used within component naming'
Type: String
Default: 'prowler'
LogsRetentionInDays:
Description: 'Specifies the number of days you want to retain CodeBuild run log events in the specified log group. Junit reports are kept for 30 days'
Type: Number
Default: 3
AllowedValues: [1, 3, 5, 7, 14, 30, 60]
ProwlerOptions:
Description: 'Options to pass to Prowler command, make sure at least -M junit-xml is used. -r for the region to send API queries, -f to filter only that region, -M output formats, -c for comma separated checks, for all checks do not use -c, for more options see -h'
Type: String
Default: -r eu-west-1 -f eu-west-1 -M text,junit-xml,html -c check11,check12,check13,check14
Resources:
ArtifactBucket:
Type: AWS::S3::Bucket
Properties:
Tags:
- Key: Name
Value: !Join ['-', ['AP2', 'INF', !Ref 'ServiceName', !Ref 'AWS::AccountId', 'S3', 'Prowler']]
BucketName: !Sub '${ServiceName}-${AWS::Region}-prowler-${AWS::AccountId}'
AccessControl: LogDeliveryWrite
VersioningConfiguration:
Status: Enabled
# LoggingConfiguration:
# DestinationBucketName: !ImportValue 'ProviderLogBucket'
# LogFilePrefix: !Sub '${ServiceName}-${AWS::Region}-prowler-${AWS::AccountId}/'
BucketEncryption:
ServerSideEncryptionConfiguration:
- ServerSideEncryptionByDefault:
SSEAlgorithm: AES256
PublicAccessBlockConfiguration:
BlockPublicAcls: true
BlockPublicPolicy: true
IgnorePublicAcls: true
RestrictPublicBuckets: true
ArtifactBucketPolicy:
Type: AWS::S3::BucketPolicy
Properties:
Bucket: !Ref 'ArtifactBucket'
PolicyDocument:
Id: Content
Version: '2012-10-17'
Statement:
- Action: '*'
Condition:
Bool:
aws:SecureTransport: 'false'
Effect: Deny
Principal: '*'
Resource:
- !Join ['', ['arn:aws:s3:::', !Ref 'ArtifactBucket', '/*']]
Sid: S3ForceSSL
- Action: 's3:PutObject'
Condition:
'Null':
s3:x-amz-server-side-encryption: 'true'
Effect: Deny
Principal: '*'
Resource:
- !Join ['', ['arn:aws:s3:::', !Ref 'ArtifactBucket', '/*']]
Sid: DenyUnEncryptedObjectUploads
# Codebuild Project
CodeBuildServiceRole:
Type: AWS::IAM::Role
Metadata:
cfn_nag:
rules_to_suppress:
- id: W28
reason: "Explicit name is required for this resource to avoid circular dependencies."
Properties:
RoleName: prowler-codebuild-role
Path: '/service-role/'
ManagedPolicyArns:
- 'arn:aws:iam::aws:policy/job-function/SupportUser'
- 'arn:aws:iam::aws:policy/job-function/ViewOnlyAccess'
- 'arn:aws:iam::aws:policy/SecurityAudit'
AssumeRolePolicyDocument:
Version: '2012-10-17'
Statement:
-
Action: 'sts:AssumeRole'
Effect: Allow
Principal:
Service:
- codebuild.amazonaws.com
Policies:
- PolicyName: LogGroup
PolicyDocument:
Version: '2012-10-17'
Statement:
- Action:
- logs:CreateLogGroup
- logs:CreateLogStream
- logs:PutLogEvents
Effect: Allow
Resource: !Sub 'arn:aws:logs:${AWS::Region}:${AWS::AccountId}:log-group:/aws/codebuild/*'
- PolicyName: S3
PolicyDocument:
Version: '2012-10-17'
Statement:
- Action:
- s3:PutObject
- s3:GetObject
- s3:GetObjectVersion
- s3:GetBucketAcl
- s3:GetBucketLocation
Effect: Allow
Resource: !Sub 'arn:aws:s3:::${ArtifactBucket}/*'
- PolicyName: CodeBuild
PolicyDocument:
Version: '2012-10-17'
Statement:
- Action:
- codebuild:CreateReportGroup
- codebuild:CreateReport
- codebuild:UpdateReport
- codebuild:BatchPutTestCases
- codebuild:BatchPutCodeCoverages
Effect: Allow
Resource: !Sub 'arn:aws:codebuild:${AWS::Region}:${AWS::AccountId}:report-group/*'
- PolicyName: AssumeRole
PolicyDocument:
Version: '2012-10-17'
Statement:
- Action:
- sts:AssumeRole
Effect: Allow
Resource: !Sub 'arn:aws:iam::${AWS::AccountId}:role/service-role/prowler-codebuild-role'
ProwlerCodeBuild:
Type: AWS::CodeBuild::Project
Properties:
Artifacts:
Type: NO_ARTIFACTS
Source:
Type: NO_SOURCE
# Prowler command below runs a set of checks, configure it base on your needs, no options will run all regions all checks.
# option -M junit-xml is requirede in order to get the report in CodeBuild.
BuildSpec: |
version: 0.2
phases:
install:
runtime-versions:
python: 3.8
commands:
- echo "Installing Prowler and dependencies..."
- pip3 install detect-secrets
- yum -y install jq
- curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip"
- unzip awscliv2.zip
- ./aws/install
- git clone https://github.com/toniblyx/prowler
build:
commands:
- echo "Running Prowler..."
- cd prowler
- ./prowler $PROWLER_OPTIONS
post_build:
commands:
- echo "Uploading reports to S3..."
- aws s3 cp --sse AES256 output/*.html s3://$BUCKET_REPORT/
- echo "Done!"
reports:
prowler:
files:
- '**/*'
base-directory: 'prowler/junit-reports'
file-format: JunitXml
Environment:
# UILD_GENERAL1_SMALL: Use up to 3 GB memory and 2 vCPUs for builds.
# BUILD_GENERAL1_MEDIUM: Use up to 7 GB memory and 4 vCPUs for builds.
# BUILD_GENERAL1_LARGE: Use up to 15 GB memory and 8 vCPUs for builds.
ComputeType: "BUILD_GENERAL1_SMALL"
Image: "aws/codebuild/amazonlinux2-x86_64-standard:3.0"
Type: "LINUX_CONTAINER"
EnvironmentVariables:
- Name: BUCKET_REPORT
Value: !Ref 'ArtifactBucket'
Type: PLAINTEXT
- Name: PROWLER_OPTIONS
Value: !Ref 'ProwlerOptions'
Type: PLAINTEXT
Description: Run Prowler assessment
ServiceRole: !GetAtt CodeBuildServiceRole.Arn
TimeoutInMinutes: 300
ProwlerCodeBuildReportGroup:
Type: AWS::CodeBuild::ReportGroup
Properties:
Name: prowler
Type: TEST
ExportConfig:
ExportConfigType: NO_EXPORT
ProwlerLogGroup:
Type: 'AWS::Logs::LogGroup'
Properties:
LogGroupName: !Sub '/aws/codebuild/${ProwlerCodeBuild}'
RetentionInDays: !Ref LogsRetentionInDays
Outputs:
ArtifactBucketName:
Description: Artifact Bucket Name
Value: !Ref 'ArtifactBucket'
Export:
Name: !Sub 'ArtifactBucketName-${ServiceName}'