From 6ea863ac3b650124b485f72ddeabf5278dfde7d7 Mon Sep 17 00:00:00 2001 From: Philipp Zeuner Date: Sun, 1 Mar 2020 20:26:51 +0100 Subject: [PATCH 001/104] Initial commit --- checks/check_extra777 | 64 +++++++++++++++++++++++++++++++++++++++++++ groups/group7_extras | 2 +- 2 files changed, 65 insertions(+), 1 deletion(-) create mode 100755 checks/check_extra777 diff --git a/checks/check_extra777 b/checks/check_extra777 new file mode 100755 index 00000000..92c9ff94 --- /dev/null +++ b/checks/check_extra777 @@ -0,0 +1,64 @@ +#!/usr/bin/env bash + +# Prowler - the handy cloud security tool (copyright 2019) by Toni de la Fuente +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy +# of the License at http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software distributed +# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +# CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. + +CHECK_ID_extra777="7.77" +CHECK_TITLE_extra777="[extra777] Find VPC security groups with wide-open public IPv4 CIDR ranges (non-RFC1918) (Not Scored) (Not part of CIS benchmark)" +CHECK_SCORED_extra777="NOT_SCORED" +CHECK_TYPE_extra777="EXTRA" +CHECK_ALTERNATE_check776="extra777" + +extra777(){ + CIDR_THRESHOLD=24 + RFC1918_REGEX="(^127\.)|(^10\.)|(^172\.1[6-9]\.)|(^172\.2[0-9]\.)|(^172\.3[0-1]\.)|(^192\.168\.)" + textInfo "Looking for VPC security groups with wide-open ( Date: Mon, 2 Mar 2020 22:53:32 +0100 Subject: [PATCH 002/104] Updated check_extra777 to fix CHECK_ALTERNATE variable --- checks/check_extra777 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/checks/check_extra777 b/checks/check_extra777 index 92c9ff94..2f1a4351 100755 --- a/checks/check_extra777 +++ b/checks/check_extra777 @@ -15,7 +15,7 @@ CHECK_ID_extra777="7.77" CHECK_TITLE_extra777="[extra777] Find VPC security groups with wide-open public IPv4 CIDR ranges (non-RFC1918) (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra777="NOT_SCORED" CHECK_TYPE_extra777="EXTRA" -CHECK_ALTERNATE_check776="extra777" +CHECK_ALTERNATE_check777="extra777" extra777(){ CIDR_THRESHOLD=24 From f149fb7535b0f9e9eed4c38a174fdb0f09177561 Mon Sep 17 00:00:00 2001 From: Philipp Zeuner Date: Sun, 8 Mar 2020 08:15:20 +0100 Subject: [PATCH 003/104] Refactored check name to check_extra778 --- checks/{check_extra777 => check_extra778} | 12 ++++++------ groups/group7_extras | 2 +- 2 files changed, 7 insertions(+), 7 deletions(-) rename checks/{check_extra777 => check_extra778} (92%) diff --git a/checks/check_extra777 b/checks/check_extra778 similarity index 92% rename from checks/check_extra777 rename to checks/check_extra778 index 2f1a4351..1bae8215 100755 --- a/checks/check_extra777 +++ b/checks/check_extra778 @@ -11,13 +11,13 @@ # CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. -CHECK_ID_extra777="7.77" -CHECK_TITLE_extra777="[extra777] Find VPC security groups with wide-open public IPv4 CIDR ranges (non-RFC1918) (Not Scored) (Not part of CIS benchmark)" -CHECK_SCORED_extra777="NOT_SCORED" -CHECK_TYPE_extra777="EXTRA" -CHECK_ALTERNATE_check777="extra777" +CHECK_ID_extra778="7.77" +CHECK_TITLE_extra778="[extra778] Find VPC security groups with wide-open public IPv4 CIDR ranges (non-RFC1918) (Not Scored) (Not part of CIS benchmark)" +CHECK_SCORED_extra778="NOT_SCORED" +CHECK_TYPE_extra778="EXTRA" +CHECK_ALTERNATE_check778="extra778" -extra777(){ +extra778(){ CIDR_THRESHOLD=24 RFC1918_REGEX="(^127\.)|(^10\.)|(^172\.1[6-9]\.)|(^172\.2[0-9]\.)|(^172\.3[0-1]\.)|(^192\.168\.)" textInfo "Looking for VPC security groups with wide-open ( Date: Sun, 8 Mar 2020 09:20:05 +0100 Subject: [PATCH 004/104] Fixed check_extra788 logic bug related to SECURITY_GROUP and improved check_cidr() isolation --- checks/check_extra778 | 25 ++++++++++++------------- 1 file changed, 12 insertions(+), 13 deletions(-) diff --git a/checks/check_extra778 b/checks/check_extra778 index 1bae8215..4de99cf8 100755 --- a/checks/check_extra778 +++ b/checks/check_extra778 @@ -23,32 +23,31 @@ extra778(){ textInfo "Looking for VPC security groups with wide-open ( Date: Sun, 8 Mar 2020 09:21:17 +0100 Subject: [PATCH 005/104] Updated check_extra778 to exclude 0.0.0.0/0 edge case --- checks/check_extra778 | 3 +++ 1 file changed, 3 insertions(+) diff --git a/checks/check_extra778 b/checks/check_extra778 index 4de99cf8..d6d362ca 100755 --- a/checks/check_extra778 +++ b/checks/check_extra778 @@ -47,6 +47,9 @@ extra778(){ for CIDR_IP in ${CIDR_IP_LIST}; do if [[ ! ${CIDR_IP} =~ ${RFC1918_REGEX} ]]; then CIDR=$(echo ${CIDR_IP} | cut -d"/" -f2 | xargs) + + # Edge case "0.0.0.0/0" for RDP and SSH are checked already by check41 and check42 + if [[ ${CIDR} < ${CIDR_THRESHOLD} && 0 < ${CIDR} ]]; then textFail "${REGION}: ${SECURITY_GROUP} has potential wide-open non-RFC1918 address ${CIDR_IP} in ${DIRECTION} rule." "${REGION}" fi fi From 1b2b52e6a7eecd691db67b94816f7d30f9735c89 Mon Sep 17 00:00:00 2001 From: Philipp Zeuner Date: Sun, 8 Mar 2020 09:22:11 +0100 Subject: [PATCH 006/104] Fixed check_extra778 reference CHECK_ID --- checks/check_extra778 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/checks/check_extra778 b/checks/check_extra778 index d6d362ca..d64a343c 100755 --- a/checks/check_extra778 +++ b/checks/check_extra778 @@ -11,7 +11,7 @@ # CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. -CHECK_ID_extra778="7.77" +CHECK_ID_extra778="7.78" CHECK_TITLE_extra778="[extra778] Find VPC security groups with wide-open public IPv4 CIDR ranges (non-RFC1918) (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra778="NOT_SCORED" CHECK_TYPE_extra778="EXTRA" From cb5858d08a072342a3264f327f211d123abb24d5 Mon Sep 17 00:00:00 2001 From: Philipp Zeuner Date: Sun, 8 Mar 2020 09:56:52 +0100 Subject: [PATCH 007/104] Updated check_extra778 to use PROFILE_OPT and AWSCLI --- checks/check_extra778 | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/checks/check_extra778 b/checks/check_extra778 index d64a343c..42672348 100755 --- a/checks/check_extra778 +++ b/checks/check_extra778 @@ -37,7 +37,8 @@ extra778(){ ;; esac - CIDR_IP_LIST=$(aws ec2 describe-security-groups \ + CIDR_IP_LIST=$(${AWSCLI} ec2 describe-security-groups \ + ${PROFILE_OPT} \ --filter "Name=group-id,Values=${SECURITY_GROUP}" \ --query "SecurityGroups[*].${DIRECTION_FILTER}[*].IpRanges[*].CidrIp" \ --region ${REGION} \ @@ -57,7 +58,12 @@ extra778(){ } for regx in ${REGIONS}; do - SECURITY_GROUP_IDS=$(${AWSCLI} ec2 describe-security-groups --region ${regx} --query 'SecurityGroups[*].GroupId' --output text | xargs) + SECURITY_GROUP_IDS=$(${AWSCLI} ec2 describe-security-groups \ + ${PROFILE_OPT} \ + --region ${regx} \ + --query 'SecurityGroups[*].GroupId' \ + --output text | xargs + ) for SECURITY_GROUP in ${SECURITY_GROUP_IDS}; do check_cidr "${SECURITY_GROUP}" "inbound" "${regx}" check_cidr "${SECURITY_GROUP}" "outbound" "${regx}" From 263926a53b5698e53d69321c1d2d6939801840cc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ng=E1=BB=8D=20Anh=20=C4=90=E1=BB=A9c?= Date: Mon, 9 Mar 2020 12:44:23 +0700 Subject: [PATCH 008/104] Improve check21 - Add ISLOGGING_STATUS, INCLUDEMANAGEMENTEVENTS_STATUS, READWRITETYPE_STATUS to check - Remove ` --no-include-shadow-trails ` from CLI 2.1 Ensure CloudTrail is enabled in all regions (Scored): Via CLI 1. ` aws cloudtrail describe-trails ` Ensure `IsMultiRegionTrail` is set to true 2. `aws cloudtrail get-trail-status --name ` Ensure `IsLogging` is set to true 3. `aws cloudtrail get-event-selectors --trail-name ` Ensure there is at least one Event Selector for a Trail with `IncludeManagementEvents` set to `true` and `ReadWriteType` set to `All` --- checks/check21 | 33 +++++++++++++++++++++------------ 1 file changed, 21 insertions(+), 12 deletions(-) diff --git a/checks/check21 b/checks/check21 index 383578e3..2d8bc952 100644 --- a/checks/check21 +++ b/checks/check21 @@ -9,7 +9,7 @@ # work. If not, see . CHECK_ID_check21="2.1,2.01" -CHECK_TITLE_check21="[check21] Ensure CloudTrail is enabled in all regions (Scored)" +CHECK_TITLE_check21=" Ensure CloudTrail is enabled in all regions (Scored)" CHECK_SCORED_check21="SCORED" CHECK_TYPE_check21="LEVEL1" CHECK_ALTERNATE_check201="check21" @@ -17,23 +17,32 @@ CHECK_ALTERNATE_check201="check21" check21(){ trail_count=0 # "Ensure CloudTrail is enabled in all regions (Scored)" + REGIONS=$($AWSCLI cloudtrail describe-trails --query 'trailList[*].HomeRegion' --output text) + result='False' for regx in $REGIONS; do - LIST_OF_TRAILS=$($AWSCLI cloudtrail describe-trails $PROFILE_OPT --region $regx --query 'trailList[*].Name' --output text --no-include-shadow-trails) + LIST_OF_TRAILS=$($AWSCLI cloudtrail describe-trails $PROFILE_OPT --region $regx --query 'trailList[*].Name' --output text) if [[ $LIST_OF_TRAILS ]];then for trail in $LIST_OF_TRAILS;do - trail_count=$((trail_count + 1)) - MULTIREGION_TRAIL_STATUS=$($AWSCLI cloudtrail describe-trails $PROFILE_OPT --region $regx --query 'trailList[*].IsMultiRegionTrail' --output text --trail-name-list $trail) - if [[ "$MULTIREGION_TRAIL_STATUS" == 'False' ]];then - textFail "$trail trail in $regx is not enabled in multi region mode" - else - textPass "$trail trail in $regx is enabled for all regions" - fi + trail_count=$((trail_count + 1)) + MULTIREGION_TRAIL_STATUS=$($AWSCLI cloudtrail describe-trails $PROFILE_OPT --region $regx --query 'trailList[*].IsMultiRegionTrail' --output text --trail-name-list $trail) + ISLOGGING_STATUS=$($AWSCLI cloudtrail get-trail-status --region $regx --name $trail --query ['IsLogging'] --output text) + INCLUDEMANAGEMENTEVENTS_STATUS=$($AWSCLI cloudtrail get-event-selectors --region $regx --trail-name $trail --query EventSelectors[*].IncludeManagementEvents --output text) + READWRITETYPE_STATUS=$($AWSCLI cloudtrail get-event-selectors --region $regx --trail-name $trail --query EventSelectors[*].ReadWriteType --output text) + + echo $MULTIREGION_TRAIL_STATUS $ISLOGGING_STATUS $INCLUDEMANAGEMENTEVENTS_STATUS $READWRITETYPE_STATUS [ "$INCLUDEMANAGEMENTEVENTS_STATUS" == *"True"* ] + if [[ "$MULTIREGION_TRAIL_STATUS" == "True" ]] && [[ "$ISLOGGING_STATUS" == "True" ]] && [[ "$INCLUDEMANAGEMENTEVENTS_STATUS" == *"True"* ]] && [[ "$READWRITETYPE_STATUS" == *"All"* ]];then + textPass "$trail trail in $regx is enabled for all regions" + result='True' + break + fi done fi done - + if [[ $result == 'False' ]]; then + textFail "trail exist but it is not enabled in multi region mode" + fi if [[ $trail_count == 0 ]]; then - textFail "No CloudTrail trails were found in the account" - fi + textFail "No CloudTrail trails were found in the account" + fi } From 3116adf86e10c902879d9d310ba93318e8455bb7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ng=E1=BB=8D=20Anh=20=C4=90=E1=BB=A9c?= Date: Mon, 9 Mar 2020 12:46:16 +0700 Subject: [PATCH 009/104] Update check21 --- checks/check21 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/checks/check21 b/checks/check21 index 2d8bc952..762d927f 100644 --- a/checks/check21 +++ b/checks/check21 @@ -9,7 +9,7 @@ # work. If not, see . CHECK_ID_check21="2.1,2.01" -CHECK_TITLE_check21=" Ensure CloudTrail is enabled in all regions (Scored)" +CHECK_TITLE_check21="[check21] Ensure CloudTrail is enabled in all regions (Scored)" CHECK_SCORED_check21="SCORED" CHECK_TYPE_check21="LEVEL1" CHECK_ALTERNATE_check201="check21" From 53ee538e0fbf3c470bdb7b5963e8da4dec4ce680 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ng=E1=BB=8D=20Anh=20=C4=90=E1=BB=A9c?= Date: Mon, 9 Mar 2020 12:57:00 +0700 Subject: [PATCH 010/104] add $PROFILE_OPT to the CLI --- checks/check21 | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/checks/check21 b/checks/check21 index 762d927f..02da72b2 100644 --- a/checks/check21 +++ b/checks/check21 @@ -9,7 +9,7 @@ # work. If not, see . CHECK_ID_check21="2.1,2.01" -CHECK_TITLE_check21="[check21] Ensure CloudTrail is enabled in all regions (Scored)" +CHECK_TITLE_check21=" Ensure CloudTrail is enabled in all regions (Scored)" CHECK_SCORED_check21="SCORED" CHECK_TYPE_check21="LEVEL1" CHECK_ALTERNATE_check201="check21" @@ -17,7 +17,7 @@ CHECK_ALTERNATE_check201="check21" check21(){ trail_count=0 # "Ensure CloudTrail is enabled in all regions (Scored)" - REGIONS=$($AWSCLI cloudtrail describe-trails --query 'trailList[*].HomeRegion' --output text) + REGIONS=$($AWSCLI cloudtrail describe-trails $PROFILE_OPT --query 'trailList[*].HomeRegion' --output text) result='False' for regx in $REGIONS; do LIST_OF_TRAILS=$($AWSCLI cloudtrail describe-trails $PROFILE_OPT --region $regx --query 'trailList[*].Name' --output text) @@ -25,9 +25,9 @@ check21(){ for trail in $LIST_OF_TRAILS;do trail_count=$((trail_count + 1)) MULTIREGION_TRAIL_STATUS=$($AWSCLI cloudtrail describe-trails $PROFILE_OPT --region $regx --query 'trailList[*].IsMultiRegionTrail' --output text --trail-name-list $trail) - ISLOGGING_STATUS=$($AWSCLI cloudtrail get-trail-status --region $regx --name $trail --query ['IsLogging'] --output text) - INCLUDEMANAGEMENTEVENTS_STATUS=$($AWSCLI cloudtrail get-event-selectors --region $regx --trail-name $trail --query EventSelectors[*].IncludeManagementEvents --output text) - READWRITETYPE_STATUS=$($AWSCLI cloudtrail get-event-selectors --region $regx --trail-name $trail --query EventSelectors[*].ReadWriteType --output text) + ISLOGGING_STATUS=$($AWSCLI cloudtrail get-trail-status $PROFILE_OPT --region $regx --name $trail --query ['IsLogging'] --output text) + INCLUDEMANAGEMENTEVENTS_STATUS=$($AWSCLI cloudtrail get-event-selectors $PROFILE_OPT --region $regx --trail-name $trail --query EventSelectors[*].IncludeManagementEvents --output text) + READWRITETYPE_STATUS=$($AWSCLI cloudtrail get-event-selectors $PROFILE_OPT --region $regx --trail-name $trail --query EventSelectors[*].ReadWriteType --output text) echo $MULTIREGION_TRAIL_STATUS $ISLOGGING_STATUS $INCLUDEMANAGEMENTEVENTS_STATUS $READWRITETYPE_STATUS [ "$INCLUDEMANAGEMENTEVENTS_STATUS" == *"True"* ] if [[ "$MULTIREGION_TRAIL_STATUS" == "True" ]] && [[ "$ISLOGGING_STATUS" == "True" ]] && [[ "$INCLUDEMANAGEMENTEVENTS_STATUS" == *"True"* ]] && [[ "$READWRITETYPE_STATUS" == *"All"* ]];then From ba13f25c9e903710a90f2c57be51fc64eb53dd48 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ng=E1=BB=8D=20Anh=20=C4=90=E1=BB=A9c?= Date: Mon, 9 Mar 2020 12:57:49 +0700 Subject: [PATCH 011/104] Update check21 --- checks/check21 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/checks/check21 b/checks/check21 index 02da72b2..5ec4eff5 100644 --- a/checks/check21 +++ b/checks/check21 @@ -9,7 +9,7 @@ # work. If not, see . CHECK_ID_check21="2.1,2.01" -CHECK_TITLE_check21=" Ensure CloudTrail is enabled in all regions (Scored)" +CHECK_TITLE_check21="[check21] Ensure CloudTrail is enabled in all regions (Scored)" CHECK_SCORED_check21="SCORED" CHECK_TYPE_check21="LEVEL1" CHECK_ALTERNATE_check201="check21" From 89514a1fa88f4213f47c0f78954d21be057a84ee Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ng=E1=BB=8D=20Anh=20=C4=90=E1=BB=A9c?= Date: Mon, 9 Mar 2020 12:59:47 +0700 Subject: [PATCH 012/104] Update check21 --- checks/check21 | 26 ++++++++++++-------------- 1 file changed, 12 insertions(+), 14 deletions(-) diff --git a/checks/check21 b/checks/check21 index 5ec4eff5..42b43dbd 100644 --- a/checks/check21 +++ b/checks/check21 @@ -22,20 +22,18 @@ check21(){ for regx in $REGIONS; do LIST_OF_TRAILS=$($AWSCLI cloudtrail describe-trails $PROFILE_OPT --region $regx --query 'trailList[*].Name' --output text) if [[ $LIST_OF_TRAILS ]];then - for trail in $LIST_OF_TRAILS;do - trail_count=$((trail_count + 1)) - MULTIREGION_TRAIL_STATUS=$($AWSCLI cloudtrail describe-trails $PROFILE_OPT --region $regx --query 'trailList[*].IsMultiRegionTrail' --output text --trail-name-list $trail) - ISLOGGING_STATUS=$($AWSCLI cloudtrail get-trail-status $PROFILE_OPT --region $regx --name $trail --query ['IsLogging'] --output text) - INCLUDEMANAGEMENTEVENTS_STATUS=$($AWSCLI cloudtrail get-event-selectors $PROFILE_OPT --region $regx --trail-name $trail --query EventSelectors[*].IncludeManagementEvents --output text) - READWRITETYPE_STATUS=$($AWSCLI cloudtrail get-event-selectors $PROFILE_OPT --region $regx --trail-name $trail --query EventSelectors[*].ReadWriteType --output text) - - echo $MULTIREGION_TRAIL_STATUS $ISLOGGING_STATUS $INCLUDEMANAGEMENTEVENTS_STATUS $READWRITETYPE_STATUS [ "$INCLUDEMANAGEMENTEVENTS_STATUS" == *"True"* ] - if [[ "$MULTIREGION_TRAIL_STATUS" == "True" ]] && [[ "$ISLOGGING_STATUS" == "True" ]] && [[ "$INCLUDEMANAGEMENTEVENTS_STATUS" == *"True"* ]] && [[ "$READWRITETYPE_STATUS" == *"All"* ]];then - textPass "$trail trail in $regx is enabled for all regions" - result='True' - break - fi - done + for trail in $LIST_OF_TRAILS;do + trail_count=$((trail_count + 1)) + MULTIREGION_TRAIL_STATUS=$($AWSCLI cloudtrail describe-trails $PROFILE_OPT --region $regx --query 'trailList[*].IsMultiRegionTrail' --output text --trail-name-list $trail) + ISLOGGING_STATUS=$($AWSCLI cloudtrail get-trail-status $PROFILE_OPT --region $regx --name $trail --query ['IsLogging'] --output text) + INCLUDEMANAGEMENTEVENTS_STATUS=$($AWSCLI cloudtrail get-event-selectors $PROFILE_OPT --region $regx --trail-name $trail --query EventSelectors[*].IncludeManagementEvents --output text) + READWRITETYPE_STATUS=$($AWSCLI cloudtrail get-event-selectors $PROFILE_OPT --region $regx --trail-name $trail --query EventSelectors[*].ReadWriteType --output text) + if [[ "$MULTIREGION_TRAIL_STATUS" == "True" ]] && [[ "$ISLOGGING_STATUS" == "True" ]] && [[ "$INCLUDEMANAGEMENTEVENTS_STATUS" == *"True"* ]] && [[ "$READWRITETYPE_STATUS" == *"All"* ]];then + textPass "$trail trail in $regx is enabled for all regions" + result='True' + break + fi + done fi done if [[ $result == 'False' ]]; then From 0979f421c3048e0fd5779e2e496d4143ea8a3ef6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ng=E1=BB=8D=20Anh=20=C4=90=E1=BB=A9c?= Date: Mon, 9 Mar 2020 13:00:43 +0700 Subject: [PATCH 013/104] Update check21 --- checks/check21 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/checks/check21 b/checks/check21 index 42b43dbd..591b911c 100644 --- a/checks/check21 +++ b/checks/check21 @@ -39,7 +39,7 @@ check21(){ if [[ $result == 'False' ]]; then textFail "trail exist but it is not enabled in multi region mode" fi - if [[ $trail_count == 0 ]]; then + if [[ $trail_count == 0 ]]; then textFail "No CloudTrail trails were found in the account" fi } From 259f24ee06a15707674d748c0f4fa153ac7bdbb1 Mon Sep 17 00:00:00 2001 From: Nimrod Kor Date: Sun, 22 Mar 2020 10:53:33 +0200 Subject: [PATCH 014/104] check23 - on failure, output info and not failure (cherry picked from commit 168c71cd5f062e67b21c4cd2013992052b9258d2) --- checks/check23 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/checks/check23 b/checks/check23 index 53d1b6f6..e337ad4d 100644 --- a/checks/check23 +++ b/checks/check23 @@ -21,7 +21,7 @@ check23(){ for bucket in $CLOUDTRAILBUCKET;do CLOUDTRAILBUCKET_HASALLPERMISIONS=$($AWSCLI s3api get-bucket-acl --bucket $bucket --query 'Grants[?Grantee.URI==`http://acs.amazonaws.com/groups/global/AllUsers`]' $PROFILE_OPT --region $REGION --output text 2>&1) if [[ $(echo "$CLOUDTRAILBUCKET_HASALLPERMISIONS" | grep AccessDenied) ]]; then - textFail "Access Denied Trying to Get Bucket Acl for $bucket" + textInfo "Access Denied Trying to Get Bucket Acl for $bucket" continue fi if [[ $CLOUDTRAILBUCKET_HASALLPERMISIONS ]]; then From b704568b23adaec78da1344109e78d9c85f2e4e9 Mon Sep 17 00:00:00 2001 From: Nimrod Kor Date: Sun, 22 Mar 2020 10:53:47 +0200 Subject: [PATCH 015/104] check26 - on failure, output info and not failure (cherry picked from commit f80c2e28b72495ca7f60fd0ab58a51d40640a86a) --- checks/check26 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/checks/check26 b/checks/check26 index 83395527..2e828908 100644 --- a/checks/check26 +++ b/checks/check26 @@ -29,7 +29,7 @@ check26(){ if [ "$CLOUDTRAIL_ACCOUNT_ID" == "$ACCOUNT_NUM" ]; then CLOUDTRAILBUCKET_LOGENABLED=$($AWSCLI s3api get-bucket-logging --bucket $bucket $PROFILE_OPT --region $REGION --query 'LoggingEnabled.TargetBucket' --output text 2>&1) if [[ $(echo "$CLOUDTRAILBUCKET_LOGENABLED" | grep AccessDenied) ]]; then - textFail "Access Denied Trying to Get Bucket Logging for $bucket" + textInfo "Access Denied Trying to Get Bucket Logging for $bucket" continue fi if [[ $CLOUDTRAILBUCKET_LOGENABLED != "null" ]]; then From d62027440d0d5cb24e6430ea7b8f5dcb9715490f Mon Sep 17 00:00:00 2001 From: Nimrod Kor Date: Sun, 22 Mar 2020 11:00:02 +0200 Subject: [PATCH 016/104] extra774 - check correct date, consolidate files and fix report generation (cherry picked from commit 75d66df94061d5cbc738384e74f0a3f42d0d6b37) --- checks/check_extra774 | 2 +- prowler | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/checks/check_extra774 b/checks/check_extra774 index 83e2aa6f..d8dae7a4 100644 --- a/checks/check_extra774 +++ b/checks/check_extra774 @@ -21,7 +21,7 @@ extra774(){ LIST_USERS_WITH_PASSWORD_ENABLED=$(cat $TEMP_REPORT_FILE|awk -F, '{ print $1,$4,$5 }' |grep true | awk '{ print $1 }') for i in $LIST_USERS_WITH_PASSWORD_ENABLED; do - user=$(cat $TEMP_REPORT_FILE|awk -F, '{ print $1,$5 }' |grep "^$i " |awk '{ print $1 }') + user=$(cat $TEMP_REPORT_FILE|awk -F, '{ print $1,$3 }' |grep "^$i " |awk '{ print $1 }') last_login_date=$(cat $TEMP_REPORT_FILE|awk -F, '{ print $1,$5 }' |grep "^$i " |awk '{ print $2 }') days_not_in_use=$(how_many_days_from_today ${last_login_date%T*}) diff --git a/prowler b/prowler index a3a8db27..0310a197 100755 --- a/prowler +++ b/prowler @@ -260,7 +260,7 @@ execute_check() { local check_id_var=CHECK_ID_$1 local check_id=${!check_id_var} if [ ${check_id} ]; then - if [[ ${check_id} == 1* || ${check_id} == 7.1,7.01 ]];then + if [[ ${check_id} == 1* || ${check_id} == 7.1,7.01 || ${check_id} == 7.74 ]];then if [ ! -s $TEMP_REPORT_FILE ];then genCredReport saveReport From 25bc8699b362df5e65c85252862301af53cb7454 Mon Sep 17 00:00:00 2001 From: Nimrod Kor Date: Sun, 22 Mar 2020 11:14:03 +0200 Subject: [PATCH 017/104] check_extra774 - revert changes (cherry picked from commit 87fd299cdb46f23dd92f4bd2dc99dd0b0db103c2) --- checks/check_extra774 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/checks/check_extra774 b/checks/check_extra774 index d8dae7a4..83e2aa6f 100644 --- a/checks/check_extra774 +++ b/checks/check_extra774 @@ -21,7 +21,7 @@ extra774(){ LIST_USERS_WITH_PASSWORD_ENABLED=$(cat $TEMP_REPORT_FILE|awk -F, '{ print $1,$4,$5 }' |grep true | awk '{ print $1 }') for i in $LIST_USERS_WITH_PASSWORD_ENABLED; do - user=$(cat $TEMP_REPORT_FILE|awk -F, '{ print $1,$3 }' |grep "^$i " |awk '{ print $1 }') + user=$(cat $TEMP_REPORT_FILE|awk -F, '{ print $1,$5 }' |grep "^$i " |awk '{ print $1 }') last_login_date=$(cat $TEMP_REPORT_FILE|awk -F, '{ print $1,$5 }' |grep "^$i " |awk '{ print $2 }') days_not_in_use=$(how_many_days_from_today ${last_login_date%T*}) From 30941c355c6efbcc3b980a7aa4186d7b2af7afbb Mon Sep 17 00:00:00 2001 From: Toni de la Fuente Date: Mon, 23 Mar 2020 14:39:23 +0100 Subject: [PATCH 018/104] Added extra777 - Security Groups with too many rules @renuez --- checks/check_extra777 | 58 +++++++++++++++++++++++++++++++++++++++++++ groups/group7_extras | 2 +- 2 files changed, 59 insertions(+), 1 deletion(-) create mode 100755 checks/check_extra777 diff --git a/checks/check_extra777 b/checks/check_extra777 new file mode 100755 index 00000000..1ccf541f --- /dev/null +++ b/checks/check_extra777 @@ -0,0 +1,58 @@ +#!/usr/bin/env bash + +# Prowler - the handy cloud security tool (copyright 2020) by Toni de la Fuente +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy +# of the License at http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software distributed +# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +# CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. + +# Current VPC Limit is 120 rules (60 inbound and 60 outbound) +# Reference: https://docs.aws.amazon.com/vpc/latest/userguide/amazon-vpc-limits.html + +CHECK_ID_extra777="7.77" +CHECK_TITLE_extra777="[extra777] Find VPC security groups with many ingress or egress rules (Not Scored) (Not part of CIS benchmark)" +CHECK_SCORED_extra777="NOT_SCORED" +CHECK_TYPE_extra777="EXTRA" +CHECK_ALTERNATE_check777="extra777" + +extra777(){ + THRESHOLD=50 + textInfo "Looking for VPC security groups with more than ${THRESHOLD} rules across all regions... " + + for regx in ${REGIONS}; do + SECURITY_GROUP_IDS=$(${AWSCLI} ec2 describe-security-groups \ + ${PROFILE_OPT} \ + --region ${regx} \ + --query 'SecurityGroups[*].GroupId' \ + --output text | xargs + ) + + for SECURITY_GROUP in ${SECURITY_GROUP_IDS}; do + + INGRESS_TOTAL=$(${AWSCLI} ec2 describe-security-groups \ + ${PROFILE_OPT} \ + --filter "Name=group-id,Values=${SECURITY_GROUP}" \ + --query "SecurityGroups[*].IpPermissions[*].IpRanges" \ + --region ${regx} \ + --output text | wc -l | xargs + ) + + EGRESS_TOTAL=$(${AWSCLI} ec2 describe-security-groups \ + ${PROFILE_OPT} \ + --filter "Name=group-id,Values=${SECURITY_GROUP}" \ + --query "SecurityGroups[*].IpPermissionsEgress[*].IpRanges" \ + --region ${regx} \ + --output text | wc -l | xargs + ) + + if [[ (${INGRESS_TOTAL} -ge ${THRESHOLD}) || (${EGRESS_TOTAL} -ge ${THRESHOLD}) ]]; then + textFail "${regx}: ${SECURITY_GROUP} has ${INGRESS_TOTAL} inbound rules and ${EGRESS_TOTAL} outbound rules." "${regx}" + fi + done + done +} \ No newline at end of file diff --git a/groups/group7_extras b/groups/group7_extras index a1f88799..5af1d58b 100644 --- a/groups/group7_extras +++ b/groups/group7_extras @@ -15,7 +15,7 @@ GROUP_ID[7]='extras' GROUP_NUMBER[7]='7.0' GROUP_TITLE[7]='Extras - [extras] **********************************************' GROUP_RUN_BY_DEFAULT[7]='Y' # run it when execute_all is called -GROUP_CHECKS[7]='extra71,extra72,extra73,extra74,extra75,extra76,extra77,extra78,extra79,extra710,extra711,extra712,extra713,extra714,extra715,extra716,extra717,extra718,extra719,extra720,extra721,extra722,extra723,extra724,extra725,extra726,extra727,extra728,extra729,extra730,extra731,extra732,extra733,extra734,extra735,extra736,extra737,extra738,extra739,extra740,extra741,extra742,extra743,extra744,extra745,extra746,extra747,extra748,extra749,extra750,extra751,extra752,extra753,extra754,extra755,extra756,extra757,extra758,extra761,extra762,extra763,extra764,extra765,extra767,extra768,extra769,extra770,extra771,extra772,extra773,extra774,extra775,extra776' +GROUP_CHECKS[7]='extra71,extra72,extra73,extra74,extra75,extra76,extra77,extra78,extra79,extra710,extra711,extra712,extra713,extra714,extra715,extra716,extra717,extra718,extra719,extra720,extra721,extra722,extra723,extra724,extra725,extra726,extra727,extra728,extra729,extra730,extra731,extra732,extra733,extra734,extra735,extra736,extra737,extra738,extra739,extra740,extra741,extra742,extra743,extra744,extra745,extra746,extra747,extra748,extra749,extra750,extra751,extra752,extra753,extra754,extra755,extra756,extra757,extra758,extra761,extra762,extra763,extra764,extra765,extra767,extra768,extra769,extra770,extra771,extra772,extra773,extra774,extra775,extra776,extra777' # Extras 759 and 760 (lambda variables and code secrets finder are not included) # to run detect-secrets use `./prowler -g secrets` From 568bba4c384bdc8d2d9555ff4593f9fbee2710df Mon Sep 17 00:00:00 2001 From: Toni de la Fuente Date: Tue, 24 Mar 2020 23:46:11 +0100 Subject: [PATCH 019/104] Add Elasticsearch checks issue #521 --- checks/check_extra716 | 21 ++++------ checks/check_extra777 | 0 checks/check_extra778 | 0 checks/check_extra779 | 76 ++++++++++++++++++++++++++++++++++++ checks/check_extra780 | 35 +++++++++++++++++ checks/check_extra781 | 35 +++++++++++++++++ checks/check_extra782 | 35 +++++++++++++++++ checks/check_extra783 | 35 +++++++++++++++++ checks/check_extra784 | 35 +++++++++++++++++ checks/check_extra785 | 35 +++++++++++++++++ groups/group14_elasticsearch | 18 +++++++++ groups/group7_extras | 2 +- 12 files changed, 313 insertions(+), 14 deletions(-) mode change 100755 => 100644 checks/check_extra777 mode change 100755 => 100644 checks/check_extra778 create mode 100644 checks/check_extra779 create mode 100644 checks/check_extra780 create mode 100644 checks/check_extra781 create mode 100644 checks/check_extra782 create mode 100644 checks/check_extra783 create mode 100644 checks/check_extra784 create mode 100644 checks/check_extra785 create mode 100644 groups/group14_elasticsearch diff --git a/checks/check_extra716 b/checks/check_extra716 index 549791ce..4726aef3 100644 --- a/checks/check_extra716 +++ b/checks/check_extra716 @@ -11,7 +11,7 @@ # CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. CHECK_ID_extra716="7.16" -CHECK_TITLE_extra716="[extra716] Check if Elasticsearch Service domains allow open access (Not Scored) (Not part of CIS benchmark)" +CHECK_TITLE_extra716="[extra716] Check if Elasticsearch Service domains are set as Public and have cross account access (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra716="NOT_SCORED" CHECK_TYPE_extra716="EXTRA" CHECK_ALTERNATE_check716="extra716" @@ -22,19 +22,14 @@ extra716(){ LIST_OF_DOMAINS=$($AWSCLI es list-domain-names $PROFILE_OPT --region $regx --query DomainNames --output text) if [[ $LIST_OF_DOMAINS ]]; then for domain in $LIST_OF_DOMAINS;do - CHECK_IF_MEMBER_OF_VPC=$($AWSCLI es describe-elasticsearch-domain-config --domain-name $domain $PROFILE_OPT --region $regx --query DomainConfig.VPCOptions.Options.VPCId --output text|grep -v ^None) - if [[ ! $CHECK_IF_MEMBER_OF_VPC ]];then - TEMP_POLICY_FILE=$(mktemp -t prowler-${ACCOUNT_NUM}-es-domain.policy.XXXXXXXXXX) - $AWSCLI es describe-elasticsearch-domain-config --domain-name $domain $PROFILE_OPT --region $regx --query DomainConfig.AccessPolicies.Options --output text > $TEMP_POLICY_FILE 2> /dev/null - # check if the policy has Principal as * - CHECK_ES_DOMAIN_ALLUSERS_POLICY=$(cat $TEMP_POLICY_FILE | jq -r '. | .Statement[] | select(.Effect == "Allow" and (((.Principal|type == "object") and .Principal.AWS == "*") or ((.Principal|type == "string") and .Principal == "*")) and .Condition == null)') - if [[ $CHECK_ES_DOMAIN_ALLUSERS_POLICY ]];then - textFail "$regx: $domain policy \"may\" allow Anonymous users to perform actions (Principal: \"*\")" "$regx" - else - textPass "$regx: $domain is not open" "$regx" - fi + TEMP_POLICY_FILE=$(mktemp -t prowler-${ACCOUNT_NUM}-es-domain.policy.XXXXXXXXXX) + $AWSCLI es describe-elasticsearch-domain-config --domain-name $domain $PROFILE_OPT --region $regx --query DomainConfig.AccessPolicies.Options --output text > $TEMP_POLICY_FILE 2> /dev/null + # check if the policy has Principal as * + CHECK_ES_DOMAIN_ALLUSERS_POLICY=$(cat $TEMP_POLICY_FILE | jq -r '. | .Statement[] | select(.Effect == "Allow" and (((.Principal|type == "object") and .Principal.AWS == "*") or ((.Principal|type == "string") and .Principal == "*")) and .Condition == null)') + if [[ $CHECK_ES_DOMAIN_ALLUSERS_POLICY ]];then + textFail "$regx: $domain policy allow Anonymous cross account access (Principal: \"*\")" "$regx" else - textPass "$regx: $domain is in a VPC" "$regx" + textPass "$regx: $domain does not allow Anonymous cross account access" "$regx" fi rm -f $TEMP_POLICY_FILE done diff --git a/checks/check_extra777 b/checks/check_extra777 old mode 100755 new mode 100644 diff --git a/checks/check_extra778 b/checks/check_extra778 old mode 100755 new mode 100644 diff --git a/checks/check_extra779 b/checks/check_extra779 new file mode 100644 index 00000000..9163cd73 --- /dev/null +++ b/checks/check_extra779 @@ -0,0 +1,76 @@ +#!/usr/bin/env bash + +# Prowler - the handy cloud security tool (copyright 2019) by Toni de la Fuente +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy +# of the License at http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software distributed +# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +# CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. +CHECK_ID_extra779="7.79" +CHECK_TITLE_extra779="[extra779] Ensure no security groups allow ingress from 0.0.0.0/0 or ::/0 to Elasticsearch/Kibana ports 9200/9300/5601 (Not Scored) (Not part of CIS benchmark)" +CHECK_SCORED_extra779="NOT_SCORED" +CHECK_TYPE_extra779="EXTRA" +CHECK_ALTERNATE_check779="extra779" + +extra779(){ + # if TEST_AUTHENTICATION has a value Prowler will try to access each ElasticSearch server to ports 9200/9300/5601 + # from the host where Prowler is running and will try to read indices or get kibana status + TEST_ES_AUTHENTICATION= + for regx in $REGIONS; do + # crate a list of SG open to the world with port 9200 or 9300 or 5601 + SG_LIST=$($AWSCLI ec2 describe-security-groups $PROFILE_OPT --region $regx --output text \ + --query 'SecurityGroups[?length(IpPermissions[?((FromPort==null && ToPort==null) || (FromPort<=`9200` && ToPort>=`9200`) || (FromPort<=`9300` && ToPort>=`9300`)) || (FromPort<=`5601` && ToPort>=`5601 `) && (contains(IpRanges[].CidrIp, `0.0.0.0/0`) || contains(Ipv6Ranges[].CidrIpv6, `::/0`))]) > `0`].{GroupId:GroupId}') + # in case of open security groups goes through each one + if [[ $SG_LIST ]];then + for sg in $SG_LIST;do + # temp file store the list of instances IDs and public IP address if found + TEMP_EXTRA779_FILE=$(mktemp -t prowler-${ACCOUNT_NUM}-es-domain.EXTRA779.XXXXXXXXXX) + # finds instances with that open security group attached and get its public ip address (if it has one) + $AWSCLI $PROFILE_OPT --region $regx ec2 describe-instances --filters Name=instance.group-id,Values=$sg --query 'Reservations[*].Instances[*].[InstanceId,PublicIpAddress]' --output text > $TEMP_EXTRA779_FILE + # in case of exposed instances it does access checks + if [[ -s "$TEMP_EXTRA779_FILE" ]];then + while read instance eip ; do + if [[ $TEST_ES_AUTHENTICATION ]];then + if [[ "$eip" != "None" ]];then + CHECH_AUTH_9200=$(curl -m 2 -s -X GET "http://$eip:9200/_cat/indices" | grep -v "not authorized" >/dev/null 2>&1 && echo "open" || echo "closed") + # timeout 1 bash -c '(echo > /dev/tcp/'$eip'/9300) >/dev/null 2>&1 && echo "open" || echo "closed"' + CHECH_AUTH_5601=$(curl -m 2 -s "http://$eip:5601/api/status" | jq .version.number | grep -v null >/dev/null 2>&1 && echo "open" || echo "closed") + if [[ $CHECH_AUTH_9200 -eq "closed" ]];then + textInfo "$regx: Found instance $instance with public IP $eip on Security Group: $sg with Elasticsearch $CHECH_AUTH_9200" "$regx" + else + textFail "$regx: Found instance $instance with public IP $eip on Security Group: $sg with Elasticsearch $CHECH_AUTH_9200" "$regx" + fi + if [[ $CHECH_AUTH_5601 -eq "closed" ]];then + textInfo "$regx: Found instance $instance with public IP $eip on Security Group: $sg with Kibana $CHECH_AUTH_5601" "$regx" + else + textFail "$regx: Found instance $instance with public IP $eip on Security Group: $sg with Kibana $CHECH_AUTH_5601" "$regx" + fi + fi + else + if [[ "$eip" == "None" ]];then + textInfo "$regx: Found instance $instance with private IP on Security Group: $sg" "$regx" + else + textFail "$regx: Found instance $instance with public IP $eip on Security Group: $sg open to 0.0.0.0/0 on for Elasticsearch/Kibana ports" "$regx" + fi + fi + if [[ "$eip" == "None" ]];then + textInfo "$regx: Found instance $instance with private IP on Security Group: $sg" "$regx" + fi + # done < <(cat $TEMP_EXTRA779_FILE | grep -v None$) + done < <(cat $TEMP_EXTRA779_FILE) + # while read instance eip ; do + # textInfo "$regx: Found instance $instance with private IP on Security Group: $sg" "$regx" + # done < <(cat $TEMP_EXTRA779_FILE | grep None$) + fi + rm -rf $TEMP_EXTRA779_FILE + #textFail "$regx: Found Security Group: $sg open to 0.0.0.0/0 on for Elasticsearch ports" "$regx" + done + else + textPass "$regx: No Security Groups found open to 0.0.0.0/0 for Elasticsearch/Kibana ports" "$regx" + fi + done +} diff --git a/checks/check_extra780 b/checks/check_extra780 new file mode 100644 index 00000000..dbee7b0d --- /dev/null +++ b/checks/check_extra780 @@ -0,0 +1,35 @@ +#!/usr/bin/env bash + +# Prowler - the handy cloud security tool (copyright 2020) by Toni de la Fuente +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy +# of the License at http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software distributed +# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +# CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. +CHECK_ID_extra780="7.80" +CHECK_TITLE_extra780="[extra780] Check if Elasticsearch Service domains has Cognito authentication for Kibana enabled (Not Scored) (Not part of CIS benchmark)" +CHECK_SCORED_extra780="NOT_SCORED" +CHECK_TYPE_extra780="EXTRA" +CHECK_ALTERNATE_check780="extra780" + +extra780(){ + for regx in $REGIONS; do + LIST_OF_DOMAINS=$($AWSCLI es list-domain-names $PROFILE_OPT --region $regx --query DomainNames --output text) + if [[ $LIST_OF_DOMAINS ]]; then + for domain in $LIST_OF_DOMAINS;do + CHECK_IF_COGNITO_ENABLED=$($AWSCLI es describe-elasticsearch-domain --domain-name $domain $PROFILE_OPT --region $regx --query 'DomainStatus.CognitoOptions.Enabled' --output text|grep -i true) + if [[ $CHECK_IF_COGNITO_ENABLED ]];then + textPass "$regx: $domain has Cognito authentication for Kibana enabled" "$regx" + else + textFail "$regx: $domain does not have Cognito authentication for Kibana enabled" "$regx" + fi + done + else + textInfo "$regx: No Elasticsearch Service domains found" "$regx" + fi + done +} diff --git a/checks/check_extra781 b/checks/check_extra781 new file mode 100644 index 00000000..f47c022b --- /dev/null +++ b/checks/check_extra781 @@ -0,0 +1,35 @@ +#!/usr/bin/env bash + +# Prowler - the handy cloud security tool (copyright 2020) by Toni de la Fuente +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy +# of the License at http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software distributed +# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +# CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. +CHECK_ID_extra781="7.81" +CHECK_TITLE_extra781="[extra781] Check if Elasticsearch Service domains has encryption at-rest enabled (Not Scored) (Not part of CIS benchmark)" +CHECK_SCORED_extra781="NOT_SCORED" +CHECK_TYPE_extra781="EXTRA" +CHECK_ALTERNATE_check781="extra781" + +extra781(){ + for regx in $REGIONS; do + LIST_OF_DOMAINS=$($AWSCLI es list-domain-names $PROFILE_OPT --region $regx --query DomainNames --output text) + if [[ $LIST_OF_DOMAINS ]]; then + for domain in $LIST_OF_DOMAINS;do + CHECK_IF_ENCREST_ENABLED=$($AWSCLI es describe-elasticsearch-domain --domain-name $domain $PROFILE_OPT --region $regx --query 'DomainStatus.EncryptionAtRestOptions.Enabled' --output text|grep -i true) + if [[ $CHECK_IF_ENCREST_ENABLED ]];then + textPass "$regx: $domain has encryption at-rest enabled" "$regx" + else + textFail "$regx: $domain does not have encryption at-rest enabled" "$regx" + fi + done + else + textInfo "$regx: No Elasticsearch Service domains found" "$regx" + fi + done +} diff --git a/checks/check_extra782 b/checks/check_extra782 new file mode 100644 index 00000000..8a60ea5d --- /dev/null +++ b/checks/check_extra782 @@ -0,0 +1,35 @@ +#!/usr/bin/env bash + +# Prowler - the handy cloud security tool (copyright 2020) by Toni de la Fuente +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy +# of the License at http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software distributed +# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +# CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. +CHECK_ID_extra782="7.82" +CHECK_TITLE_extra782="[extra782] Check if Elasticsearch Service domains has node-to-node encryption enabled (Not Scored) (Not part of CIS benchmark)" +CHECK_SCORED_extra782="NOT_SCORED" +CHECK_TYPE_extra782="EXTRA" +CHECK_ALTERNATE_check782="extra782" + +extra782(){ + for regx in $REGIONS; do + LIST_OF_DOMAINS=$($AWSCLI es list-domain-names $PROFILE_OPT --region $regx --query DomainNames --output text) + if [[ $LIST_OF_DOMAINS ]]; then + for domain in $LIST_OF_DOMAINS;do + CHECK_IF_NODETOENCR_ENABLED=$($AWSCLI es describe-elasticsearch-domain --domain-name $domain $PROFILE_OPT --region $regx --query 'DomainStatus.NodeToNodeEncryptionOptions.Enabled' --output text|grep -i true) + if [[ $CHECK_IF_NODETOENCR_ENABLED ]];then + textPass "$regx: $domain has node-to-node encryption enabled" "$regx" + else + textFail "$regx: $domain does not have node-to-node encryption enabled" "$regx" + fi + done + else + textInfo "$regx: No Elasticsearch Service domains found" "$regx" + fi + done +} diff --git a/checks/check_extra783 b/checks/check_extra783 new file mode 100644 index 00000000..60c2e362 --- /dev/null +++ b/checks/check_extra783 @@ -0,0 +1,35 @@ +#!/usr/bin/env bash + +# Prowler - the handy cloud security tool (copyright 2020) by Toni de la Fuente +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy +# of the License at http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software distributed +# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +# CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. +CHECK_ID_extra783="7.83" +CHECK_TITLE_extra783="[extra783] Check if Elasticsearch Service domains has enforce HTTPS enabled (Not Scored) (Not part of CIS benchmark)" +CHECK_SCORED_extra783="NOT_SCORED" +CHECK_TYPE_extra783="EXTRA" +CHECK_ALTERNATE_check783="extra783" + +extra783(){ + for regx in $REGIONS; do + LIST_OF_DOMAINS=$($AWSCLI es list-domain-names $PROFILE_OPT --region $regx --query DomainNames --output text) + if [[ $LIST_OF_DOMAINS ]]; then + for domain in $LIST_OF_DOMAINS;do + CHECK_IF_ENFORCEHTTPS_ENABLED=$($AWSCLI es describe-elasticsearch-domain --domain-name $domain $PROFILE_OPT --region $regx --query 'DomainStatus.DomainEndpointOptions.EnforceHTTPS' --output text|grep -i true) + if [[ $CHECK_IF_ENFORCEHTTPS_ENABLED ]];then + textPass "$regx: $domain has enforce HTTPS enabled" "$regx" + else + textFail "$regx: $domain does not have enforce HTTPS enabled" "$regx" + fi + done + else + textInfo "$regx: No Elasticsearch Service domains found" "$regx" + fi + done +} diff --git a/checks/check_extra784 b/checks/check_extra784 new file mode 100644 index 00000000..533b1d96 --- /dev/null +++ b/checks/check_extra784 @@ -0,0 +1,35 @@ +#!/usr/bin/env bash + +# Prowler - the handy cloud security tool (copyright 2020) by Toni de la Fuente +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy +# of the License at http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software distributed +# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +# CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. +CHECK_ID_extra784="7.84" +CHECK_TITLE_extra784="[extra784] Check if Elasticsearch Service domains internal user database enabled (Not Scored) (Not part of CIS benchmark)" +CHECK_SCORED_extra784="NOT_SCORED" +CHECK_TYPE_extra784="EXTRA" +CHECK_ALTERNATE_check784="extra784" + +extra784(){ + for regx in $REGIONS; do + LIST_OF_DOMAINS=$($AWSCLI es list-domain-names $PROFILE_OPT --region $regx --query DomainNames --output text) + if [[ $LIST_OF_DOMAINS ]]; then + for domain in $LIST_OF_DOMAINS;do + CHECK_IF_INTERNALDB_ENABLED=$($AWSCLI es describe-elasticsearch-domain --domain-name $domain $PROFILE_OPT --region $regx --query 'DomainStatus.AdvancedSecurityOptions.InternalUserDatabaseEnabled' --output text|grep -i true) + if [[ $CHECK_IF_INTERNALDB_ENABLED ]];then + textPass "$regx: $domain has internal user database enabled" "$regx" + else + textFail "$regx: $domain does not have internal user database enabled" "$regx" + fi + done + else + textInfo "$regx: No Elasticsearch Service domains found" "$regx" + fi + done +} diff --git a/checks/check_extra785 b/checks/check_extra785 new file mode 100644 index 00000000..fe4dbd8f --- /dev/null +++ b/checks/check_extra785 @@ -0,0 +1,35 @@ +#!/usr/bin/env bash + +# Prowler - the handy cloud security tool (copyright 2020) by Toni de la Fuente +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy +# of the License at http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software distributed +# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +# CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. +CHECK_ID_extra785="7.85" +CHECK_TITLE_extra785="[extra785] Check if Elasticsearch Service domains have updates available (Not Scored) (Not part of CIS benchmark)" +CHECK_SCORED_extra785="NOT_SCORED" +CHECK_TYPE_extra785="EXTRA" +CHECK_ALTERNATE_check785="extra785" + +extra785(){ + for regx in $REGIONS; do + LIST_OF_DOMAINS=$($AWSCLI es list-domain-names $PROFILE_OPT --region $regx --query DomainNames --output text) + if [[ $LIST_OF_DOMAINS ]]; then + for domain in $LIST_OF_DOMAINS;do + CHECK_IF_UPDATE_AVAILABLE=$($AWSCLI es describe-elasticsearch-domain --domain-name $domain $PROFILE_OPT --region $regx --query 'DomainStatus.ServiceSoftwareOptions.UpdateAvailable' --output text|grep -i true) + if [[ $CHECK_IF_UPDATE_AVAILABLE ]];then + textInfo "$regx: $domain has updates available" "$regx" + else + textPass "$regx: $domain does not have have updates available" "$regx" + fi + done + else + textInfo "$regx: No Elasticsearch Service domains found" "$regx" + fi + done +} diff --git a/groups/group14_elasticsearch b/groups/group14_elasticsearch new file mode 100644 index 00000000..089c85bc --- /dev/null +++ b/groups/group14_elasticsearch @@ -0,0 +1,18 @@ +#!/usr/bin/env bash + +# Prowler - the handy cloud security tool (copyright 2018) by Toni de la Fuente +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy +# of the License at http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software distributed +# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +# CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. + +GROUP_ID[14]='elasticsearch' +GROUP_NUMBER[14]='14.0' +GROUP_TITLE[14]='Elasticsearch security checks - [elasticsearch] ***************' +GROUP_RUN_BY_DEFAULT[14]='N' # run it when execute_all is called +GROUP_CHECKS[14]='extra715,extra716,extra779,extra780,extra781,extra782,extra783,extra784,extra785' diff --git a/groups/group7_extras b/groups/group7_extras index a2f45ec6..061ee16b 100644 --- a/groups/group7_extras +++ b/groups/group7_extras @@ -15,7 +15,7 @@ GROUP_ID[7]='extras' GROUP_NUMBER[7]='7.0' GROUP_TITLE[7]='Extras - [extras] **********************************************' GROUP_RUN_BY_DEFAULT[7]='Y' # run it when execute_all is called -GROUP_CHECKS[7]='extra71,extra72,extra73,extra74,extra75,extra76,extra77,extra78,extra79,extra710,extra711,extra712,extra713,extra714,extra715,extra716,extra717,extra718,extra719,extra720,extra721,extra722,extra723,extra724,extra725,extra726,extra727,extra728,extra729,extra730,extra731,extra732,extra733,extra734,extra735,extra736,extra737,extra738,extra739,extra740,extra741,extra742,extra743,extra744,extra745,extra746,extra747,extra748,extra749,extra750,extra751,extra752,extra753,extra754,extra755,extra756,extra757,extra758,extra761,extra762,extra763,extra764,extra765,extra767,extra768,extra769,extra770,extra771,extra772,extra773,extra774,extra775,extra776,extra777,extra778' +GROUP_CHECKS[7]='extra71,extra72,extra73,extra74,extra75,extra76,extra77,extra78,extra79,extra710,extra711,extra712,extra713,extra714,extra715,extra716,extra717,extra718,extra719,extra720,extra721,extra722,extra723,extra724,extra725,extra726,extra727,extra728,extra729,extra730,extra731,extra732,extra733,extra734,extra735,extra736,extra737,extra738,extra739,extra740,extra741,extra742,extra743,extra744,extra745,extra746,extra747,extra748,extra749,extra750,extra751,extra752,extra753,extra754,extra755,extra756,extra757,extra758,extra761,extra762,extra763,extra764,extra765,extra767,extra768,extra769,extra770,extra771,extra772,extra773,extra774,extra775,extra776,extra777,extra778,extra779,extra780,extra781,extra782,extra783,extra784,extra785' # Extras 759 and 760 (lambda variables and code secrets finder are not included) # to run detect-secrets use `./prowler -g secrets` From 16154784446e52dc2e49f796b32f81d24e233ae0 Mon Sep 17 00:00:00 2001 From: Toni de la Fuente Date: Wed, 25 Mar 2020 09:40:03 +0100 Subject: [PATCH 020/104] Fixed query on extra779 --- checks/check_extra779 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/checks/check_extra779 b/checks/check_extra779 index 9163cd73..4b8de72d 100644 --- a/checks/check_extra779 +++ b/checks/check_extra779 @@ -23,7 +23,7 @@ extra779(){ for regx in $REGIONS; do # crate a list of SG open to the world with port 9200 or 9300 or 5601 SG_LIST=$($AWSCLI ec2 describe-security-groups $PROFILE_OPT --region $regx --output text \ - --query 'SecurityGroups[?length(IpPermissions[?((FromPort==null && ToPort==null) || (FromPort<=`9200` && ToPort>=`9200`) || (FromPort<=`9300` && ToPort>=`9300`)) || (FromPort<=`5601` && ToPort>=`5601 `) && (contains(IpRanges[].CidrIp, `0.0.0.0/0`) || contains(Ipv6Ranges[].CidrIpv6, `::/0`))]) > `0`].{GroupId:GroupId}') + --query 'SecurityGroups[?length(IpPermissions[?((FromPort==null && ToPort==null) || (FromPort<=`9200` && ToPort>=`9200`) || (FromPort<=`9300` && ToPort>=`9300`) || (FromPort<=`5601` && ToPort>=`5601 `)) && (contains(IpRanges[].CidrIp, `0.0.0.0/0`) || contains(Ipv6Ranges[].CidrIpv6, `::/0`))]) > `0`].{GroupId:GroupId}') # in case of open security groups goes through each one if [[ $SG_LIST ]];then for sg in $SG_LIST;do From f809f2fa1dab70fa34212ea893f77265a5dbef28 Mon Sep 17 00:00:00 2001 From: Toni de la Fuente Date: Wed, 25 Mar 2020 10:53:05 +0100 Subject: [PATCH 021/104] Modify group names header to clarify what is CIS only --- groups/group11_secrets | 2 +- groups/group1_iam | 2 +- groups/group2_logging | 2 +- groups/group3_monitoring | 2 +- groups/group4_networking | 2 +- groups/group5_cislevel1 | 2 +- groups/group6_cislevel2 | 2 +- groups/group7_extras | 2 +- 8 files changed, 8 insertions(+), 8 deletions(-) diff --git a/groups/group11_secrets b/groups/group11_secrets index 24ceda0c..52a2df02 100644 --- a/groups/group11_secrets +++ b/groups/group11_secrets @@ -13,7 +13,7 @@ GROUP_ID[11]='secrets' GROUP_NUMBER[11]='11.0' -GROUP_TITLE[11]='Look for keys secrets or passwords around resources - [secrets] **' +GROUP_TITLE[11]='Look for keys secrets or passwords around resources - [secrets]' GROUP_RUN_BY_DEFAULT[11]='N' # but it runs when execute_all is called (default) GROUP_CHECKS[11]='extra741,extra742,extra759,extra760,extra768,extra775' diff --git a/groups/group1_iam b/groups/group1_iam index 19026030..910897ea 100644 --- a/groups/group1_iam +++ b/groups/group1_iam @@ -10,6 +10,6 @@ GROUP_ID[1]='group1' GROUP_NUMBER[1]='1.0' -GROUP_TITLE[1]='Identity and Access Management - [group1] **********************' +GROUP_TITLE[1]='Identity and Access Management - CIS only - [group1] ***********' GROUP_RUN_BY_DEFAULT[1]='Y' # run it when execute_all is called GROUP_CHECKS[1]='check11,check12,check13,check14,check15,check16,check17,check18,check19,check110,check111,check112,check113,check114,check115,check116,check117,check118,check119,check120,check121,check122,extra774' diff --git a/groups/group2_logging b/groups/group2_logging index 426d8ee9..a9d4db0b 100644 --- a/groups/group2_logging +++ b/groups/group2_logging @@ -10,6 +10,6 @@ GROUP_ID[2]='group2' GROUP_NUMBER[2]='2.0' -GROUP_TITLE[2]='Logging - [group2] *********************************************' +GROUP_TITLE[2]='Logging - CIS only - [group2] **********************************' GROUP_RUN_BY_DEFAULT[2]='Y' # run it when execute_all is called GROUP_CHECKS[2]='check21,check22,check23,check24,check25,check26,check27,check28,check29' diff --git a/groups/group3_monitoring b/groups/group3_monitoring index e6fd1e4a..058939a1 100644 --- a/groups/group3_monitoring +++ b/groups/group3_monitoring @@ -10,6 +10,6 @@ GROUP_ID[3]='group3' GROUP_NUMBER[3]='3.0' -GROUP_TITLE[3]='Monitoring - [group3] ******************************************' +GROUP_TITLE[3]='Monitoring - CIS only - [group3] *******************************' GROUP_RUN_BY_DEFAULT[3]='Y' # run it when execute_all is called GROUP_CHECKS[3]='check31,check32,check33,check34,check35,check36,check37,check38,check39,check310,check311,check312,check313,check314' diff --git a/groups/group4_networking b/groups/group4_networking index 0f552890..05b307b4 100644 --- a/groups/group4_networking +++ b/groups/group4_networking @@ -10,6 +10,6 @@ GROUP_ID[4]='group4' GROUP_NUMBER[4]='4.0' -GROUP_TITLE[4]='Networking - [group4] ******************************************' +GROUP_TITLE[4]='Networking - CIS only - [group4] *******************************' GROUP_RUN_BY_DEFAULT[4]='Y' # run it when execute_all is called GROUP_CHECKS[4]='check41,check42,check43,check44' diff --git a/groups/group5_cislevel1 b/groups/group5_cislevel1 index cbf29e74..3fe3a084 100644 --- a/groups/group5_cislevel1 +++ b/groups/group5_cislevel1 @@ -10,6 +10,6 @@ GROUP_ID[5]='cislevel1' GROUP_NUMBER[5]='5.0' -GROUP_TITLE[5]='CIS Level 1 - [cislevel1] **************************************' +GROUP_TITLE[5]='CIS Level 1 - CIS only - [cislevel1] ***************************' GROUP_RUN_BY_DEFAULT[5]='N' # run it when execute_all is called GROUP_CHECKS[5]='check11,check12,check13,check14,check15,check16,check17,check18,check19,check110,check111,check112,check113,check115,check116,check117,check118,check119,check120,check122,check21,check23,check24,check25,check26,check31,check32,check33,check34,check35,check38,check312,check313,check314,check41,check42' diff --git a/groups/group6_cislevel2 b/groups/group6_cislevel2 index 23b81f51..67fdc1e1 100644 --- a/groups/group6_cislevel2 +++ b/groups/group6_cislevel2 @@ -10,6 +10,6 @@ GROUP_ID[6]='cislevel2' GROUP_NUMBER[6]='6.0' -GROUP_TITLE[6]='CIS Level 2 - [cislevel2] **************************************' +GROUP_TITLE[6]='CIS Level 2 - CIS only - [cislevel2] ***************************' GROUP_RUN_BY_DEFAULT[6]='N' # run it when execute_all is called GROUP_CHECKS[6]='check11,check12,check13,check14,check15,check16,check17,check18,check19,check110,check111,check112,check113,check114,check115,check116,check117,check118,check119,check120,check121,check122,check21,check22,check23,check24,check25,check26,check27,check28,check29,check31,check32,check33,check34,check35,check36,check37,check38,check39,check310,check311,check312,check313,check314,check41,check42,check43,check44' diff --git a/groups/group7_extras b/groups/group7_extras index 061ee16b..bbb27d31 100644 --- a/groups/group7_extras +++ b/groups/group7_extras @@ -13,7 +13,7 @@ GROUP_ID[7]='extras' GROUP_NUMBER[7]='7.0' -GROUP_TITLE[7]='Extras - [extras] **********************************************' +GROUP_TITLE[7]='Extras - all non CIS specific checks - [extras] ****************' GROUP_RUN_BY_DEFAULT[7]='Y' # run it when execute_all is called GROUP_CHECKS[7]='extra71,extra72,extra73,extra74,extra75,extra76,extra77,extra78,extra79,extra710,extra711,extra712,extra713,extra714,extra715,extra716,extra717,extra718,extra719,extra720,extra721,extra722,extra723,extra724,extra725,extra726,extra727,extra728,extra729,extra730,extra731,extra732,extra733,extra734,extra735,extra736,extra737,extra738,extra739,extra740,extra741,extra742,extra743,extra744,extra745,extra746,extra747,extra748,extra749,extra750,extra751,extra752,extra753,extra754,extra755,extra756,extra757,extra758,extra761,extra762,extra763,extra764,extra765,extra767,extra768,extra769,extra770,extra771,extra772,extra773,extra774,extra775,extra776,extra777,extra778,extra779,extra780,extra781,extra782,extra783,extra784,extra785' From b4aaf0b81e2c99f6da595d5b4991c05737243807 Mon Sep 17 00:00:00 2001 From: Toni de la Fuente Date: Wed, 25 Mar 2020 10:53:55 +0100 Subject: [PATCH 022/104] Added initial PCI group without checks yet, issue #296 --- groups/group15_pci | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) create mode 100644 groups/group15_pci diff --git a/groups/group15_pci b/groups/group15_pci new file mode 100644 index 00000000..89b59656 --- /dev/null +++ b/groups/group15_pci @@ -0,0 +1,21 @@ +#!/usr/bin/env bash + +# Prowler - the handy cloud security tool (copyright 2020) by Toni de la Fuente +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy +# of the License at http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software distributed +# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +# CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. + +GROUP_ID[15]='pci' +GROUP_NUMBER[15]='15.0' +GROUP_TITLE[15]='PCI-DSS v3.2.1 Readiness - ONLY AS REFERENCE - [pci] **********' +GROUP_RUN_BY_DEFAULT[15]='N' # run it when execute_all is called +GROUP_CHECKS[15]='' + +# Resources: +# https://github.com/toniblyx/prowler/issues/296 From ee82424869844b9a8e6cfe6851f49cd1d2457807 Mon Sep 17 00:00:00 2001 From: Toni de la Fuente Date: Wed, 25 Mar 2020 12:44:10 +0100 Subject: [PATCH 023/104] Enhanced extra779 with better authentication test and TEST_ES_AUTHENTICATION disabled --- checks/check_extra779 | 43 ++++++++++++++++++++++++++++++++++--------- 1 file changed, 34 insertions(+), 9 deletions(-) diff --git a/checks/check_extra779 b/checks/check_extra779 index 4b8de72d..0532fcd7 100644 --- a/checks/check_extra779 +++ b/checks/check_extra779 @@ -20,6 +20,25 @@ extra779(){ # if TEST_AUTHENTICATION has a value Prowler will try to access each ElasticSearch server to ports 9200/9300/5601 # from the host where Prowler is running and will try to read indices or get kibana status TEST_ES_AUTHENTICATION= + httpStatus(){ + case $1 in + 000) SERVER_RESPONSE="000 Not responding within 2 seconds" ;; + 200) SERVER_RESPONSE="200 Successful" ;; + 400) SERVER_RESPONSE="400 Error: Bad Request" ;; + 401) SERVER_RESPONSE="401 Error: Unauthorized" ;; + 403) SERVER_RESPONSE="403 Error: Forbidden" ;; + 404) SERVER_RESPONSE="404 Error: Not Found" ;; + 407) SERVER_RESPONSE="407 Error: Proxy Authentication Required" ;; + 408) SERVER_RESPONSE="408 Error: Request Timeout within 2 seconds" ;; + 500) SERVER_RESPONSE="500 Error: Internal Server Error" ;; + 502) SERVER_RESPONSE="502 Error: Bad Gateway" ;; + 503) SERVER_RESPONSE="503 Error: Service Unavailable" ;; + 504) SERVER_RESPONSE="504 Error: Gateway Timeout within 2 seconds" ;; + 505) SERVER_RESPONSE="505 Error: HTTP Version Not Supported" ;; + *) SERVER_RESPONSE="HTTP: status not defined." ;; + esac + } + for regx in $REGIONS; do # crate a list of SG open to the world with port 9200 or 9300 or 5601 SG_LIST=$($AWSCLI ec2 describe-security-groups $PROFILE_OPT --region $regx --output text \ @@ -36,19 +55,25 @@ extra779(){ while read instance eip ; do if [[ $TEST_ES_AUTHENTICATION ]];then if [[ "$eip" != "None" ]];then - CHECH_AUTH_9200=$(curl -m 2 -s -X GET "http://$eip:9200/_cat/indices" | grep -v "not authorized" >/dev/null 2>&1 && echo "open" || echo "closed") - # timeout 1 bash -c '(echo > /dev/tcp/'$eip'/9300) >/dev/null 2>&1 && echo "open" || echo "closed"' - CHECH_AUTH_5601=$(curl -m 2 -s "http://$eip:5601/api/status" | jq .version.number | grep -v null >/dev/null 2>&1 && echo "open" || echo "closed") - if [[ $CHECH_AUTH_9200 -eq "closed" ]];then - textInfo "$regx: Found instance $instance with public IP $eip on Security Group: $sg with Elasticsearch $CHECH_AUTH_9200" "$regx" + # check for Elasticsearch on port 9200 + CHECH_HTTP_9200=$(curl -m 2 -s -w "%{http_code}" -o /dev/null -X GET "http://$eip:9200/_cat/indices") + httpStatus $CHECH_HTTP_9200 + if [[ $CHECH_HTTP_9200 -eq "200" ]];then + textFail "$regx: Found instance $instance with public IP $eip on Security Group: $sg with Elasticsearch response $SERVER_RESPONSE" "$regx" else - textFail "$regx: Found instance $instance with public IP $eip on Security Group: $sg with Elasticsearch $CHECH_AUTH_9200" "$regx" + textInfo "$regx: Found instance $instance with public IP $eip on Security Group: $sg with Elasticsearch response $SERVER_RESPONSE" "$regx" fi - if [[ $CHECH_AUTH_5601 -eq "closed" ]];then - textInfo "$regx: Found instance $instance with public IP $eip on Security Group: $sg with Kibana $CHECH_AUTH_5601" "$regx" + + # check for Kibana on port 5601 + CHECH_HTTP_5601=$(curl -m 2 -s -w "%{http_code}" -o /dev/null -X GET "http://$eip:5601/api/status") + httpStatus $CHECH_HTTP_5601 + if [[ $CHECH_AUTH_5601 -eq "200" ]];then + textFail "$regx: Found instance $instance with public IP $eip on Security Group: $sg with Kibana response $SERVER_RESPONSE" "$regx" else - textFail "$regx: Found instance $instance with public IP $eip on Security Group: $sg with Kibana $CHECH_AUTH_5601" "$regx" + textInfo "$regx: Found instance $instance with public IP $eip on Security Group: $sg with Kibana response $SERVER_RESPONSE" "$regx" fi + # port 9300 not added yet, a command to check that could be: + # timeout 1 bash -c '(echo > /dev/tcp/'$eip'/9300) >/dev/null 2>&1 && echo "open" || echo "closed"' fi else if [[ "$eip" == "None" ]];then From 8c18533752257e436769806546f0490506ff287e Mon Sep 17 00:00:00 2001 From: Toni de la Fuente Date: Wed, 25 Mar 2020 17:18:43 +0100 Subject: [PATCH 024/104] Updated check titles --- checks/check_extra715 | 12 ++++++------ checks/check_extra716 | 8 ++++---- checks/check_extra781 | 8 ++++---- checks/check_extra782 | 8 ++++---- checks/check_extra783 | 8 ++++---- checks/check_extra784 | 8 ++++---- checks/check_extra785 | 8 ++++---- groups/group14_elasticsearch | 2 +- 8 files changed, 31 insertions(+), 31 deletions(-) diff --git a/checks/check_extra715 b/checks/check_extra715 index 34eb9a3d..8de075a3 100644 --- a/checks/check_extra715 +++ b/checks/check_extra715 @@ -11,7 +11,7 @@ # CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. CHECK_ID_extra715="7.15" -CHECK_TITLE_extra715="[extra715] Check if Elasticsearch Service domains have logging enabled (Not Scored) (Not part of CIS benchmark)" +CHECK_TITLE_extra715="[extra715] Check if Amazon Elasticsearch Service (ES) domains have logging enabled" CHECK_SCORED_extra715="NOT_SCORED" CHECK_TYPE_extra715="EXTRA" CHECK_ALTERNATE_check715="extra715" @@ -23,19 +23,19 @@ extra715(){ for domain in $LIST_OF_DOMAINS;do SEARCH_SLOWLOG_ENABLED=$($AWSCLI es describe-elasticsearch-domain-config --domain-name $domain $PROFILE_OPT --region $regx --query DomainConfig.LogPublishingOptions.Options.SEARCH_SLOW_LOGS.Enabled --output text |grep -v ^None|grep -v ^False) if [[ $SEARCH_SLOWLOG_ENABLED ]];then - textPass "$regx: ElasticSearch Service domain $domain SEARCH_SLOW_LOGS enabled" "$regx" + textPass "$regx: Amazon Elasticsearch Service (ES) domain $domain SEARCH_SLOW_LOGS enabled" "$regx" else - textFail "$regx: ElasticSearch Service domain $domain SEARCH_SLOW_LOGS disabled!" "$regx" + textFail "$regx: Amazon Elasticsearch Service (ES) domain $domain SEARCH_SLOW_LOGS disabled!" "$regx" fi INDEX_SLOWLOG_ENABLED=$($AWSCLI es describe-elasticsearch-domain-config --domain-name $domain $PROFILE_OPT --region $regx --query DomainConfig.LogPublishingOptions.Options.INDEX_SLOW_LOGS.Enabled --output text |grep -v ^None|grep -v ^False) if [[ $INDEX_SLOWLOG_ENABLED ]];then - textPass "$regx: ElasticSearch Service domain $domain INDEX_SLOW_LOGS enabled" "$regx" + textPass "$regx: Amazon Elasticsearch Service (ES) domain $domain INDEX_SLOW_LOGS enabled" "$regx" else - textFail "$regx: ElasticSearch Service domain $domain INDEX_SLOW_LOGS disabled!" "$regx" + textFail "$regx: Amazon Elasticsearch Service (ES) domain $domain INDEX_SLOW_LOGS disabled!" "$regx" fi done else - textInfo "$regx: No Elasticsearch Service domain found" "$regx" + textInfo "$regx: No Amazon Elasticsearch Service (ES) domain found" "$regx" fi done } diff --git a/checks/check_extra716 b/checks/check_extra716 index 4726aef3..abe5c546 100644 --- a/checks/check_extra716 +++ b/checks/check_extra716 @@ -11,7 +11,7 @@ # CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. CHECK_ID_extra716="7.16" -CHECK_TITLE_extra716="[extra716] Check if Elasticsearch Service domains are set as Public and have cross account access (Not Scored) (Not part of CIS benchmark)" +CHECK_TITLE_extra716="[extra716] Check if Amazon Elasticsearch Service (ES) domains are set as Public and have cross account access" CHECK_SCORED_extra716="NOT_SCORED" CHECK_TYPE_extra716="EXTRA" CHECK_ALTERNATE_check716="extra716" @@ -27,14 +27,14 @@ extra716(){ # check if the policy has Principal as * CHECK_ES_DOMAIN_ALLUSERS_POLICY=$(cat $TEMP_POLICY_FILE | jq -r '. | .Statement[] | select(.Effect == "Allow" and (((.Principal|type == "object") and .Principal.AWS == "*") or ((.Principal|type == "string") and .Principal == "*")) and .Condition == null)') if [[ $CHECK_ES_DOMAIN_ALLUSERS_POLICY ]];then - textFail "$regx: $domain policy allow Anonymous cross account access (Principal: \"*\")" "$regx" + textFail "$regx: Amazon Elasticsearch Service (ES) $domain policy allow Anonymous cross account access (Principal: \"*\")" "$regx" else - textPass "$regx: $domain does not allow Anonymous cross account access" "$regx" + textPass "$regx: Amazon Elasticsearch Service (ES) $domain does not allow Anonymous cross account access" "$regx" fi rm -f $TEMP_POLICY_FILE done else - textInfo "$regx: No Elasticsearch Service domain found" "$regx" + textInfo "$regx: No Amazon Elasticsearch Service (ES) domain found" "$regx" fi done } diff --git a/checks/check_extra781 b/checks/check_extra781 index f47c022b..c19289ca 100644 --- a/checks/check_extra781 +++ b/checks/check_extra781 @@ -11,7 +11,7 @@ # CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. CHECK_ID_extra781="7.81" -CHECK_TITLE_extra781="[extra781] Check if Elasticsearch Service domains has encryption at-rest enabled (Not Scored) (Not part of CIS benchmark)" +CHECK_TITLE_extra781="[extra781] Check if Amazon Elasticsearch Service (ES) domains has encryption at-rest enabled (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra781="NOT_SCORED" CHECK_TYPE_extra781="EXTRA" CHECK_ALTERNATE_check781="extra781" @@ -23,13 +23,13 @@ extra781(){ for domain in $LIST_OF_DOMAINS;do CHECK_IF_ENCREST_ENABLED=$($AWSCLI es describe-elasticsearch-domain --domain-name $domain $PROFILE_OPT --region $regx --query 'DomainStatus.EncryptionAtRestOptions.Enabled' --output text|grep -i true) if [[ $CHECK_IF_ENCREST_ENABLED ]];then - textPass "$regx: $domain has encryption at-rest enabled" "$regx" + textPass "$regx: Amazon Elasticsearch Service (ES) $domain has encryption at-rest enabled" "$regx" else - textFail "$regx: $domain does not have encryption at-rest enabled" "$regx" + textFail "$regx: Amazon Elasticsearch Service (ES) $domain does not have encryption at-rest enabled" "$regx" fi done else - textInfo "$regx: No Elasticsearch Service domains found" "$regx" + textInfo "$regx: No Amazon Elasticsearch Service (ES) domains found" "$regx" fi done } diff --git a/checks/check_extra782 b/checks/check_extra782 index 8a60ea5d..1c92beec 100644 --- a/checks/check_extra782 +++ b/checks/check_extra782 @@ -11,7 +11,7 @@ # CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. CHECK_ID_extra782="7.82" -CHECK_TITLE_extra782="[extra782] Check if Elasticsearch Service domains has node-to-node encryption enabled (Not Scored) (Not part of CIS benchmark)" +CHECK_TITLE_extra782="[extra782] Check if Amazon Elasticsearch Service (ES) domains has node-to-node encryption enabled (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra782="NOT_SCORED" CHECK_TYPE_extra782="EXTRA" CHECK_ALTERNATE_check782="extra782" @@ -23,13 +23,13 @@ extra782(){ for domain in $LIST_OF_DOMAINS;do CHECK_IF_NODETOENCR_ENABLED=$($AWSCLI es describe-elasticsearch-domain --domain-name $domain $PROFILE_OPT --region $regx --query 'DomainStatus.NodeToNodeEncryptionOptions.Enabled' --output text|grep -i true) if [[ $CHECK_IF_NODETOENCR_ENABLED ]];then - textPass "$regx: $domain has node-to-node encryption enabled" "$regx" + textPass "$regx: Amazon Elasticsearch Service (ES) $domain has node-to-node encryption enabled" "$regx" else - textFail "$regx: $domain does not have node-to-node encryption enabled" "$regx" + textFail "$regx: Amazon Elasticsearch Service (ES) $domain does not have node-to-node encryption enabled" "$regx" fi done else - textInfo "$regx: No Elasticsearch Service domains found" "$regx" + textInfo "$regx: No Amazon Elasticsearch Service (ES) domains found" "$regx" fi done } diff --git a/checks/check_extra783 b/checks/check_extra783 index 60c2e362..8b71b478 100644 --- a/checks/check_extra783 +++ b/checks/check_extra783 @@ -11,7 +11,7 @@ # CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. CHECK_ID_extra783="7.83" -CHECK_TITLE_extra783="[extra783] Check if Elasticsearch Service domains has enforce HTTPS enabled (Not Scored) (Not part of CIS benchmark)" +CHECK_TITLE_extra783="[extra783] Check if Amazon Elasticsearch Service (ES) domains has enforce HTTPS enabled" CHECK_SCORED_extra783="NOT_SCORED" CHECK_TYPE_extra783="EXTRA" CHECK_ALTERNATE_check783="extra783" @@ -23,13 +23,13 @@ extra783(){ for domain in $LIST_OF_DOMAINS;do CHECK_IF_ENFORCEHTTPS_ENABLED=$($AWSCLI es describe-elasticsearch-domain --domain-name $domain $PROFILE_OPT --region $regx --query 'DomainStatus.DomainEndpointOptions.EnforceHTTPS' --output text|grep -i true) if [[ $CHECK_IF_ENFORCEHTTPS_ENABLED ]];then - textPass "$regx: $domain has enforce HTTPS enabled" "$regx" + textPass "$regx: Amazon Elasticsearch Service (ES) $domain has enforce HTTPS enabled" "$regx" else - textFail "$regx: $domain does not have enforce HTTPS enabled" "$regx" + textFail "$regx: Amazon Elasticsearch Service (ES) $domain does not have enforce HTTPS enabled" "$regx" fi done else - textInfo "$regx: No Elasticsearch Service domains found" "$regx" + textInfo "$regx: No Amazon Elasticsearch Service (ES) domains found" "$regx" fi done } diff --git a/checks/check_extra784 b/checks/check_extra784 index 533b1d96..0a62207e 100644 --- a/checks/check_extra784 +++ b/checks/check_extra784 @@ -11,7 +11,7 @@ # CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. CHECK_ID_extra784="7.84" -CHECK_TITLE_extra784="[extra784] Check if Elasticsearch Service domains internal user database enabled (Not Scored) (Not part of CIS benchmark)" +CHECK_TITLE_extra784="[extra784] Check if Amazon Elasticsearch Service (ES) domains internal user database enabled" CHECK_SCORED_extra784="NOT_SCORED" CHECK_TYPE_extra784="EXTRA" CHECK_ALTERNATE_check784="extra784" @@ -23,13 +23,13 @@ extra784(){ for domain in $LIST_OF_DOMAINS;do CHECK_IF_INTERNALDB_ENABLED=$($AWSCLI es describe-elasticsearch-domain --domain-name $domain $PROFILE_OPT --region $regx --query 'DomainStatus.AdvancedSecurityOptions.InternalUserDatabaseEnabled' --output text|grep -i true) if [[ $CHECK_IF_INTERNALDB_ENABLED ]];then - textPass "$regx: $domain has internal user database enabled" "$regx" + textPass "$regx: Amazon Elasticsearch Service (ES) $domain has internal user database enabled" "$regx" else - textFail "$regx: $domain does not have internal user database enabled" "$regx" + textFail "$regx: Amazon Elasticsearch Service (ES) $domain does not have internal user database enabled" "$regx" fi done else - textInfo "$regx: No Elasticsearch Service domains found" "$regx" + textInfo "$regx: No Amazon Elasticsearch Service (ES) domains found" "$regx" fi done } diff --git a/checks/check_extra785 b/checks/check_extra785 index fe4dbd8f..a630eb28 100644 --- a/checks/check_extra785 +++ b/checks/check_extra785 @@ -11,7 +11,7 @@ # CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. CHECK_ID_extra785="7.85" -CHECK_TITLE_extra785="[extra785] Check if Elasticsearch Service domains have updates available (Not Scored) (Not part of CIS benchmark)" +CHECK_TITLE_extra785="[extra785] Check if Amazon Elasticsearch Service (ES) domains have updates available" CHECK_SCORED_extra785="NOT_SCORED" CHECK_TYPE_extra785="EXTRA" CHECK_ALTERNATE_check785="extra785" @@ -23,13 +23,13 @@ extra785(){ for domain in $LIST_OF_DOMAINS;do CHECK_IF_UPDATE_AVAILABLE=$($AWSCLI es describe-elasticsearch-domain --domain-name $domain $PROFILE_OPT --region $regx --query 'DomainStatus.ServiceSoftwareOptions.UpdateAvailable' --output text|grep -i true) if [[ $CHECK_IF_UPDATE_AVAILABLE ]];then - textInfo "$regx: $domain has updates available" "$regx" + textInfo "$regx: Amazon Elasticsearch Service (ES) $domain has updates available" "$regx" else - textPass "$regx: $domain does not have have updates available" "$regx" + textPass "$regx: Amazon Elasticsearch Service (ES) $domain does not have have updates available" "$regx" fi done else - textInfo "$regx: No Elasticsearch Service domains found" "$regx" + textInfo "$regx: No Amazon Elasticsearch Service (ES) domains found" "$regx" fi done } diff --git a/groups/group14_elasticsearch b/groups/group14_elasticsearch index 089c85bc..e046981f 100644 --- a/groups/group14_elasticsearch +++ b/groups/group14_elasticsearch @@ -13,6 +13,6 @@ GROUP_ID[14]='elasticsearch' GROUP_NUMBER[14]='14.0' -GROUP_TITLE[14]='Elasticsearch security checks - [elasticsearch] ***************' +GROUP_TITLE[14]='Elasticsearch related security checks - [elasticsearch] ***************' GROUP_RUN_BY_DEFAULT[14]='N' # run it when execute_all is called GROUP_CHECKS[14]='extra715,extra716,extra779,extra780,extra781,extra782,extra783,extra784,extra785' From eae4722499a017ab9b684319d02fc88daa819809 Mon Sep 17 00:00:00 2001 From: Toni de la Fuente Date: Wed, 25 Mar 2020 17:25:38 +0100 Subject: [PATCH 025/104] Updated ES check titles and results --- checks/check_extra715 | 10 +++++----- checks/check_extra716 | 6 +++--- checks/check_extra779 | 2 +- checks/check_extra780 | 8 ++++---- checks/check_extra781 | 8 ++++---- checks/check_extra782 | 8 ++++---- checks/check_extra783 | 6 +++--- checks/check_extra784 | 6 +++--- checks/check_extra785 | 6 +++--- 9 files changed, 30 insertions(+), 30 deletions(-) diff --git a/checks/check_extra715 b/checks/check_extra715 index 8de075a3..0acde205 100644 --- a/checks/check_extra715 +++ b/checks/check_extra715 @@ -23,19 +23,19 @@ extra715(){ for domain in $LIST_OF_DOMAINS;do SEARCH_SLOWLOG_ENABLED=$($AWSCLI es describe-elasticsearch-domain-config --domain-name $domain $PROFILE_OPT --region $regx --query DomainConfig.LogPublishingOptions.Options.SEARCH_SLOW_LOGS.Enabled --output text |grep -v ^None|grep -v ^False) if [[ $SEARCH_SLOWLOG_ENABLED ]];then - textPass "$regx: Amazon Elasticsearch Service (ES) domain $domain SEARCH_SLOW_LOGS enabled" "$regx" + textPass "$regx: Amazon ES domain $domain SEARCH_SLOW_LOGS enabled" "$regx" else - textFail "$regx: Amazon Elasticsearch Service (ES) domain $domain SEARCH_SLOW_LOGS disabled!" "$regx" + textFail "$regx: Amazon ES domain $domain SEARCH_SLOW_LOGS disabled!" "$regx" fi INDEX_SLOWLOG_ENABLED=$($AWSCLI es describe-elasticsearch-domain-config --domain-name $domain $PROFILE_OPT --region $regx --query DomainConfig.LogPublishingOptions.Options.INDEX_SLOW_LOGS.Enabled --output text |grep -v ^None|grep -v ^False) if [[ $INDEX_SLOWLOG_ENABLED ]];then - textPass "$regx: Amazon Elasticsearch Service (ES) domain $domain INDEX_SLOW_LOGS enabled" "$regx" + textPass "$regx: Amazon ES domain $domain INDEX_SLOW_LOGS enabled" "$regx" else - textFail "$regx: Amazon Elasticsearch Service (ES) domain $domain INDEX_SLOW_LOGS disabled!" "$regx" + textFail "$regx: Amazon ES domain $domain INDEX_SLOW_LOGS disabled!" "$regx" fi done else - textInfo "$regx: No Amazon Elasticsearch Service (ES) domain found" "$regx" + textInfo "$regx: No Amazon ES domain found" "$regx" fi done } diff --git a/checks/check_extra716 b/checks/check_extra716 index abe5c546..2bd4cc16 100644 --- a/checks/check_extra716 +++ b/checks/check_extra716 @@ -27,14 +27,14 @@ extra716(){ # check if the policy has Principal as * CHECK_ES_DOMAIN_ALLUSERS_POLICY=$(cat $TEMP_POLICY_FILE | jq -r '. | .Statement[] | select(.Effect == "Allow" and (((.Principal|type == "object") and .Principal.AWS == "*") or ((.Principal|type == "string") and .Principal == "*")) and .Condition == null)') if [[ $CHECK_ES_DOMAIN_ALLUSERS_POLICY ]];then - textFail "$regx: Amazon Elasticsearch Service (ES) $domain policy allow Anonymous cross account access (Principal: \"*\")" "$regx" + textFail "$regx: Amazon ES domain $domain policy allow Anonymous cross account access (Principal: \"*\")" "$regx" else - textPass "$regx: Amazon Elasticsearch Service (ES) $domain does not allow Anonymous cross account access" "$regx" + textPass "$regx: Amazon ES domain $domain does not allow Anonymous cross account access" "$regx" fi rm -f $TEMP_POLICY_FILE done else - textInfo "$regx: No Amazon Elasticsearch Service (ES) domain found" "$regx" + textInfo "$regx: No Amazon ES domain found" "$regx" fi done } diff --git a/checks/check_extra779 b/checks/check_extra779 index 0532fcd7..2375375d 100644 --- a/checks/check_extra779 +++ b/checks/check_extra779 @@ -11,7 +11,7 @@ # CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. CHECK_ID_extra779="7.79" -CHECK_TITLE_extra779="[extra779] Ensure no security groups allow ingress from 0.0.0.0/0 or ::/0 to Elasticsearch/Kibana ports 9200/9300/5601 (Not Scored) (Not part of CIS benchmark)" +CHECK_TITLE_extra779="[extra779] Ensure no security groups allow ingress from 0.0.0.0/0 or ::/0 to Elasticsearch/Kibana ports 9200/9300/5601" CHECK_SCORED_extra779="NOT_SCORED" CHECK_TYPE_extra779="EXTRA" CHECK_ALTERNATE_check779="extra779" diff --git a/checks/check_extra780 b/checks/check_extra780 index dbee7b0d..eadb584a 100644 --- a/checks/check_extra780 +++ b/checks/check_extra780 @@ -11,7 +11,7 @@ # CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. CHECK_ID_extra780="7.80" -CHECK_TITLE_extra780="[extra780] Check if Elasticsearch Service domains has Cognito authentication for Kibana enabled (Not Scored) (Not part of CIS benchmark)" +CHECK_TITLE_extra780="[extra780] Check if Amazon Elasticsearch Service (ES) domains has Amazon Cognito authentication for Kibana enabled" CHECK_SCORED_extra780="NOT_SCORED" CHECK_TYPE_extra780="EXTRA" CHECK_ALTERNATE_check780="extra780" @@ -23,13 +23,13 @@ extra780(){ for domain in $LIST_OF_DOMAINS;do CHECK_IF_COGNITO_ENABLED=$($AWSCLI es describe-elasticsearch-domain --domain-name $domain $PROFILE_OPT --region $regx --query 'DomainStatus.CognitoOptions.Enabled' --output text|grep -i true) if [[ $CHECK_IF_COGNITO_ENABLED ]];then - textPass "$regx: $domain has Cognito authentication for Kibana enabled" "$regx" + textPass "$regx: Amazon ES domain $domain has Amazon Cognito authentication for Kibana enabled" "$regx" else - textFail "$regx: $domain does not have Cognito authentication for Kibana enabled" "$regx" + textFail "$regx: Amazon ES domain $domain does not have Amazon Cognito authentication for Kibana enabled" "$regx" fi done else - textInfo "$regx: No Elasticsearch Service domains found" "$regx" + textInfo "$regx: No Amazon ES domain found" "$regx" fi done } diff --git a/checks/check_extra781 b/checks/check_extra781 index c19289ca..2b19cc7e 100644 --- a/checks/check_extra781 +++ b/checks/check_extra781 @@ -11,7 +11,7 @@ # CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. CHECK_ID_extra781="7.81" -CHECK_TITLE_extra781="[extra781] Check if Amazon Elasticsearch Service (ES) domains has encryption at-rest enabled (Not Scored) (Not part of CIS benchmark)" +CHECK_TITLE_extra781="[extra781] Check if Amazon Elasticsearch Service (ES) domains has encryption at-rest enabled" CHECK_SCORED_extra781="NOT_SCORED" CHECK_TYPE_extra781="EXTRA" CHECK_ALTERNATE_check781="extra781" @@ -23,13 +23,13 @@ extra781(){ for domain in $LIST_OF_DOMAINS;do CHECK_IF_ENCREST_ENABLED=$($AWSCLI es describe-elasticsearch-domain --domain-name $domain $PROFILE_OPT --region $regx --query 'DomainStatus.EncryptionAtRestOptions.Enabled' --output text|grep -i true) if [[ $CHECK_IF_ENCREST_ENABLED ]];then - textPass "$regx: Amazon Elasticsearch Service (ES) $domain has encryption at-rest enabled" "$regx" + textPass "$regx: Amazon ES domain $domain has encryption at-rest enabled" "$regx" else - textFail "$regx: Amazon Elasticsearch Service (ES) $domain does not have encryption at-rest enabled" "$regx" + textFail "$regx: Amazon ES domain $domain does not have encryption at-rest enabled" "$regx" fi done else - textInfo "$regx: No Amazon Elasticsearch Service (ES) domains found" "$regx" + textInfo "$regx: No Amazon ES domain found" "$regx" fi done } diff --git a/checks/check_extra782 b/checks/check_extra782 index 1c92beec..1c38c7f3 100644 --- a/checks/check_extra782 +++ b/checks/check_extra782 @@ -11,7 +11,7 @@ # CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. CHECK_ID_extra782="7.82" -CHECK_TITLE_extra782="[extra782] Check if Amazon Elasticsearch Service (ES) domains has node-to-node encryption enabled (Not Scored) (Not part of CIS benchmark)" +CHECK_TITLE_extra782="[extra782] Check if Amazon Elasticsearch Service (ES) domains has node-to-node encryption enabled" CHECK_SCORED_extra782="NOT_SCORED" CHECK_TYPE_extra782="EXTRA" CHECK_ALTERNATE_check782="extra782" @@ -23,13 +23,13 @@ extra782(){ for domain in $LIST_OF_DOMAINS;do CHECK_IF_NODETOENCR_ENABLED=$($AWSCLI es describe-elasticsearch-domain --domain-name $domain $PROFILE_OPT --region $regx --query 'DomainStatus.NodeToNodeEncryptionOptions.Enabled' --output text|grep -i true) if [[ $CHECK_IF_NODETOENCR_ENABLED ]];then - textPass "$regx: Amazon Elasticsearch Service (ES) $domain has node-to-node encryption enabled" "$regx" + textPass "$regx: Amazon ES domain $domain has node-to-node encryption enabled" "$regx" else - textFail "$regx: Amazon Elasticsearch Service (ES) $domain does not have node-to-node encryption enabled" "$regx" + textFail "$regx: Amazon ES domain $domain does not have node-to-node encryption enabled" "$regx" fi done else - textInfo "$regx: No Amazon Elasticsearch Service (ES) domains found" "$regx" + textInfo "$regx: No Amazon ES domain found" "$regx" fi done } diff --git a/checks/check_extra783 b/checks/check_extra783 index 8b71b478..4c33b1ac 100644 --- a/checks/check_extra783 +++ b/checks/check_extra783 @@ -23,13 +23,13 @@ extra783(){ for domain in $LIST_OF_DOMAINS;do CHECK_IF_ENFORCEHTTPS_ENABLED=$($AWSCLI es describe-elasticsearch-domain --domain-name $domain $PROFILE_OPT --region $regx --query 'DomainStatus.DomainEndpointOptions.EnforceHTTPS' --output text|grep -i true) if [[ $CHECK_IF_ENFORCEHTTPS_ENABLED ]];then - textPass "$regx: Amazon Elasticsearch Service (ES) $domain has enforce HTTPS enabled" "$regx" + textPass "$regx: Amazon ES domain $domain has enforce HTTPS enabled" "$regx" else - textFail "$regx: Amazon Elasticsearch Service (ES) $domain does not have enforce HTTPS enabled" "$regx" + textFail "$regx: Amazon ES domain $domain does not have enforce HTTPS enabled" "$regx" fi done else - textInfo "$regx: No Amazon Elasticsearch Service (ES) domains found" "$regx" + textInfo "$regx: No Amazon ES domain found" "$regx" fi done } diff --git a/checks/check_extra784 b/checks/check_extra784 index 0a62207e..cb407538 100644 --- a/checks/check_extra784 +++ b/checks/check_extra784 @@ -23,13 +23,13 @@ extra784(){ for domain in $LIST_OF_DOMAINS;do CHECK_IF_INTERNALDB_ENABLED=$($AWSCLI es describe-elasticsearch-domain --domain-name $domain $PROFILE_OPT --region $regx --query 'DomainStatus.AdvancedSecurityOptions.InternalUserDatabaseEnabled' --output text|grep -i true) if [[ $CHECK_IF_INTERNALDB_ENABLED ]];then - textPass "$regx: Amazon Elasticsearch Service (ES) $domain has internal user database enabled" "$regx" + textPass "$regx: Amazon ES domain $domain has internal user database enabled" "$regx" else - textFail "$regx: Amazon Elasticsearch Service (ES) $domain does not have internal user database enabled" "$regx" + textFail "$regx: Amazon ES domain $domain does not have internal user database enabled" "$regx" fi done else - textInfo "$regx: No Amazon Elasticsearch Service (ES) domains found" "$regx" + textInfo "$regx: No Amazon ES domain found" "$regx" fi done } diff --git a/checks/check_extra785 b/checks/check_extra785 index a630eb28..243693b7 100644 --- a/checks/check_extra785 +++ b/checks/check_extra785 @@ -23,13 +23,13 @@ extra785(){ for domain in $LIST_OF_DOMAINS;do CHECK_IF_UPDATE_AVAILABLE=$($AWSCLI es describe-elasticsearch-domain --domain-name $domain $PROFILE_OPT --region $regx --query 'DomainStatus.ServiceSoftwareOptions.UpdateAvailable' --output text|grep -i true) if [[ $CHECK_IF_UPDATE_AVAILABLE ]];then - textInfo "$regx: Amazon Elasticsearch Service (ES) $domain has updates available" "$regx" + textInfo "$regx: Amazon ES domain $domain has updates available" "$regx" else - textPass "$regx: Amazon Elasticsearch Service (ES) $domain does not have have updates available" "$regx" + textPass "$regx: Amazon ES domain $domain does not have have updates available" "$regx" fi done else - textInfo "$regx: No Amazon Elasticsearch Service (ES) domains found" "$regx" + textInfo "$regx: No Amazon ES domain found" "$regx" fi done } From 8faf1f45c40463ac78b0a305d7f72a689e5c0936 Mon Sep 17 00:00:00 2001 From: Toni de la Fuente Date: Wed, 25 Mar 2020 18:19:41 +0100 Subject: [PATCH 026/104] Added connection test for port 9300 in both linux and macosx on extra779 --- checks/check_extra779 | 34 +++++++++++++++++++++++----------- include/os_detector | 18 ++++++++++++++++++ 2 files changed, 41 insertions(+), 11 deletions(-) diff --git a/checks/check_extra779 b/checks/check_extra779 index 2375375d..d6107851 100644 --- a/checks/check_extra779 +++ b/checks/check_extra779 @@ -17,23 +17,24 @@ CHECK_TYPE_extra779="EXTRA" CHECK_ALTERNATE_check779="extra779" extra779(){ - # if TEST_AUTHENTICATION has a value Prowler will try to access each ElasticSearch server to ports 9200/9300/5601 - # from the host where Prowler is running and will try to read indices or get kibana status - TEST_ES_AUTHENTICATION= + # if TEST_AUTHENTICATION has a value Prowler will try to access each ElasticSearch server to port: + # 9200 API, 9300 Communcation and 5601 Kibana to figure out if authentication is enabled. + # That is from the host where Prowler is running and will try to read indices or get kibana status + TEST_ES_AUTHENTICATION=1 httpStatus(){ case $1 in - 000) SERVER_RESPONSE="000 Not responding within 2 seconds" ;; + 000) SERVER_RESPONSE="000 Not responding" ;; 200) SERVER_RESPONSE="200 Successful" ;; 400) SERVER_RESPONSE="400 Error: Bad Request" ;; 401) SERVER_RESPONSE="401 Error: Unauthorized" ;; 403) SERVER_RESPONSE="403 Error: Forbidden" ;; 404) SERVER_RESPONSE="404 Error: Not Found" ;; 407) SERVER_RESPONSE="407 Error: Proxy Authentication Required" ;; - 408) SERVER_RESPONSE="408 Error: Request Timeout within 2 seconds" ;; + 408) SERVER_RESPONSE="408 Error: Request Timeout" ;; 500) SERVER_RESPONSE="500 Error: Internal Server Error" ;; 502) SERVER_RESPONSE="502 Error: Bad Gateway" ;; 503) SERVER_RESPONSE="503 Error: Service Unavailable" ;; - 504) SERVER_RESPONSE="504 Error: Gateway Timeout within 2 seconds" ;; + 504) SERVER_RESPONSE="504 Error: Gateway Timeout" ;; 505) SERVER_RESPONSE="505 Error: HTTP Version Not Supported" ;; *) SERVER_RESPONSE="HTTP: status not defined." ;; esac @@ -55,13 +56,26 @@ extra779(){ while read instance eip ; do if [[ $TEST_ES_AUTHENTICATION ]];then if [[ "$eip" != "None" ]];then - # check for Elasticsearch on port 9200 + # check for Elasticsearch on port 9200, rest API HTTP. CHECH_HTTP_9200=$(curl -m 2 -s -w "%{http_code}" -o /dev/null -X GET "http://$eip:9200/_cat/indices") httpStatus $CHECH_HTTP_9200 if [[ $CHECH_HTTP_9200 -eq "200" ]];then - textFail "$regx: Found instance $instance with public IP $eip on Security Group: $sg with Elasticsearch response $SERVER_RESPONSE" "$regx" + textFail "$regx: Found instance $instance with public IP $eip on Security Group: $sg with Elasticsearch port 9200 response $SERVER_RESPONSE" "$regx" else - textInfo "$regx: Found instance $instance with public IP $eip on Security Group: $sg with Elasticsearch response $SERVER_RESPONSE" "$regx" + textInfo "$regx: Found instance $instance with public IP $eip on Security Group: $sg with Elasticsearch port 9200 response $SERVER_RESPONSE" "$regx" + fi + + # check for port 9300 TCP, this is the communication port, not: + # test_tcp_connectivity is in include/os_detector + # syntax is 'test_tcp_connectivity $HOST $PORT $TIMEOUT' (in seconds) + CHECK_TCP_9300=$(test_tcp_connectivity $eip 9300 2) + # Using HTTP error codes here as well to reuse httpStatus function + # codes for better handling, so 200 is open and 000 is not responding + httpStatus $CHECK_TCP_9300 + if [[ $CHECK_TCP_9300 -eq "200" ]];then + textFail "$regx: Found instance $instance with public IP $eip on Security Group: $sg with Elasticsearch port 9300 response $SERVER_RESPONSE" "$regx" + else + textInfo "$regx: Found instance $instance with public IP $eip on Security Group: $sg with Elasticsearch port 9300 response $SERVER_RESPONSE" "$regx" fi # check for Kibana on port 5601 @@ -72,8 +86,6 @@ extra779(){ else textInfo "$regx: Found instance $instance with public IP $eip on Security Group: $sg with Kibana response $SERVER_RESPONSE" "$regx" fi - # port 9300 not added yet, a command to check that could be: - # timeout 1 bash -c '(echo > /dev/tcp/'$eip'/9300) >/dev/null 2>&1 && echo "open" || echo "closed"' fi else if [[ "$eip" == "None" ]];then diff --git a/include/os_detector b/include/os_detector index 2394c521..1e24df71 100644 --- a/include/os_detector +++ b/include/os_detector @@ -55,6 +55,15 @@ if [ "$OSTYPE" == "linux-gnu" ] || [ "$OSTYPE" == "linux-musl" ]; then DATE_BEFORE_MONTHS_TO_COMPARE=$(date -d @$STARTDATEINSECS '+%Y-%m-%d') echo $DATE_BEFORE_MONTHS_TO_COMPARE } + test_tcp_connectivity() + { + HOST=$1 + PORT=$2 + TIMEOUT=$3 + # This is initially for ES port 9300, not not HTTP but I add HTTP error + # codes for better handling, so 200 is open and 000 is not responding + timeout $TIMEOUT bash -c '(echo > /dev/tcp/'$HOST'/'$PORT') >/dev/null 2>&1 && echo "200" || echo "000"' + } elif [[ "$OSTYPE" == "darwin"* ]]; then # BSD/OSX commands compatibility TEMP_REPORT_FILE=$(mktemp -t prowler.cred_report-XXXXXX) @@ -91,6 +100,15 @@ elif [[ "$OSTYPE" == "darwin"* ]]; then DATE_BEFORE_MONTHS_TO_COMPARE=$(date -v -$(echo $MONTHS_TO_COMPARE)m '+%Y-%m-%d') echo $DATE_BEFORE_MONTHS_TO_COMPARE } + test_tcp_connectivity() + { + HOST=$1 + PORT=$2 + TIMEOUT=$3 + # This is initially for ES port 9300, not not HTTP but I add HTTP error + # codes for better handling, so 200 is open and 000 is not responding + nc -z -G $TIMEOUT $HOST $PORT >/dev/null 2>&1 && echo "200" || echo "000" + } elif [[ "$OSTYPE" == "cygwin" ]]; then # POSIX compatibility layer and Linux environment emulation for Windows TEMP_REPORT_FILE=$(mktemp -t -p /tmp prowler.cred_report-XXXXXX) From ba75d899117851a5e9bddd3f0258a43edc44c06c Mon Sep 17 00:00:00 2001 From: Toni de la Fuente Date: Wed, 25 Mar 2020 18:20:20 +0100 Subject: [PATCH 027/104] Added connection test for port 9300 in both linux and macosx on extra779 --- checks/check_extra779 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/checks/check_extra779 b/checks/check_extra779 index d6107851..ce892684 100644 --- a/checks/check_extra779 +++ b/checks/check_extra779 @@ -20,7 +20,7 @@ extra779(){ # if TEST_AUTHENTICATION has a value Prowler will try to access each ElasticSearch server to port: # 9200 API, 9300 Communcation and 5601 Kibana to figure out if authentication is enabled. # That is from the host where Prowler is running and will try to read indices or get kibana status - TEST_ES_AUTHENTICATION=1 + TEST_ES_AUTHENTICATION= httpStatus(){ case $1 in 000) SERVER_RESPONSE="000 Not responding" ;; From 1419d4887aab3a8f7beeddeff7ec3ba7a22b70c0 Mon Sep 17 00:00:00 2001 From: Huang Yaming Date: Fri, 27 Mar 2020 14:49:52 +0800 Subject: [PATCH 028/104] Ignore imported ACM Certificate in check_extra724 --- checks/check_extra724 | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/checks/check_extra724 b/checks/check_extra724 index 068a07d2..bb750855 100644 --- a/checks/check_extra724 +++ b/checks/check_extra724 @@ -24,10 +24,16 @@ extra724(){ for cert_arn in $LIST_OF_CERTS;do CT_ENABLED=$($AWSCLI acm describe-certificate $PROFILE_OPT --region $regx --certificate-arn $cert_arn --query Certificate.Options.CertificateTransparencyLoggingPreference --output text) CERT_DOMAIN_NAME=$(aws acm describe-certificate $PROFILE_OPT --region $regx --certificate-arn $cert_arn --query Certificate.DomainName --output text) - if [[ $CT_ENABLED == "ENABLED" ]];then - textPass "$regx: ACM Certificate $CERT_DOMAIN_NAME has Certificate Transparency logging enabled!" "$regx" + CERT_TYPE=$(aws acm describe-certificate $PROFILE_OPT --region $regx --certificate-arn $cert_arn --query Certificate.Type --output text) + if [[ $CERT_TYPE == "IMPORTED" ]];then + # Ignore imported certificate + textInfo "$regx: ACM Certificate $CERT_DOMAIN_NAME is imported." "$regx" else - textFail "$regx: ACM Certificate $CERT_DOMAIN_NAME has Certificate Transparency logging disabled!" "$regx" + if [[ $CT_ENABLED == "ENABLED" ]];then + textPass "$regx: ACM Certificate $CERT_DOMAIN_NAME has Certificate Transparency logging enabled!" "$regx" + else + textFail "$regx: ACM Certificate $CERT_DOMAIN_NAME has Certificate Transparency logging disabled!" "$regx" + fi fi done else From 059c701923553a43a552c22b53712a12e3de9117 Mon Sep 17 00:00:00 2001 From: dhirajdatar Date: Tue, 31 Mar 2020 16:46:38 +0530 Subject: [PATCH 029/104] Update README.md --- README.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/README.md b/README.md index bc1a3fa1..e0c5d0e5 100644 --- a/README.md +++ b/README.md @@ -369,6 +369,13 @@ or to run just one of the checks: ./prowler -c extraNUMBER ``` +or to run multiple extras in one go: + +```sh +./prowler -c extraNumber,extraNumber +``` + + ## Forensics Ready Checks With this group of checks, Prowler looks if each service with logging or audit capabilities has them enabled to ensure all needed evidences are recorded and collected for an eventual digital forensic investigation in case of incident. List of checks part of this group (you can also see all groups with `./prowler -L`). The list of checks can be seen in the group file at: From 1ae5d5d725d7799569ef7c49b351213e52aabc6b Mon Sep 17 00:00:00 2001 From: Toni de la Fuente Date: Wed, 1 Apr 2020 16:52:52 +0200 Subject: [PATCH 030/104] Added custom ports variable to extra779 --- checks/check_extra716 | 48 +++++++++++++++++++++++++++++++++++++++++-- checks/check_extra779 | 48 +++++++++++++++++++++++-------------------- 2 files changed, 72 insertions(+), 24 deletions(-) diff --git a/checks/check_extra716 b/checks/check_extra716 index 2bd4cc16..48a78df7 100644 --- a/checks/check_extra716 +++ b/checks/check_extra716 @@ -17,6 +17,28 @@ CHECK_TYPE_extra716="EXTRA" CHECK_ALTERNATE_check716="extra716" extra716(){ + # if TEST_AUTHENTICATION has a value Prowler will try to access each ElasticSearch server to the public URI endpoint. + # That is from the host where Prowler is running and will try to read indices or get kibana status + TEST_ES_AUTHENTICATION=1 + httpStatus(){ + case $1 in + 000) SERVER_RESPONSE="000 Not responding" ;; + 200) SERVER_RESPONSE="200 Successful" ;; + 400) SERVER_RESPONSE="400 Error: Bad Request" ;; + 401) SERVER_RESPONSE="401 Error: Unauthorized" ;; + 403) SERVER_RESPONSE="403 Error: Forbidden" ;; + 404) SERVER_RESPONSE="404 Error: Not Found" ;; + 407) SERVER_RESPONSE="407 Error: Proxy Authentication Required" ;; + 408) SERVER_RESPONSE="408 Error: Request Timeout" ;; + 500) SERVER_RESPONSE="500 Error: Internal Server Error" ;; + 502) SERVER_RESPONSE="502 Error: Bad Gateway" ;; + 503) SERVER_RESPONSE="503 Error: Service Unavailable" ;; + 504) SERVER_RESPONSE="504 Error: Gateway Timeout" ;; + 505) SERVER_RESPONSE="505 Error: HTTP Version Not Supported" ;; + *) SERVER_RESPONSE="HTTP: status not defined." ;; + esac + } + # "Check if Elasticsearch Service domains allow open access (Not Scored) (Not part of CIS benchmark)" for regx in $REGIONS; do LIST_OF_DOMAINS=$($AWSCLI es list-domain-names $PROFILE_OPT --region $regx --query DomainNames --output text) @@ -27,7 +49,29 @@ extra716(){ # check if the policy has Principal as * CHECK_ES_DOMAIN_ALLUSERS_POLICY=$(cat $TEMP_POLICY_FILE | jq -r '. | .Statement[] | select(.Effect == "Allow" and (((.Principal|type == "object") and .Principal.AWS == "*") or ((.Principal|type == "string") and .Principal == "*")) and .Condition == null)') if [[ $CHECK_ES_DOMAIN_ALLUSERS_POLICY ]];then - textFail "$regx: Amazon ES domain $domain policy allow Anonymous cross account access (Principal: \"*\")" "$regx" + if [[ $TEST_ES_AUTHENTICATION ]];then + # get endpoint or vpc endpoints + ES_DOMAIN_ENDPOINT=$($AWSCLI es describe-elasticsearch-domain --domain-name $domain $PROFILE_OPT --region $regx --query 'DomainStatus.[Endpoint || Endpoints]' --output text) + + # check for Elasticsearch on port 443, rest API HTTP. + CHECH_ES_HTTPS=$(curl -m 2 -s -w "%{http_code}" -o /dev/null -X GET "https://$ES_DOMAIN_ENDPOINT/_cat/indices") + httpStatus $CHECH_ES_HTTPS + if [[ $CHECH_ES_HTTPS -eq "200" ]];then + textFail "$regx: Amazon ES domain $domain policy allow Anonymous cross account access (Principal: \"*\") and ES service endpoint $ES_DOMAIN_ENDPOINT responded $SERVER_RESPONSE" "$regx" + else + textInfo "$regx: Amazon ES domain $domain policy allow Anonymous cross account access (Principal: \"*\") but ES service endpoint $ES_DOMAIN_ENDPOINT responded $SERVER_RESPONSE" "$regx" + fi + # check for Kibana on port 443 + CHECH_KIBANA_HTTPS=$(curl -m 2 -s -w "%{http_code}" -o /dev/null -X GET "https://$ES_DOMAIN_ENDPOINT/_plugin/kibana/api/status") + httpStatus $CHECH_KIBANA_HTTPS + if [[ $CHECH_KIBANA_HTTPS -eq "200" ]];then + textFail "$regx: Amazon ES domain $domain policy allow Anonymous cross account access (Principal: \"*\") and Kibana service endpoint $ES_DOMAIN_ENDPOINT responded $SERVER_RESPONSE" "$regx" + else + textInfo "$regx: Amazon ES domain $domain policy allow Anonymous cross account access (Principal: \"*\") but Kibana service endpoint $ES_DOMAIN_ENDPOINT responded $SERVER_RESPONSE" "$regx" + fi + else + textFail "$regx: Amazon ES domain $domain policy allow Anonymous cross account access (Principal: \"*\")" "$regx" + fi else textPass "$regx: Amazon ES domain $domain does not allow Anonymous cross account access" "$regx" fi @@ -37,4 +81,4 @@ extra716(){ textInfo "$regx: No Amazon ES domain found" "$regx" fi done -} +} diff --git a/checks/check_extra779 b/checks/check_extra779 index ce892684..4dde28ad 100644 --- a/checks/check_extra779 +++ b/checks/check_extra779 @@ -11,8 +11,8 @@ # CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. CHECK_ID_extra779="7.79" -CHECK_TITLE_extra779="[extra779] Ensure no security groups allow ingress from 0.0.0.0/0 or ::/0 to Elasticsearch/Kibana ports 9200/9300/5601" -CHECK_SCORED_extra779="NOT_SCORED" +CHECK_TITLE_extra779="[extra779] Ensure no security groups allow ingress from 0.0.0.0/0 or ::/0 to Elasticsearch/Kibana ports" +CHECK_SCORED_extra779="NOT_SCORED" CHECK_TYPE_extra779="EXTRA" CHECK_ALTERNATE_check779="extra779" @@ -21,6 +21,10 @@ extra779(){ # 9200 API, 9300 Communcation and 5601 Kibana to figure out if authentication is enabled. # That is from the host where Prowler is running and will try to read indices or get kibana status TEST_ES_AUTHENTICATION= + ES_API_PORT="9200" + ES_DATA_PORT="9300" + ES_KIBANA_PORT="5601" + httpStatus(){ case $1 in 000) SERVER_RESPONSE="000 Not responding" ;; @@ -41,9 +45,9 @@ extra779(){ } for regx in $REGIONS; do - # crate a list of SG open to the world with port 9200 or 9300 or 5601 + # crate a list of SG open to the world with port $ES_API_PORT or $ES_DATA_PORT or $ES_KIBANA_PORT SG_LIST=$($AWSCLI ec2 describe-security-groups $PROFILE_OPT --region $regx --output text \ - --query 'SecurityGroups[?length(IpPermissions[?((FromPort==null && ToPort==null) || (FromPort<=`9200` && ToPort>=`9200`) || (FromPort<=`9300` && ToPort>=`9300`) || (FromPort<=`5601` && ToPort>=`5601 `)) && (contains(IpRanges[].CidrIp, `0.0.0.0/0`) || contains(Ipv6Ranges[].CidrIpv6, `::/0`))]) > `0`].{GroupId:GroupId}') + --query "SecurityGroups[?length(IpPermissions[?((FromPort==null && ToPort==null) || (FromPort<=\`$ES_API_PORT\` && ToPort>=\`$ES_API_PORT\`) || (FromPort<=\`$ES_DATA_PORT\` && ToPort>=\`$ES_DATA_PORT\`) || (FromPort<=\`$ES_KIBANA_PORT\` && ToPort>=\`$ES_KIBANA_PORT\`)) && (contains(IpRanges[].CidrIp, \`0.0.0.0/0\`) || contains(Ipv6Ranges[].CidrIpv6, \`::/0\`))]) > \`0\`].{GroupId:GroupId}") # in case of open security groups goes through each one if [[ $SG_LIST ]];then for sg in $SG_LIST;do @@ -56,42 +60,42 @@ extra779(){ while read instance eip ; do if [[ $TEST_ES_AUTHENTICATION ]];then if [[ "$eip" != "None" ]];then - # check for Elasticsearch on port 9200, rest API HTTP. - CHECH_HTTP_9200=$(curl -m 2 -s -w "%{http_code}" -o /dev/null -X GET "http://$eip:9200/_cat/indices") - httpStatus $CHECH_HTTP_9200 - if [[ $CHECH_HTTP_9200 -eq "200" ]];then - textFail "$regx: Found instance $instance with public IP $eip on Security Group: $sg with Elasticsearch port 9200 response $SERVER_RESPONSE" "$regx" + # check for Elasticsearch on port $ES_API_PORT, rest API HTTP. + CHECH_HTTP_ES_API=$(curl -m 2 -s -w "%{http_code}" -o /dev/null -X GET "http://$eip:$ES_API_PORT/_cat/indices") + httpStatus $CHECH_HTTP_ES_API + if [[ $CHECH_HTTP_ES_API -eq "200" ]];then + textFail "$regx: Found instance $instance with public IP $eip on Security Group: $sg with Elasticsearch port $ES_API_PORT response $SERVER_RESPONSE" "$regx" else - textInfo "$regx: Found instance $instance with public IP $eip on Security Group: $sg with Elasticsearch port 9200 response $SERVER_RESPONSE" "$regx" + textInfo "$regx: Found instance $instance with public IP $eip on Security Group: $sg with Elasticsearch port $ES_API_PORT response $SERVER_RESPONSE" "$regx" fi - # check for port 9300 TCP, this is the communication port, not: + # check for port $ES_DATA_PORT TCP, this is the communication port, not: # test_tcp_connectivity is in include/os_detector # syntax is 'test_tcp_connectivity $HOST $PORT $TIMEOUT' (in seconds) - CHECK_TCP_9300=$(test_tcp_connectivity $eip 9300 2) + CHECH_HTTP_ES_DATA=$(test_tcp_connectivity $eip $ES_DATA_PORT 2) # Using HTTP error codes here as well to reuse httpStatus function # codes for better handling, so 200 is open and 000 is not responding - httpStatus $CHECK_TCP_9300 - if [[ $CHECK_TCP_9300 -eq "200" ]];then - textFail "$regx: Found instance $instance with public IP $eip on Security Group: $sg with Elasticsearch port 9300 response $SERVER_RESPONSE" "$regx" + httpStatus $CHECH_HTTP_ES_DATA + if [[ $CHECH_HTTP_ES_DATA -eq "200" ]];then + textFail "$regx: Found instance $instance with public IP $eip on Security Group: $sg with Elasticsearch port $ES_DATA_PORT response $SERVER_RESPONSE" "$regx" else - textInfo "$regx: Found instance $instance with public IP $eip on Security Group: $sg with Elasticsearch port 9300 response $SERVER_RESPONSE" "$regx" + textInfo "$regx: Found instance $instance with public IP $eip on Security Group: $sg with Elasticsearch port $ES_DATA_PORT response $SERVER_RESPONSE" "$regx" fi - # check for Kibana on port 5601 - CHECH_HTTP_5601=$(curl -m 2 -s -w "%{http_code}" -o /dev/null -X GET "http://$eip:5601/api/status") - httpStatus $CHECH_HTTP_5601 + # check for Kibana on port $ES_KIBANA_PORT + CHECH_HTTP_ES_KIBANA=$(curl -m 2 -s -w "%{http_code}" -o /dev/null -X GET "http://$eip:$ES_KIBANA_PORT/api/status") + httpStatus $CHECH_HTTP_ES_KIBANA if [[ $CHECH_AUTH_5601 -eq "200" ]];then - textFail "$regx: Found instance $instance with public IP $eip on Security Group: $sg with Kibana response $SERVER_RESPONSE" "$regx" + textFail "$regx: Found instance $instance with public IP $eip on Security Group: $sg with Kibana on port $ES_KIBANA_PORT response $SERVER_RESPONSE" "$regx" else - textInfo "$regx: Found instance $instance with public IP $eip on Security Group: $sg with Kibana response $SERVER_RESPONSE" "$regx" + textInfo "$regx: Found instance $instance with public IP $eip on Security Group: $sg with Kibana on port $ES_KIBANA_PORT response $SERVER_RESPONSE" "$regx" fi fi else if [[ "$eip" == "None" ]];then textInfo "$regx: Found instance $instance with private IP on Security Group: $sg" "$regx" else - textFail "$regx: Found instance $instance with public IP $eip on Security Group: $sg open to 0.0.0.0/0 on for Elasticsearch/Kibana ports" "$regx" + textFail "$regx: Found instance $instance with public IP $eip on Security Group: $sg open to 0.0.0.0/0 on for Elasticsearch/Kibana ports $ES_API_PORT/$ES_DATA_PORT/$ES_KIBANA_PORT" "$regx" fi fi if [[ "$eip" == "None" ]];then From 2e2fe96ff53ea1ec09225e64bd6d64f4def67363 Mon Sep 17 00:00:00 2001 From: Toni de la Fuente Date: Wed, 1 Apr 2020 21:57:20 +0200 Subject: [PATCH 031/104] Improved extra716 filters and auth check --- checks/check_extra716 | 82 ++++++++++++++++++++++++++++--------------- 1 file changed, 53 insertions(+), 29 deletions(-) diff --git a/checks/check_extra716 b/checks/check_extra716 index 48a78df7..332baece 100644 --- a/checks/check_extra716 +++ b/checks/check_extra716 @@ -11,7 +11,7 @@ # CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. CHECK_ID_extra716="7.16" -CHECK_TITLE_extra716="[extra716] Check if Amazon Elasticsearch Service (ES) domains are set as Public and have cross account access" +CHECK_TITLE_extra716="[extra716] Check if Amazon Elasticsearch Service (ES) domains are set as Public and have cross account access " CHECK_SCORED_extra716="NOT_SCORED" CHECK_TYPE_extra716="EXTRA" CHECK_ALTERNATE_check716="extra716" @@ -19,7 +19,7 @@ CHECK_ALTERNATE_check716="extra716" extra716(){ # if TEST_AUTHENTICATION has a value Prowler will try to access each ElasticSearch server to the public URI endpoint. # That is from the host where Prowler is running and will try to read indices or get kibana status - TEST_ES_AUTHENTICATION=1 + TEST_ES_AUTHENTICATION= httpStatus(){ case $1 in 000) SERVER_RESPONSE="000 Not responding" ;; @@ -45,35 +45,59 @@ extra716(){ if [[ $LIST_OF_DOMAINS ]]; then for domain in $LIST_OF_DOMAINS;do TEMP_POLICY_FILE=$(mktemp -t prowler-${ACCOUNT_NUM}-es-domain.policy.XXXXXXXXXX) - $AWSCLI es describe-elasticsearch-domain-config --domain-name $domain $PROFILE_OPT --region $regx --query DomainConfig.AccessPolicies.Options --output text > $TEMP_POLICY_FILE 2> /dev/null - # check if the policy has Principal as * - CHECK_ES_DOMAIN_ALLUSERS_POLICY=$(cat $TEMP_POLICY_FILE | jq -r '. | .Statement[] | select(.Effect == "Allow" and (((.Principal|type == "object") and .Principal.AWS == "*") or ((.Principal|type == "string") and .Principal == "*")) and .Condition == null)') - if [[ $CHECK_ES_DOMAIN_ALLUSERS_POLICY ]];then - if [[ $TEST_ES_AUTHENTICATION ]];then - # get endpoint or vpc endpoints - ES_DOMAIN_ENDPOINT=$($AWSCLI es describe-elasticsearch-domain --domain-name $domain $PROFILE_OPT --region $regx --query 'DomainStatus.[Endpoint || Endpoints]' --output text) - - # check for Elasticsearch on port 443, rest API HTTP. - CHECH_ES_HTTPS=$(curl -m 2 -s -w "%{http_code}" -o /dev/null -X GET "https://$ES_DOMAIN_ENDPOINT/_cat/indices") - httpStatus $CHECH_ES_HTTPS - if [[ $CHECH_ES_HTTPS -eq "200" ]];then - textFail "$regx: Amazon ES domain $domain policy allow Anonymous cross account access (Principal: \"*\") and ES service endpoint $ES_DOMAIN_ENDPOINT responded $SERVER_RESPONSE" "$regx" - else - textInfo "$regx: Amazon ES domain $domain policy allow Anonymous cross account access (Principal: \"*\") but ES service endpoint $ES_DOMAIN_ENDPOINT responded $SERVER_RESPONSE" "$regx" + # get endpoint or vpc endpoints + ES_DOMAIN_ENDPOINT=$($AWSCLI es describe-elasticsearch-domain --domain-name $domain $PROFILE_OPT --region $regx --query 'DomainStatus.[Endpoint || Endpoints]' --output text) + # If the endpoint starts with "vpc-" it is in a VPC then it is fine. + if [[ "$ES_DOMAIN_ENDPOINT" =~ ^vpc-* ]];then + ES_DOMAIN_VPC=$($AWSCLI es describe-elasticsearch-domain --domain-name $domain $PROFILE_OPT --region $regx --query 'DomainStatus.VPCOptions.VPCId' --output text) + textInfo "$regx: Amazon ES domain $domain is in VPC $ES_DOMAIN_VPC run extra779 to make sure it is not exposed using custom proxy" "$regx" + else + $AWSCLI es describe-elasticsearch-domain-config --domain-name $domain $PROFILE_OPT --region $regx --query DomainConfig.AccessPolicies.Options --output text > $TEMP_POLICY_FILE 2> /dev/null + CHECK_ES_DOMAIN_POLICY_OPEN=$(cat $TEMP_POLICY_FILE | jq -r '. | .Statement[] | select(.Effect == "Allow" and (((.Principal|type == "object") and .Principal.AWS == "*") or ((.Principal|type == "string") and .Principal == "*")) and select(has("Condition") | not))') + CHECK_ES_DOMAIN_POLICY_HAS_CONDITION=$(cat $TEMP_POLICY_FILE | jq -r '. | .Statement[] | select(.Effect == "Allow" and (((.Principal|type == "object") and .Principal.AWS == "*") or ((.Principal|type == "string") and .Principal == "*")) and select(has("Condition")))' ) + CHECK_ES_DOMAIN_POLICY_CONDITION_PRIVATE_IP=$(cat $TEMP_POLICY_FILE | jq 'select (.Statement[0] .Condition.IpAddress."aws:SourceIp" | select( test("^192.168.[0-9]|^10.0.[0-9]|^172.(1[6-9]|2[0-9]|3[01])|^127.0.0.1")))' ) + CHECK_ES_DOMAIN_POLICY_CONDITION_ZERO=$(cat $TEMP_POLICY_FILE | jq 'select (.Statement[0] .Condition.IpAddress."aws:SourceIp" | select( test("^0.0.0.0/0|^0.0.0.0/8")))' ) + CHECK_ES_DOMAIN_POLICY_CONDITION_STAR=$(cat $TEMP_POLICY_FILE | jq 'select (.Statement[0] .Condition.IpAddress."aws:SourceIp" == "*")') + CHECK_ES_DOMAIN_POLICY_CONDITION_PUBLIC_IP=$(cat $TEMP_POLICY_FILE | jq 'select (.Statement[0] .Condition.IpAddress."aws:SourceIp" | select( test("^192.168.[0-9]|^10.0.[0-9]|^172.(1[6-9]|2[0-9]|3[01])|^127.0.0.1")| not))' ) + if [[ $CHECK_ES_DOMAIN_POLICY_OPEN || $CHECK_ES_DOMAIN_POLICY_CONDITION_ZERO || $CHECK_ES_DOMAIN_POLICY_CONDITION_STAR ]];then + if [[ $TEST_ES_AUTHENTICATION ]];then + # check for REST API on port 443 + CHECH_ES_HTTPS=$(curl -m 2 -s -w "%{http_code}" -o /dev/null -X GET "https://$ES_DOMAIN_ENDPOINT/_cat/indices") + httpStatus $CHECH_ES_HTTPS + if [[ $CHECH_ES_HTTPS -eq "200" ]];then + textFail "$regx: Amazon ES domain $domain policy allows Anonymous access and ES service endpoint $ES_DOMAIN_ENDPOINT responded $SERVER_RESPONSE" "$regx" + else + textInfo "$regx: Amazon ES domain $domain policy allows Anonymous access but ES service endpoint $ES_DOMAIN_ENDPOINT responded $SERVER_RESPONSE" "$regx" + fi + # check for Kibana on port 443 + CHECH_KIBANA_HTTPS=$(curl -m 2 -s -w "%{http_code}" -o /dev/null -X GET "https://$ES_DOMAIN_ENDPOINT/_plugin/kibana/api/status") + httpStatus $CHECH_KIBANA_HTTPS + if [[ $CHECH_KIBANA_HTTPS -eq "200" ]];then + textFail "$regx: Amazon ES domain $domain policy allows Anonymous access and Kibana service endpoint $ES_DOMAIN_ENDPOINT responded $SERVER_RESPONSE" "$regx" + else + textInfo "$regx: Amazon ES domain $domain policy allows Anonymous access but Kibana service endpoint $ES_DOMAIN_ENDPOINT responded $SERVER_RESPONSE" "$regx" + fi + else + if [[ $CHECK_ES_DOMAIN_POLICY_OPEN ]];then + textFail "$regx: Amazon ES domain $domain policy allows access (Principal: \"*\") AUTH NOT TESTED" "$regx" + fi + if [[ $CHECK_ES_DOMAIN_POLICY_CONDITION_ZERO ]];then + textFail "$regx: Amazon ES domain $domain policy allows access (Principal: \"*\" and network 0.0.0.0) AUTH NOT TESTED" "$regx" + fi + if [[ $CHECK_ES_DOMAIN_POLICY_CONDITION_STAR ]];then + textFail "$regx: Amazon ES domain $domain policy allows access (Principal: \"*\" and network \"*\") AUTH NOT TESTED" "$regx" + fi fi - # check for Kibana on port 443 - CHECH_KIBANA_HTTPS=$(curl -m 2 -s -w "%{http_code}" -o /dev/null -X GET "https://$ES_DOMAIN_ENDPOINT/_plugin/kibana/api/status") - httpStatus $CHECH_KIBANA_HTTPS - if [[ $CHECH_KIBANA_HTTPS -eq "200" ]];then - textFail "$regx: Amazon ES domain $domain policy allow Anonymous cross account access (Principal: \"*\") and Kibana service endpoint $ES_DOMAIN_ENDPOINT responded $SERVER_RESPONSE" "$regx" - else - textInfo "$regx: Amazon ES domain $domain policy allow Anonymous cross account access (Principal: \"*\") but Kibana service endpoint $ES_DOMAIN_ENDPOINT responded $SERVER_RESPONSE" "$regx" + elif [[ $CHECK_ES_DOMAIN_POLICY_HAS_CONDITION ]];then + if [[ $CHECK_ES_DOMAIN_POLICY_CONDITION_PRIVATE_IP ]];then + textInfo "$regx: Amazon ES domain $domain policy allows access from a RFC1918 PRIVATE IP or CIDR" "$regx" fi - else - textFail "$regx: Amazon ES domain $domain policy allow Anonymous cross account access (Principal: \"*\")" "$regx" - fi - else - textPass "$regx: Amazon ES domain $domain does not allow Anonymous cross account access" "$regx" + if [[ $CHECK_ES_DOMAIN_POLICY_CONDITION_PUBLIC_IP ]];then + textInfo "$regx: Amazon ES domain $domain policy allows access from a PUBLIC IP or CIDR" "$regx" + fi + else + textPass "$regx: Amazon ES domain $domain does not allow Anonymous cross account access" "$regx" + fi fi rm -f $TEMP_POLICY_FILE done From 9dec4e6eb3e72acf1e9da8f717a1527b5721be01 Mon Sep 17 00:00:00 2001 From: Toni de la Fuente Date: Thu, 2 Apr 2020 00:02:42 +0200 Subject: [PATCH 032/104] Fix issue #488 only works if IsMultiRegionTrail --- include/check3x | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/include/check3x b/include/check3x index 5b75d315..8e805e90 100644 --- a/include/check3x +++ b/include/check3x @@ -14,17 +14,19 @@ check3x(){ local CHECK_WARN local CHECK_CROSS_ACCOUNT_WARN - DESCRIBE_TRAILS_CACHE=$($AWSCLI cloudtrail describe-trails $PROFILE_OPT --region "$REGION") - TRAIL_LIST=$(echo $DESCRIBE_TRAILS_CACHE | jq -r '.trailList[]|@base64') + # In order to make all these checks work properly logs and alarms have to + # be based only on CloudTrail tail set as "IsMultiRegionTrail" = True. + DESCRIBE_TRAILS_CACHE=$($AWSCLI cloudtrail describe-trails $PROFILE_OPT --region "$REGION" --query 'trailList[?IsMultiRegionTrail == `true` ]') + TRAIL_LIST=$(echo $DESCRIBE_TRAILS_CACHE | jq -r '. |@base64') CURRENT_ACCOUNT_ID=$($AWSCLI sts $PROFILE_OPT get-caller-identity --region "$REGION" --query Account --output text) CLOUDWATCH_LOGGROUP=$($AWSCLI cloudtrail describe-trails $PROFILE_OPT --region "$REGION" --query 'trailList[*].CloudWatchLogsLogGroupArn' --output text| tr '\011' '\012' | awk -F: '{print $7}') if [[ $CLOUDWATCH_LOGGROUP != "" ]]; then for group_obj_enc in $TRAIL_LIST; do group_obj_raw=$(echo $group_obj_enc | decode_report) - CLOUDWATCH_LOGGROUP_NAME=$(echo $group_obj_raw | jq -r '.CloudWatchLogsLogGroupArn|split(":")[6]') - CLOUDWATCH_LOGGROUP_REGION=$(echo $group_obj_raw | jq -r '.CloudWatchLogsLogGroupArn|split(":")[3]') - CLOUDWATCH_LOGGROUP_ACCOUNT=$(echo $group_obj_raw | jq -r '.CloudWatchLogsLogGroupArn|split(":")[4]') + CLOUDWATCH_LOGGROUP_NAME=$(echo $group_obj_raw | jq -r '.[] | .CloudWatchLogsLogGroupArn|split(":")[6]') + CLOUDWATCH_LOGGROUP_REGION=$(echo $group_obj_raw | jq -r '.[] | .CloudWatchLogsLogGroupArn|split(":")[3]') + CLOUDWATCH_LOGGROUP_ACCOUNT=$(echo $group_obj_raw | jq -r '.[] | .CloudWatchLogsLogGroupArn|split(":")[4]') if [ "$CLOUDWATCH_LOGGROUP_ACCOUNT" == "$CURRENT_ACCOUNT_ID" ];then # Filter control and whitespace from .metricFilters[*].filterPattern for easier matching later METRICFILTER_CACHE=$($AWSCLI logs describe-metric-filters --log-group-name "$CLOUDWATCH_LOGGROUP_NAME" $PROFILE_OPT --region "$CLOUDWATCH_LOGGROUP_REGION"|jq '.metricFilters|=map(.filterPattern|=gsub("[[:space:]]+"; " "))') From 2c580dd750be26b166c30c2f4ff9346d22fb7047 Mon Sep 17 00:00:00 2001 From: Toni de la Fuente Date: Thu, 2 Apr 2020 00:19:43 +0200 Subject: [PATCH 033/104] Fix issue #488 only works if CloudWatchLog configuration --- include/check3x | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/include/check3x b/include/check3x index 8e805e90..cabe626a 100644 --- a/include/check3x +++ b/include/check3x @@ -15,8 +15,8 @@ check3x(){ local CHECK_CROSS_ACCOUNT_WARN # In order to make all these checks work properly logs and alarms have to - # be based only on CloudTrail tail set as "IsMultiRegionTrail" = True. - DESCRIBE_TRAILS_CACHE=$($AWSCLI cloudtrail describe-trails $PROFILE_OPT --region "$REGION" --query 'trailList[?IsMultiRegionTrail == `true` ]') + # be based only on CloudTrail tail with CloudWatchLog configuration. + DESCRIBE_TRAILS_CACHE=$($AWSCLI cloudtrail describe-trails $PROFILE_OPT --region "$REGION" --query 'trailList[?CloudWatchLogsLogGroupArn != `null`]') TRAIL_LIST=$(echo $DESCRIBE_TRAILS_CACHE | jq -r '. |@base64') CURRENT_ACCOUNT_ID=$($AWSCLI sts $PROFILE_OPT get-caller-identity --region "$REGION" --query Account --output text) CLOUDWATCH_LOGGROUP=$($AWSCLI cloudtrail describe-trails $PROFILE_OPT --region "$REGION" --query 'trailList[*].CloudWatchLogsLogGroupArn' --output text| tr '\011' '\012' | awk -F: '{print $7}') From e567ccb828e2625a0cf4601ad8f9c28f49fb4eb5 Mon Sep 17 00:00:00 2001 From: Toni de la Fuente Date: Thu, 2 Apr 2020 15:31:43 +0200 Subject: [PATCH 034/104] v2.2.1 with new function and Improved extra779 and extra716 --- checks/check_extra716 | 28 ++++++---------------------- checks/check_extra779 | 19 ------------------- include/connection_tests | 34 ++++++++++++++++++++++++++++++++++ prowler | 3 ++- 4 files changed, 42 insertions(+), 42 deletions(-) create mode 100644 include/connection_tests diff --git a/checks/check_extra716 b/checks/check_extra716 index 332baece..eb040432 100644 --- a/checks/check_extra716 +++ b/checks/check_extra716 @@ -20,24 +20,6 @@ extra716(){ # if TEST_AUTHENTICATION has a value Prowler will try to access each ElasticSearch server to the public URI endpoint. # That is from the host where Prowler is running and will try to read indices or get kibana status TEST_ES_AUTHENTICATION= - httpStatus(){ - case $1 in - 000) SERVER_RESPONSE="000 Not responding" ;; - 200) SERVER_RESPONSE="200 Successful" ;; - 400) SERVER_RESPONSE="400 Error: Bad Request" ;; - 401) SERVER_RESPONSE="401 Error: Unauthorized" ;; - 403) SERVER_RESPONSE="403 Error: Forbidden" ;; - 404) SERVER_RESPONSE="404 Error: Not Found" ;; - 407) SERVER_RESPONSE="407 Error: Proxy Authentication Required" ;; - 408) SERVER_RESPONSE="408 Error: Request Timeout" ;; - 500) SERVER_RESPONSE="500 Error: Internal Server Error" ;; - 502) SERVER_RESPONSE="502 Error: Bad Gateway" ;; - 503) SERVER_RESPONSE="503 Error: Service Unavailable" ;; - 504) SERVER_RESPONSE="504 Error: Gateway Timeout" ;; - 505) SERVER_RESPONSE="505 Error: HTTP Version Not Supported" ;; - *) SERVER_RESPONSE="HTTP: status not defined." ;; - esac - } # "Check if Elasticsearch Service domains allow open access (Not Scored) (Not part of CIS benchmark)" for regx in $REGIONS; do @@ -55,10 +37,12 @@ extra716(){ $AWSCLI es describe-elasticsearch-domain-config --domain-name $domain $PROFILE_OPT --region $regx --query DomainConfig.AccessPolicies.Options --output text > $TEMP_POLICY_FILE 2> /dev/null CHECK_ES_DOMAIN_POLICY_OPEN=$(cat $TEMP_POLICY_FILE | jq -r '. | .Statement[] | select(.Effect == "Allow" and (((.Principal|type == "object") and .Principal.AWS == "*") or ((.Principal|type == "string") and .Principal == "*")) and select(has("Condition") | not))') CHECK_ES_DOMAIN_POLICY_HAS_CONDITION=$(cat $TEMP_POLICY_FILE | jq -r '. | .Statement[] | select(.Effect == "Allow" and (((.Principal|type == "object") and .Principal.AWS == "*") or ((.Principal|type == "string") and .Principal == "*")) and select(has("Condition")))' ) - CHECK_ES_DOMAIN_POLICY_CONDITION_PRIVATE_IP=$(cat $TEMP_POLICY_FILE | jq 'select (.Statement[0] .Condition.IpAddress."aws:SourceIp" | select( test("^192.168.[0-9]|^10.0.[0-9]|^172.(1[6-9]|2[0-9]|3[01])|^127.0.0.1")))' ) - CHECK_ES_DOMAIN_POLICY_CONDITION_ZERO=$(cat $TEMP_POLICY_FILE | jq 'select (.Statement[0] .Condition.IpAddress."aws:SourceIp" | select( test("^0.0.0.0/0|^0.0.0.0/8")))' ) - CHECK_ES_DOMAIN_POLICY_CONDITION_STAR=$(cat $TEMP_POLICY_FILE | jq 'select (.Statement[0] .Condition.IpAddress."aws:SourceIp" == "*")') - CHECK_ES_DOMAIN_POLICY_CONDITION_PUBLIC_IP=$(cat $TEMP_POLICY_FILE | jq 'select (.Statement[0] .Condition.IpAddress."aws:SourceIp" | select( test("^192.168.[0-9]|^10.0.[0-9]|^172.(1[6-9]|2[0-9]|3[01])|^127.0.0.1")| not))' ) + if [[ $CHECK_ES_DOMAIN_POLICY_HAS_CONDITION ]]; then + CHECK_ES_DOMAIN_POLICY_CONDITION_PRIVATE_IP=$(cat $TEMP_POLICY_FILE | jq 'select (.Statement[0] .Condition.IpAddress."aws:SourceIp" | select( test("^192.168.[0-9]|^10.0.[0-9]|^172.(1[6-9]|2[0-9]|3[01])|^127.0.0.1")))' ) + CHECK_ES_DOMAIN_POLICY_CONDITION_ZERO=$(cat $TEMP_POLICY_FILE | jq 'select (.Statement[0] .Condition.IpAddress."aws:SourceIp" | select( test("^0.0.0.0/0|^0.0.0.0/8")))' ) + CHECK_ES_DOMAIN_POLICY_CONDITION_STAR=$(cat $TEMP_POLICY_FILE | jq 'select (.Statement[0] .Condition.IpAddress."aws:SourceIp" == "*")') + CHECK_ES_DOMAIN_POLICY_CONDITION_PUBLIC_IP=$(cat $TEMP_POLICY_FILE | jq 'select (.Statement[0] .Condition.IpAddress."aws:SourceIp" | select( test("^192.168.[0-9]|^10.0.[0-9]|^172.(1[6-9]|2[0-9]|3[01])|^127.0.0.1")| not))' ) + fi if [[ $CHECK_ES_DOMAIN_POLICY_OPEN || $CHECK_ES_DOMAIN_POLICY_CONDITION_ZERO || $CHECK_ES_DOMAIN_POLICY_CONDITION_STAR ]];then if [[ $TEST_ES_AUTHENTICATION ]];then # check for REST API on port 443 diff --git a/checks/check_extra779 b/checks/check_extra779 index 4dde28ad..627bc51a 100644 --- a/checks/check_extra779 +++ b/checks/check_extra779 @@ -25,25 +25,6 @@ extra779(){ ES_DATA_PORT="9300" ES_KIBANA_PORT="5601" - httpStatus(){ - case $1 in - 000) SERVER_RESPONSE="000 Not responding" ;; - 200) SERVER_RESPONSE="200 Successful" ;; - 400) SERVER_RESPONSE="400 Error: Bad Request" ;; - 401) SERVER_RESPONSE="401 Error: Unauthorized" ;; - 403) SERVER_RESPONSE="403 Error: Forbidden" ;; - 404) SERVER_RESPONSE="404 Error: Not Found" ;; - 407) SERVER_RESPONSE="407 Error: Proxy Authentication Required" ;; - 408) SERVER_RESPONSE="408 Error: Request Timeout" ;; - 500) SERVER_RESPONSE="500 Error: Internal Server Error" ;; - 502) SERVER_RESPONSE="502 Error: Bad Gateway" ;; - 503) SERVER_RESPONSE="503 Error: Service Unavailable" ;; - 504) SERVER_RESPONSE="504 Error: Gateway Timeout" ;; - 505) SERVER_RESPONSE="505 Error: HTTP Version Not Supported" ;; - *) SERVER_RESPONSE="HTTP: status not defined." ;; - esac - } - for regx in $REGIONS; do # crate a list of SG open to the world with port $ES_API_PORT or $ES_DATA_PORT or $ES_KIBANA_PORT SG_LIST=$($AWSCLI ec2 describe-security-groups $PROFILE_OPT --region $regx --output text \ diff --git a/include/connection_tests b/include/connection_tests new file mode 100644 index 00000000..632be16f --- /dev/null +++ b/include/connection_tests @@ -0,0 +1,34 @@ +#!/usr/bin/env bash + +# Prowler - the handy cloud security tool (copyright 2018) by Toni de la Fuente +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy +# of the License at http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software distributed +# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +# CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. + + +# Functions to connection responses initially used for Elasticsearch related checks + +httpStatus(){ + case $1 in + 000) SERVER_RESPONSE="000 Not responding" ;; + 200) SERVER_RESPONSE="200 Successful" ;; + 400) SERVER_RESPONSE="400 Error: Bad Request" ;; + 401) SERVER_RESPONSE="401 Error: Unauthorized" ;; + 403) SERVER_RESPONSE="403 Error: Forbidden" ;; + 404) SERVER_RESPONSE="404 Error: Not Found" ;; + 407) SERVER_RESPONSE="407 Error: Proxy Authentication Required" ;; + 408) SERVER_RESPONSE="408 Error: Request Timeout" ;; + 500) SERVER_RESPONSE="500 Error: Internal Server Error" ;; + 502) SERVER_RESPONSE="502 Error: Bad Gateway" ;; + 503) SERVER_RESPONSE="503 Error: Service Unavailable" ;; + 504) SERVER_RESPONSE="504 Error: Gateway Timeout" ;; + 505) SERVER_RESPONSE="505 Error: HTTP Version Not Supported" ;; + *) SERVER_RESPONSE="HTTP: status not defined." ;; + esac + } \ No newline at end of file diff --git a/prowler b/prowler index 0310a197..2486808c 100755 --- a/prowler +++ b/prowler @@ -32,7 +32,7 @@ OPTRED="" OPTNORMAL="" # Set the defaults variables -PROWLER_VERSION=2.2.0 +PROWLER_VERSION=2.2.1 PROWLER_DIR=$(dirname "$0") REGION="" @@ -194,6 +194,7 @@ trap "{ rm -f /tmp/prowler*.policy.*; }" EXIT . $PROWLER_DIR/include/secrets_detector . $PROWLER_DIR/include/check3x . $PROWLER_DIR/include/assume_role +. $PROWLER_DIR/include/connection_tests # Get a list of all available AWS Regions REGIONS=$($AWSCLI ec2 describe-regions --query 'Regions[].RegionName' \ From afb908f19055b6c9bad5418bdb3b0773a090d7a0 Mon Sep 17 00:00:00 2001 From: Toni de la Fuente Date: Fri, 3 Apr 2020 17:54:25 +0200 Subject: [PATCH 035/104] Improved policy handling on extra716 --- checks/check_extra716 | 49 ++++++++++++++++++++++++++++--------------- checks/check_extra785 | 19 +++++++++++------ 2 files changed, 45 insertions(+), 23 deletions(-) diff --git a/checks/check_extra716 b/checks/check_extra716 index eb040432..9badab22 100644 --- a/checks/check_extra716 +++ b/checks/check_extra716 @@ -19,7 +19,7 @@ CHECK_ALTERNATE_check716="extra716" extra716(){ # if TEST_AUTHENTICATION has a value Prowler will try to access each ElasticSearch server to the public URI endpoint. # That is from the host where Prowler is running and will try to read indices or get kibana status - TEST_ES_AUTHENTICATION= + TEST_ES_AUTHENTICATION=1 # "Check if Elasticsearch Service domains allow open access (Not Scored) (Not part of CIS benchmark)" for regx in $REGIONS; do @@ -38,12 +38,27 @@ extra716(){ CHECK_ES_DOMAIN_POLICY_OPEN=$(cat $TEMP_POLICY_FILE | jq -r '. | .Statement[] | select(.Effect == "Allow" and (((.Principal|type == "object") and .Principal.AWS == "*") or ((.Principal|type == "string") and .Principal == "*")) and select(has("Condition") | not))') CHECK_ES_DOMAIN_POLICY_HAS_CONDITION=$(cat $TEMP_POLICY_FILE | jq -r '. | .Statement[] | select(.Effect == "Allow" and (((.Principal|type == "object") and .Principal.AWS == "*") or ((.Principal|type == "string") and .Principal == "*")) and select(has("Condition")))' ) if [[ $CHECK_ES_DOMAIN_POLICY_HAS_CONDITION ]]; then - CHECK_ES_DOMAIN_POLICY_CONDITION_PRIVATE_IP=$(cat $TEMP_POLICY_FILE | jq 'select (.Statement[0] .Condition.IpAddress."aws:SourceIp" | select( test("^192.168.[0-9]|^10.0.[0-9]|^172.(1[6-9]|2[0-9]|3[01])|^127.0.0.1")))' ) - CHECK_ES_DOMAIN_POLICY_CONDITION_ZERO=$(cat $TEMP_POLICY_FILE | jq 'select (.Statement[0] .Condition.IpAddress."aws:SourceIp" | select( test("^0.0.0.0/0|^0.0.0.0/8")))' ) - CHECK_ES_DOMAIN_POLICY_CONDITION_STAR=$(cat $TEMP_POLICY_FILE | jq 'select (.Statement[0] .Condition.IpAddress."aws:SourceIp" == "*")') - CHECK_ES_DOMAIN_POLICY_CONDITION_PUBLIC_IP=$(cat $TEMP_POLICY_FILE | jq 'select (.Statement[0] .Condition.IpAddress."aws:SourceIp" | select( test("^192.168.[0-9]|^10.0.[0-9]|^172.(1[6-9]|2[0-9]|3[01])|^127.0.0.1")| not))' ) + # get content of IpAddress."aws:SourceIp" and get a clean list + LIST_CONDITION_IPS=$(cat $TEMP_POLICY_FILE | jq '.Statement[0] .Condition.IpAddress."aws:SourceIp"'| awk -F'"' '{print $2}' | tr -d '",^$' | sed '/^$/d') + for condition_ip in $LIST_CONDITION_IPS;do + CONDITION_HAS_PRIVATE_IP=$(echo $condition_ip | grep -E '^(192\.168|10\.|172\.1[6789]\.|172\.2[0-9]\.|172\.3[01]\.)') + if [[ $CONDITION_HAS_PRIVATE_IP ]];then + CONDITION_HAS_PRIVATE_IP_ARRAY+=($condition_ip) + fi + CONDITION_HAS_PUBLIC_IP=$(echo $condition_ip | grep -vE '^(192\.168|10\.|172\.1[6789]\.|172\.2[0-9]\.|172\.3[01]\.|0\.0\.0\.0|\*)') + if [[ $CONDITION_HAS_PUBLIC_IP ]];then + CONDITION_HAS_PUBLIC_IP_ARRAY+=($condition_ip) + fi + CONDITION_HAS_ZERO_NET=$(echo $condition_ip | grep -E '^(0\.0\.0\.0|\*)') + CONDITION_HAS_STAR=$(echo $condition_ip | grep -E '^\*') + done + CHECK_ES_DOMAIN_POLICY_CONDITION_PRIVATE_IP=${CONDITION_HAS_PRIVATE_IP_ARRAY[@]} + CHECK_ES_DOMAIN_POLICY_CONDITION_PUBLIC_IP=${CONDITION_HAS_PUBLIC_IP_ARRAY[@]} + CHECK_ES_DOMAIN_POLICY_CONDITION_ZERO=$CONDITION_HAS_ZERO_NET + CHECK_ES_DOMAIN_POLICY_CONDITION_STAR=$CONDITION_HAS_STAR fi - if [[ $CHECK_ES_DOMAIN_POLICY_OPEN || $CHECK_ES_DOMAIN_POLICY_CONDITION_ZERO || $CHECK_ES_DOMAIN_POLICY_CONDITION_STAR ]];then + if [[ $CHECK_ES_DOMAIN_POLICY_OPEN || $CHECK_ES_DOMAIN_POLICY_CONDITION_ZERO || $CHECK_ES_DOMAIN_POLICY_CONDITION_STAR || ${CHECK_ES_DOMAIN_POLICY_CONDITION_PUBLIC_IP[@]} ]];then + #Prowler will check to read indices or kibaba status if no conditions, condition IP is *, 0.0.0.0/0, 0.0.0.0/8 or any public IP. if [[ $TEST_ES_AUTHENTICATION ]];then # check for REST API on port 443 CHECH_ES_HTTPS=$(curl -m 2 -s -w "%{http_code}" -o /dev/null -X GET "https://$ES_DOMAIN_ENDPOINT/_cat/indices") @@ -65,25 +80,25 @@ extra716(){ if [[ $CHECK_ES_DOMAIN_POLICY_OPEN ]];then textFail "$regx: Amazon ES domain $domain policy allows access (Principal: \"*\") AUTH NOT TESTED" "$regx" fi - if [[ $CHECK_ES_DOMAIN_POLICY_CONDITION_ZERO ]];then + if [[ $CHECK_ES_DOMAIN_POLICY_HAS_CONDITION && $CHECK_ES_DOMAIN_POLICY_CONDITION_ZERO ]];then textFail "$regx: Amazon ES domain $domain policy allows access (Principal: \"*\" and network 0.0.0.0) AUTH NOT TESTED" "$regx" fi - if [[ $CHECK_ES_DOMAIN_POLICY_CONDITION_STAR ]];then + if [[ $CHECK_ES_DOMAIN_POLICY_HAS_CONDITION && $CHECK_ES_DOMAIN_POLICY_CONDITION_STAR ]];then textFail "$regx: Amazon ES domain $domain policy allows access (Principal: \"*\" and network \"*\") AUTH NOT TESTED" "$regx" fi - fi - elif [[ $CHECK_ES_DOMAIN_POLICY_HAS_CONDITION ]];then - if [[ $CHECK_ES_DOMAIN_POLICY_CONDITION_PRIVATE_IP ]];then - textInfo "$regx: Amazon ES domain $domain policy allows access from a RFC1918 PRIVATE IP or CIDR" "$regx" - fi - if [[ $CHECK_ES_DOMAIN_POLICY_CONDITION_PUBLIC_IP ]];then - textInfo "$regx: Amazon ES domain $domain policy allows access from a PUBLIC IP or CIDR" "$regx" + if [[ $CHECK_ES_DOMAIN_POLICY_HAS_CONDITION && ${CHECK_ES_DOMAIN_POLICY_CONDITION_PUBLIC_IP[@]} ]];then + textFail "$regx: Amazon ES domain $domain policy allows access (Principal: \"*\" and Public IP or Network $(echo ${CONDITION_HAS_PUBLIC_IP_ARRAY[@]})) AUTH NOT TESTED" "$regx" + fi fi else - textPass "$regx: Amazon ES domain $domain does not allow Anonymous cross account access" "$regx" + if [[ $CHECK_ES_DOMAIN_POLICY_HAS_CONDITION && ${CHECK_ES_DOMAIN_POLICY_CONDITION_PRIVATE_IP[@]} ]];then + textInfo "$regx: Amazon ES domain $domain policy allows access from a Private IP or CIDR RFC1918 $(echo ${CONDITION_HAS_PRIVATE_IP_ARRAY[@]})" "$regx" + else + textPass "$regx: Amazon ES domain $domain does not allow Anonymous cross account access" "$regx" + fi fi + rm -f $TEMP_POLICY_FILE fi - rm -f $TEMP_POLICY_FILE done else textInfo "$regx: No Amazon ES domain found" "$regx" diff --git a/checks/check_extra785 b/checks/check_extra785 index 243693b7..7e22a689 100644 --- a/checks/check_extra785 +++ b/checks/check_extra785 @@ -16,17 +16,24 @@ CHECK_SCORED_extra785="NOT_SCORED" CHECK_TYPE_extra785="EXTRA" CHECK_ALTERNATE_check785="extra785" +# NOTE! +# API does not properly shows if an update is available while it is a new version available +# that can be done using the Console but not the API, not sure if it is a bug +# I have to investigate further + extra785(){ for regx in $REGIONS; do LIST_OF_DOMAINS=$($AWSCLI es list-domain-names $PROFILE_OPT --region $regx --query DomainNames --output text) if [[ $LIST_OF_DOMAINS ]]; then for domain in $LIST_OF_DOMAINS;do - CHECK_IF_UPDATE_AVAILABLE=$($AWSCLI es describe-elasticsearch-domain --domain-name $domain $PROFILE_OPT --region $regx --query 'DomainStatus.ServiceSoftwareOptions.UpdateAvailable' --output text|grep -i true) - if [[ $CHECK_IF_UPDATE_AVAILABLE ]];then - textInfo "$regx: Amazon ES domain $domain has updates available" "$regx" - else - textPass "$regx: Amazon ES domain $domain does not have have updates available" "$regx" - fi + CHECK_IF_UPDATE_AVAILABLE_AND_VERSION=$($AWSCLI es describe-elasticsearch-domain --domain-name $domain $PROFILE_OPT --region $regx --query 'DomainStatus.[ServiceSoftwareOptions.UpdateAvailable,ElasticsearchVersion]' --output text) + while read update_status es_version;do + if [[ $update_status != "False" ]];then + textInfo "$regx: Amazon ES domain $domain v$es_version has updates available " "$regx" + else + textPass "$regx: Amazon ES domain $domain v$es_version does not have have updates available" "$regx" + fi + done < <(echo $CHECK_IF_UPDATE_AVAILABLE_AND_VERSION) done else textInfo "$regx: No Amazon ES domain found" "$regx" From b5e1c9002a933c51502116b6fc6a84d475b7b74c Mon Sep 17 00:00:00 2001 From: Toni de la Fuente Date: Fri, 3 Apr 2020 17:54:55 +0200 Subject: [PATCH 036/104] Improved policy handling on extra716 --- checks/check_extra716 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/checks/check_extra716 b/checks/check_extra716 index 9badab22..afdb8dd4 100644 --- a/checks/check_extra716 +++ b/checks/check_extra716 @@ -19,7 +19,7 @@ CHECK_ALTERNATE_check716="extra716" extra716(){ # if TEST_AUTHENTICATION has a value Prowler will try to access each ElasticSearch server to the public URI endpoint. # That is from the host where Prowler is running and will try to read indices or get kibana status - TEST_ES_AUTHENTICATION=1 + TEST_ES_AUTHENTICATION= # "Check if Elasticsearch Service domains allow open access (Not Scored) (Not part of CIS benchmark)" for regx in $REGIONS; do From bd432fed920d0587c1ef9a7227a7157db86990fa Mon Sep 17 00:00:00 2001 From: Toni de la Fuente Date: Tue, 7 Apr 2020 16:46:46 +0200 Subject: [PATCH 037/104] New check for Metadata Service Version 2 #413 --- checks/check_extra786 | 52 +++++++++++++++++++++++++++++++++++++++++++ groups/group7_extras | 2 +- 2 files changed, 53 insertions(+), 1 deletion(-) create mode 100644 checks/check_extra786 diff --git a/checks/check_extra786 b/checks/check_extra786 new file mode 100644 index 00000000..f7093bcf --- /dev/null +++ b/checks/check_extra786 @@ -0,0 +1,52 @@ +#!/usr/bin/env bash + +# Prowler - the handy cloud security tool (copyright 2020) by Toni de la Fuente +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy +# of the License at http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software distributed +# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +# CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. +CHECK_ID_extra786="7.86" +CHECK_TITLE_extra786="[extra786] Check if EC2 Instance Metadata Service Version 2 (IMDSv2) is Enabled and Required (Not Scored) (Not part of CIS benchmark)" +CHECK_SCORED_extra786="NOT_SCORED" +CHECK_TYPE_extra786="EXTRA" +CHECK_ALTERNATE_check770="extra786" + +extra786(){ + for regx in $REGIONS; do + TEMP_EXTRA786_FILE=$(mktemp -t prowler-${ACCOUNT_NUM}-es-domain.EXTRA779.XXXXXXXXXX) + $AWSCLI ec2 describe-instances $PROFILE_OPT --region $regx \ + --query 'Reservations[*].Instances[*].{HttpTokens:MetadataOptions.HttpTokens,HttpEndpoint:MetadataOptions.HttpEndpoint,InstanceId:InstanceId}' \ + --output text --max-items $MAXITEMS > $TEMP_EXTRA786_FILE + # if the file contains data, there are instances in that region + if [[ -s "$TEMP_EXTRA786_FILE" ]];then + # here we read content from the file fields instanceid httptokens_status httpendpoint + while read httpendpoint httptokens_status instanceid ; do + #echo i:$instanceid tok:$httptokens_status end:$httpendpoint + if [[ "$httpendpoint" == "enabled" && "$httptokens_status" == "required" ]];then + textPass "$regx: EC2 Instance $instanceid has IMDSv2 enabled and required" "$regx" + elif [[ "$httpendpoint" == "disabled" ]];then + textInfo "$regx: EC2 Instance $instanceid has HTTP endpoint access to metadata service disabled" "$regx" + else + textFail "$regx: EC2 Instance $instanceid has IMDSv2 disabled or not required" "$regx" + fi + done < <(cat $TEMP_EXTRA786_FILE) + else + textInfo "$regx: no EC2 Instances found" "$regx" + fi + rm -fr $TEMP_EXTRA786_FILE + done +} + +# Remediation: + +# aws ec2 modify-instance-metadata-options \ +# --instance-id i-1234567898abcdef0 \ +# --http-tokens required \ +# --http-endpoint enabled + +# More information here https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/configuring-instance-metadata-service.html diff --git a/groups/group7_extras b/groups/group7_extras index bbb27d31..57062cb7 100644 --- a/groups/group7_extras +++ b/groups/group7_extras @@ -15,7 +15,7 @@ GROUP_ID[7]='extras' GROUP_NUMBER[7]='7.0' GROUP_TITLE[7]='Extras - all non CIS specific checks - [extras] ****************' GROUP_RUN_BY_DEFAULT[7]='Y' # run it when execute_all is called -GROUP_CHECKS[7]='extra71,extra72,extra73,extra74,extra75,extra76,extra77,extra78,extra79,extra710,extra711,extra712,extra713,extra714,extra715,extra716,extra717,extra718,extra719,extra720,extra721,extra722,extra723,extra724,extra725,extra726,extra727,extra728,extra729,extra730,extra731,extra732,extra733,extra734,extra735,extra736,extra737,extra738,extra739,extra740,extra741,extra742,extra743,extra744,extra745,extra746,extra747,extra748,extra749,extra750,extra751,extra752,extra753,extra754,extra755,extra756,extra757,extra758,extra761,extra762,extra763,extra764,extra765,extra767,extra768,extra769,extra770,extra771,extra772,extra773,extra774,extra775,extra776,extra777,extra778,extra779,extra780,extra781,extra782,extra783,extra784,extra785' +GROUP_CHECKS[7]='extra71,extra72,extra73,extra74,extra75,extra76,extra77,extra78,extra79,extra710,extra711,extra712,extra713,extra714,extra715,extra716,extra717,extra718,extra719,extra720,extra721,extra722,extra723,extra724,extra725,extra726,extra727,extra728,extra729,extra730,extra731,extra732,extra733,extra734,extra735,extra736,extra737,extra738,extra739,extra740,extra741,extra742,extra743,extra744,extra745,extra746,extra747,extra748,extra749,extra750,extra751,extra752,extra753,extra754,extra755,extra756,extra757,extra758,extra761,extra762,extra763,extra764,extra765,extra767,extra768,extra769,extra770,extra771,extra772,extra773,extra774,extra775,extra776,extra777,extra778,extra779,extra780,extra781,extra782,extra783,extra784,extra785,extra786' # Extras 759 and 760 (lambda variables and code secrets finder are not included) # to run detect-secrets use `./prowler -g secrets` From 92e1f17a8049f7d21b8f0d8ff8542c7d2ebf3ab3 Mon Sep 17 00:00:00 2001 From: Marc Jay Date: Tue, 7 Apr 2020 16:08:07 +0100 Subject: [PATCH 038/104] Adds 'json-asff' and 'securityhub' output modes json-asff mode outputs JSON, similar to the standard 'json' mode with one check per line, but in AWS Security Finding Format - used by AWS Security Hub Currently uses a generic Type, Resources and ProductArn value, but sets the Id to a unique value that includes the details of the message, in order to separate out checks that run against multiple resources and output one result per resource per check. This ensures that findings can be updated, should the resource move in or out of compliance securityhub mode generates the ASFF JSON and then passes it to an 'aws securityhub batch-import-findings' call, once per resource per check. Output to the screen is similar to the standard mode, but prints whether or not the finding was submitted successfully Fixes #524 --- checks/check13 | 6 +- include/colors | 6 +- include/outputs | 202 +++++++++++++++++++++++++++--------------------- include/whoami | 2 +- prowler | 12 +-- 5 files changed, 127 insertions(+), 101 deletions(-) diff --git a/checks/check13 b/checks/check13 index aeab1944..10289768 100644 --- a/checks/check13 +++ b/checks/check13 @@ -17,17 +17,17 @@ CHECK_ALTERNATE_check103="check13" check13(){ # "Ensure credentials unused for 90 days or greater are disabled (Scored)" COMMAND12_LIST_USERS_WITH_PASSWORD_ENABLED=$(cat $TEMP_REPORT_FILE|awk -F, '{ print $1,$4 }' |grep true | awk '{ print $1 }') - # Only check Password last used for users with password enabled + # Only check Password last used for users with password enabled if [[ $COMMAND12_LIST_USERS_WITH_PASSWORD_ENABLED ]]; then for i in $COMMAND12_LIST_USERS_WITH_PASSWORD_ENABLED; do DATEUSED=$($AWSCLI iam list-users --query "Users[?UserName=='$i'].PasswordLastUsed" --output text $PROFILE_OPT --region $REGION | cut -d'T' -f1) if [ "$DATEUSED" == "" ] then - textFail "User \"$i\" has not logged in during the last 90 days " + textFail "User \"$i\" has not logged in during the last 90 days" else HOWOLDER=$(how_older_from_today $DATEUSED) if [ $HOWOLDER -gt "90" ];then - textFail "User \"$i\" has not logged in during the last 90 days " + textFail "User \"$i\" has not logged in during the last 90 days" else textPass "User \"$i\" found with credentials used in the last 90 days" fi diff --git a/include/colors b/include/colors index 2b7175ce..68ac32a4 100644 --- a/include/colors +++ b/include/colors @@ -11,15 +11,15 @@ # CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. -if [[ $MODE != "mono" && $MODE != "text" && $MODE != "csv" && $MODE != "json" ]]; then +if [[ "$MODE" != "mono" && "$MODE" != "text" && "$MODE" != "csv" && "$MODE" != "json" && "$MODE" != "json-asff" && "$MODE" != "securityhub" ]]; then echo "" - echo "$OPTRED ERROR!$OPTNORMAL Invalid output mode. Choose text, mono, or csv." + echo "$OPTRED ERROR!$OPTNORMAL Invalid output mode. Choose text, mono, csv, json, json-asff or securityhub." usage EXITCODE=1 exit $EXITCODE fi -if [[ "$MODE" == "mono" || "$MODE" == "csv" || "$MODE" == "json" ]]; then +if [[ "$MODE" == "mono" || "$MODE" == "csv" || "$MODE" == "json" || "$MODE" == "json-asff" ]]; then MONOCHROME=1 fi diff --git a/include/outputs b/include/outputs index b18ac683..6af14778 100644 --- a/include/outputs +++ b/include/outputs @@ -18,41 +18,22 @@ textPass(){ fi PASS_COUNTER=$((PASS_COUNTER+1)) - if [[ "$MODE" == "csv" ]]; then + if [[ "$MODE" == "csv" || "$MODE" == "json" || "$MODE" == "json-asff" || "$MODE" == "securityhub" ]]; then if [[ $2 ]]; then REPREGION=$2 else REPREGION=$REGION fi - echo "$PROFILE${SEP}$ACCOUNT_NUM${SEP}$REPREGION${SEP}$TITLE_ID${SEP}PASS${SEP}$ITEM_SCORED${SEP}$ITEM_LEVEL${SEP}$TITLE_TEXT${SEP}$1" - elif [[ "$MODE" == "json" ]]; then - if [[ $2 ]]; then - REPREGION=$2 - else - REPREGION=$REGION + if [[ "$MODE" == "csv" ]]; then + echo "$PROFILE${SEP}$ACCOUNT_NUM${SEP}$REPREGION${SEP}$TITLE_ID${SEP}PASS${SEP}$ITEM_SCORED${SEP}$ITEM_LEVEL${SEP}$TITLE_TEXT${SEP}$1" + elif [[ "$MODE" == "json" ]]; then + generateJsonOutput "$1" "Pass" + elif [[ "$MODE" == "json-asff" ]]; then + generateJsonAsffOutput "$1" "PASSED" "INFORMATIONAL" + elif [[ "$MODE" == "securityhub" ]]; then + printf " $OK PASS!$NORMAL %s... " "$1" + aws securityhub batch-import-findings --findings "$(generateJsonAsffOutput "$1" "PASSED" "INFORMATIONAL")" | jq -M -r 'if .SuccessCount == 1 then "Successfully submitted finding" else "Failed to upload finding" end' fi - jq -M -c \ - --arg PROFILE "$PROFILE" \ - --arg ACCOUNT_NUM "$ACCOUNT_NUM" \ - --arg TITLE_TEXT "$TITLE_TEXT" \ - --arg MESSAGE "$(echo -e "${1}" | sed -e 's/^[[:space:]]*//')" \ - --arg SCORED "$ITEM_SCORED" \ - --arg ITEM_LEVEL "$ITEM_LEVEL" \ - --arg TITLE_ID "$TITLE_ID" \ - --arg REPREGION "$REPREGION" \ - --arg TIMESTAMP $(date -u +"%Y-%m-%dT%H:%M:%SZ") \ - -n '{ - "Profile": $PROFILE, - "Account Number": $ACCOUNT_NUM, - "Control": $TITLE_TEXT, - "Message": $MESSAGE, - "Status": "Pass", - "Scored": $SCORED, - "Level": $ITEM_LEVEL, - "Control ID": $TITLE_ID, - "Region": $REPREGION, - "Timestamp": $TIMESTAMP, - }' else echo " $OK PASS!$NORMAL $1" fi @@ -63,41 +44,22 @@ textInfo(){ return fi - if [[ "$MODE" == "csv" ]]; then + if [[ "$MODE" == "csv" || "$MODE" == "json" || "$MODE" == "json-asff" ]]; then if [[ $2 ]]; then REPREGION=$2 else REPREGION=$REGION fi - echo "$PROFILE${SEP}$ACCOUNT_NUM${SEP}$REPREGION${SEP}$TITLE_ID${SEP}INFO${SEP}$ITEM_SCORED${SEP}$ITEM_LEVEL${SEP}$TITLE_TEXT${SEP}$1" - elif [[ "$MODE" == "json" ]]; then - if [[ $2 ]]; then - REPREGION=$2 - else - REPREGION=$REGION + if [[ "$MODE" == "csv" ]]; then + echo "$PROFILE${SEP}$ACCOUNT_NUM${SEP}$REPREGION${SEP}$TITLE_ID${SEP}INFO${SEP}$ITEM_SCORED${SEP}$ITEM_LEVEL${SEP}$TITLE_TEXT${SEP}$1" + elif [[ "$MODE" == "json" ]]; then + generateJsonOutput "$1" "Info" + elif [[ "$MODE" == "json-asff" ]]; then + generateJsonAsffOutput "$1" "NOT_AVAILABLE" "LOW" + elif [[ "$MODE" == "securityhub" ]]; then + printf " $NOTICE INFO! %s... $NORMAL" "$1" + aws securityhub batch-import-findings --findings "$(generateJsonAsffOutput "$1" "NOT_AVAILABLE" "LOW")" | jq -M -r 'if .SuccessCount == 1 then "Successfully submitted finding" else "Failed to upload finding" end' fi - jq -M -c \ - --arg PROFILE "$PROFILE" \ - --arg ACCOUNT_NUM "$ACCOUNT_NUM" \ - --arg TITLE_TEXT "$TITLE_TEXT" \ - --arg MESSAGE "$(echo -e "${1}" | sed -e 's/^[[:space:]]*//')" \ - --arg SCORED "$ITEM_SCORED" \ - --arg ITEM_LEVEL "$ITEM_LEVEL" \ - --arg TITLE_ID "$TITLE_ID" \ - --arg REPREGION "$REPREGION" \ - --arg TIMESTAMP $(date -u +"%Y-%m-%dT%H:%M:%SZ") \ - -n '{ - "Profile": $PROFILE, - "Account Number": $ACCOUNT_NUM, - "Control": $TITLE_TEXT, - "Message": $MESSAGE, - "Status": "Info", - "Scored": $SCORED, - "Level": $ITEM_LEVEL, - "Control ID": $TITLE_ID, - "Region": $REPREGION, - "Timestamp": $TIMESTAMP, - }' else echo " $NOTICE INFO! $1 $NORMAL" fi @@ -106,41 +68,22 @@ textInfo(){ textFail(){ FAIL_COUNTER=$((FAIL_COUNTER+1)) EXITCODE=3 - if [[ "$MODE" == "csv" ]]; then + if [[ "$MODE" == "csv" || "$MODE" == "json" || "$MODE" == "json-asff" || "$MODE" == "securityhub" ]]; then if [[ $2 ]]; then REPREGION=$2 else REPREGION=$REGION fi - echo "$PROFILE${SEP}$ACCOUNT_NUM${SEP}$REPREGION${SEP}$TITLE_ID${SEP}FAIL${SEP}$ITEM_SCORED${SEP}$ITEM_LEVEL${SEP}$TITLE_TEXT${SEP}$1" - elif [[ "$MODE" == "json" ]]; then - if [[ $2 ]]; then - REPREGION=$2 - else - REPREGION=$REGION + if [[ "$MODE" == "csv" ]]; then + echo "$PROFILE${SEP}$ACCOUNT_NUM${SEP}$REPREGION${SEP}$TITLE_ID${SEP}FAIL${SEP}$ITEM_SCORED${SEP}$ITEM_LEVEL${SEP}$TITLE_TEXT${SEP}$1" + elif [[ "$MODE" == "json" ]]; then + generateJsonOutput "$1" "Fail" + elif [[ "$MODE" == "json-asff" ]]; then + generateJsonAsffOutput "$1" "FAILED" "HIGH" + elif [[ "$MODE" == "securityhub" ]]; then + printf " $BAD FAIL! %s... $NORMAL" "$1" + aws securityhub batch-import-findings --findings "$(generateJsonAsffOutput "$1" "FAILED" "HIGH")" | jq -M -r 'if .SuccessCount == 1 then "Successfully submitted finding" else "Failed to upload finding" end' fi - jq -M -c \ - --arg PROFILE "$PROFILE" \ - --arg ACCOUNT_NUM "$ACCOUNT_NUM" \ - --arg TITLE_TEXT "$TITLE_TEXT" \ - --arg MESSAGE "$(echo -e "${1}" | sed -e 's/^[[:space:]]*//')" \ - --arg SCORED "$ITEM_SCORED" \ - --arg ITEM_LEVEL "$ITEM_LEVEL" \ - --arg TITLE_ID "$TITLE_ID" \ - --arg REPREGION "$REPREGION" \ - --arg TIMESTAMP $(date -u +"%Y-%m-%dT%H:%M:%SZ") \ - -n '{ - "Profile": $PROFILE, - "Account Number": $ACCOUNT_NUM, - "Control": $TITLE_TEXT, - "Message": $MESSAGE, - "Status": "Fail", - "Scored": $SCORED, - "Level": $ITEM_LEVEL, - "Control ID": $TITLE_ID, - "Region": $REPREGION, - "Timestamp": $TIMESTAMP, - }' else echo " $BAD FAIL! $1 $NORMAL" fi @@ -179,7 +122,7 @@ textTitle(){ if [[ "$MODE" == "csv" ]]; then >&2 echo "$TITLE_ID $TITLE_TEXT" - elif [[ "$MODE" == "json" ]]; then + elif [[ "$MODE" == "json" || "$MODE" == "json-asff" ]]; then : else if [[ "$ITEM_SCORED" == "Scored" ]]; then @@ -189,3 +132,86 @@ textTitle(){ fi fi } + +generateJsonOutput(){ + local message=$1 + local status=$2 + jq -M -c \ + --arg PROFILE "$PROFILE" \ + --arg ACCOUNT_NUM "$ACCOUNT_NUM" \ + --arg TITLE_TEXT "$TITLE_TEXT" \ + --arg MESSAGE "$(echo -e "${message}" | sed -e 's/^[[:space:]]*//')" \ + --arg STATUS "$status" \ + --arg SCORED "$ITEM_SCORED" \ + --arg ITEM_LEVEL "$ITEM_LEVEL" \ + --arg TITLE_ID "$TITLE_ID" \ + --arg REPREGION "$REPREGION" \ + --arg TIMESTAMP $(date -u +"%Y-%m-%dT%H:%M:%SZ") \ + -n '{ + "Profile": $PROFILE, + "Account Number": $ACCOUNT_NUM, + "Control": $TITLE_TEXT, + "Message": $MESSAGE, + "Status": $STATUS, + "Scored": $SCORED, + "Level": $ITEM_LEVEL, + "Control ID": $TITLE_ID, + "Region": $REPREGION, + "Timestamp": $TIMESTAMP, + }' +} + +generateJsonAsffOutput(){ + # UNIQUE_ID must only contain characters from the unreserved characters set defined in section 2.3 of RFC-3986 + # Replace any successive non-conforming characters with a single underscore + local message=$1 + local status=$2 + local severity=$3 + jq -M -c \ + --arg PROFILE "$PROFILE" \ + --arg ACCOUNT_NUM "$ACCOUNT_NUM" \ + --arg TITLE_TEXT "$TITLE_TEXT" \ + --arg MESSAGE "$(echo -e "${message}" | sed -e 's/^[[:space:]]*//')" \ + --arg UNIQUE_ID "$(LC_ALL=C echo -e "${message}" | tr -cs '[:alnum:]._~-\n' '_')" \ + --arg STATUS "$status" \ + --arg SEVERITY "$severity" \ + --arg SCORED "$ITEM_SCORED" \ + --arg ITEM_LEVEL "$ITEM_LEVEL" \ + --arg TITLE_ID "$TITLE_ID" \ + --arg REPREGION "$REPREGION" \ + --arg TIMESTAMP $(date -u +"%Y-%m-%dT%H:%M:%SZ") \ + --arg PROWLER_VERSION "$PROWLER_VERSION" \ +-n '{ + "SchemaVersion": "2018-10-08", + "Id": "prowler-\($TITLE_ID)-\($ACCOUNT_NUM)-\($REPREGION)-\($UNIQUE_ID)", + "ProductArn": "arn:aws:securityhub:\($REPREGION):\($ACCOUNT_NUM):product/\($ACCOUNT_NUM)/default", + "ProductFields": { + "ProviderName": "Prowler", + "ProviderVersion": $PROWLER_VERSION + }, + "GeneratorId": "prowler-\($PROWLER_VERSION)", + "AwsAccountId": $ACCOUNT_NUM, + "Types": [ + "Software and Configuration Checks" + ], + "FirstObservedAt": $TIMESTAMP, + "UpdatedAt": $TIMESTAMP, + "CreatedAt": $TIMESTAMP, + "Severity": { + "Label": $SEVERITY + }, + "Title": $TITLE_TEXT, + "Description": $MESSAGE, + "Resources": [ + { + "Type": "AwsAccount", + "Id": "AWS: : : :Account:\($ACCOUNT_NUM)", + "Partition": "aws", + "Region": $REPREGION + } + ], + "Compliance": { + "Status": $STATUS + } + }' +} diff --git a/include/whoami b/include/whoami index 0fa8479a..abe59cc5 100644 --- a/include/whoami +++ b/include/whoami @@ -28,7 +28,7 @@ getWhoami(){ printCsvHeader textTitle "0.0" "Show report generation info" "NOT_SCORED" "SUPPORT" textInfo "ARN: $CALLER_ARN TIMESTAMP: $SCRIPT_START_TIME" - elif [[ "$MODE" == "json" ]]; then + elif [[ "$MODE" == "json" || "$MODE" == "json-asff" ]]; then : else echo "" diff --git a/prowler b/prowler index 2486808c..01eb9a35 100755 --- a/prowler +++ b/prowler @@ -64,7 +64,7 @@ USAGE: -f specify an AWS region to run checks against (i.e.: us-west-1) -m specify the maximum number of items to return for long-running requests (default: 100) - -M output mode: text (default), mono, json, csv (separator is ","; data is on stdout; progress on stderr) + -M output mode: text (default), mono, json, json-asff, securityhub, csv (separator is ","; data is on stdout; progress on stderr) -k keep the credential report -n show check numbers to sort easier (i.e.: 1.01 instead of 1.1) @@ -77,12 +77,12 @@ USAGE: -s show scoring report -x specify external directory with custom checks (i.e. /my/own/checks, files must start by "check") -q suppress info messages and passing test output - -A account id for the account where to assume a role, requires -R and -T + -A account id for the account where to assume a role, requires -R and -T (i.e.: 123456789012) - -R role name to assume in the account, requires -A and -T + -R role name to assume in the account, requires -A and -T (i.e.: ProwlerRole) -T session durantion given to that role credentials in seconds, default 1h (3600) recommended 12h, requires -R and -T - (i.e.: 43200) + (i.e.: 43200) -h this help " exit @@ -395,8 +395,8 @@ if [[ $PRINTGROUPSONLY == "1" ]]; then exit $EXITCODE fi -# Check that jq is installed for JSON output -if [[ $MODE == "json" ]]; then +# Check that jq is installed for JSON outputs +if [[ "$MODE" == "json" || "$MODE" == "json-asff" || "$MODE" == "securityhub" ]]; then . $PROWLER_DIR/include/jq_detector fi From 9c4e62964714029ca70d75eb8ce36e529869a941 Mon Sep 17 00:00:00 2001 From: Toni de la Fuente Date: Tue, 7 Apr 2020 20:28:38 +0200 Subject: [PATCH 039/104] Fixed typo in extra786 --- checks/check_extra786 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/checks/check_extra786 b/checks/check_extra786 index f7093bcf..95b709a7 100644 --- a/checks/check_extra786 +++ b/checks/check_extra786 @@ -14,7 +14,7 @@ CHECK_ID_extra786="7.86" CHECK_TITLE_extra786="[extra786] Check if EC2 Instance Metadata Service Version 2 (IMDSv2) is Enabled and Required (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra786="NOT_SCORED" CHECK_TYPE_extra786="EXTRA" -CHECK_ALTERNATE_check770="extra786" +CHECK_ALTERNATE_check786="extra786" extra786(){ for regx in $REGIONS; do From 4ff685635e8c429810b935a6693ab9064d11dfe9 Mon Sep 17 00:00:00 2001 From: Patrick Downey Date: Wed, 8 Apr 2020 11:54:15 +0100 Subject: [PATCH 040/104] Use TrailARN property to query get-event-selectors This will work to query cloudtrail's that are in different accounts. e.g. in the case of organisation managed cloudtrails. --- checks/check_extra720 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/checks/check_extra720 b/checks/check_extra720 index 1bf63cc5..a3a97c9a 100644 --- a/checks/check_extra720 +++ b/checks/check_extra720 @@ -22,7 +22,7 @@ extra720(){ LIST_OF_FUNCTIONS=$($AWSCLI lambda list-functions $PROFILE_OPT --region $regx --query Functions[*].FunctionName --output text) if [[ $LIST_OF_FUNCTIONS ]]; then for lambdafunction in $LIST_OF_FUNCTIONS;do - LIST_OF_TRAILS=$($AWSCLI cloudtrail describe-trails $PROFILE_OPT --region $regx --query trailList[?HomeRegion==\`$regx\`].Name --output text) + LIST_OF_TRAILS=$($AWSCLI cloudtrail describe-trails $PROFILE_OPT --region $regx --query trailList[?HomeRegion==\`$regx\`].TrailARN --output text) if [[ $LIST_OF_TRAILS ]]; then for trail in $LIST_OF_TRAILS; do FUNCTION_ENABLED_IN_TRAIL=$($AWSCLI cloudtrail get-event-selectors $PROFILE_OPT --trail-name $trail --region $regx --query "EventSelectors[*].DataResources[?Type == \`AWS::Lambda::Function\`].Values" --output text |xargs -n1| grep -E "^arn:aws:lambda.*function:$lambdafunction$|^arn:aws:lambda$") From 84711d1ef511e344459d65352ff679572802ae3c Mon Sep 17 00:00:00 2001 From: Patrick Downey Date: Wed, 8 Apr 2020 12:38:20 +0100 Subject: [PATCH 041/104] Remove HomeRegion predicate from describe-trails to look for cross-region trails too This will hopefully address #455 --- checks/check_extra720 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/checks/check_extra720 b/checks/check_extra720 index a3a97c9a..dacee06d 100644 --- a/checks/check_extra720 +++ b/checks/check_extra720 @@ -22,7 +22,7 @@ extra720(){ LIST_OF_FUNCTIONS=$($AWSCLI lambda list-functions $PROFILE_OPT --region $regx --query Functions[*].FunctionName --output text) if [[ $LIST_OF_FUNCTIONS ]]; then for lambdafunction in $LIST_OF_FUNCTIONS;do - LIST_OF_TRAILS=$($AWSCLI cloudtrail describe-trails $PROFILE_OPT --region $regx --query trailList[?HomeRegion==\`$regx\`].TrailARN --output text) + LIST_OF_TRAILS=$($AWSCLI cloudtrail describe-trails $PROFILE_OPT --region $regx --query trailList[].TrailARN --output text) if [[ $LIST_OF_TRAILS ]]; then for trail in $LIST_OF_TRAILS; do FUNCTION_ENABLED_IN_TRAIL=$($AWSCLI cloudtrail get-event-selectors $PROFILE_OPT --trail-name $trail --region $regx --query "EventSelectors[*].DataResources[?Type == \`AWS::Lambda::Function\`].Values" --output text |xargs -n1| grep -E "^arn:aws:lambda.*function:$lambdafunction$|^arn:aws:lambda$") From 6ea37b05cafe5789c750018a9dfd853b4801bd6a Mon Sep 17 00:00:00 2001 From: Toni de la Fuente Date: Wed, 8 Apr 2020 14:00:12 +0200 Subject: [PATCH 042/104] Improvements and new checks for elasticsearch --- checks/check_extra716 | 56 +++++++--------------- checks/check_extra779 | 55 ++-------------------- checks/check_extra786 | 2 +- checks/check_extra787 | 81 ++++++++++++++++++++++++++++++++ checks/check_extra788 | 91 ++++++++++++++++++++++++++++++++++++ groups/group14_elasticsearch | 4 +- include/connection_tests | 5 +- 7 files changed, 198 insertions(+), 96 deletions(-) create mode 100644 checks/check_extra787 create mode 100644 checks/check_extra788 diff --git a/checks/check_extra716 b/checks/check_extra716 index afdb8dd4..d1615857 100644 --- a/checks/check_extra716 +++ b/checks/check_extra716 @@ -17,10 +17,6 @@ CHECK_TYPE_extra716="EXTRA" CHECK_ALTERNATE_check716="extra716" extra716(){ - # if TEST_AUTHENTICATION has a value Prowler will try to access each ElasticSearch server to the public URI endpoint. - # That is from the host where Prowler is running and will try to read indices or get kibana status - TEST_ES_AUTHENTICATION= - # "Check if Elasticsearch Service domains allow open access (Not Scored) (Not part of CIS benchmark)" for regx in $REGIONS; do LIST_OF_DOMAINS=$($AWSCLI es list-domain-names $PROFILE_OPT --region $regx --query DomainNames --output text) @@ -40,17 +36,17 @@ extra716(){ if [[ $CHECK_ES_DOMAIN_POLICY_HAS_CONDITION ]]; then # get content of IpAddress."aws:SourceIp" and get a clean list LIST_CONDITION_IPS=$(cat $TEMP_POLICY_FILE | jq '.Statement[0] .Condition.IpAddress."aws:SourceIp"'| awk -F'"' '{print $2}' | tr -d '",^$' | sed '/^$/d') - for condition_ip in $LIST_CONDITION_IPS;do - CONDITION_HAS_PRIVATE_IP=$(echo $condition_ip | grep -E '^(192\.168|10\.|172\.1[6789]\.|172\.2[0-9]\.|172\.3[01]\.)') + for condition_ip in "${LIST_CONDITION_IPS}";do + CONDITION_HAS_PRIVATE_IP=$(echo "${condition_ip}" | grep -E '^(192\.168|10\.|172\.1[6789]\.|172\.2[0-9]\.|172\.3[01]\.)') if [[ $CONDITION_HAS_PRIVATE_IP ]];then CONDITION_HAS_PRIVATE_IP_ARRAY+=($condition_ip) fi - CONDITION_HAS_PUBLIC_IP=$(echo $condition_ip | grep -vE '^(192\.168|10\.|172\.1[6789]\.|172\.2[0-9]\.|172\.3[01]\.|0\.0\.0\.0|\*)') + CONDITION_HAS_PUBLIC_IP=$(echo "${condition_ip}" | grep -vE '^(192\.168|10\.|172\.1[6789]\.|172\.2[0-9]\.|172\.3[01]\.|0\.0\.0\.0|\*)') if [[ $CONDITION_HAS_PUBLIC_IP ]];then CONDITION_HAS_PUBLIC_IP_ARRAY+=($condition_ip) fi - CONDITION_HAS_ZERO_NET=$(echo $condition_ip | grep -E '^(0\.0\.0\.0|\*)') - CONDITION_HAS_STAR=$(echo $condition_ip | grep -E '^\*') + CONDITION_HAS_ZERO_NET=$(echo "${condition_ip}" | grep -E '^(0\.0\.0\.0)') + CONDITION_HAS_STAR=$(echo "${condition_ip}" | grep -E '^\*') done CHECK_ES_DOMAIN_POLICY_CONDITION_PRIVATE_IP=${CONDITION_HAS_PRIVATE_IP_ARRAY[@]} CHECK_ES_DOMAIN_POLICY_CONDITION_PUBLIC_IP=${CONDITION_HAS_PUBLIC_IP_ARRAY[@]} @@ -58,38 +54,18 @@ extra716(){ CHECK_ES_DOMAIN_POLICY_CONDITION_STAR=$CONDITION_HAS_STAR fi if [[ $CHECK_ES_DOMAIN_POLICY_OPEN || $CHECK_ES_DOMAIN_POLICY_CONDITION_ZERO || $CHECK_ES_DOMAIN_POLICY_CONDITION_STAR || ${CHECK_ES_DOMAIN_POLICY_CONDITION_PUBLIC_IP[@]} ]];then - #Prowler will check to read indices or kibaba status if no conditions, condition IP is *, 0.0.0.0/0, 0.0.0.0/8 or any public IP. - if [[ $TEST_ES_AUTHENTICATION ]];then - # check for REST API on port 443 - CHECH_ES_HTTPS=$(curl -m 2 -s -w "%{http_code}" -o /dev/null -X GET "https://$ES_DOMAIN_ENDPOINT/_cat/indices") - httpStatus $CHECH_ES_HTTPS - if [[ $CHECH_ES_HTTPS -eq "200" ]];then - textFail "$regx: Amazon ES domain $domain policy allows Anonymous access and ES service endpoint $ES_DOMAIN_ENDPOINT responded $SERVER_RESPONSE" "$regx" - else - textInfo "$regx: Amazon ES domain $domain policy allows Anonymous access but ES service endpoint $ES_DOMAIN_ENDPOINT responded $SERVER_RESPONSE" "$regx" - fi - # check for Kibana on port 443 - CHECH_KIBANA_HTTPS=$(curl -m 2 -s -w "%{http_code}" -o /dev/null -X GET "https://$ES_DOMAIN_ENDPOINT/_plugin/kibana/api/status") - httpStatus $CHECH_KIBANA_HTTPS - if [[ $CHECH_KIBANA_HTTPS -eq "200" ]];then - textFail "$regx: Amazon ES domain $domain policy allows Anonymous access and Kibana service endpoint $ES_DOMAIN_ENDPOINT responded $SERVER_RESPONSE" "$regx" - else - textInfo "$regx: Amazon ES domain $domain policy allows Anonymous access but Kibana service endpoint $ES_DOMAIN_ENDPOINT responded $SERVER_RESPONSE" "$regx" - fi - else - if [[ $CHECK_ES_DOMAIN_POLICY_OPEN ]];then - textFail "$regx: Amazon ES domain $domain policy allows access (Principal: \"*\") AUTH NOT TESTED" "$regx" - fi - if [[ $CHECK_ES_DOMAIN_POLICY_HAS_CONDITION && $CHECK_ES_DOMAIN_POLICY_CONDITION_ZERO ]];then - textFail "$regx: Amazon ES domain $domain policy allows access (Principal: \"*\" and network 0.0.0.0) AUTH NOT TESTED" "$regx" - fi - if [[ $CHECK_ES_DOMAIN_POLICY_HAS_CONDITION && $CHECK_ES_DOMAIN_POLICY_CONDITION_STAR ]];then - textFail "$regx: Amazon ES domain $domain policy allows access (Principal: \"*\" and network \"*\") AUTH NOT TESTED" "$regx" - fi - if [[ $CHECK_ES_DOMAIN_POLICY_HAS_CONDITION && ${CHECK_ES_DOMAIN_POLICY_CONDITION_PUBLIC_IP[@]} ]];then - textFail "$regx: Amazon ES domain $domain policy allows access (Principal: \"*\" and Public IP or Network $(echo ${CONDITION_HAS_PUBLIC_IP_ARRAY[@]})) AUTH NOT TESTED" "$regx" - fi + if [[ $CHECK_ES_DOMAIN_POLICY_OPEN ]];then + textFail "$regx: Amazon ES domain $domain policy allows access (Principal: \"*\") - use extra788 to test AUTH" "$regx" fi + if [[ $CHECK_ES_DOMAIN_POLICY_HAS_CONDITION && $CHECK_ES_DOMAIN_POLICY_CONDITION_ZERO ]];then + textFail "$regx: Amazon ES domain $domain policy allows access (Principal: \"*\" and network 0.0.0.0) - use extra788 to test AUTH" "$regx" + fi + if [[ $CHECK_ES_DOMAIN_POLICY_HAS_CONDITION && $CHECK_ES_DOMAIN_POLICY_CONDITION_STAR ]];then + textFail "$regx: Amazon ES domain $domain policy allows access (Principal: \"*\" and network \"*\") - use extra788 to test AUTH" "$regx" + fi + if [[ $CHECK_ES_DOMAIN_POLICY_HAS_CONDITION && ${CHECK_ES_DOMAIN_POLICY_CONDITION_PUBLIC_IP[@]} ]];then + textFail "$regx: Amazon ES domain $domain policy allows access (Principal: \"*\" and Public IP or Network $(echo ${CONDITION_HAS_PUBLIC_IP_ARRAY[@]})) - use extra788 to test AUTH" "$regx" + fi else if [[ $CHECK_ES_DOMAIN_POLICY_HAS_CONDITION && ${CHECK_ES_DOMAIN_POLICY_CONDITION_PRIVATE_IP[@]} ]];then textInfo "$regx: Amazon ES domain $domain policy allows access from a Private IP or CIDR RFC1918 $(echo ${CONDITION_HAS_PRIVATE_IP_ARRAY[@]})" "$regx" diff --git a/checks/check_extra779 b/checks/check_extra779 index 627bc51a..468b127d 100644 --- a/checks/check_extra779 +++ b/checks/check_extra779 @@ -17,14 +17,10 @@ CHECK_TYPE_extra779="EXTRA" CHECK_ALTERNATE_check779="extra779" extra779(){ - # if TEST_AUTHENTICATION has a value Prowler will try to access each ElasticSearch server to port: - # 9200 API, 9300 Communcation and 5601 Kibana to figure out if authentication is enabled. - # That is from the host where Prowler is running and will try to read indices or get kibana status - TEST_ES_AUTHENTICATION= ES_API_PORT="9200" ES_DATA_PORT="9300" ES_KIBANA_PORT="5601" - + # Test connectivity and authentication is performed by check extra787 for regx in $REGIONS; do # crate a list of SG open to the world with port $ES_API_PORT or $ES_DATA_PORT or $ES_KIBANA_PORT SG_LIST=$($AWSCLI ec2 describe-security-groups $PROFILE_OPT --region $regx --output text \ @@ -38,58 +34,15 @@ extra779(){ $AWSCLI $PROFILE_OPT --region $regx ec2 describe-instances --filters Name=instance.group-id,Values=$sg --query 'Reservations[*].Instances[*].[InstanceId,PublicIpAddress]' --output text > $TEMP_EXTRA779_FILE # in case of exposed instances it does access checks if [[ -s "$TEMP_EXTRA779_FILE" ]];then - while read instance eip ; do - if [[ $TEST_ES_AUTHENTICATION ]];then - if [[ "$eip" != "None" ]];then - # check for Elasticsearch on port $ES_API_PORT, rest API HTTP. - CHECH_HTTP_ES_API=$(curl -m 2 -s -w "%{http_code}" -o /dev/null -X GET "http://$eip:$ES_API_PORT/_cat/indices") - httpStatus $CHECH_HTTP_ES_API - if [[ $CHECH_HTTP_ES_API -eq "200" ]];then - textFail "$regx: Found instance $instance with public IP $eip on Security Group: $sg with Elasticsearch port $ES_API_PORT response $SERVER_RESPONSE" "$regx" - else - textInfo "$regx: Found instance $instance with public IP $eip on Security Group: $sg with Elasticsearch port $ES_API_PORT response $SERVER_RESPONSE" "$regx" - fi - - # check for port $ES_DATA_PORT TCP, this is the communication port, not: - # test_tcp_connectivity is in include/os_detector - # syntax is 'test_tcp_connectivity $HOST $PORT $TIMEOUT' (in seconds) - CHECH_HTTP_ES_DATA=$(test_tcp_connectivity $eip $ES_DATA_PORT 2) - # Using HTTP error codes here as well to reuse httpStatus function - # codes for better handling, so 200 is open and 000 is not responding - httpStatus $CHECH_HTTP_ES_DATA - if [[ $CHECH_HTTP_ES_DATA -eq "200" ]];then - textFail "$regx: Found instance $instance with public IP $eip on Security Group: $sg with Elasticsearch port $ES_DATA_PORT response $SERVER_RESPONSE" "$regx" - else - textInfo "$regx: Found instance $instance with public IP $eip on Security Group: $sg with Elasticsearch port $ES_DATA_PORT response $SERVER_RESPONSE" "$regx" - fi - - # check for Kibana on port $ES_KIBANA_PORT - CHECH_HTTP_ES_KIBANA=$(curl -m 2 -s -w "%{http_code}" -o /dev/null -X GET "http://$eip:$ES_KIBANA_PORT/api/status") - httpStatus $CHECH_HTTP_ES_KIBANA - if [[ $CHECH_AUTH_5601 -eq "200" ]];then - textFail "$regx: Found instance $instance with public IP $eip on Security Group: $sg with Kibana on port $ES_KIBANA_PORT response $SERVER_RESPONSE" "$regx" - else - textInfo "$regx: Found instance $instance with public IP $eip on Security Group: $sg with Kibana on port $ES_KIBANA_PORT response $SERVER_RESPONSE" "$regx" - fi - fi - else - if [[ "$eip" == "None" ]];then - textInfo "$regx: Found instance $instance with private IP on Security Group: $sg" "$regx" - else - textFail "$regx: Found instance $instance with public IP $eip on Security Group: $sg open to 0.0.0.0/0 on for Elasticsearch/Kibana ports $ES_API_PORT/$ES_DATA_PORT/$ES_KIBANA_PORT" "$regx" - fi - fi + while read instance eip ; do if [[ "$eip" == "None" ]];then textInfo "$regx: Found instance $instance with private IP on Security Group: $sg" "$regx" + else + textFail "$regx: Found instance $instance with public IP $eip on Security Group: $sg open to 0.0.0.0/0 on for Elasticsearch/Kibana ports - use extra787 to test AUTH" "$regx" fi - # done < <(cat $TEMP_EXTRA779_FILE | grep -v None$) done < <(cat $TEMP_EXTRA779_FILE) - # while read instance eip ; do - # textInfo "$regx: Found instance $instance with private IP on Security Group: $sg" "$regx" - # done < <(cat $TEMP_EXTRA779_FILE | grep None$) fi rm -rf $TEMP_EXTRA779_FILE - #textFail "$regx: Found Security Group: $sg open to 0.0.0.0/0 on for Elasticsearch ports" "$regx" done else textPass "$regx: No Security Groups found open to 0.0.0.0/0 for Elasticsearch/Kibana ports" "$regx" diff --git a/checks/check_extra786 b/checks/check_extra786 index 95b709a7..dd9f378e 100644 --- a/checks/check_extra786 +++ b/checks/check_extra786 @@ -18,7 +18,7 @@ CHECK_ALTERNATE_check786="extra786" extra786(){ for regx in $REGIONS; do - TEMP_EXTRA786_FILE=$(mktemp -t prowler-${ACCOUNT_NUM}-es-domain.EXTRA779.XXXXXXXXXX) + TEMP_EXTRA786_FILE=$(mktemp -t prowler-${ACCOUNT_NUM}-es-domain.EXTRA786.XXXXXXXXXX) $AWSCLI ec2 describe-instances $PROFILE_OPT --region $regx \ --query 'Reservations[*].Instances[*].{HttpTokens:MetadataOptions.HttpTokens,HttpEndpoint:MetadataOptions.HttpEndpoint,InstanceId:InstanceId}' \ --output text --max-items $MAXITEMS > $TEMP_EXTRA786_FILE diff --git a/checks/check_extra787 b/checks/check_extra787 new file mode 100644 index 00000000..085d424e --- /dev/null +++ b/checks/check_extra787 @@ -0,0 +1,81 @@ +#!/usr/bin/env bash + +# Prowler - the handy cloud security tool (copyright 2020) by Toni de la Fuente +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy +# of the License at http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software distributed +# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +# CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. +CHECK_ID_extra787="7.87" +CHECK_TITLE_extra787="[extra787] Check connection and authentication for public exposed Elasticsearch/Kibana ports" +CHECK_SCORED_extra787="NOT_SCORED" +CHECK_TYPE_extra787="EXTRA" +CHECK_ALTERNATE_check787="extra787" + +extra787(){ + # Prowler will try to access each ElasticSearch server to port: + # 9200 API, 9300 Communcation and 5601 Kibana to figure out if authentication is enabled. + # That is from the host where Prowler is running and will try to read indices or get kibana status + ES_API_PORT="9200" + ES_DATA_PORT="9300" + ES_KIBANA_PORT="5601" + + for regx in $REGIONS; do + # crate a list of SG open to the world with port $ES_API_PORT or $ES_DATA_PORT or $ES_KIBANA_PORT + SG_LIST=$($AWSCLI ec2 describe-security-groups $PROFILE_OPT --region $regx --output text \ + --query "SecurityGroups[?length(IpPermissions[?((FromPort==null && ToPort==null) || (FromPort<=\`$ES_API_PORT\` && ToPort>=\`$ES_API_PORT\`) || (FromPort<=\`$ES_DATA_PORT\` && ToPort>=\`$ES_DATA_PORT\`) || (FromPort<=\`$ES_KIBANA_PORT\` && ToPort>=\`$ES_KIBANA_PORT\`)) && (contains(IpRanges[].CidrIp, \`0.0.0.0/0\`) || contains(Ipv6Ranges[].CidrIpv6, \`::/0\`))]) > \`0\`].{GroupId:GroupId}") + # in case of open security groups goes through each one + if [[ $SG_LIST ]];then + for sg in $SG_LIST;do + # temp file store the list of instances IDs and public IP address if found + TEMP_EXTRA787_FILE=$(mktemp -t prowler-${ACCOUNT_NUM}-es-domain.EXTRA787.XXXXXXXXXX) + # finds instances with that open security group attached and get its public ip address (if it has one) + $AWSCLI $PROFILE_OPT --region $regx ec2 describe-instances --filters Name=instance.group-id,Values=$sg --query 'Reservations[*].Instances[*].[InstanceId,PublicIpAddress]' --output text > $TEMP_EXTRA787_FILE + # in case of exposed instances it does access checks + if [[ -s "$TEMP_EXTRA787_FILE" ]];then + while read instance eip ; do + if [[ "$eip" != "None" ]];then + # check for Elasticsearch on port $ES_API_PORT, rest API HTTP. + CHECH_HTTP_ES_API=$(curl -m 2 -s -w "%{http_code}" -o /dev/null -X GET "http://$eip:$ES_API_PORT/_cat/indices") + httpStatus $CHECH_HTTP_ES_API + if [[ $CHECH_HTTP_ES_API -eq "200" ]];then + textFail "$regx: Found instance $instance with public IP $eip on Security Group: $sg with Elasticsearch port $ES_API_PORT response $SERVER_RESPONSE" "$regx" + else + textInfo "$regx: Found instance $instance with public IP $eip on Security Group: $sg with Elasticsearch port $ES_API_PORT response $SERVER_RESPONSE" "$regx" + fi + # check for port $ES_DATA_PORT TCP, this is the communication port, not: + # test_tcp_connectivity is in include/os_detector + # syntax is 'test_tcp_connectivity $HOST $PORT $TIMEOUT' (in seconds) + CHECH_HTTP_ES_DATA=$(test_tcp_connectivity $eip $ES_DATA_PORT 2) + # Using HTTP error codes here as well to reuse httpStatus function + # codes for better handling, so 200 is open and 000 is not responding + httpStatus $CHECH_HTTP_ES_DATA + if [[ $CHECH_HTTP_ES_DATA -eq "200" ]];then + textFail "$regx: Found instance $instance with public IP $eip on Security Group: $sg with Elasticsearch port $ES_DATA_PORT response $SERVER_RESPONSE" "$regx" + else + textInfo "$regx: Found instance $instance with public IP $eip on Security Group: $sg with Elasticsearch port $ES_DATA_PORT response $SERVER_RESPONSE" "$regx" + fi + # check for Kibana on port $ES_KIBANA_PORT + CHECH_HTTP_ES_KIBANA=$(curl -m 2 -s -w "%{http_code}" -o /dev/null -X GET "http://$eip:$ES_KIBANA_PORT/api/status") + httpStatus $CHECH_HTTP_ES_KIBANA + if [[ $CHECH_AUTH_5601 -eq "200" ]];then + textFail "$regx: Found instance $instance with public IP $eip on Security Group: $sg with Kibana on port $ES_KIBANA_PORT response $SERVER_RESPONSE" "$regx" + else + textInfo "$regx: Found instance $instance with public IP $eip on Security Group: $sg with Kibana on port $ES_KIBANA_PORT response $SERVER_RESPONSE" "$regx" + fi + else + textInfo "$regx: Found instance $instance with private IP on Security Group: $sg" "$regx" + fi + done < <(cat $TEMP_EXTRA787_FILE) + fi + rm -rf $TEMP_EXTRA787_FILE + done + else + textPass "$regx: No Security Groups found open to 0.0.0.0/0 for Elasticsearch/Kibana ports" "$regx" + fi + done +} diff --git a/checks/check_extra788 b/checks/check_extra788 new file mode 100644 index 00000000..c3f8c046 --- /dev/null +++ b/checks/check_extra788 @@ -0,0 +1,91 @@ +#!/usr/bin/env bash + +# Prowler - the handy cloud security tool (copyright 2018) by Toni de la Fuente +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy +# of the License at http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software distributed +# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +# CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. +CHECK_ID_extra788="7.88" +CHECK_TITLE_extra788="[extra788] Check connection and authentication for publicly exposed Amazon Elasticsearch Service (ES) domains" +CHECK_SCORED_extra788="NOT_SCORED" +CHECK_TYPE_extra788="EXTRA" +CHECK_ALTERNATE_check788="extra788" + +extra788(){ + # Prowler will try to access each ElasticSearch server to the public URI endpoint. + # That is from the host where Prowler is running and will try to read indices or get kibana status + + # "Check if Elasticsearch Service domains allow open access (Not Scored) (Not part of CIS benchmark)" + for regx in $REGIONS; do + LIST_OF_DOMAINS=$($AWSCLI es list-domain-names $PROFILE_OPT --region $regx --query DomainNames --output text) + if [[ $LIST_OF_DOMAINS ]]; then + for domain in $LIST_OF_DOMAINS;do + TEMP_POLICY_FILE=$(mktemp -t prowler-${ACCOUNT_NUM}-es-domain.policy.XXXXXXXXXX) + # get endpoint or vpc endpoints + ES_DOMAIN_ENDPOINT=$($AWSCLI es describe-elasticsearch-domain --domain-name $domain $PROFILE_OPT --region $regx --query 'DomainStatus.[Endpoint || Endpoints]' --output text) + # If the endpoint starts with "vpc-" it is in a VPC then it is fine. + if [[ "$ES_DOMAIN_ENDPOINT" =~ ^vpc-* ]];then + ES_DOMAIN_VPC=$($AWSCLI es describe-elasticsearch-domain --domain-name $domain $PROFILE_OPT --region $regx --query 'DomainStatus.VPCOptions.VPCId' --output text) + textInfo "$regx: Amazon ES domain $domain is in VPC $ES_DOMAIN_VPC run extra779 to make sure it is not exposed using custom proxy" "$regx" + else + $AWSCLI es describe-elasticsearch-domain-config --domain-name $domain $PROFILE_OPT --region $regx --query DomainConfig.AccessPolicies.Options --output text > $TEMP_POLICY_FILE 2> /dev/null + CHECK_ES_DOMAIN_POLICY_OPEN=$(cat $TEMP_POLICY_FILE | jq -r '. | .Statement[] | select(.Effect == "Allow" and (((.Principal|type == "object") and .Principal.AWS == "*") or ((.Principal|type == "string") and .Principal == "*")) and select(has("Condition") | not))') + CHECK_ES_DOMAIN_POLICY_HAS_CONDITION=$(cat $TEMP_POLICY_FILE | jq -r '. | .Statement[] | select(.Effect == "Allow" and (((.Principal|type == "object") and .Principal.AWS == "*") or ((.Principal|type == "string") and .Principal == "*")) and select(has("Condition")))' ) + if [[ $CHECK_ES_DOMAIN_POLICY_HAS_CONDITION ]]; then + # get content of IpAddress."aws:SourceIp" and get a clean list + LIST_CONDITION_IPS=$(cat $TEMP_POLICY_FILE | jq '.Statement[0] .Condition.IpAddress."aws:SourceIp"'| awk -F'"' '{print $2}' | tr -d '",^$' | sed '/^$/d') + for condition_ip in "${LIST_CONDITION_IPS}";do + CONDITION_HAS_PRIVATE_IP=$(echo "${condition_ip}" | grep -E '^(192\.168|10\.|172\.1[6789]\.|172\.2[0-9]\.|172\.3[01]\.)') + if [[ $CONDITION_HAS_PRIVATE_IP ]];then + CONDITION_HAS_PRIVATE_IP_ARRAY+=($condition_ip) + fi + CONDITION_HAS_PUBLIC_IP=$(echo "${condition_ip}" | grep -vE '^(192\.168|10\.|172\.1[6789]\.|172\.2[0-9]\.|172\.3[01]\.|0\.0\.0\.0|\*)') + if [[ $CONDITION_HAS_PUBLIC_IP ]];then + CONDITION_HAS_PUBLIC_IP_ARRAY+=($condition_ip) + fi + CONDITION_HAS_ZERO_NET=$(echo "${condition_ip}" | grep -E '^(0\.0\.0\.0)') + CONDITION_HAS_STAR=$(echo "${condition_ip}" | grep -E '^\*') + done + CHECK_ES_DOMAIN_POLICY_CONDITION_PRIVATE_IP=${CONDITION_HAS_PRIVATE_IP_ARRAY[@]} + CHECK_ES_DOMAIN_POLICY_CONDITION_PUBLIC_IP=${CONDITION_HAS_PUBLIC_IP_ARRAY[@]} + CHECK_ES_DOMAIN_POLICY_CONDITION_ZERO=$CONDITION_HAS_ZERO_NET + CHECK_ES_DOMAIN_POLICY_CONDITION_STAR=$CONDITION_HAS_STAR + fi + if [[ $CHECK_ES_DOMAIN_POLICY_OPEN || $CHECK_ES_DOMAIN_POLICY_CONDITION_ZERO || $CHECK_ES_DOMAIN_POLICY_CONDITION_STAR || ${CHECK_ES_DOMAIN_POLICY_CONDITION_PUBLIC_IP[@]} ]];then + #Prowler will check to read indices or kibaba status if no conditions, condition IP is *, 0.0.0.0/0, 0.0.0.0/8 or any public IP. + # check for REST API on port 443 + CHECH_ES_HTTPS=$(curl -m 2 -s -w "%{http_code}" -o /dev/null -X GET "https://$ES_DOMAIN_ENDPOINT/_cat/indices") + httpStatus $CHECH_ES_HTTPS + if [[ $CHECH_ES_HTTPS -eq "200" ]];then + textFail "$regx: Amazon ES domain $domain policy allows Anonymous access and ES service endpoint $ES_DOMAIN_ENDPOINT responded $SERVER_RESPONSE" "$regx" + else + textInfo "$regx: Amazon ES domain $domain policy allows Anonymous access but ES service endpoint $ES_DOMAIN_ENDPOINT responded $SERVER_RESPONSE" "$regx" + fi + # check for Kibana on port 443 + CHECH_KIBANA_HTTPS=$(curl -m 2 -s -w "%{http_code}" -o /dev/null -X GET "https://$ES_DOMAIN_ENDPOINT/_plugin/kibana/api/status") + httpStatus $CHECH_KIBANA_HTTPS + if [[ $CHECH_KIBANA_HTTPS -eq "200" ]];then + textFail "$regx: Amazon ES domain $domain policy allows Anonymous access and Kibana service endpoint $ES_DOMAIN_ENDPOINT responded $SERVER_RESPONSE" "$regx" + else + textInfo "$regx: Amazon ES domain $domain policy allows Anonymous access but Kibana service endpoint $ES_DOMAIN_ENDPOINT responded $SERVER_RESPONSE" "$regx" + fi + else + if [[ $CHECK_ES_DOMAIN_POLICY_HAS_CONDITION && ${CHECK_ES_DOMAIN_POLICY_CONDITION_PRIVATE_IP[@]} ]];then + textInfo "$regx: Amazon ES domain $domain policy allows access from a Private IP or CIDR RFC1918 $(echo ${CONDITION_HAS_PRIVATE_IP_ARRAY[@]})" "$regx" + else + textPass "$regx: Amazon ES domain $domain does not allow Anonymous cross account access" "$regx" + fi + fi + rm -f $TEMP_POLICY_FILE + fi + done + else + textInfo "$regx: No Amazon ES domain found" "$regx" + fi + done +} diff --git a/groups/group14_elasticsearch b/groups/group14_elasticsearch index e046981f..22ffbb4c 100644 --- a/groups/group14_elasticsearch +++ b/groups/group14_elasticsearch @@ -13,6 +13,6 @@ GROUP_ID[14]='elasticsearch' GROUP_NUMBER[14]='14.0' -GROUP_TITLE[14]='Elasticsearch related security checks - [elasticsearch] ***************' +GROUP_TITLE[14]='Elasticsearch related security checks - [elasticsearch] *******' GROUP_RUN_BY_DEFAULT[14]='N' # run it when execute_all is called -GROUP_CHECKS[14]='extra715,extra716,extra779,extra780,extra781,extra782,extra783,extra784,extra785' +GROUP_CHECKS[14]='extra715,extra716,extra779,extra780,extra781,extra782,extra783,extra784,extra785,extra787,extra788' \ No newline at end of file diff --git a/include/connection_tests b/include/connection_tests index 632be16f..a34fb46c 100644 --- a/include/connection_tests +++ b/include/connection_tests @@ -12,9 +12,10 @@ # specific language governing permissions and limitations under the License. -# Functions to connection responses initially used for Elasticsearch related checks +# Function test_tcp_connectivity is in include/os_detector -httpStatus(){ +# Functions to connection responses initially used for Elasticsearch related checks +httpStatus(){ case $1 in 000) SERVER_RESPONSE="000 Not responding" ;; 200) SERVER_RESPONSE="200 Successful" ;; From effc3eb14d75e4c6fd24e8921380742d61dfe5c2 Mon Sep 17 00:00:00 2001 From: Toni de la Fuente Date: Wed, 8 Apr 2020 14:06:11 +0200 Subject: [PATCH 043/104] Added new checks to group extras --- checks/check_extra787 | 2 +- checks/check_extra788 | 2 +- groups/group7_extras | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/checks/check_extra787 b/checks/check_extra787 index 085d424e..6f867902 100644 --- a/checks/check_extra787 +++ b/checks/check_extra787 @@ -11,7 +11,7 @@ # CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. CHECK_ID_extra787="7.87" -CHECK_TITLE_extra787="[extra787] Check connection and authentication for public exposed Elasticsearch/Kibana ports" +CHECK_TITLE_extra787="[extra787] Check connection and authentication for Internet exposed Elasticsearch/Kibana ports" CHECK_SCORED_extra787="NOT_SCORED" CHECK_TYPE_extra787="EXTRA" CHECK_ALTERNATE_check787="extra787" diff --git a/checks/check_extra788 b/checks/check_extra788 index c3f8c046..f2258843 100644 --- a/checks/check_extra788 +++ b/checks/check_extra788 @@ -11,7 +11,7 @@ # CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. CHECK_ID_extra788="7.88" -CHECK_TITLE_extra788="[extra788] Check connection and authentication for publicly exposed Amazon Elasticsearch Service (ES) domains" +CHECK_TITLE_extra788="[extra788] Check connection and authentication for Internet exposed Amazon Elasticsearch Service (ES) domains" CHECK_SCORED_extra788="NOT_SCORED" CHECK_TYPE_extra788="EXTRA" CHECK_ALTERNATE_check788="extra788" diff --git a/groups/group7_extras b/groups/group7_extras index 57062cb7..526df553 100644 --- a/groups/group7_extras +++ b/groups/group7_extras @@ -15,7 +15,7 @@ GROUP_ID[7]='extras' GROUP_NUMBER[7]='7.0' GROUP_TITLE[7]='Extras - all non CIS specific checks - [extras] ****************' GROUP_RUN_BY_DEFAULT[7]='Y' # run it when execute_all is called -GROUP_CHECKS[7]='extra71,extra72,extra73,extra74,extra75,extra76,extra77,extra78,extra79,extra710,extra711,extra712,extra713,extra714,extra715,extra716,extra717,extra718,extra719,extra720,extra721,extra722,extra723,extra724,extra725,extra726,extra727,extra728,extra729,extra730,extra731,extra732,extra733,extra734,extra735,extra736,extra737,extra738,extra739,extra740,extra741,extra742,extra743,extra744,extra745,extra746,extra747,extra748,extra749,extra750,extra751,extra752,extra753,extra754,extra755,extra756,extra757,extra758,extra761,extra762,extra763,extra764,extra765,extra767,extra768,extra769,extra770,extra771,extra772,extra773,extra774,extra775,extra776,extra777,extra778,extra779,extra780,extra781,extra782,extra783,extra784,extra785,extra786' +GROUP_CHECKS[7]='extra71,extra72,extra73,extra74,extra75,extra76,extra77,extra78,extra79,extra710,extra711,extra712,extra713,extra714,extra715,extra716,extra717,extra718,extra719,extra720,extra721,extra722,extra723,extra724,extra725,extra726,extra727,extra728,extra729,extra730,extra731,extra732,extra733,extra734,extra735,extra736,extra737,extra738,extra739,extra740,extra741,extra742,extra743,extra744,extra745,extra746,extra747,extra748,extra749,extra750,extra751,extra752,extra753,extra754,extra755,extra756,extra757,extra758,extra761,extra762,extra763,extra764,extra765,extra767,extra768,extra769,extra770,extra771,extra772,extra773,extra774,extra775,extra776,extra777,extra778,extra779,extra780,extra781,extra782,extra783,extra784,extra785,extra786,extra787,extra788' # Extras 759 and 760 (lambda variables and code secrets finder are not included) # to run detect-secrets use `./prowler -g secrets` From fc83a9896c7c8005b83400a4cb3a344d1fcb1e5d Mon Sep 17 00:00:00 2001 From: Patrick Downey Date: Wed, 8 Apr 2020 13:27:09 +0100 Subject: [PATCH 044/104] Use TrailARN property to query get-event-selectors in checks_extra725 This will work to query cloudtrail's that are in different accounts. e.g. in the case of organisation managed cloudtrails. --- checks/check_extra725 | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/checks/check_extra725 b/checks/check_extra725 index 257a3d8e..259782a9 100644 --- a/checks/check_extra725 +++ b/checks/check_extra725 @@ -33,15 +33,15 @@ extra725(){ # now create a list with all trails available and their region TEMP_TRAILS_LIST_FILE=$(mktemp -t prowler.trails-list-XXXXXX) for regx in $REGIONS; do - $AWSCLI cloudtrail describe-trails $PROFILE_OPT --region $regx --query trailList[?HomeRegion==\`$regx\`].[Name,HomeRegion] --output text >> $TEMP_TRAILS_LIST_FILE + $AWSCLI cloudtrail describe-trails $PROFILE_OPT --region $regx --query trailList[?HomeRegion==\`$regx\`].[TrailARN,HomeRegion] --output text >> $TEMP_TRAILS_LIST_FILE done # look for buckets being logged per trail and create a list with them TEMP_BUCKETS_LOGGING_LIST_FILE=$(mktemp -t prowler.buckets-logging-list-XXXXXX) while IFS='' read -r LINE || [[ -n "${LINE}" ]]; do TRAIL_REGION=$(echo "${LINE}" | awk '{ print $2 }') - TRAIL_NAME=$(echo "${LINE}" | awk '{ print $1 }') - BUCKETS_OBJECT_LOGGING_ENABLED=$($AWSCLI cloudtrail get-event-selectors --trail-name "${TRAIL_NAME}" $PROFILE_OPT --region $TRAIL_REGION --query "EventSelectors[*].DataResources[?Type == \`AWS::S3::Object\`].Values" --output text |xargs -n1 |cut -d: -f 6|sed 's/\///g') + TRAIL_ARN=$(echo "${LINE}" | awk '{ print $1 }') + BUCKETS_OBJECT_LOGGING_ENABLED=$($AWSCLI cloudtrail get-event-selectors --trail-name "${TRAIL_ARN}" $PROFILE_OPT --region $TRAIL_REGION --query "EventSelectors[*].DataResources[?Type == \`AWS::S3::Object\`].Values" --output text |xargs -n1 |cut -d: -f 6|sed 's/\///g') echo $BUCKETS_OBJECT_LOGGING_ENABLED |tr " " "\n"|sort >> $TEMP_BUCKETS_LOGGING_LIST_FILE if [[ $BUCKETS_OBJECT_LOGGING_ENABLED ]]; then for bucket in $BUCKETS_OBJECT_LOGGING_ENABLED; do From 78ccc7d953e636c9f157776cc1926e2e2a951814 Mon Sep 17 00:00:00 2001 From: Patrick Downey Date: Wed, 8 Apr 2020 13:28:18 +0100 Subject: [PATCH 045/104] Remove HomeRegion predicate from describe-trails in extras725 So we can look at cross-region trails too --- checks/check_extra725 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/checks/check_extra725 b/checks/check_extra725 index 259782a9..3ba2af59 100644 --- a/checks/check_extra725 +++ b/checks/check_extra725 @@ -33,7 +33,7 @@ extra725(){ # now create a list with all trails available and their region TEMP_TRAILS_LIST_FILE=$(mktemp -t prowler.trails-list-XXXXXX) for regx in $REGIONS; do - $AWSCLI cloudtrail describe-trails $PROFILE_OPT --region $regx --query trailList[?HomeRegion==\`$regx\`].[TrailARN,HomeRegion] --output text >> $TEMP_TRAILS_LIST_FILE + $AWSCLI cloudtrail describe-trails $PROFILE_OPT --region $regx --query trailList[].[TrailARN,HomeRegion] --output text >> $TEMP_TRAILS_LIST_FILE done # look for buckets being logged per trail and create a list with them From b6adfd58ec37e54bbc9a44186f554d30c572d8ef Mon Sep 17 00:00:00 2001 From: Patrick Downey Date: Wed, 8 Apr 2020 15:38:43 +0100 Subject: [PATCH 046/104] Support cross-region and cross-account object-level cloudtrail logs for S3 Buckets that log to one or more trails are logged as `PASS!` for each trail they are associated with. Buckets that aren't associated with any trails are logged as `FAIL!` once. ``` ... PASS! : S3 bucket bucket-one has Object-level logging enabled in trails: arn:aws:cloudtrail:eu-west-2:123456789012:trail/central-trail PASS! : S3 bucket bucket-two has Object-level logging enabled in trails: arn:aws:cloudtrail:eu-west-2:9876543210989:trail/trail-two PASS! : S3 bucket bucket-two has Object-level logging enabled in trails: arn:aws:cloudtrail:eu-west-2:123456789012:trail/central-trail PASS! : S3 bucket bucket-three has Object-level logging enabled in trails: arn:aws:cloudtrail:eu-west-2:123456789012:trail/central-trail ... ``` This change should also address #387 --- checks/check_extra725 | 65 +++++++++++++++++++------------------------ 1 file changed, 29 insertions(+), 36 deletions(-) diff --git a/checks/check_extra725 b/checks/check_extra725 index 3ba2af59..73d576ff 100644 --- a/checks/check_extra725 +++ b/checks/check_extra725 @@ -22,42 +22,35 @@ extra725(){ # "Check if S3 buckets have Object-level logging enabled in CloudTrail (Not Scored) (Not part of CIS benchmark)" textInfo "Looking for S3 Buckets Object-level logging information in all trails... " - # create a file with a list of all buckets - TEMP_BUCKET_LIST_FILE=$(mktemp -t prowler.bucket-list-XXXXXX) - $AWSCLI s3api list-buckets --query 'Buckets[*].{Name:Name}' $PROFILE_OPT --region $REGION --output text > $TEMP_BUCKET_LIST_FILE - if [ ! -s $TEMP_BUCKET_LIST_FILE ]; then + LIST_OF_BUCKETS=$($AWSCLI s3api list-buckets $PROFILE_OPT --query 'Buckets[*].{Name:Name}' --output text) + LIST_OF_TRAILS=$($AWSCLI cloudtrail describe-trails $PROFILE_OPT --query 'trailList[].TrailARN' --output text) + if [[ $LIST_OF_BUCKETS ]]; then + for bucketName in $LIST_OF_BUCKETS;do + if [[ $LIST_OF_TRAILS ]]; then + BUCKET_ENABLED_TRAILS=() + for trail in $LIST_OF_TRAILS; do + BUCKET_ENABLED_IN_TRAIL=$($AWSCLI cloudtrail get-event-selectors $PROFILE_OPT --trail-name $trail --query "EventSelectors[*].DataResources[?Type == \`AWS::S3::Object\`].Values" --output text |xargs -n1| grep -E "^arn:aws:s3:::$bucketName/\S*$|^arn:aws:s3$") + if [[ $BUCKET_ENABLED_IN_TRAIL ]]; then + BUCKET_ENABLED_TRAILS+=($trail) + # textPass "$regx: S3 bucket $bucketName has Object-level logging enabled in trail $trail" "$regx" + #else + # textFail "$regx: S3 bucket $bucketName has Object-level logging disabled" "$regx" + fi + done + + if [[ ${#BUCKET_ENABLED_TRAILS[@]} -gt 0 ]]; then + for trail in "${BUCKET_ENABLED_TRAILS[@]}"; do + textPass "$regx: S3 bucket $bucketName has Object-level logging enabled in trail $trail" "$regx" + done + else + textFail "$regx: S3 bucket $bucketName has Object-level logging disabled" "$regx" + fi + + else + textFail "$regx: S3 bucket $bucketName is not being recorded no CloudTrail found!" "$regx" + fi + done + else textInfo "$regx: No S3 buckets found" "$regx" - exit fi - - # now create a list with all trails available and their region - TEMP_TRAILS_LIST_FILE=$(mktemp -t prowler.trails-list-XXXXXX) - for regx in $REGIONS; do - $AWSCLI cloudtrail describe-trails $PROFILE_OPT --region $regx --query trailList[].[TrailARN,HomeRegion] --output text >> $TEMP_TRAILS_LIST_FILE - done - - # look for buckets being logged per trail and create a list with them - TEMP_BUCKETS_LOGGING_LIST_FILE=$(mktemp -t prowler.buckets-logging-list-XXXXXX) - while IFS='' read -r LINE || [[ -n "${LINE}" ]]; do - TRAIL_REGION=$(echo "${LINE}" | awk '{ print $2 }') - TRAIL_ARN=$(echo "${LINE}" | awk '{ print $1 }') - BUCKETS_OBJECT_LOGGING_ENABLED=$($AWSCLI cloudtrail get-event-selectors --trail-name "${TRAIL_ARN}" $PROFILE_OPT --region $TRAIL_REGION --query "EventSelectors[*].DataResources[?Type == \`AWS::S3::Object\`].Values" --output text |xargs -n1 |cut -d: -f 6|sed 's/\///g') - echo $BUCKETS_OBJECT_LOGGING_ENABLED |tr " " "\n"|sort >> $TEMP_BUCKETS_LOGGING_LIST_FILE - if [[ $BUCKETS_OBJECT_LOGGING_ENABLED ]]; then - for bucket in $BUCKETS_OBJECT_LOGGING_ENABLED; do - textPass "$regx: S3 bucket $bucket has Object-level logging enabled in trail $trail" "$regx" - done - fi - done < $TEMP_TRAILS_LIST_FILE - - # diff to get the ones that are not in any trail then they are not logging - BUCKETS_NOT_LOGGING=$(diff $TEMP_BUCKETS_LOGGING_LIST_FILE $TEMP_BUCKET_LIST_FILE | sed -n 's/^> //p') - if [[ $BUCKETS_NOT_LOGGING ]]; then - for bucket in $BUCKETS_NOT_LOGGING; do - textFail "$regx: S3 bucket $bucket has Object-level logging disabled" "$regx" - done - fi - # delete all temp files - rm -fr $TEMP_BUCKET_LIST_FILE $TEMP_TRAILS_LIST_FILE $TEMP_BUCKETS_LOGGING_LIST_FILE - } From 8f83da985a7ba129317fa2716583f1181bfc4686 Mon Sep 17 00:00:00 2001 From: Toni de la Fuente Date: Wed, 8 Apr 2020 18:00:54 +0200 Subject: [PATCH 047/104] PR #511 --- README.md | 43 +++++++++++++ checks/check_extra780 | 35 ---------- checks/check_extra789 | 57 +++++++++++++++++ checks/check_extra790 | 60 ++++++++++++++++++ .../prowler-multi-account-environment.png | Bin 0 -> 78154 bytes .../prowler-single-account-environment.png | Bin 0 -> 53454 bytes groups/group16_trustboundaries | 23 +++++++ 7 files changed, 183 insertions(+), 35 deletions(-) delete mode 100644 checks/check_extra780 create mode 100644 checks/check_extra789 create mode 100644 checks/check_extra790 create mode 100644 docs/images/prowler-multi-account-environment.png create mode 100644 docs/images/prowler-single-account-environment.png create mode 100644 groups/group16_trustboundaries diff --git a/README.md b/README.md index e0c5d0e5..087ba9ec 100644 --- a/README.md +++ b/README.md @@ -14,6 +14,7 @@ - [Forensics Ready Checks](#forensics-ready-checks) - [GDPR Checks](#gdpr-checks) - [HIPAA Checks](#hipaa-checks) +- [Trust Boundaries Checks](#trust-boundaries-checks) - [Add Custom Checks](#add-custom-checks) - [Third Party Integrations](#third-party-integrations) - [Full list of checks and groups](/LIST_OF_CHECKS_AND_GROUPS.md) @@ -41,6 +42,7 @@ It covers hardening and security best practices for all AWS regions related to t - Forensics related group of checks [forensics-ready] - GDPR [gdpr] Read more [here](#gdpr-checks) - HIPAA [hipaa] Read more [here](#hipaa-checks) +- Trust Boundaries [trustboundaries] Read more [here](#trustboundaries-checks) For a comprehensive list and resolution look at the guide on the link above. @@ -421,6 +423,47 @@ The `hipaa` group of checks uses existing and extra checks. To get a HIPAA repor ./prowler -g hipaa ``` +## Trust Boundaries Checks +### Definition and Terms +The term "trust boundary" is originating from the threat modelling process and the most popular contributor Adam Shostack and author of "Threat Modeling: Designing for Security" defines it as following ([reference](https://adam.shostack.org/uncover.html)): + +> Trust boundaries are perhaps the most subjective of all: these represent the border between trusted and untrusted elements. Trust is complex. You might trust your mechanic with your car, your dentist with your teeth, and your banker with your money, but you probably don't trust your dentist to change your spark plugs. + +AWS is made to be flexible for service links within and between different AWS accounts, we all know that. + +This group of checks helps to analyse a particular AWS account (subject) on existing links to other AWS accounts across various AWS services, in order to identify untrusted links. + +### Run +To give it a quick shot just call: +```sh +./prowler -g trustboundaries +``` +### Scenarios +Currently this check group supports two different scenarios: + 1. Single account environment: no action required, the configuration is happening automatically for you. + 2. Multi account environment: in case you environment has multiple trusted and known AWS accounts you maybe want to append them manually to [groups/group16_trustboundaries](groups/group16_trustboundaries) as a space separated list into `GROUP_TRUSTBOUNDARIES_TRUSTED_ACCOUNT_IDS` variable, then just run prowler. + +### Coverage +Current coverage of Amazon Web Service (AWS) taken from [here](https://docs.aws.amazon.com/whitepapers/latest/aws-overview/introduction.html): +| Topic | Service | Trust Boundary | +|---------------------------------|------------|---------------------------------------------------------------------------| +| Networking and Content Delivery | Amazon VPC | VPC endpoints connections ([extra786](checks/check_extra786)) | +| | | VPC endpoints whitelisted principals ([extra787](checks/check_extra787)) | + +All ideas or recommendations to extend this group are very welcome [here](https://github.com/toniblyx/prowler/issues/new/choose). + +### Detailed Explanation of the Concept +The diagrams depict two common scenarios, single account and multi account environments. +Every circle represents one AWS account. +The dashed line represents the trust boundary, that separates trust and untrusted AWS accounts. +The arrow simply describes the direction of the trust, however the data can potentially flow in both directions. + +Single Account environment assumes that only the AWS account subject to this analysis is trusted. However there is a chance that two VPCs are existing within that one AWS account which are still trusted as a self reference. +![single-account-environment](/docs/images/prowler-single-account-environment.png) + +Multi Account environments assumes a minimum of two trusted or known accounts. For this particular example all trusted and known accounts will be tested. Therefore `GROUP_TRUSTBOUNDARIES_TRUSTED_ACCOUNT_IDS` variable in [groups/group16_trustboundaries](groups/group16_trustboundaries) should include all trusted accounts Account #A, Account #B, Account #C, and Account #D in order to finally raise Account #E and Account #F for being untrusted or unknown. +![multi-account-environment](/docs/images/prowler-multi-account-environment.png) + ## Add Custom Checks In order to add any new check feel free to create a new extra check in the extras group or other group. To do so, you will need to follow these steps: diff --git a/checks/check_extra780 b/checks/check_extra780 deleted file mode 100644 index eadb584a..00000000 --- a/checks/check_extra780 +++ /dev/null @@ -1,35 +0,0 @@ -#!/usr/bin/env bash - -# Prowler - the handy cloud security tool (copyright 2020) by Toni de la Fuente -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may not -# use this file except in compliance with the License. You may obtain a copy -# of the License at http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software distributed -# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR -# CONDITIONS OF ANY KIND, either express or implied. See the License for the -# specific language governing permissions and limitations under the License. -CHECK_ID_extra780="7.80" -CHECK_TITLE_extra780="[extra780] Check if Amazon Elasticsearch Service (ES) domains has Amazon Cognito authentication for Kibana enabled" -CHECK_SCORED_extra780="NOT_SCORED" -CHECK_TYPE_extra780="EXTRA" -CHECK_ALTERNATE_check780="extra780" - -extra780(){ - for regx in $REGIONS; do - LIST_OF_DOMAINS=$($AWSCLI es list-domain-names $PROFILE_OPT --region $regx --query DomainNames --output text) - if [[ $LIST_OF_DOMAINS ]]; then - for domain in $LIST_OF_DOMAINS;do - CHECK_IF_COGNITO_ENABLED=$($AWSCLI es describe-elasticsearch-domain --domain-name $domain $PROFILE_OPT --region $regx --query 'DomainStatus.CognitoOptions.Enabled' --output text|grep -i true) - if [[ $CHECK_IF_COGNITO_ENABLED ]];then - textPass "$regx: Amazon ES domain $domain has Amazon Cognito authentication for Kibana enabled" "$regx" - else - textFail "$regx: Amazon ES domain $domain does not have Amazon Cognito authentication for Kibana enabled" "$regx" - fi - done - else - textInfo "$regx: No Amazon ES domain found" "$regx" - fi - done -} diff --git a/checks/check_extra789 b/checks/check_extra789 new file mode 100644 index 00000000..964067cd --- /dev/null +++ b/checks/check_extra789 @@ -0,0 +1,57 @@ +#!/usr/bin/env bash + +# Prowler - the handy cloud security tool (copyright 2020) by Toni de la Fuente +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy +# of the License at http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software distributed +# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +# CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. + +CHECK_ID_extra789="7.89" +CHECK_TITLE_extra789="[extra789] Find trust boundaries in VPC endpoint services connections" +CHECK_SCORED_extra789="NOT_SCORED" +CHECK_TYPE_extra789="EXTRA" +CHECK_ALTERNATE_extra789="extra789" + +extra789(){ + TRUSTED_ACCOUNT_IDS=$( echo "${ACCOUNT_NUM} ${GROUP_TRUSTBOUNDARIES_TRUSTED_ACCOUNT_IDS}" | xargs ) + + for regx in ${REGIONS}; do + ENDPOINT_SERVICES_IDS=$(${AWSCLI} ec2 describe-vpc-endpoint-services \ + ${PROFILE_OPT} \ + --query "ServiceDetails[?Owner=='${ACCOUNT_NUM}'].ServiceId" \ + --region ${regx} \ + --output text | xargs + ) + + for ENDPOINT_SERVICE_ID in ${ENDPOINT_SERVICES_IDS}; do + + ENDPOINT_CONNECTION_LIST=$(${AWSCLI} ec2 describe-vpc-endpoint-connections \ + ${PROFILE_OPT} \ + --query "VpcEndpointConnections[?VpcEndpointState=='available'].VpcEndpointOwner" \ + --region ${regx} \ + --output text | xargs + ) + + for ENDPOINT_CONNECTION in ${ENDPOINT_CONNECTION_LIST}; do + for ACCOUNT_ID in ${TRUSTED_ACCOUNT_IDS}; do + if [[ "${ACCOUNT_ID}" == "${ENDPOINT_CONNECTION}" ]]; then + textPass "${regx}: Found trusted account in VPC endpoint service connection ${ENDPOINT_CONNECTION}" "${regx}" + # Algorithm: + # Remove all trusted ACCOUNT_IDs from ENDPOINT_CONNECTION_LIST. + # As a result, the ENDPOINT_CONNECTION_LIST finally contains only unknown/untrusted account ids. + ENDPOINT_CONNECTION_LIST=("${ENDPOINT_CONNECTION_LIST[@]/$ENDPOINT_CONNECTION}") # remove hit from whitelist + fi + done + done + + for UNTRUSTED_CONNECTION in ${ENDPOINT_CONNECTION_LIST}; do + textFail "${regx}: Found untrusted account in VPC endpoint service connection ${UNTRUSTED_CONNECTION}" "${regx}" + done + done + done +} diff --git a/checks/check_extra790 b/checks/check_extra790 new file mode 100644 index 00000000..9a56cf17 --- /dev/null +++ b/checks/check_extra790 @@ -0,0 +1,60 @@ +#!/usr/bin/env bash + +# Prowler - the handy cloud security tool (copyright 2020) by Toni de la Fuente +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy +# of the License at http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software distributed +# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +# CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. + +CHECK_ID_extra790="7.90" +CHECK_TITLE_extra790="[extra790] Find trust boundaries in VPC endpoint services whitelisted principles" +CHECK_SCORED_extra790="NOT_SCORED" +CHECK_TYPE_extra790="EXTRA" +CHECK_ALTERNATE_extra790="extra790" + +extra790(){ + TRUSTED_ACCOUNT_IDS=$( echo "${ACCOUNT_NUM} ${GROUP_TRUSTBOUNDARIES_TRUSTED_ACCOUNT_IDS}" | xargs ) + + for regx in ${REGIONS}; do + ENDPOINT_SERVICES_IDS=$(${AWSCLI} ec2 describe-vpc-endpoint-services \ + ${PROFILE_OPT} \ + --query "ServiceDetails[?Owner=='${ACCOUNT_NUM}'].ServiceId" \ + --region ${regx} \ + --output text | xargs + ) + + for ENDPOINT_SERVICE_ID in ${ENDPOINT_SERVICES_IDS}; do + ENDPOINT_PERMISSIONS_LIST=$(${AWSCLI} ec2 describe-vpc-endpoint-service-permissions \ + ${PROFILE_OPT} \ + --service-id ${ENDPOINT_SERVICE_ID} \ + --query "AllowedPrincipals[*].Principal" \ + --region ${regx} \ + --output text | xargs + ) + + for ENDPOINT_PERMISSION in ${ENDPOINT_PERMISSIONS_LIST}; do + # Take only account id from ENDPOINT_PERMISSION: arn:aws:iam::965406151242:root + ENDPOINT_PERMISSION_ACCOUNT_ID=$(echo ${ENDPOINT_PERMISSION} | cut -d':' -f5 | xargs) + + for ACCOUNT_ID in ${TRUSTED_ACCOUNT_IDS}; do + if [[ "${ACCOUNT_ID}" == "${ENDPOINT_PERMISSION_ACCOUNT_ID}" ]]; then + textPass "${regx}: Found trusted account in VPC endpoint service permission ${ENDPOINT_PERMISSION}" "${regx}" + # Algorithm: + # Remove all trusted ACCOUNT_IDs from ENDPOINT_PERMISSIONS_LIST. + # As a result, the ENDPOINT_PERMISSIONS_LIST finally contains only unknown/untrusted account ids. + ENDPOINT_PERMISSIONS_LIST=("${ENDPOINT_PERMISSIONS_LIST[@]/$ENDPOINT_PERMISSION}") + fi + done + done + + for UNTRUSTED_PERMISSION in ${ENDPOINT_PERMISSIONS_LIST}; do + textFail "${regx}: Found untrusted account in VPC endpoint service permission ${UNTRUSTED_PERMISSION}" "${regx}" + done + done + done +} diff --git a/docs/images/prowler-multi-account-environment.png b/docs/images/prowler-multi-account-environment.png new file mode 100644 index 0000000000000000000000000000000000000000..89adaf5f69e4b66d5e4e4103ea0730491f67de24 GIT binary patch literal 78154 zcmdSAcRba5{5a0(5E_nSW**0}vsd=P!I8c9&dSOjSx04WvSo`>$*61<5y>7Yk)55H zmGAq6d)?3H^Z5P$^LTLH&UuaJ-X~H+P4Nm5H4zRD&J`szN(%=E9D###!GZuEctRugSe?WMP`{4$QzF!jOFcw%rZvGkRq zDjpGcIfI4lGRI6fO4f}Fw4N6P(Qu&}3iIm{pDneFGeN!Vqlg|f3|2BxjT z?v!MA^?iCa-F5Nk$@13}(MZ}W$Eq(QL|TuZXk;f`<|D_6F@h;Yg&ELTU65A`C>q1B z;vn-V40oF8yrFw245^auU$+pFPEBPejvg58+8~qLvtqU)Qvbb*{1o~rf zr~na(D9UbSq))Wc8r0yH)kK$y`IMU?R0n6G$CGC-wmZ!2bh=v$?c6X-p?ThKEaOzb z7BTXvt$p@=TiIND;jO)wgo`(RZ8`n%+;RS<3M>c3Wr2!qN+khJm^Fu8@c^Q2NbI?HqnkHCi&*ke}u zm;*WQju-(q(#g=fNNHLLi2u%gqvtFxx~t{E$@6#c%?C}a72S&SQbe*9-`UF|ih$qgTD}HCjm9gB4o{*9C46b7LGQg-xVQC$BBwy^xU(g(2?8 z@j|RyK`-UANks0)f?@e+GH7@a>?fK6E}zn2a*ISOtXx4D{x#wDjN%NhJ4iIz6ZSgc z04=gXxk2BiL47+UGv9lGDkavv7cGdJ6Kb0;&@1c7S4aB{`LtKk{(?V9GTuy1ywA>_ z0iTyECeqM~n~G-IiqC)}HS*)jJp*>VI9*FG1A)Wh7Zy1N#5-46r!HqdsBaOX#js9+ z0`R36%)?w;eWtJY(gfi($L~(%5_pEPzs;Cbo+M!*sU#2}WWMkIPVF7zJN7qObp$UW zQSfAY^{udaz6a^5*>WZd-&D$3UqQqph$90L(2waeDjTeA3BQX1vp6sv4%x_+nM`x%S%6yS=C^0GW)Z8!g z*SY&dS_qS=YKnfAH>|0yxu#)$+rh!mVb5N5Fv*_WLDzm|K&;Tqe2GpqcMdh(-*10& z%zcd8=+X+s3UiLy>%xnD+CTTUX18wdEA30}8#{;X*gHRZrEwSIUF>~t$7W?-jEv5h zF7C5Vj%<#5&IK__FMm(=I`29Y)27vc@tfoC>qI;cwy6BAx2?7|wo-o%E#wT{eWKp~ zdVqL{a*=0v;(4&vG2;bm1A#Y}aji+LFHVycc7@nmxV8v%2(t-{I`+EsI+nUFSf<~) z(N)pEUTVooYCXp1_sL|AXf9~ZYwm!=mgxo)iYYa&l*Y}km8LLJK}G>ys8#4+=%}z3 zYxp7mgXo8+F*LDjnUgGPETpQonNED7HvIzq7DW^2w;lb72a11gM`em-e8~9BH}2y9 zwz}ujx9i_jY+t=yE;5W49gygd_!{tRk8jiEQkZ@6! z>Uwux4UolOC<}IWN*&o7RFR zq>oGv-4AacK8mYiV5NR-{Y@^IGq~wk`1mu99@4*M7dHo!iaBg~(sEH|Sms?`j*R+^ zxNN7-^q)5^MlAf+^Cv}2%vU8`lnE%H^jg&Y^iLQvsp|N)%+hrY^bDL0T;JIJ&R$O? zkVV~UeG)+OFx5iZ>SMLMkVhSZscE7m0j zm#-mRh|kA*#oDg*k6_{rK5whp)^<1R>A4R%adxfh(Jvx{b0FCj@E_gzO_tDhhkpr{C$m3D<458U& zo#e7w?Rd3;j1Rfqu$+WP*=EUqFbwZsOzqmN^9$Q-5xoiB4z+s+`(b4Hy2HHMXbUkQ z?cp^yaQ(Pk$Z&iL=~nwj@!dnGl!lJDj_;XAEUYYc`k#?5?k<7!qCApo6*}gbOwL&@ zQh~!;S2L>0QNyC|eLmD(YurHZso@o*7ua`-goN*2x_NO{r1Jm|7w;jSs~t7FKSh3pM$ih^nSmm4T!T)TQcdYy)^$lw>{&vvlPFybWF+D|{>43hN5A5V=pF}K9BVWV+_L-fIljKAUd*R` z#pUVF^XJd`o*8OJ4|6!y?-s4pR*T$n)y`2Kf8}o*_(iHhy>@MvVIa7nN!mSl33mrm zckk6j5jrbc4re5_t;m*e6peV6R;Jk1c^HcEf=k++w&Q!von zF!ODAAwS&j0bgo;KkV@AwYr*^UyZYs@j+RKwq4NGgS{Dt&p)T_4tr=tm+tPYi|*8Z z*fg7`;r2DVu@ybf6F$~BM#JrI?o>D9QC4eh(*CjwlcsJ$Si7;Q`+aD0q~L{@iF!lO zrove4=d5>G>aMfj{eL;WeR1?UdKI#o@_ne$IH+gKY{)gPdC14#i@uS;{rKIz-xpsq z5YbEBE%M#Ucl>3&KfS?9q5dSdPExX;ZaDDg?#E^y$z;i;HTFHZwRqJYx5tB2} zNec%@9eUcqTrH3v(l#%~gQ%SBZCP-FS3>9Sli^UMP3~D%?td0xi`ai87SR&?Zt?Dq zJYQUmi{e9WIbT67QlraJFp&U#x*^g~oW)hCZ-NCKo<7)sZV#n*6(Y;mMI zFXHcu5MK_s@1wI!fx{&*=%e}&@?(7R0EfKJ7I7V99=T7yFCS;VizLTf{9SiU*^(Yv zA;zAS0;C;LHU>(zs;W2~z%v037?&Cc4|u`_ex-0}{&`lwWy87f_k9o!PPhXO_;igL z@QVFO0DiH1PF^o0h2dNT{(}O)zImXt)!>M{3un(4EP&5&WVGd!lz>-lYj+zP7Y}<^ zPpeBc;=mh(chH6&I5-r{*k4>FEyOx-{)mH)fv170ikP*lGq>d}S1TKCU*|j6b8sYl z#ehd=8&6A^ud|bjhnTM<{O<}e;2HZi4;=P)iKn9^+(1O-QglIs2A^IG#ycf~)J{Tb3X zH}$rU&PU>aAR#zl7!EGMxyPB*78?Ro?Y88V@c;E0uq+M-L?QzEpPR7zJ4A881=$lg zf(-xHu`q}m@wr?6dBkLsB4M3-SCE^G|ApgZ1&Cz#|AONGf0y0LX!&D$@Z2wub-(hP z${7ARnqyuO!XP!V2}FN60n-7=iCh&O(0gGlbQE}$pBZU-o=fN)9FXQBcMHy6o-N>G z%C_0HN7>&zA(ufU+7|^E(WDiqo#!V?9S6haPuYL6p%ekLM1wSK3>&=rRLq4;YcqPz zpWYz}@bKFN2nQ<{30xp?O7EClBfV!f$@h|Ne`MmxnHAjj96bkofnTCeueme77~3uhr!}Cw zepQhED>^VcFeJi#?oQ?*g_5sbA9j}Vi(H(tWc^oSkyi;|klB`xf+WCY$VY_B6E&_WY4*i$v?%Xw zK%G^b5*q5bnZ2B$ey`GAJ(wS?CvufGS-t_(zJdAgTlKZp=835n=zC z2UB+|S4+Hd5?qM^{fj;hrU3$od-P+66M$}jx@VY^G*Fm-F$qD^W8r(-RgHxY2i!>@ zn3-mOuH_1`iL(z^AyjcHA?-0r;-LLy#$qcKu|3 zWy1auvZet=dV#cr*UY|&|MPkEM-~AHz`bZ~<*=9|=Lry-&?_cV7T%9u57c&+5&lCC z;8YR-i>MZ*bb`OAjkD%8pggiXY+Y`B-pW~6`h!>d9}58jU?58n;Ke(C#2Ji5GJt$u zSu{CeJ->{_%8rmg(5x@Zm@Lyd)3l)qESvcAc>C|ZUihipr+<8_R7l>#c6J(;BM}(@ zyFPt?@BJ5L^n;N5jD)DB%a7;PA4~zg1PspDIy}HvKn&qwMEdBND z0xXFk9DsCcNZvcqWC$%L=aRUsyK_GoAfPqQbBotl$%Z5&a?T?`5COo2{M>TFH-g-d zf^Y=%*~Ck;kE#vOc_^73s3>3IjmmdwE{h z@qjJt$oZs`Evo;R5n-$uA&uO+`5y&g0|Zs}7ufr+`XN~e$h?UZWc%FwBFU7BBH%>- z@D?DAEDR96P|6D2zoN%4pO@Hxf<~}hWEB{4p}ueqPaXhI-@x$M6Fhk_{(g}x_$(@x z4UmfM^z#N!1OU}<^Tx_w8Nxh*rki95a<)C}7rMyJz$@PKl|69o%Krxhsz|@clHtdn zGLr4GZ?tmQW_XLl-up)!HKBlB^*d8t`3u=g5)CsrxgJk+x5p^f3Z7V`4Xna21eHAe z3+n%Hnt3b$9{;is9p7>|+>p_9dyJQkOOoM^&DkNa1Xe5_8^X>fc(4)t&X$LU3$qBP zr&2r^;f4OAsz9|H!{%~qDft(vTQMQ-X@o8rAmIICJd13E$08Y+f zxfyr@ABKW(d38<9&HniL5tGm-6UpiO1x(_j=i$a&1p@4{L{;Uxs`(#! zT?G*FUU1_+MWhQXIPL^^qg{?p#cIBnG8>C^Dd%0NMm%u3q}cMQ)PRwB723SoOWIys znr84w6ynA6=*aWn2m$V7214!3;B}$4Pm)W!tZn{3fQfsiU9BE>2_pJ?%%tBdiw-7yXIh%2-)?ZP-?h zYfF31G2a33=+k6zI<+$>c|cA7MjQ{PHNV8ZJf~_u09hq!r!+XX`CnPqVg$S`kvBIs z1^}Q%$A`F6Ztrdj0Ln*sPWd3T0F@_ALS;@>WR*nYb3xxjO&`qAl3q9Z7`@A_(0%UC zKCHX_pi6antUYinRtax;@A@c{bH#IIeSG_|-|qW4I?p5ne33d*dwSxg!oi5E?4hNV zXxM*)jSj5YJKTSidgAlr2vnY_XS`F(w~ZDWQqlgrueLQL5k_|ok}m*VQ%ooN`A?-O zGNhF;61Ckq`BR2c=|4J$Y{G^yT@%$5e?`Mx1v${AK*WTN-!jR_WKbK6nZD;d4ZkuV z7*)@UEl){O2u{iKKP++6Zm9D0w6nVO@hK^<@4rUS9SJPIs8$krx*UP~%vQKo*kO3h zK;1=Z$BNypRDc||rpoV66>ppH(>Dq0R2SnXcOf62E}f4LJFpt?=}l7D zDK#@$ksFJ18E*I|8Ub*%0}iaX?*CBuR3I~9zzP-%H@1IQumLM##8Pd~Rxko9{?#h% z8T7!4<-HV&(-lVWyqO`RA5MUqoZGvQMx5ULQFXwl;Kdx@@;Ob2z^ZgyBQ zKd5>*)tTs+-O!^{wb?*7FfkH*i-G2xeVJkbG~xAe1^uZpA_om6>ARXYJL9Vpd(Zmo zoXQmTmIQ79D9=3)uhZizutE2DN)-?qeA*bT%%IJ_Jgj&5UvCAm$HH^>@CMDv*$ECH zqaP}=eV>K|GF({CxivplKx%fyLw5d(*+1UyFYglW0v5Km84u@Fixy~cpWXjJWS29o zr23EG<>>=1-(Nm+>69t_YRD<9hj5(z(26gs`Qb=)^9K9zL+?fD zRF7pqjjI+j-oNEJK@R`}A_1(OG`){Gm7;aRQ~Cd-p(><}Ac1RMqmcSXt1vJi;Kut& z5fgfXL93j_uVHzCyIoGI_sEq5MQ<2-NQ<9=0noD_i**xm#fjO1L4+`Xzp2tPDf=2$ zn<(nx^4nzG`n+LJz{D_pGG4j*{1GWhEp8yL7Qw}w#5A3$@{PK4EqEJ?8(GWCp|6t#d{{7W| zXoG|K4g@2MT&~Ygqn{3mzhP?;ua-W->Os|~34B4w3Ey!r9e}RO&+&db4Xj&{7RS#6 zPZBUeeR$tHp-bg=^pYX}QU|#P?AVSI(m$o;OjHO8t}0^OY~AQmPx*hk)p5Y|44O6# zUOlm`K&l#v0s^l(T7w_u-1Ps3pqS@CMPX6Qiu=qgBbhj}VSEF0N_X*fO*V1D|j)}Ms| zV$b}K))OQE+7P1l7ICURsf0Su?+EkpS`tk^xGs|M>QvQXIG9PSAg#eKpWb{cKE(Wo z3K!Fw#?GN~^qBGs)? z3*4B!XMO^KLxLoV<83M{XDes0UR;e%z3Q|E!$*?-HCq500hox)*vKoWg7ob0XJ?C1 z*!Yu3#1e5zo@OwSIG#X_lrE@uV#F0FTXRz8a{)W^ zyE`4sDWgV-s3^AcxpXy9vM3D<8~lfXlmtKsnVC-$9-}ArR1X2=GO4lvV)&;kwlx1x zKLkk%V4E4zmVXB5R%*-4Sdk(7^TEc2jZuEMrm5_?l0iElC1Q_A6wlN<0gjuSSNDie zK)>gWeSv>_1@WnH10s+o3{ZK@Y4j{%K*GQrq`LY7%G%vzs^^n;`$qsFH$UCDaHbe! z1Zbui`6bFoevL}`L+P!3n`O_3csf2iW-b~!V8o23eZs0RAVMs=s z9Vg_WL!^;pGDT=Os-Pu1T=a}$U?qUH=+_oxXUp4363|8Hf^G3(M|zG#d!PbT2p&F{ zcVfl>n%}OnBtF|f0!ctu118R~3?7-%l6lTzX;+p7_hj*oSJsE|YJNT5B?_Cx zvyWho0E(4!nq$eAK*q+##omu3{Q56ny_zbuc^coJi78+&dancYwvBX?0Q;jYe0)aG zm&d++^OW86wdfAwj~X6he>|;GNyw?#|9A zLlYCh8ZaZFbko9btCtJX^nTk`eZPOdA&o)6PU7}SdQfIk$ve;LyXotvK{yWP5ICnZ zx5vRP4~)R??&-;r&DW@0|CAxX4Ml5rkdb(P)8QZb5gahs_x-zBnN(vUQcys^nuUeM zq)79N+Fy^?aSb<8u8_0v`4CVvi9m^NN_U@81h|p_wOcVs2i9WDBgiW*W)=_;(XVT| z&lvb9JNvq`<8~~VDN^3i(Q$Qqaxzcn;iCSnzR^*G(>!tJLCfl&=hy1oI)KXCTIM&O zf}ZOq`n(w$a?XrH0a8MVz9aK?!H6*xRn=>eD2`0Ak_Gxm;cXO$g0(Zv>A(XHGu2aFPpYuV7W;I|8`oYZ-0nGZlFY*hix8@>J<~ASEDx3PW6NMZDvk&$7FZXhh>YlEyfAIc^s zIq+P+4yM5+r!ij!gsPtvmWUqcj`E>!q!#e??{!jfqCyp9M*Nc@IcJ;F>Ytu_-U?5;$cJd8Bfs z;t9jEfFF7I*<$pr^DOULR1E3s}3_ugyBf*>w)(XgBVAbB9-)kjjBQrB$M&_?OuQsq@bDkk60@V$j_Qks6ZW0aiN`vclQkCD8 zmo=pgr5mq9lN;=d^a+)yd%Ixb2}sdef z)>bFj=#w;S(0nEZV)J`26VyB&t@@`bct}Y1FvxU5@qIlVc{7=5k>jFbtz4w=6P(s| z6>QU``mTeC5qJxxwUrQsA|+%u9te=CDC z=^^k?1i{amOjuEeKcR%#Od@2O$n4f(QgrIVQn(-DM*8plh1}{dUSl2xIozz^Q@`Kv( zclZFdpg>SP;OE=T#)l-b(EA9`UT!ZQ1&@=`jed2Zx9G%8qX3qzSNDBiSZI#|V)AZ> zcvv2dl#QfMptkIlV^){vtY&u;>H}(BV$KhcJA^uHo=t8Bx zF83vrZJF%T;C_lBeO2u&KC87ezBe|Zvd;`Dm}ZVb6iy3yeTtU#TbWiuY)F4Bv!7yo zi5}=>gG59Eu2<#zyWu>--@09u)kT58VB5Qb@WNDnU6MkN`Oh`R&d+-&wPW>|6<5HB zFO~y#xlWpE$?}E94>`M)S_b0PAwQE9S$d{{BH*1!toi=Awsi3#U@9=5NHxr@92`Cp z%2DI!{wheg&v>M<$~gG|-st}leZ;x_K*-nR%W>Vj%6AzUE#NnP{01q*SrSxK8Y?O$ zH`Y<#vg5K=+XQs4W30Tqh6tf_%^G1GDnv5=FX9Y~*#@KMZi7{FX!ca4JDi^u!y_Es$3)NH!VQ6P*45NkhJytW(_ z6+(&JT34A?4!B%F_M?D=Op=6qvkImCMt**DObwSI zM`4Vo>BO%Aw~sxQkM(|8B?ik2K9P?)4V*E)1iqGN?T-O{WbE4$8)6SWdDw632J-wG zB$G66CC%>`m=qEOaL)Gw-z{Lu3ommgE^~5nTE}@dEfY!N&&E-kAN8Gcio=yjv(By+ zS*ZN5Ah!MSt$1kYAme7JM)Bly{L1iCFw;?o*_X0isas-xs(b3*H`kL55O)E?8{)PX z)hZ(z{ny~iGUv@Vz{5BmFkT7SP+5x^X*G}_i4uFCY*dFWpWSsT6J-S>+!t~i6%3My zhqDEni*2Kl^+b|KVSRFas74BDBmRciWCVfItwgpWHQCDFdxucNEmC(o6?iUX^r2(1 zJ>r0wqKwPv<2?{Y0;<99$XuhRDeH)WhliR2t``a3QqhlNB&2LAm44d8xGUfAjIT+L zG^#a=R-VsaAWN7DVl(if`iUZVOZ~edagKEGqi_e(Z{)FXw{F&o;U{+fH*CvFV;6mu z$Zo35CczNDT8~?~<9@Coopr^xlgVPpOc5{9O|gwRhIJwi%07gEQwwHB!PP^t#v2SW zquMwky`TTxUp|oaO6q>8|909SWhw(31dT0)Wp34aZ-KYsO^FQ(!i_wN#h9suNaV-~ z38Pq|{5mt}f18o@-!&JsQCUf5OZCccy(It8Dls023R*jgngx8jsWG>w5?qXt=3iNx0Bkd%a_%^a5K9vkv2CJvp_vM9=Y z5zU(Y_(h(5-JVKb`!Up}sNh+{X9}Jd{)E1o+D|D8Wxbreb}4!3mD{+tqglSw)V z@p+z9%Xmw!HXJGR5*?cNn?x&+nDu*`Do)}_1E3O(>@nXqWQLa6<8fW-Z>4>iBQO0? zn%YU{-5v71aNXdlg`X8)?<2`x1y^uefTHM=*z$tEX&tiXFit*RDSM@%sJ?|WBFHb< z8fd4?XH;$%0&3T8qVYLsE%)smj`w77U4;S|`<$g)M=%8dMdoIB011 zd~Z+?YM*kQ_0q}hP#3>cBTairec1`yzVm3fK@Exv`Qt`>P_3;mJ^j*m>6$mI}rUS)Bpnw)dH&D5P zYl?ACJ%M!4;$E@n{`|QPZ7QTz`)4qiu})M?|0(g4HM*c0f$dld3W{~R?A4fex4{K$ z-|i>oldKPPq1DC%dwluS^cT0~G;^-2GAG~pajC1tA3E-*tB7uR_*Lmancesgm1m7i zA?QKzVG}}-A8Vf7!j~$$SopXu0s#?9iLIBt&=38iBPg$NsSe_hb7dlImQ5=Oxm@p6v z{q2haO+xvpo3#CtsT=So3S!kni zgR~DGLmlhXQFz4$g2!?57h3s>G)a~3d*^<>iKLBH5Mo>yz6d>nMe-&`4O_#J^Y)&0*crTi1 zo&$bGv8+$RZIY-V#u2f`xdqTq~Ewe&BS z*9IE%7Wi@>YbT<%fGBO(r3Qj2HNI6Q;sM@5(o<8rbIRGbhC#35&KCX}h(l1Z@n|-F z*5c176w`-NM^F>B=XM(uG|Lm-m5Kz)F6$J3K*|iFrJ?Z%mBnGPlnq7lXQC1jLj{Wko-RRYZedXAK0hfbsSD$kM;^M!S>NQ< zE8F2ISLJrYO0sm9i07$DR9!;=n{^@K1{egNYz~2Q+`lVre@m5)O+HbFB2pF_zO|0X zux8G23P_C~BI!>N@QOJXA1%u=QMV0Jw{<(?hw<7{mV+*8g>~UR^ zxPg5-S|;5~v&wcRuiq~(J2~w6GZ&;rw3Zs1NI4A}5r#k`13-?1^_lMI)1ayqjSQr? za;FS-wQb?8r;}O~D!@mn{o+eAatxbZkEKr!p^5hwo7GH89y655g6PpOPw5>e>3)`c zWGF840hmm_#6+885#a!iJ+1jfL?Zd;AyXPEk1oB}g61`aFjVWJ(3PQZwSJ>;qd&dY<;w)`P4ayHPs>DFS{F?J=7??lQm3-cbqdpYAcKuy z>8cy48fkhLXR0(1CHFBDol^AnF2SvPNcRNgBPw|$=~3pDiIO3raczM9(h(7h)e%0TpM&N3|@rL^7W9;C<9NVz7%2-G9+3rIvR z&XmTX)a4qyk3k9eA78zGEhj`?uLX2^QYtUJ37`e5<%rFuDoO|cY52Le7XN-B+pvs8 zxH+b=1jNB7#N;AcPZ;O$Tud=Dy1A)obv1TqNLQr5`;)_R7;+KJGP0c1*7h*d8wMRh z0Sy757kWfhEOf(TE~YCVEzQEQi|1Mnf5Lo~U8Hbd?)oH2LzfmG1j9zYY$7NO3c{;; z|Ni}`O3#Bv=)G#A45Ft2Gw)`dg|u=C9zIkye>EhE6;)Z*5Hwj`I8b92s8a_L{%%9H zV;CV1FNq6VW_MDCA`v(HCndp^a89$9%^LdY;S-K~_P=4}8 zqP=_XTq>0@J_XQj!Ubw-ivzrnYnUp2+XD0ZiZ7xN1wiBsiFiW{yMt{!TH_Q~7#BKa z>Xlr52-V`uXTk$LgRavdm|b)(Mru~;DY={R0PbNsNq8g_=|k}8)2DQx{+9LunTV1I zu6v(h^psR1*eH69%|WQoap>Bt8z)GUzELeJ3tNIGd;Xjun=IU*vuWv#hZ`hvduI$U;ravHL2f9Kx zNu<)^?u8t;D=oNE{n^MLjgYha&BUpv6NOqtXu=>DFhC`>d!KGeNY$p!P*rd_1bG|0 z^{I9$inBvJCG+B&J4qq&x3mQl_oY#l%s*$=Bte>g0@R|j5CD{j3U2!6#mhBCje02q zTl|t8Y>)*q+y}dknHjRr9%kA5WzcfxSSd@6GgSnq#fET>61)LIVxCa64(SzfSOO8Q zYO?;Z);-h39t~K6Fz$9RnUbWG3#FKWP5!;=qr|ywl8FAIo|;1JWKdYv;X+s|;~t8b zp9n0bilp;W6-h=5N=iCYP*REq5-#3Dwgcq$qGczS&2t2AnPlYTcx?+5KXyznd!q#U zC{y|10bXH{2y-sL!;POlM&n@IqxU*g6$QH0ml4Ig;0E+#&Vh_Y$>Ro7O`wv)|0IDD z?)p9cbz?iW)M3rUlWWJsbFe?{j7XJ(xr0==qQ7CTiA>D5^^*w%&Rae){6J?`=tqDO zYN@yeQUax!dS@K?`ue7S`*c7LL3@`_^)2Au;r>DJsk=J^C{c}w79_W-l3c4>&T~~4U4UX<$2;QcB!bSyulN3@3W`A#*CZ0n~Yn@Ha_yIj{xP& zR1_)!SwCjmOBxk-O}<#-Q(mj(4Eji7uF!wm!%E#3Xoe`ktc(nOo8A^2l328G21~`) zwtd9qe`a6uu$yfUJDtH`KF}{rVDI3?^-@Y$*fymlw%-)w&&#ea4$L)>7#0;Li4bc~%H&r;%07_2x81fr z)fRot#eqsy{O(RU=@dRbz6rFQxBOj{1Su6Qtzer~Uu;3yJ@r$UD=6DS%%7nAlEjFX z6pA~sYgr+Tc;Pug0WMe-m?OHz2*V3>b&a=qQNP^N)+R3?F7BA(gC6+QnghaA6QU7% zib{JC2C6*Hu6K@HAibW|f13Wd=&0sEI=FD9Ti9@nNRp*gU0ffq(X2lH;uvB=GWe}o zv+kusAxT80!UsY>=E?c^WRr@!J~)=6U3X9^Tw7bXbZeFp3q#Nb( zV+W;xCZIdTgCEfH4g%cfa>>BYuO-IMg6a|cKwHs(EyzQHjwYzZY9zh*1s6TH-z zF4teB^~RHFd0~G8P$se!1q|{NIHkki(`InlWYn!&1KhW>E!;C4*_593UT^xw4oZF={6>$tr5LH(J!bv! z%TH)2S$Knx(Y8t=3UH1DFCg}@A!`CjcT0tWvdORF2J?^-QpTy*IS_(0MSg1e%n=}( zY5A!K4R_<>;;6g5SojO8Ov;{Zf&+@O3L>|HLja*9ybKJu0d@ashQ!h&f?P%*_}`E2 z{x!8wLHPS1gMG*HpiFz1@idhP>5(tGU$gJtp;NVEx|Y_jV|82dzQvf7_U#3F+}Td8 z0tM!B+mBPLZY6W8*aBXRJMj<)G*WOJI~Odd-lAv%ksOo||4fh^l55oyWox$>-Ta81_E_Fe zP4!9)DKI%q8i5K9;&$A;#=s!*SS@GKFe?B>QRrk?dv9al>Jf2*#wUBDpnykhscO$v zGapXf>@HzEi4Sy`Cl7?|K4-h#GuI`yVgkbvlLwLnxg=sQ2$KO(!!~5m(Dj>juWaNI z{iSKG`DQgrvR}#dO@2IARAUXtn3IsKA73DoEMkMplLF3z3COgl8pFEtp&HE!M>I{S z@I}Vu+Ygca{zjKNH7cgd* z^6RaBv9UP2cbw?oe>Vchkwyv*sq_I>4BO0|h===^3N!7rsv;*mc6N3;-cr4$Su%=8 z>QEAkGv1NF6aigNP$Vj|gxorDd}(RPro=}XaZ`c#WqIH=cPSJq|1 z$YOJ8+_2n_7Osi)bRkTW`nOCC40w%xC4n&IBnChWud1rb#UlHmN!8rA@`?e|dK`5oc%Y1G=kN|ND;^xT<+FZAiT$@BK8=x~pVktFx)at-^gi&0wL2?b&w&51~< zD!!MN`07CR)pg&m;Q_J&9B>P7VD6q5Ntjne2m=+9)^j=Jg{Sx!Hr{XxwL>rfaB?04 z51Ep~h_tpB^_Entmb1TMagLaR_QKc^Xj%U^6%CC5&=7o>_Co+%zy#y-b##m-?=IM+ z#n)%5sCU(0Y_RO5{TAp@QMdv0es018!b13f&`rVNqMp8UifK_9{yGzwnlZ2G0H|M# zM}0Obx|f8VGybx{`iw9lgffaOqwtnXlYJyQFk>(toTSldlV&=cz+P$Y5%-lOvRW(i zcZIv5T!$zd2=tM>o7=2`3e%QOfY}H7zN7L~MBXG}V(`8_2O2&*CSby`LmZb-4jLiY zsB%O@1g=y=PFzU0O;zPNsbO;Pg0#kt;^R3sSU3vM($JX010bGku4jD_gvjfQu3>`i zl{wumY|_z&C6I&m>)9xxuRCXxhxcFg$VPj*{Lapx$+$-bc2`FdmZhbp8ax!*^^pz2 z`R!mkFfiaUj0!S(bd(B**i5un^YVhA@>@)r3g#?JeWFc-?!%taZ&Y$w@5U*@Iv=S!Xx;*orCxuFdaC0avKN&DtUnM9$Uxq8WmaFVX^`GcUm?OE{Q7XQBRtjX_`Ku}O-#s*{$ zcV+pt*Z~&`*4Y&AVA%>rP?SyQ0|-oH&qy_^z8DKKxuv!7+j~NU2^t$0SGT}k5I$D? zc`qNj{&dv7vs57o=pPYb+Kg8?a)42RUt{tDqBk-g+V))~s8(%J=dpgG`jxE=#t-^- zkSJO%<3SQZ4Q{gpT22_fk~G~)dlW*eH9@w5{8`a1wO1Yz<_JdgazsV6#u{#3Um?ML z<}y4nLVOw9`U4JQih|=lcwGydESv{e&E*5I8fetvgH%b^f8?v?8o0f4gD1w9r>Cd$ zb`Al>Bt1gkJ}*nlxAvZ=8SfXwWHU^l@p{&uV%>Jqb?qe*bi5sT7YR@+^d%B!?8O5~ zv1&Yi@|+dwu&3`;)Y09~J~l|ofqU#z+RZ&8 zO)jq6F*tAs>uf7AA`Bagt0gB`Or<3_a^+5q3-bl$g>ApE0H}CLnpdXa_Jf-(!45D7 zhf?}?&t3iwh<*Cb?+A>9cNh+;_73KV;cgHYuYxY4GeQ`^EX10faSM~PKm6NPvWBN*`4RDuDU{sV9x}9U88G2JKATAf_ z{_9FX;q)#mmwiqM!yT5}bzWhNIgv=RB#{a{kR)QV(0{CUrlln$8k)3F$n$Q$k4%6; zw5YBTA`fc!Mtiu#fCIuKTVwhBiL;Cu^GfsEj48X+=zcN)mh6jy6cTm;Ts`?=a4_W> zV1lU005Nz0?vP92>N#WZ71D(Cs@Jc5mVb59Ao|)bNh|R!j3%$(nnm1HAQN(@Z%4ws z0SUtve6xV_8Rdy}Ik8`d@^h*k<9;OrGXe#1@Nw!XEXiJ8gWzsJya;Z=hJg95DS}TS zNFKu9rH^KW!o$N&XVipO~kWrWb-?u+!*UcOh4YT5PKq6-}|t_RnK55XbZ5aF~?BpX;*L}YxB=ZI+} zP5;QV@b^Xhfv!54G%;&fUNe{cB4fgbNiHX+w6ig&as^FP6iWdXpH5NF#Xi{&rx zLJ!2{Co8+SJ#a>5CPmP)qn<5%bPC&gBU=anT!mWl?lUhm}(}4L`>~6a)kg+7{Sg0roLla;lQDQnqJNoxASl`1V*JW4L}IR9!&acf{+09>mY#w z2}6VG>+2OjB%7P}Jc$*y6GF^a%|MCwfvp=zi}`nP(Fn0O2YI(W$WP{{p7@SZ-M}pw z0I$M6Es5fmnC{T}jc`XG2ZgG2E@J{n*~kFvd%H6xHa4TCrY7Kh-gYeulD@#CUV0=p z4K3(7t)Ys4)8JX_5RhOggb@53l?ZDbXPIOwG5lEmwW!`)8l<^<%>$2MCMl#xy47UH zq%K#1l^`J;JF8I3q%r#P9^R6+&{xU8p=#hueJ|&NyseF((CD1)hVr`0;dF}12$y*`djEchp5ra)Tun|%ui>G=Gy zxe7N)ui$=Tqhy*#BK!8Q@8kfX_OEs^Lat+ujsiUZG1?%;TVlMtB!nyd#QWEjd|@^a zX3fN&NuATH4U*Wc_`z>IR|)RRhL~{X`+~$}nUF5ednNiaB?(Z^+@`MsVcAt&F?FxW z3eGg}g5YfbieO!6h@LR#cf;~g|DT0Py2=QxjoU!wh-LEemGFkvqvJno$JDC92f@c{ z$F;c$esgWFj`wnp-BcS#8dq%&^Ad#*CytMD5Bh{xlqvQ!nw*Ms#G4a0#r$H-$K$u4 zIYna!b-+wUG{6ZF2kSo@GSMfMeR9ub-yUTCF6-P-{~9uB)=CFrEi-Z?~i2d zHm(KEG4uE-eNAkJhOY)6A7s_9HGOS37+({XliNkC1^?O1b#;031Ant&Ub3fQ*@lO6 zv&H;)+dS*Bh5bQ{mndI$BL0p$DH0l3q77+?o#)F6 zx3#VT3X|82_lL!5o8H?esb#1A6|wrC#gB3Vb~mLj@X*|7_?;1*({R~KCeINpyEc8; z`Qk9@?)Qa^!dzq5#u~@#l}0bg!*5>ZF?rMLEfC#S@N{icYHjoCGqjH0gs#5n?9RCF zbga$S?H@1xEWWtu?HfGoUjyn(+`QX}X;OO;=`-(-f_!NPDk-pDLy*ejpV!rIN9W=U za8CpJzi{%Uw}lVNObfvG54qh-J!-T`!F9^88L2q9P73X46txR=^3lG1)24Xyfc$&U zw41MPA16!ik#5G=NxT=jd7Y>EaN`yAo61V7>N?k1=z6BqPRK=xmVh~dT)s8mcJWW7 zxxZU0mk>-5X4_BnGgu{mD%l()Gpyd7ILHi_+RFc^DjBS9{&>El-dzS<@Z6$Ui(n=T zhckWl>v8oTHe5Y^@y`2Pdn_8G>{P0vS-^Df1k66r6D&&+VfJSuEo)wCw^qUG?Ip8y zkbt!GnmZ(L@&W^Iy#HWffpt#m+Z>9BLqU&7R99D5v2Ek=(V9qn;Ief$z<<|GEI;R3 zBT0(`{gcTew*D+N|5@tV3E#oRunERj+Y*B`9lzZGzN%6x*TOn6&;ew2kTIKKzY>IW6q?k_&=-VIvYr5DwkFZ(>wFemgp zD);D1u9sgx()Q0I*W(@6`iQa@dzmlztgWq!$8(RC?M(kL*Ua8yILzB?WtI973&bHMR#aPW{=GAMDw;fd+T|Hsr>heg>n4P4e$0R@&;Lb?%Dx|i-|K{}*UY3W6g zl2kxiLb|(4kZuqOr8}jY@2t=Be(&q$WLap5I0HY-yv>GCs5Mk{qA^Plu72lNLxIQ2J; zq@<*cwMK8XO1pF92caw~HRT1A^HoPjN90qP<6Ji9yFkx;BW~IY2y`7US0neRI;9X$ z-0rF^_TIL^QBJ8iYsc$6J25;BFm$)nT4heoXb=!N`zZ3p@}6AYV~1=da-TJE3TE=% zSNYAH!aVritk~VU^$!cc`!IvT`!waGf#FSpXKI7}zWdE} z)wI2Uqmtt{9`8{ea!)%$t>t{eSUCip6HL1_r$T6q=R8rx^N>d@IPMm%#4%eI7+W$8 z+}&#Rg_?R5y0gicHcef^a}`E{p1Gg)GLshn6f=OpvGaoO8y;5AALS`#*J}~iU0%N? zyp!Mb>i%9}OS1&|l)0C!6qm?QR!0hEpQ{#}OSsj>$NiZkAUaAR@TM)oY9)h>n$d>~AgTcx9UW4#R7S5*%g7`8TQVmy~o6=NRsIWE^H zbyh6g&yryhl0Mqf5HKqz?7YmSfAcH#G&q$PPc+kp3FL*VP8aSix%yr%QtY23T68*yb(cCpHY9^|*49wo#>?*OBn2b>2IRaX91|u`MOb_zM!6(xORzg}Q>brD`>Qsz^TYB=TpSgZ+U9(TyDBVYHOM0P}P zgD^t3I?l%^!Aebh8(M#vi5yl<1xLF`_K6=qdR%!Nf0f~Hc$KIx&A2oj$~PZ&scM!u zUR)o8ec2^!CnE)$Tz?hVym!A@C$Ix;`oMPg}HOaJi;s7C6?5p`u!^?$lsn_eNJ3)KFqFPR8AGvFTbV$lIz@Q`RFurZY)S;G!O1Pq3=fS zf&4Co5i|L#%cWN-=b&|k!qKk0Hn1mr-z0=r&N*K*8WPpKo7dW6!H5@nabT);~rS)$cJc1 zZ2X_R&98r+*l_E-9KcLFwwT}Um!5U`Nt^c;o=bs?hbf@gGx>GKIi^DiU!WrYW$o4Y zUA-+0rs~~JpRG=2<-+qx`cJN%@}I3TIn|n}EiVtAJW?F}J9^q_Z|8BB34N^}pQMU( zUB!}dns%9R*|oPgxhKreiZo?0C1&@Z%sMhs6;4GYk7(wHq&S@TY@G$v8g-)5B_7Zb zf;%wkFYV4AyzgQgR53`e#CsWJD_9KYD(EqsbtudDZY3Gsy?3vdRW+zyKWk!J{q*9s z+3VNCV@H@>O;lwcmITCxOOR$H})v5(gbJ4+Afp6XV^n z@5Blz^O1w~QDf7pXHzG6M&?L2W7^H-;0LhaE|&4Q2G_GwlsvzT5aH!z95;0E zIFc4QA0Bz#d~?xkm{*OPmPL;2l2)EvUX-60OZ<1lIB&Yd^`@`)*>B3Wa#Am9Q=O(+ zXhNg!(cMXZ7_HHJJajD_E%;{kAzHM?bp2-#mURw?=X8@+&TQlxURh_Dsl>c^1(Fj5 z_oE^>DNbAN^#RjaJb>2auIT!1_;O(PGQ1R5f-U!DegF7Ky}Z1<6b5TtT%7#)|9b(y z)QY+JyM7K2jx_b~aBD@q8OwNV2I7t%Hq@h)5+hpah6)r`^xoLBi%iJR6-j(qFbZIc zBdE}>XFKXn5DHzSg3*I02ys8S07ryv0*YZ(@hmrhgCI^it5kzmkDBvi(!W+5HD|=p zh1pb%3SZ4`YhoLudEH^Ovud-s^zh331Jr z%TnEW5iD0%S34<)X1VUXO~PN({03hQqHP793<%Dl;k@d+IxoJd8!p>p?lv9Plz8q+ zk~E!iIW7}z{p+6VEe>F>is@DjPwFNf9qAX=Xifp^Rk-x?>)ZT>;gO?-LepOk?%zmf zaODQvx`^X_wt!Q zgimpP0k9e7cp+4n5*{@+F<6Pa--X=ATivHdQ&tj8@bO7pa>7}3{wyduczi77bJq*p z5XJF`eAD;x@uKEJ%}Rnzc+&5_KORBXe<*HBOO40BugpK=W{4;~=nm7TAI}k2ExZKh zhFaaLZ)|j%5>Y)bg@Hl)Y{{hh6K^Ieng^{TIHvP$JI`Yq?}i*)vLYG2E| z;z}rWu$WGKK{C6CbGw?$6XM;c$gBF}enhi#%vp=Hh)U%OyZ2CBLWd1;|CPv13cKz# z{EGaHz0F5Aqxih6AXQV2Cm=|3Yd&cQzXUh+yVC8Q>wpbC(c6sN)e)ttqAuagvoQqX zoGm69F8ViJN^gaa9~C9oR)pcJ)7Fr=%}#gFBr&E72P>9rrMN8r&^uqc7jVcl$zHC? z^Zlf>g84-m`{E{Q_`((j$7OM6{q(gV@msc1l>1khh~v@-@4jbjh?y4BN!DpyrVSda zO0JlnQ0l9jnC%Vas?lt#0Mb=wR3;zrTkxTjMi}oHVX~~JU|HT&zuO2Dxin@V^Xky@ z3aGt0o=!%8`B7`)Fj>lX%&N?(GmM`m1-FDZF{PCSGY69a{ z`x9gN?ZPhU6}Cmx-#DqWaphgS?8NNAYZ-8qw;TC$GBPrXKIWZ-*hfDd+aMk>6^8{y zbCz8gklQC#=YPGsA#8JGAh08E^yKXnu8sA0eC4*zl+I1D4wJ|4<1(2CrK3pCH{Dgm z<5*HuM@|*Ab>|q&8`&Dd`| zmOOjWIO`B-f}#86k&mTiS8$cb^{m%He@y)^3rX0;8r>6@;;*ZSipz$(iyE9V zC;h-)?s)IV5eJ!S6`JiN5(ZVUUh#N+j!*kf%oS z&*EW$ax`YijundMu6$Y(sw12n^_Mx*250i*L?TDBINRkGgLUDdO{Wb9$-A5=L*LsT zXC8jDWg)WJ7IL$mdfhzvFHvrPyp=(Yl@?|6>DM)8mpx&i zifcCW#MAd@R)r^i>fJ&Vv;0{P1gDwaV-FIJDDVVUNh6_AUOYXmL-=&6G@rax5|_AI3hsKuO> zP)TDVb7@IOotbR&gT5t21>@b=;GZhSbFFlV#(g*66Stzl^p|7A`-3+1%(-v6CnxuH}2A6^AHBR(1;Qyp|etYLtVX3~r&$KT3yc zzTIpG1|f_}nnPGf#z(`MNCqr;rJ9#v=5r4@vG{g2$qJhdy+VJuM(Bg-dT?=;ljxjF z3gM&uGNk7H2ZQQR=ajBYW-JOljqlXJHZsiPsr-l1-o%8sro*q&4PV1QKry|~Vn3VW z^Zw}98r%AlU~$7Ca=J-IEWr zBG70{=s}$*;~g%l99XvHH^cNveqN3#Uuo#Xt=2uF{8DnINyDo{DDqCxP)wHzZ}n(Y ziRo`qghoGeY-emgz>)M{fpn8>40ly2J{z5@>9u!?c)2n1s)O57??}kA7hO{Bc3e75 z(C4|P0{8Ofmt+~^;GCORr`xByrkp}s;JjYkJHNL5;{Y+JeN;&MG?(Hl(=KyJ)v?K^ z<4rHgxg3!+rdibKPhTSJm?=c3_Ji$nbGM?qVFXr*tRjcvB1?Dkcf5B!Nrj}jVid0M z3H9aEqp8FAABkp)CIMA8FQve_Z4jkpo%(P?$8hhCq0u?%=`8*44NuJwg(hEyG)0#Q zW?7`n+ZfmJN}a&E?Tv{Iz9`KVl16WT)@!m!dT-$~JMZM@1^ zjAC^~;5dyc^3)D9h0UE}2Z~6g`-}M=wIJ+C71Q#x68H?F6!ApIo_*Y-LCY!2S(39Csvbz{1B`3ccCsSNCRTyx6wYAPWCjM2imFN1)^<9wlk&CCUz=Cvs z)p{?|M~BJk=X6MNS`tUC`NusXt-OScOjL=X`JS_$dTJi``!H1)x+LYuCpw{%!vY0t zzwx)H>BNm)jL}-lb`GA2XH&&jIwuCIqNrn`JRby+eRqtYHe+2yXND$TK|iXtXZG_4 zoe`txclbxyv?Xf~Q*Y_EA*lmm4pb!H28T3!1hs=HS#Le3XTA?!jXWz;H;E1*A{su> zuezFUzDn#(&D>5V2+~MRPZ_ILiQ+vP3KSmGSN6s!CGG{41^B7*GFK!b@LVS^N89>n zZ6-t0%DQNXYUNeTG>Iao!5f2YVH%JaVA^x^ll}GIsvU9~qnod?k2YR-Z)=zyV3AKf zHM5Aga`ee$7C2u%?lxc{AfI#)kg#tilnmd>uS$zw-0sp}pW^n&(Q&a)>`z?^FJBl> z!}`WX?l$9iP=&9VH!HRAX<(F8fd9pmR@I^t#g?U^RkPyh3)imK6Hf$AKjUztRo932 zC#tw+x_2>f-xHlY?!iF?de*|r$$Q18aSLa>Ed>(oo3|An>!hu9U2}4uHsegII#-x&wgRyuWp)co-oa_aJn9`SahnV z?0!DlHdE( zG&btHHf&Vi<_>h__n7=+9eDooD1QP+eB?hL0tUpIqm-N5+MC=X7feP2f*-w(7x^ z62UU|TB_Qa|{8^v1^bnJTx6Af9;NfLC1 z;rP~5xIxZXv!VpwO!;=|x?e_k-b4lpwpaESf#K(nt2#|@qwAs;f_pYnph#1?UCJQl z=cHDlb zF`jnx9sdATMgRoIsPLlWT6atD>yxp}O&0brv5zq-+TE%7go@c}5F0uP4c>~YwjJz) z(WJw)sK{UD5fgbtI*kgFwN9osmFZ0O!oCA`)xR_o&X==2tiIyVxfZa76`5%ED)-=j zsXf6gt-?81JG!o`jD8u1UGPUL_*u*sEm@}7z4tsd83nvr++lqe?cvOIGms{5df7mH zO+w$#|J?0E=Y5=?bo%ltUsFgmr+)tlUPun|plYB`LH_#^{Y_tu-_% zzMeq4;yfZ>+m_o7GsurR@K6MB6oAG7)!Et+MfKTVCw-)$QCB#q8ytRV%1=vSI@kzY z23L}gy1z0E?)xtC_^PxJ04^L_P7MMp_HU)z$a z+}g~=wL&w~pw}ttcX_+&NzYNB+m?TwPDLK7KTU9anLZvL=L#k?v zwvVrFswQ(Y_+8|z<=zwCL;A$&dT0exZ@-+RYtm>_D11dPRT+YX<;F6^#8g9FwN+81 z6X@z)PaW`yRp0K%g^uJ!!P5y1iKx#YXhi%U9$X%GT;@2Uw50Scl;q0rcdNV_AvB#5 z*5l^KXcS;;T6M&(a}N#u5R{bosc=f@NS?j_>tlOGffVnKFQoprfYBv3m!V6D=9Gr$5*=qAQ zJX0TBpy9Z;q!EthJsu4W`)z*}#;KMN%xIk{9FBy?qCa5AnfO3U(qJ5!Ij-&KYGkMS z8!qH~esOWT{DBjS$#bQ+=I|{GO=bdC^qUARtX&O-4RHPdM~6g(DZIvE6v`yQb){`z zdvPUrQakvhv_w^yF0EavjBH9KrD2R@w_1_=c|Fa7sbl8>W?~82WV%T+q$jDC&qr-EU&e*Fbz0_W(sVP$JeayT887y6>G!B_%+N-5O=ld~ ze$vRrYI@SG<;z+O>$;CzjMn5BXJvO2;@jXDCK^>t`P9{3v6s0vy=0dn*8L^qxTj=6&<%?8IN#;1g`opcyPSe#FJ|df}{o8 zC^ifdg$!6S&lsun7w2(&-DN+GSQOH0GNqs1FDfjIdD84Ld-3~Zh~C~6S2kqt54W2d zIokeA|c4ay`fT6AHVMvA%V0fEk zI5BJG+bmrwk%I{1QgJbEt&Uif$a^jHrQm<>-SYZ9c`WW+4_>Vud_M~lH2zY^1lwa# z`Jo%Yd8fRF7OF-wN3#hsMAgcX_ubxuv#$lO+I=YH9v{R&a42q`vj^oXiU;%J-8MK( zC+4#C?_XC}S2^pF7Ix(2=GN3X*LS&~s1Ny>77JI5HILA0+vpUz{zF%fsD=`kDzMq5 zHoW(}33qE)+`|AjM#gHe{VG`k)D;d6!ndsAW2VlL%-oyr94V^i#YIfR3Q-E3gzTR* z$y-OMdf4%o%S7!#dv}q}#F`bk&_K1Wk6pF~e#lgAgcB8v z18F+4fsDfw`?-`R zFaN!0-}q2izT=vK$CW=p-qOyA?9r2NKIez)35vGI?sP>}G*3~NX5YhG971TQh zCR>~RzPUajP3-LwJZ;5s6_KO03&`pb7dW1j(y68WU6+ODrHjn@G*|zUxLZY1PViUv z*C@Ng7McKICO`oEV9%R8wr-c(Y9k^5o_)bvS|JeX z3X-pe^D8hQOn7r>QPf^+vYdx7sLl1~>N+Re=j+a@YgynKvv3WAJ9 zcOT{qz1{u)^5d5Sar^g5>OsF-vp6w^-pG9vowuK#o>OPZSpA@1j-Bu*V#za*Bz zKZ^xhu)TFUfD}nFm&4xdc2vNfChO zDyJv<{wcf|XXT2h%7nuW-(_cI_5J+$SzA|Ez7@iTfqaVoAtEC2=*XqKp~03mNo@fX zz5IhW6UC#Pkc+f$%^|>uOk|Nf)#E?;3w0?k7P3Ks@>mD3QaPMxoW2e=AmQauPO}&4 zf!*R`tbE~vBxuWvY6fgsLl)o4!5Zw*6#BtcpqJK)s{@nRr!#sgRmKmXBb0M-5-SeW zSm$~igMXPvp1lRCP*tZX)=CkXg&*D!Rw4iq# z9&&;TVZ?L+I<+L=Ob%h9HIY?|g*lZ;_+5%?@2`BV0@Z?p*%j!JT>K$H(|&1l1D}ne z7C$)pDPbQ-i(I>AgFL&>K^b`;l3?(o`KJT+j&RPD!9FgcqwV`$^wV{&1naCz7F2?V z-~KBmiizXnsz+pGeYyu6lLpXqg}N9iQA%$}U$64)y%(^%Xg7CE0Iy>Wg=bk4LiYbs zP77drSETzmW*r%EQ%LxyC%z*j=uq*N!Cr<{xuNx?G9_2K481TU; zg&<2bauMFK#ar3&cDA+;pUA;c>2Q{NCtH){@7K*H4>%#J$6j)wl&}YAv&JT-R1f2f zWFjz5qs;hKgzgS5Q>p~E3-aiRWHa}1s!XviFX;wr$sCZ}wPTf~NEhU8#G`Z8Ld7<+xk_%O?958#T$ zQ*x3prf6_wBZXlZylYZwsj0Oc;=WiAG{MLd9vb17YVdud;LlkRZYapcOlyAz^MlJn zFM|@Y5}0lJEj6xgWa@T2D!>=Jw4=E!jx6uzpAAU4EJ`1u=HL0#(yiGIp+U58~QSMOY4Fl3bfl}U3o!vFQYFhlPISPG+C5sH^PZ;pFGHR!R}Ha*j|hL!mzFo4%UC&_h}PAc~17^gUm%XziVs zb#Rb~OZxEPD;hif8Yto~f*sxh^&-g48n^KPI;0dol9x7WZKl>qO4aex7AQsQDRQuS z1ofw$(-JaD&{nZ;{mce5?1L8_S@&P%qA132DD*P=6VH$a;dL0-Qn#c<{f$F<~Z%fZ1 z*9PX1)eq#&MSwE>QOWrLGLNgmUdR9a!{Eqz()R?cxs)2 zB8lfu6l(tJltN#N|I&5;$n&#L-xcgWn#h;@97{=@(i#QU6q{Kk@YqLcfSNx$5X z1BqLMmO3A2cm?$!;1Y+8i7Uf){v5rRatIquk!91c!f=0AI7g>{|8p1_SyBOtN4IzN zSkrOYAP5O}l+={ye<=7vn-nBH_kNrLyYwzPt@P*%+%z;JGCWSS)RzlO-5=RbU>UZe z_4gMkhn}M?Qw&tP9&w3o22k4M zb)Kv#PWcexV}{fJT2ne0f_#CVLqlgA6w`|;^A-C7e86!OaM$|lsMh5AqlxE28O&td zbqGBWH`4PVJ{O*WQNi}S6G}^;Wyny=`G5l7<%yFDGKeT_NG9O#rYdi+jl)PudeFx9 z0fjU0fu4f$N+nwA2nivbt+#M)3`Ppo80fLFB|zf(43sySvKVt2Ryp~~L$ToG7btXAfU!-k$rJ6rlOk)h z?V4D=SZR^ZZJ^#apge6sqTZjk-+dt^qNq*w**x{6o~rSfVOKm$Fl#k1vgpIV8TEYh zPoJBJAKBG#$&xyG;Y*A-i;Qp!3fhj9qOg=_^#9po%aUs`l+HO5M z5hH;{-!@nxm-}|x*Z?2NsL*Ug2z8W(*XnWJXa|_>I;~z?k|Ze<49Mtm+J93)caHJP zO2GPUlr2ZGg=h?iQeyz#iU!RWt7A|KxlE@0*w#fsBttb1nWorr@JF;7W zs2b{jNYMOhXKh@r-D~+4;kFjHp@K-$#}D^$-!M10hQgm)r(P85ep)OfMl$}*l6o#{ z1TtLu0J245JfUrLb~d%!btI<--SVB5IwhUSb7c{>S>q>A4_UZ6ifi|a4%HkW;E(?! zL7|`W_%)iD(C8})Z!0YgFagQ*08l(Uv@uxXZ;-uio*@kI#ivo4a7uM(1@jo7h;5R| zf+}%JI|yFjSS|^uVn7v*gA-_ul+mhxf419V?`FDGx-HBNrr4~dR& zB6Qk&_5Ym1EYpaTy7|G@DDeDzEJ`T`AsHX)n*9Cyaejj2T601Cmeb2*KBw=&63#!% zSB$4_BM0zQMyzA2c3DRo#_Mb9aRi2B6&@RM(B;IwaPOhs4MGLKL(Q0mSE?bo} zu#O#3Fr`u$Peu4M!ov!*yBlByIz;gYPl-{6_#L47P&@0)(KB?UA(3bqO;~L&=al4! z1)=E#^G;Ui@Grt>&29dVO;!g}SB*6(h&=@q;N*Eg@ckRa$yelDP^Vx@l~ljG-JsD! zOh4zg!hJUt${}X7W(doIN@Xicm@p-16KC|1#-E~6$5cECb#%@#N^_Zq&qkJQ5(3JOf; z)9AYWu~+v?b{PE)Lix4RGjz~j3AAK*nnf*KL1}p zv$c268e(DS_-HzJ`W>1v{*dF+d0Vl+3_(?~SP2^wwv~1I&ZGa*Lf+njsxf@IHv?#+ z@|uM*)QUpjq@fOVb#v<<8j8dZ1fnyObfq+|D1@4R3Am~_z_yw_{pT>fJl%smcN@xz zCaKP1T&b*A$bMZQ(NP#h`=#}XWa7HPFu@ziR9wDKwCxvCAe)R!EEvVjGeT!{%mJuG z)Dd-<5A!>hDrve>40Rx&!mDlM=m-CSp~tq!w6fKeR3pZPC2^mP7ci4>(}N0Zyj&3p z5k@qIRfmsP5AoDiFU>8>&p=@~WXTzbPc`)L)|e&WSUS{NoLP4(5z4oiPmWe#LDunJ zG`I|dY9`%r^sEHbNtPMK%}t*=d8?_V6`WJl#-yA`fL7Q=^^0_)(^8H)_u;J{=mXgivKv{K1P_x~(*kuJaFgItvs8DEjBH z-lFLEdD$FbyhPlZRDLoVcjN}Rsj!#(=7p@;AgU4Yzo5heAaeYJoXT9(*vsA16@-v^ zm{FcKZ&Q~)oevsLrJ23`Kw(RKfa~!4bLh&dU7P2o10m6mR}W9esX#&%IYbuak@7<; z)ZU8OAcAP+I`2I*7!MUM)0Y+2UZ#;^H{pOT(6wlkyL%I-FaPVW#b>R?XZZ`?N0qY!|0F5T6afv+aNF6Tc6vF#m`7y)ZxuER;he(vt0lEP^nsth2 zz>9q&F6KE+`(-uh91aSRis&VYp0Ki3XtQJo!cp*7RyH<%_4k2+{ZmsUS2HlOkD{ai z9cEYlG8Y8~T)H zQYw?3L;uxlTgJ7h0$PwooA(W6(^0sJtc0)I$SniID=JBRKnJaDeJ-iF$pKhOA$TJd z?pTf}1ucm+>>J$aTdETRC&*td0rdSGSvZbbe|?-xCTdUpffoj|j0%j2YA!|YU<8XN zgve-UXXz^~Qo_hl-~O-jEHy#@0Q;?9iZ==F7{qBT8IMhqCVLOhin z5=8-Zhz;Q~{fGt~G*C0xbye_4pNnf~m1N|T`7NS&b_gh-`l`oe8!*)} zWjHzOnJ((uDXgIu=h7cc4UF=ya$EbJmKSS3?Ab`pBRU@liT8oiG1qxkIJ>IJpNkkG zIM^J_*$K&{!mF`$Ad2;S=iToLtt)CF0XmISf0ZR!=haHJha-Nm=Nac|vm@jyV^gRW zCI0}=BGDiTZ+8X6G3ODSxHRUk`MGP~HR`l{D&xHh8VC2KLQr%2lxM&%W3#s0JDd{| zaKDFS!yPKu@TN&ESB+EsRGE@1q3d?HEWYWnb!Ew`j0d@ zqr~#LOft+2F*dhrNvG<*q`#|pP#^^-Oeh@en{>F(!~pgX8Vn%JQ{RoDol-^6yQj$v zsy8phr{ss&Q^mX3xV@9$k+eD^6AyW)=8JEf!s&QeQWAGyAsbB=lq#a; z=WSdGXau*$k0#W0bjwgj$8kdKo{SijYFuA>cIXZ#9s^aETMZ!UCO5AoZ|gSR8x5G9|k+|p56gqHBUJQrLK6gcH)V8#x2;cRvD3J z=u)zxGU+PM{ZST&xD3OmBQqjS@iT0Swlg z)W!=prKPS}-ruCZ%uUL%^oUFch!C|KNf_z%3XG0H1bk4*S(P<*n5io*fx$cQ7oADK>~7v{H`wNP`b@;b~xLqP?HwK}z&Jd%VHlLRA%!kWtSK2AnCs&crP)EMMg{Df}pF)K*$GfJdr;VuP5k~98 zqITyj$(PRLBBT&>O1~Css$O5cgkzlToWP%faC@k&O9XT1_AVy345nL~ z!DFJ|)mzWGbdjdwlvk<|d0$G%PH<1<{ruM0@9$9g-Gb=eXFODqjMA(vNyG}(Db(DU z7@y`g^(H06%vDUtR3p^F9Qg@@-$H}_z#r-{TehE65K$yHQ1Q@xuJEF zMGxX`S;$3wmks2+Hx4V*77=BT(X7GrU|3I*+3@(ysNs<_MtolWmf_8qVT@zGnhf^k z^1!OwEpPE#NZLD^{gPv+MaFkUu%iWqI;Ft-V0$n8{JIufzuKTvMi%<{;|0)SerPj} zVh+{VDJT|XJvmQEKhN!9it*cQO4gDoE|+i3(+N^p_1&+<*UF7u3? zEUlMy+=)+cQ6qu90OPlOLZ1Q}rJ--pm$0&qsD496<%JTE)HVd%rV=PMt!d;YP^*mG z9ao$8aiRb}k&&OfUTow$U8dZ3URL9)8DrfSb)^RLhfY}{2A*~Y8wWL;vgkQ&Qmr9Z zm|Db>2f0&HhI!A+PDZiA37}Rn+?DEaA?u#MQfJ$GTbW_h1PR0oo?!YYeUJnCoq8YG zsBye}-0k@*m6l#^Nua(HZWMnDZTR`4V;VoW{T)v23jde}DB*mI3{_vAnZY>29jOEF z(UUxhvP8DC^W$lUw|u@^!U2M+omFWO{}pJgl31aMfUBwn*^dF`sE=$yw`|+k7({Ty zD-qRYV&qqDHRc8Xv&gU)T3|5xOeul4AOrU;i&DYmG3CtZrH$Hzp>5r=+xeif528A7@9eJgdiDbJCXXq1C@#nmEcx)eg!@@~9?#K9vJtO+d> z9R4Yj4agBv;*N%~)9cr`1q4LGQ7ao5_vq(4LtYuS1}0C)oKR3ugbr=Tcxy0b_U^PL z(hoN^G+b_sH0&+Tpk!#Da_?~8l(|vPR6V|gKRO^8$oDOXJP`KSmJ(7l=nUbI9$%xu zpk)yiSXDrD^8dQLdO|#Z;R9~Um6e+}^770G-V?kD#SB_X%gc4Gw;LE74DA!21$cv1 zh{B8H1GLOD@Jv4%2vj2c0kj`%r6VYTUyilRxOgwO_s?Bt{@%yO9=C!lYhKs{aHHu+Vu~z`zHvv9QF!7dz5{f#=Vsm!cDB!Q(zyrKFwJx}{4T ziW(xQB&Ri0=`wQcc*#f*!vrqsMm5ZoWo*w!E-ZXvcrSRXQ6G{I$)_){ziKqAyV^q5)a*Hsq$mEl%5Dh(nDyqv6@pc4kHD-39O`}ZO5sa06f?mW&r zRYCM+O}h)S>D}q#CwwmXU|`GBV$@Hj_@xhZekWkUd;Z+Q_t; zL4vZt-yQv&h?26${T8nf1f3O=kwA@?fD7$tp`7^Sw66WiM7eqEO`%6uQ_&hiV*cw+ zVK;wuLaGd!UiqZn>bf9GNV_z?_|Ec-pN!A3jJ|*6_*bXRV8CF@_Fa}wA=j8|&!6in zRarweCOn$KpV3W2fu_)iVs#0+?0~=u|741VADUOD~TvvPkM4w_JtJiNq&_HinZi6>b180((pfZH9$WJC5q*93H zOj=j^*M<~4K`kh1`B48XA^iLpi)p0marY#Q68&BxCJ>WpFNut&D?GXqz^{W&61?xd zLT#OYf76l(E#7#{}#Cln3piXm8Fpz4ois|S`h;4bgoSj&=S`jF!8y=cZ@j!7h-DRGM}H$;Z*+O} zssz!C;L5Bmbbj*~Wkn+`kcl?~nh&l{Y&(YH7@T}~>sjl6bUV3O3ZTXS{{Z&oGZRP9 z@%5(x5Wm^qzwgSAZN9y9XudM{xw$$C!^xTZcA}^mBWJd1PgELWnfP_5`D`VTQs(0N zV)yMdxa1MLVnf8q)`xYTbr*R;5rvZ!_vMz0kN;r(ec7`+=s_WT zJBK7_3{VZUj9#C^uR|{<3K`{RcGZM99rP-gsr5;!w$0uc*Y9ub3nTEYnpo~g*@Ex% z>xJYI*C)` zb^9Js=~1k4t%e#LOliLJV=UUBve5>w3gFKe?sD~AFHE+P*;OSq$SYGcO!&f2_I%xY z%Dy^`H9paji3tbH(RT;~Pr3X@iXs}_XSUdig?gc0AO2C49k`T~%mXh*)uGP`hXQR8 z4+7q%;ZO)e^W)aV>#+i2xzrF+c}ejK^t~x2AtmkJN#o)~Wiki}OezB40aQa$duBVopb&R$Wa>9wlYQ8 znXkgDQY}tscSy3cvs=l;H}Y1jRRcP*T89Kb3Po zZ*BFt3e@OVTd(C4OL}oo5UdO3eX$&EVOkj45(BR^!5!K^&ZACHAkKeXInzjAU_N>A zB8LyOf}@O-2cD@OHffkti#PJsc;pK42;Tg83)w`LHT50(`QCuszMRfW6TyMKmX1qaZ_K^*(K;QS66JbkL9Q;E_zYiM%-p&KTe_?SA1ETm4`51--zS7qN2d7Yy(d7oQXh}pVL(XYUOzQ>N4Il(jO$t) zP7sz^`xe}}E9fD_hb^8-jXPlmZ<$%t6W=|pd?2_9Fb8_kRq&TRkQR>O6E!t)uT*hUGxMg2P^%g8Qm=yL1n%iYZhXJ`k2Aa0uLXkVPu!>@;w3a6nE#v@$co~ zG&__s;0DTIvc^dh+JHB7&j^9NUPkgnzqq%lXSqzvyHD2wJ!poc^p#BJoCbbgwrcBA zETj;KPaL#j%loTVu&r959>=$pr~A+Taez));P9p0pCcSlHCz?OOg?=L(acvege+cr ziUB^$vgkpY`=9rQ>YyKk^FLOvJV2-1tGm_vyV<@S z_yV_(=|H&?nq{stfSj$LfSIdT!R`wa`#+!r{q*E9>%&cR(gYj;a)+A|$ltSkn zd>Cw9PL$iJ@1cvC7v$UvQcr<{(F`1no0keWxIy|Y@5Ku=i3IFe?^U1;1Ey?Ay9evN zWy;d~KVOd^i;If<&w@=vFz%uJ;~%gWm0MS5^IJD`l>;Qw^GskyZb9A<17F|*eZ&W< zMl^(Jrc14=5fny?L6bYXN!?k!8}av<<-k37tvXrw>0%Wz5^^T0KOKb{aWAp`1G_Z&s%Y!ZkUlVeu4>ppej<<~51||q z>q?!h>Fx8;XM>(%@J}~rKLibdk>gBM%&7h6{1XS=5&50CagislnEvGNmp=m{-r?5; zZsJv2ls@<7nVpg9L((t9CcQ$rR<*vWVYqTUyKKNlc`KR_H)Xe#tyzne_2q8CDs#ec z2YGF-fP=rML|RHJ`X67ES-D7o}TPoI%0qMoI#5xUK5twgQA(2d#-rgUG7 z)=Zp@oo%WQzB!HNx@=X?{C$Qyc*asI<;WfA0+Er)MvdgCPzn~&3l}#1lbXTr@5Wz; zItl_$Me2*w1V&rh*+ia45Oew)!lu7RnJ3`=yMHo{(A(KyO%;;(OR>GnKL=aUJ`z$= z%KhN!&)_h2NtjAPwU&cwaA`CbNeh|gP>1}Zs3UcOT+CpTV@Xp!LZkmbvfer_>gIbN zhGm6CK$cEn=}@FalvrR%NeKZ-m6i@^Sdo%$=@byfpc@pWq)R}$L%P#v7VrE1e1EU! zUv%d^bK*MJIdf(vU$sTJE0+}h=56{}_3xKMS})n`yJ#@ULDq1lN}41Mk60mh&bHbU z&07gn3{Nic1$hC;@Q#Ri88!MFbYsKZk1eg(0NfmJp{1B-RsrtO1k-Eee>C*?)VaOn z@Ljw+xQ;xQytl{~SvRG4)@*zk^A0K5plf!&tdZ-USzGDa4Cz2av(p zRMSrI3z?PD)5w_po6?%vyF#mdt1^C_Enh8ekaqkL?kh#X zv4W~=ZBUezV!`u_)6-GoP2y`WiX@6HTgFXRMxEKA@FV7|H|sa*m00n#Q870!vvViR zlxvxCzra{oko)QaAN1&la8cmq1-TpjpO2XwoCHqS&iBi${@Pq0mnRwvQcarmit}N! z>=Vo^A^_jJ%|3SpcEo|)k2ZcXW+~LUzFtgoEm$sz9gJY|koxH@X33T~BSf@3T3Z_x zu}N#bHHV0r$mzPY%|E^t1pCHrvLtME+v3OI!RhDgNjPR0 z@ud6ET2#z=xw@L|?OsBC9uJa2V{A5!7(k(dJaEtJ7TquW;~*8D3K3r3X*?Q_ZrvMI zr7chPCxRCu#yWX;dm5^%`CD6Czx2HGwHw7mow6%H!;jK-F#xdw-1E&`ryktlLDL5} z+5PCa=2F*OSV%?wk1hg=MOAh(848eQh6gcEr(1a{wgd|1dCZBOCkvQzKCfM)LJ;U3=09H9dI&??MX0)Q^U1NgywgeVR~_38-m zYjCfk9NARKF$?g^9^G4Op)U-zNmL3n1sS4L%JmHov_Rno3lfU2E*nqHgNG?<2>?pm z@iIM*IlA7zC|C%OB&qPJB&A~|kc6FvNYqceN)Y z)WQh&NH76J4phZbv-4PB?|Dsl-f5EpNA(=E*c(lWPqXgtx$_&2s@Y@#=I~oueP0P5V8W|W_1b@kJqQCr_bn`N6E+l8NX${=?Iin2j?9K^@PkvaSLbbZ^+ zh`2HF-xvGI!LXXC&P>tS{^@C+_V&EGV;O9p1?j_Is;MC3`91{%Ta;Mi;RUk)7urO@ zOgcvW`wXBJ6qc30@_()+o0nR-yroHP1X1xKa=j>nUvHoLIs=f~`zsq68!r;Ni#Ffs zi3ILmD)1&SCw)k>9n_obQ=Dd0>l4l5ogc~eb95WT*c!UIcMg-^*W7ucqDImd=l>!* ztS_+7_Ad)n>nbP)7=p|OsC-p;+@+!rqO5B8K86FqJ?#E#Qb4afVHy>bZAL&%r~Fx0Fi_4>FsukNc_J`e$NIM~_#tW3LCUm_&dyIF zqoM-dY?RN7U%F%ha_Iw^fj*_HZ&65&)(9w8!kz*!j6bLDg+NIumR-ynvkVRRL|t<_ z?5?Nzw-s+I)C#+mtj;)C?ira}QeikiVglE0@HQbR0V9!Rsf-`xnmo`-Z%A+2^aRT; z1#?&6lES~sCI7%7xw_;uVkrbMx~eWazwoJ_>;KZY@rj9K&w4xXVs1b*EaoxBxSymY z?e`n$6#YN9Uw~6(4(f@?ANU`(XKV6G*-j81Wn`y5AB!6SyO+%s}>(BO4 z)6{K;^)~%eQ_|fT-`OhGF6fs4?WjQq3D7^^6C0XEe z4WJBnG8V82Knm$h%tbv*N@gGAvK|SC7ppAnldZGmgYcL?VdpjR@Gj7q9RZN%!BOvB z44nn_96fwHk}ZWM)d#hel~l^AU99B{L>sh*ObC6Pw^(Y`f8&4@TMYX<^Gg+y>*~mA zfh>kQ=jN}L>dQi41Cal=DzZriJF)t}L0x2-6uS>sH$zf8DN%);>@P0w*HyZJ_+vm> z<&{$W-qSGZ+3Zl?GwJhH#eeJW3TnV6#cjG@BCR0_N^FJnw1l|%eU^D{MylZPF6?N~s=LewER6&(87|^JTEEd;F0N0StdLnBVTz$W(V>9#vj0vaN6$kyHaEMz zd_j+10``3Ow-`gfyqsL0_S+|&A0m^IB_t&arm(t0fI+^57BXKob)W*y{vmxj65^c2YI4NA!j&Af((;?(=cRV<9Gkh zm8UcNjlX!l8wGS+P(m&+!lYlH#t;PCfP2OXFUNMb5taV(s;ns_FW^ z_UV<~9zBn=iN|hJL%^;m0R$kuxv%e}vZ`tdxYS&~EL83($JX?hfMwP}FE=+=vANmV zuVS8ioYrGQLkZw&%8yS3tw3`PI5RP9{=;C>fZe)oubZv(_(K;;jO21pTm2duk2<6S zTZIeG8PGI4n=?8wAq=i>H23hR4rk_SmVr=*h>~768b8hbL zIuBTvU*!r`KIia9xB1Z|7&fP;U*FwZ(ZoE(F>j@C*^S7CUaS1jHHeqrIXn1xFW9tD z$x0pb%sBcHn2w#^77;{>%z@Y+$N*@PbGbtOxDRH6pp7uT-V;d|ooucnk3YbUIW%#( zM!vh)gs5CWRy>%yls^}%QOK7tB@@JsG~uN7*A-ZPmL~@-Hxz7;|NZZE2rw}Mesz3U zxE$7|f8Wv2`ZGe5Yn-LTx}#ALIAF3kKhK(}+z!rOT{Z<^o-Do%);c5?FO=5~E{|S+ z9ChQyd*1$eRk+PP+#G7chDoNZ>zyEy6RyQieECZJ@0WlO?_=SROn?q1#MaUFoaG}g zH_jq7pR*+dH-{cEEJWaT_x)QFEAYq>5I1IeZD=gazrPxX$j9y zApx@vk?4)`cjHUV%oI@;T$2yEd_nnglD!5%ZYejs^o__McAleOzrN_BWq|^bOeaDSGdQUQxcYW5yXH1d~uSIehl>Swn-B>Xg{JmV;@( zr$W!Vi|_7VnZIK3M;G3ym?uPk#lOkRn+hJ=Y2GXOs)45sq?7ecThRV97ZnZzhV}c zO(gMz5cMVOQ#@|l9TyJ|tC6nvvdgJ3a@ zbCtvE5Dv(t`R)Kwa!Mx7L(%=Psud=1@Su+t^@c)ON($bDB!Bsu`(xe!3plH1K&}OB zA%tW^bb?=vIHTpUU1LU_%j5iv^qPoix=Z_0{H>xV&PR>Cqn%ic79Sqv*_RhK6oH^ z_0C$v=W6f8c<`h&A^Rt{GV|f6pI+>Bp2r=Z7JSr%P?&>E)X6~QF(k3r&@o9h(QbAmU_tM2>rY;NvMQ(;h~1SpO>OlSd#;DFR^_uxN` zDE)%edNM=VE0mv1ft7F*s4WKn)53WhtQ~yBA88KUM}TDx3e&m|v^BItob5u0Vp3?& zy!+VYK%R0C3O>N*FqL@dwK*y1+|x6`zW9$&q=8UwQMW^ZP)4SvM0R&~!9m4^t!w4v zgs5z#4=J94UGHf(;=#k`gzS9#XfEf2&!woZqf6#p7hVTs+o(%(ZL$O+ifD-{Wm4xx zgI9}6Yin&16B9cZ7tP|)b0&aiKb5{;cQK_7c|mE+mSB;h$2P)6 z4uLC3nH&$9mF~CbFu$p&`I&eK{fl8yKxJpTv1HJzKxIjogPis29LOw2V9n=ka6d-u z{NxGw;3q)r+R$4ccsp~8<;BCMGA3?^X@-eMk>FoK;V3@vz3y;9ggQQerQc`c-PQKx z+;4FLgQS4OshxI;6O{#bRhS1sYq|r}+|9gmd*SmbjgP9Da%$Ie+myE!$xeQFgp!!m zgd?$V`e0jk!0{{+hk+mL92^n(FW?AJYbQh<_RZ)@LhMpTkE+ru9KV-E_@;$`7Nri3 zPk%^i!`6ZqyGA&oWEo_B@ZKy%x*k8yLV<9A4HP1bY!l-52Dm_A9As7j=9FBvrk$8- zjwuc;E-tPP-O&f_%TN{My%hfRo0cRGJirN8EFe_~oK*YSZR~aZgXO+l%R_+xoU%NHpDF;YAA$2Y;2`=$@G;Tfuy7eOpMc+5-!w?kA%=J=c%AIM1@FI#&+EzE z0AX0FI>U^w>wSjP8|_Qzz?cJq2rSQvGX@tzLE0-61v-VBH_RxmGJVS7hhJgm>nXY? zENs@@-AxG0fsAkn(@Bf@85ZOUSrbx=&!i-{i4zLma;Z(?vh364VYu(#gaA{(C7C_) z0Z~w-3$&h^ri}W;-@*NFmaFnW5a&;o!mLpMu~O61g~!^%vCu+5UT?BdDZbEKw{GRQ z_5ko>WSYG++DrEn2g)ugFtE~AfrML~3;zin6^na=(Gs z?J>W>bdm9b>8!G;>1(n;Hh3<;dH25O`AI_x;2AUkOke!zqU%%5gAO0#yi_4$QYX6v z`mi+uL2wtciatsZv)XtyFyG@&E)vLFSQVboFt%b~7>XYv0B`?PDEuA%CL5mq(wez9 zHf4t4H`rk7>g&@`eDgs0I)2r4atwc2>1T(%R6$NkKeB%&E63zP^u77GzuGFIa#q6| z(;|n3j>iZMDk}hsSNt1OUs5R~53Xq>hD)nui1CA^Gxcyk`oesy-Cytwct0GyeHf9N z8a2flYg<=knrN)6`i<=GGi(7szO(f#6X4iZouc<#dpYR1Yn z@9}XIF0&YIJtb(>GjiDQW$u9d!&<7&lLUng@wZ_YD z65?p^eAlN=~+1^IT+;IvNp^3hgbS}`eE8+7>hhYnQR0&FHYy;g>rY5nRm{7HD%6# zUE2II##sGs)1#3q&@%CAhONrV^+&w-hJb^;2ex9yCU+tPeM(;gN*JJ_LXP0!-vOmf z{;;^Mw-9$7zK?Vh9Qa`VSAz-|SG}v6_X0VC@yc*FUXT@pQ5Fo>FhW5sAJB#y{QU(? zn1hGI;CjukFAR@dEjYTk@-RT5PKzGS7k)ftz~1r8y>FxG2y%~)y$QV33BVD|7 za|e|P;Q=j=k{j}HiG}!7AtTiB>^T}$Qv?; zUkdimMyhvybSCf`VhMA@aWo=cuIOog`1mn`@*SxEhUO=)+`}Aq^5=!#Hl7l?@v)e- zC|9KBwPr#w;Syg$K+Z%Fx}f>uYMP z%j{_X8R^!Cb#ul?QJy*$n-gw$ElW$D3@n%<+gMOLFt-}BFCs84X%pvxhHZU4e$WSZ z>E_gdASi1rJ8mE#AN<;*c>-=woxZw z7LD(8fq+m^xur+RN98~u%-DxhZb;(2dr>(FXhOpl?!ALRU7-zLK>tw+1MB1FZ+gun@-Jx>=nC-)Dl*av=CIJxu2m4Vb@K)~F> zqU++KAy_z1i3vCXdCA__ceO4fz&TAnuWmh+kN6)sz+mSD>WB#a0>A3}TfLETlU|}O z1{@8UE5@d!W*E48`g!%L8M5zx%!e4rYbQ$qy!EQsWJ}aFJIIBOxhV+7Nse*+8Uo#V zMNfh#0Cx4}zi?iC(r}~n4?|tmeH=A#7y!u!1apOjn_C|b62&Na8$|GDw#Im-o@2O# zsEn)mEklnD%V`Oc@};tHp=lZlLD`_5agV14{*DwV*I^siu9wifY~$w>%D5rrpd9^z{W&Q&Z1lNdO`uvs@s>D))#E76mjm4t`hY zrRgjG4dPktU`E1vCuQ|okPYlt#q0OKPgZ>ee^Q+{64K3x*@*^iC?aF&8kv8iBV2df zSg6*bHngZy!a{_=G)3RAqYEUwj0@*P`|mR7W1ej+Ou_V3 zfVAt?&Esp#)08t7KTf6c?py;LOU2~OXKrzEP9*ekvUws>8U*QH2k?8^No-5sZK!Tj zxqC~z1K=TjJOm91Y9NpMVWAoda&r@ivLGFXwg5LrfkMhThMPJ(Le7b6@mZEc!S3Mn z{eExw$kdOK8>A`zl!Zomdi*JNpjA=VdXyZ)O0yWzr}@d{4}YDjvM>OQM9zUJTBo&H zIV5BJt2}t`Z7`d-5D7w^726{EDgPcbSKM<+NWbe2b4_)%)Y5(cN(@9he6;UC=fPyX z#bQS0yF68Lkj){RM8Fcf<6!%5DXC+6qlBG&~2#&2PT!;kYF z;jl<#qm?w$3*lHu4%sdb8TtO1d>&IMAw)Ph+Sb!mGVCj};lvQiUfQ_| z236Yo3)TbS?mP3LqId5CW;<-?LH2w>8WCq@40LpoGlRSWHSh!KfE~fFw-$eY| z=Ljm1&K-gu|D?S_p#Xl_6;mrHRZT2fH=?)q-eTUr+hL_;WaO<=z6#DQ`+Ff;12|O! zH?=Wp0%r}AL4m)hNV*zhq|A^XA8(}=i&T&b<#XhB_04Mh!`)N>q82Ll=NAW(hVUxa zgOE{njRYHFhRL*UJ9Yr_oe>Y7na&@wP6nHFgi8Vbj(~h~#sk7!u(*9A$}2H@eWQF` z922yWniv-@nzwqs0ZmL=r+W*kV-*M0xopjZ03yu%W&8VK$||E=*DWk?dn)>)HQ`W% z1M$BF;s?)q#;CkC!~R_{h8N^3q_|NkP45jhM0lPZ{Nk)<`q>P}hG>J_OY4ej2R#b8 zR$}FI`$|PUBPwhT%%Bfb^6J_cwJUht{#-ewdTEJy4o;x`{L>aF;8`MoO%RdaRq4+3 z50?=j{mkcj@vNPn4*Yf0J*GwslHvxQinG!ER3o++4~%q=9aB{-Tc{702E{8k-nEEO zU0^b34Bn|P3yB&-HqhRe(F}~#;N8wchy9Z=xg#!u?rj)H(~F_+g86?LErty6T?L=X z?VTHMAq!DUy(1B40M8UJ+g9Z# zTHW)i6aJcLmTnz|E>!J*Z}DjS#n9M}uf6@pjEKDdlU^f%Z9&LswGRTCK-dENBNjge zB)>`VNJur&glt*n-ak@8%DK{ng1T_$LB#}r+Xt1ud-K=P|*NFoU9biN{Z z=uUR+TDg8P)+4EeH3fhT*Wc(j-fpkD#Zo)`Y@j=8x%Z^~H6^BCi&-X|0Ze+PJ)HKT zGNNH4I$z#01&R%601BHAV=Zh&K=$>Ha7>woNazkM2e2p`~rRY%#=pWFKjX5&>gmE z%HqUGaY;$}pNnBh9RFC87S&`0vVMSAj$BifPK8r#+AluMkSL~J+l#B z_g~tBzfWKWtbJ)Ur4F5~O|a@>%1sv!{AN zO%7v6o+PhFKd&8*?g!U2gW~slnL(n!DKK0yX1H>9t8qRk*dWfmqPd?X03IaYORXGf zj-F5BhAq$NEa1m0)!xDClkOz6)a9CK=Sf75`HI3)R1%CnXxlkh}+zn!k zZ|qfKkQrN6FzI?8OF%T_(WnKRf+r)a4Vj1dC<4Y}L->e^h)}J!1;=n|aXqkyy_7-T zdoAv};MjAu@}b_qM-FO`Powoofi~`c?k{`~oi&ot5BIdS?XrA$9Sndeb_<1SOr65M zQGG}>GSb(t;T3JY%mV*MLUxI%u5~Hfu>TW*?qWuhf7_ow2%R^}E>1}Q8J!<*#cw@a zrw|eSGJ~XdPhj6dT$v1^jtA*h$g60>s;RG!i2M*>Vg>FyVc|RcBI>-=7l10(DR|g5 z)~Wvw;=8f}(MLXbqk;nxowA08$>gAbpS{SZtgsOOlgeGztA%4)6c8W$wq-7vzOF8n zGWN7^RLToEjCV&e`X$K!vsy5Qty?9uehb?}2XH-k2I-Rzi00`0 z*p)9hAG2%g{+pA{`@oDzsZ8otwlb{wBs5?L?e{OFQX}2BXd@DkDa9z+?91YdOR3W`r@7hKfi{J zS~4Cn18RMJU(J?^SmnNnwKT8vw__CYXLbHS)tcdnl;1W&JbP~9i{a+Pw#oqB#QM^6 zA6Vh0y`7z$UyUlc5Ux^XPj~kLJ@MLrr#|2U>>$FcYHfe|p-Z_7%Q^$|{#(4(_V)JadaI|1S9D(nyLH}Chzl*pf#WW) z<&z}U=xljh)ANMut3|{eiwYhKWDY^hwzsCmD3H}5qUipdZCyadG)65V_H zf@`g7q0Rpw1;=bCMsw;IQg}77kl~_iBf@KI?Eou;YLk#|!&IepJi=DY@T@BAc??e&A!E^w+0ZTBR4_u6Vgt>DNiZTO$>19w^M#Z;#z z_9w4LNi4H}-MmZEJ1BVmHgig(yZqPsUEx#JN8{9tKY2E4qBz1CpKqVW3J@DCk0oo(m|%4^d&tmv z!eZ6*{?CMq@pATbb#n(-8OC_U^paPsSvG1v()xFkjW%2=MyK)PElmPG$7!`>B#CqC*N*a|~Kk*2lwJO5>#zh8sTBSi3?;NsEF9R>=Ye>NR30+}YjZXqycC2`tvp# zNPG7KIZ8hEXB(*?dhM_01h!u(6|5Y9Ld*E!V0ma5_b2Pehz`UjUt+Yt)fFbN$@}@h zNcaJ2Cp>!1_iRQzh_BqZw)N!&JT3cvblDf^$qEkmmTIqYI?hQ}cmEq(Gt%DjmmE&X z-?#`M=lH+HNSusu}4*{G`4MTp%L2 zL$2;1m(4B-l|y)*t?fH-sxfa51ScHwGI$vjNvLdV~) zeOVf|PDysV6{isr_|_*;%V({{-883MEV=rD6k}zr2mCP@AgYC67nI>A5U&FN(g14-_ z%Xrhr4z4Jjg3Bo-h`L5%0V@wd>sEQ;`n*bbeYsaI6oEA-Yac3Tw72*G^}fQ3O&7AX zv}Aj_xEg8NX1B&b6NDv1k4-ZKHk9s(_y~}Pe%2*JObcS~NnP|yG0ykI)1PygOnFxn zi`ZJwyT~z3ER$}KF1zmXx;_5e2Jsx2#0p!ddrZa<9~YfH!io6Bw#Y*PLgC|xNxthV z4bI;lD9?rSlTXNpKhY*ZGLow?_^69LQ7TNt$_UQgZrybd@=XJKL@#Chw;v1X@f(G= ze4HAM5QbM12-uZHzpG?`<9~<4AMv|&CVT1-S~zv7V);a;j}}^F5zDc)9XEg*;9AH; zjo(gtyyjJN0>zp$M3Z|A=>O;4c`~Me6EogFduxh1XlG5`%4yY?FaD-OrJ78nkSrX` zMM~HwV>w{&y}}9y_Ve6LBj05*wRvx|z$McLv34RraD#yKw#R9BO&`(9>c>A2U`A#ULY4yrI16}je4;ZbxXuy&OQy$8%eOh_LJb-SIl(m! zZL0p;%=bz)dQ9r5GfY?rQ8k>PA?u!IYMDabcrVNs9({B6-fIP&SQ>1|KT~TI>%%+# z%vvxDoA%J9|0N{)eClW}G%bkM3a~P6V6a+J=M~B=VUH-S8iyv4W!2Rl78WIM-mrxe zTY1TcuI%;R8ofj1Q%@298wPKS!#Y8zrF>rql;Diy5si|=>S^vfTJX7@5$r+6CJ93# zc!454Vmdh(Tf+9}ZEO(zI4$Gj2CdUsD^E)hq-X318bzIMbygasL?|p=8q0_8YW-@r zETdLyM(ieO^tCT!$G6BbeP=wD3 zCRCf}drI+uN2I|vSG-hk;I_?T_tZhVn?hIC2R0IuI!^q(knem(VjFc$t)!pAojI68#Mq|J zv&yrTZ=O*0KBLi2dB@;82#^W|vL{=iC2SJT$T_CEgfC%WY@7;?$AH#X@^UlqJm5%h z^YkxSobbu1Jb#^%0wD%n%bzqmItKy@LY@Ch$3qsudSS+k z;Z)DfbO~@`!lebStz@fS#hAJ4oI}) z&8>2MW8=*_lMzG6B{4e6B z$og;RNbH!`RVA2h;pQYHP&`@klH)14^}R2Gev+*c@?k)P2Fo?~xwkjH@cj%Z*o<+t zX11k?!8$QP`D3R?O28C!(OMKe%}A&)N_bn09zt8-0>ebnV}?IY4K8;2bH(oDQ*_ec zI}I<)>-g9$ary=d)L1I*T{UQ~vZ_m;<1Z6s-8{JJ#8~;R?`LCQnF9h~BNA1v>@QLH zM)$rwc}DOX7T$WK81a31qf|(N1fN;A#6bm>ew{`zL9ltgFj>^B7Y;DSLE|`Ve5>n^Bl>H=fy!M&Xw$xr><-P>wVj!2@DND0g6Pp3i@KD~H^ zeQ$wFzP_r-LuysmL{?3ibF-Wz>@>{$@R^9l;%b@Uri(cbi5kiWAAIIv#RQMdjyp*j zIwbhZ#>TJlDKYZf4-Y;pEaS+(P}{@yF`i@`Vm zMB)o~3$<0~d0z;?SFn*`8a58Xxl4Oy?nLWL#>?Muwt7<_sOMJ=q@P7nme z<^|uY#-AO(+ow)g!Lh;z*=90Ftr}^2K}Na&2X8v-Y-k}Bh_s&V&aa1sZ0lexNY*cw z`pLH*&;$z=hkq{&UYXs$C+{OY&{cLn^yFoJswrzxX4)K&G~+j`EpdRv@PaRgM$oF! zP1!fejw-b)_WAMh_WK}LUYt2)Jm@T}AKRc&dn%3*xJ;x+S&rhIEF71aCSd&b0Wi3} zuY=UVJIky4?J`#l2y__f=}~eyAc(bDYGqRPlgeZHOe<`SMq!;~G1APs1_W~moFh~L za@`?nIArM^dzSno^~X0;4|scAh`J>3)cv@XLLTc5_nCUMrG-VSWp}|>@Mez1&k~I* zoyYsE2$nve0RnvMnq*(HRT(TFbw<6!KGHoYn)qfe8Ie-}0xTZ|Y`Aj;GTO{Yi9fzi zPN#A9_DCx{nyfwS9N%>Y)Z{CBlRHVp*{kK$~ z&mzVZEP8TPjChT6X!h6qUZB!fvTFB9rubfaj#9vkoXgzFK`ZSX77!^8mSYtM`feoE zBu&VC$r~#(PtL&?gP_S%JWz@i^fvVFmCVh}Sw0k;nEzbmL2G@HC&hTg5_WeG>Z6Xw zm2Z#JvH;?1GV8S6iAQ_<(}8cVLI1+8<(fuMercSZ+D$200V62etxfJnmD^u$;Aotg zMi>9CUptl3QSebB4g`&`)S-|##0i2AeMQ}HPYO%A1?F?GTM#-|F21H`SIFv@++MAVeqcbli;BSQEoUNWd#Kn+O zPLYD}nsBN~>X5R8dP-c(j&nH3oZpGohbB_0f@G9=2LTH2{3Tn~Gm0-70sLQdo|_q~{@?wVjGtA>Lo$p) znj2=7v_9&$x1BngcR$g0tK4{Vn$M^6#S5t17QK zR(MXfs${o%(UqE)TLk|7Vfat~Fvqr~Ta3OFd+LSQQs{KWvp9~ECduT%YPsbAPerPp zX3zG4%He5`*9UN^lmVsAU=0Zz=#XHg8|0w?1mpEi<*<3hmJ3jX`z z;$-l^x}|(6=&^OXeS_A$-MBj?6_;ZMJxOo{qe-a)AR1hp7IB+f)?mF^W&3v4w7t7@bgu(-a*_?R0_Je!m||irB)3?CA7KFY8BH zf?!@KNm|4Lyf#vtEwwF-!Ut}y*0jr~Tpeg+BVxWXNuf61`$q9R6%(SkVh`S>GeuG* zP=`pD=N}JE6m}It-NX2}zj$}cGtM{17|4N?Xthz=r?skK3*MdhWMTqq#ar9ZQt5rBHgh+5EiM*Lm!}4*EWSoku zsdRE@K4nifAC#Y?$_Va?7A_{t1cBj5fUx>Xb_CTMx)4m)3D}fO1`l%V&?Asp){rsL zO>pMDv#To^0Bg~^fx?|Wya>|g@CaE(C{zCO1^PNybF_B4P3V$o`kVW|R;97j_G=Ub ze!IOCGK#lGjE#>AfwI1w{i)06)h+^1QYVWRNoBZN&OlPTlE6W;i%T3N44yjL zZ@%q*;@*k>6$ZF{eIXnVTN14GhzZYn`!-s=C9EINUgSjL!A^g4B`&7c8~re~PEXgN zgg!KjxMN0{s~IE+gssKzOSi;sg5JkGHo&7nV^09G8W=*yf{IlD{!2cicyojkrC>!F z^r~=W=CSdd9hx+V~5UrRAR)t>`J~KaG*irSElohU?&N5 z+ay<4HH2L3o$jgJyYaZrtiqzQ*%BpV-R|B8m%A_#P$uLu|&@#g0L>|NTsWDf8XJa{$LOWQe!>I+x zt%_*my1bWjiFalcZY_`^8RLJ{8e6$D9I(jL*+bKoAQ^29itLRioxeL@&;6J~1^s1S zaUmQz1{itla=?TsOt70^YVL#0Z+ew{|bht+>6JL*Aur7GAZUr4sF1G!5->&KkSVa*Y*YE`Uw zsOl9#shk`lFE1}=kJv*n^D=vokpyu7m&~|sp#z*sJ)IE919&>IT9bI*Y^a!+OlFUP zuj}Zu`p*f4v#Vu3Z|{^Qf@#xZrtOd)?~Q+S$B5h<(eF|M8nQ}1eKHwzz|?G!jNNLF zKXrmWYi${S2?L_%V#M!eRNv;3HSIdM1T`#va(il_K>v}WC>YI&&KraQ3v<-ejf8sB z@m}SJf3LqXGAz+LDWmg-G>dU5fG!erOeb06_eT9#Vzh|la5_HYaufnAF|ws+sq~a4 zgpz&z{&w~C%y&k=YWR%?b;=?vU)#^gGT|~$mfgHVn4VOyt4%XVBy;yFh(ffRNxCnZ0q`y2D&K$b!aA;y$N$iAqn1tTcGINgnry2 z-;)BJcrzjZMnreES5)*xdGh0HNV9lm-eSjIV}SeXQ_mA=4uS(fx5%tc@_#n}*j36d-KJ@(b4K4Qf^HU_)G) zrgf6QB{a87Hb`-SX%d>FUhx4Wc$Gkx5qvone2}F}?B?A6hSU<8aXA(=%9}$KIlzwi zjOMKo{bMpE-rMl&svLRwE3i9H1XfudT*fX z#GblkQhqpv${oWCP+_?f!%Yv72&by12`QWG{jULRCIp=#4Q}wb-16!IJ0r=dsrR2h zkkQO0rsY1u6{ho15C4?DO{6ye_$b!K732PqJ|t|Fx7WpIy3Cl@;*wN50Tlqeb_c}o z7)ed1y#VCUoU9#g4u@I>YM^6_VJG*AM%WqtGdsH!knyqnlq~)%3)=X6dNYaSSEYax zg#!e<8FZss2RKpZY^Xu~&7Hz8osknw^E-E9hGxEICtG!aM57H8sV8` zA>%KUKtoB)A^Z{gw6D5QwgFlO&#?Oq_8Ldrjv3091H7gk3U6beJdCub`7Aj$*BH!y z?>HbWfg4c#e5F~yIrIXzlg!Dp05`c7HV!g z=ecDQ%YAKa#|H6b{>DuZ78VrkE=U~aSTfo!+SzR)Fs+A)_MR16uO{sphl=WO^W$*{}`FkWkO_ z`C7wYO1CLDW=zLBMis)l1>whLZv~!DZyK9WiTQ%uZFoCh9-g!cucizPUU&~%CHrZL zo;_DCv8OXr1*UFr+%{a#)l4SJm@z-gi!DZco}iMLa@~-g=9~-DqNDe^22@N>Sjgo8 z83AWaep8`~TEb$8P$}l6<>h8oZ+G9xXr#`jQ)Smm^hRa;}qR!2=Kg}8fT~%!|e6KI|8`#}OSJ$A# zo)o(Q>j!L*dM`_V!$90T=&BMUZjPlY-w(!`3zRI$Vqbt4KCYu~N6z2s4-E~qJS=hI z)3^#P;+VdrQ!JCW9|B|74_3B!?Zw**$_Hu*XLl4MmY-`Kogo3yS}a-P$-mq|g8|JA zmId_7Y*wV=k^VCF41t_d>%eALWd9}k9 zJ1{L!QGAMUqT(_lUvMQ=+6nOA&3KYBS}fL!S9$Yj_$KhXnT7R{8IL<+GTnr6U;xvO z2>pi`k={LsH{d~+w1#C9p$zhd+{l$aW`K48P`B%IJ5F6wZ| zVf@YwZ^+*HmYD)U2V0Y{<}G`r-C*RXkDnzD$s@v(0jokqVJK*<@5CHN1Xz;7I5zJu zVOg?gRSPr{w1x)?0j&|GUpc0Y-cN_pH{7(662%Z7z>F?PIKAqq$B^@s%Cj>gjjE`K z4kmw%`9#5!>BK8jDT<&bYPhix6bxRN8g-Nt$CM0-Mf+dP0`5!R^BR=z)EC|P%h0eJ zAEwen#wgU9h;-`C1u?MbmkVtKD=7Jam#YsjSc4aU&+z?>viv(B=zagpw!{aE7d0X@ zL)3tt8U|QvOG_tsgpFG3EgSSIVw#fZo}{FR0J$)K7I6xv>)3c0NY<$mE*t?JuxO4k zhfimV7>F7h7{8t$NS*trJ6hTmS@|MI(9!h{l#$-@T5G`Ymy355IHsh}Pz4oq&Kl-aBvxBzv& zH$OaqL!>2aM^`g;3fe)7`Mv4){n}mi+8x*(n-3M$lA@wqA5Ef^q#4~=35yk7;0{g{TbUSshggxij1)^}d~svBCo^LexTg#CDF@aQwUox}+tn6E`Q9 z<=)akJDH%k?b65?=vx&^Z|dv}7nP8>jkyqEDwVt!PbK!Yt%^)m`w4=GF$uRvM*c-l zK4x;A3}81`teW2_zBMHl*u$`U%qX(#pVnv$fu;bsnNQYx*8psL?qHD=R8KD2)9)*oZLOO6{!{3`PVdO;G9qD+MXetL>j^MlXAqtm}&V7#tLnx zAQ^?~4+k#nFw2{`d2-&e`w0|sQk4L&-C_m3DfG!;I?c%szA~w|V5Xxb_vR}JiEyAl zzX}7WRvKV~Gf`kA)3-t|8q-KX6uSt&(|;4PVRoNPJVqu#2sE0qwz_LvvrxhhM$*j5 zn&Xh{BAZu`gW#RU51>B&C<|Bk35H>MZNp4o z{RF`+uk)ARIl$uiF*>XEuwQq(M;|*#4!ZR$S6%xC-KfUX+aw$y22_Ux@Sf(=V}~*i z4A!eHf|IE+;M1aMTx{T@;Z(_lC@Lx{IR+fe3prC@ueIqH6x_#D4O-g4&I`~H9Ox)o zz?MD;NwzOVsp`&Po-I4Zj>%EPxG-}u|G#q)fG3!KzA7&UW>N`FISRt@ak$~vAcauP z3OzwgUPw)Cf1*W&%&D+8xsoIf0t;s0{SXGaZar5vzr|}cYuxIR|@z3F(@QWHVW>H0>tC@R>nS-U<_VxRT#~Yg$cDwacr}c|} z&NBZzX=!ckiaLvt?Dy^{@K);C)0V3d?N`w^>3{AseUcHseE8>LsR{jyh+@?`w#tGS>ud%?)$c< zpG|u2c6Gac*gOo3@}6f|=RJDCYw5Sc;peoJRu%b4BzFC<;jrG;23f?lA1-{ljbQwv;zM`-GXg97tWGgvI_1m>~TQ#l6AM2QtKQCk)+C^{OXqde*Hmu1S zBR@(aD-KVbh`hOQ{GRyY0+>{~L>}G7Vg06$UW%Ol{prTDq(A+poB?(WroO%s9i?7` z$6LXY^xyr)3VigZ9=~rjJzZeQ z!9BO|= z9HAY|-mtKx%~;3_3Nf7mffFUhv`No-EN^szu%mXWDVxuqH%g1k zt&GRF|MxBc37@O(Oi{OpGV7w(zgF1ZE3bFXoc_SL64`S;-ec?6p5(3g^bT$P(DB0b zT6v5y!CJz=YIeHz=fpjP{>dl5md%p3+BM}&59(LaRqVb?n`i0Q#dDXaq;EqcJl@&h zKByy}Iy9U!Ovqlg$)&_)RtboP`}Go^NZH~ki{SoQoH|S8Hls2a^`HwmI5=Rgwv1lB zwqg5c>WI!S_f-8<0w?DF`{1997tMa|@qa)opMi^6riVCOXV{cJ3l?<4T94KeCA z=QggRI>XFhB;_C%80#skTS@fx9R}a;ACD;dK7TwZ`uIxzdBd;rpfSlUyo)Bzt|jm8 zrLxA|dyS>eovpmNs{uwqOs(Cm2j*|%yw=`)9xL#t+F9;zJ7tg-rX3;}H593rSRZqV z$zBH5>?i3t=iK&XqVdGdcD}bv-@LB}nRk>QWb|{CGl*Hl=x9~@aO&chpN-8Z!OxB7 z^=pRzA6s7mRpk=3EeHaFfPx^Phfo2PP`bO35~RCBx?4cH1wm; zMX2{aW4dTmp%63w)!LQE!Isx-u{2#m0&7DuRDauzQ{fWutghxD%2SQCT^7&}EWG-Q zC6@~m@^HPOgNT~k;c+HQuWr0k%HSIppZVc|VaYd)y*dX^NQ|AFD(z>~_50@$b>8ex z`$)ULc_WIiSkDxFQ{gdQugZh+wVHBFQb7Ngq6dB6r;L%XQYp@od+>Pg)A&U40|T#@ zIR#M0sZ|701k=HnW!^`F3D#%X)=aw|Yuw?7`0MSVK8JfDURS4J?BT5NkEv&h-sa`k zDyXi;=g%}&*AVro#1E<^1oXQgOnIBFV3o zyz5Wb#re&>1_n<##Sq=H!c2ge?|NO&)kW&n$i*k%lhZ9EHVX6eN+1%Qr0rCmx;fl) zJr6C;!0oa@&KuFwt2*!F-SjP3&J{e>%0X*Wef{ptVUa~o(R_@AUFyIIa=##}D^kf+ zDZtRq#8WcaA4(snC32_VrknljpL(}ITv=(*rR7Mh`(is4QMRPu+&B*qca{#(c^QTV zw@z1!FFrKsm&;dRA6Yg=U!G_+Z8?+FHAbD5@E>)F9gf&?Yl?1XO_cv=AS#S)+~dT_ z$1BiiEP=>(l72s9lj9M><=ecqrDe*pYVq=Q6QO|?@%Yc%Oq;`7kfuQVK2X8?!NV~@ zEVe<^s^Uev6E@uyM z7KV#xewcW=DAdsxN}0J;k?izAPqjh8C^=E{Xlf`yjbB@iLiHcK^a-^{d)FcKXj5Gq zT*qN!W6uVjT^`xi2dRt~O_TCk)Y=MnZ96FM?kX%==AOADIg;j2F%{i?osM%q?Zo|udcJBjrXY$4a4Ma`vEgP_nTUm|mVH@~#f zIa0T4UB54O{KVO-&Wwt|Gj6a69)J0YrkDjHB1Ok;`$6xPYqEX*8B<=D!0SUumL~aO zi}%&;RRf~U>jz%wng6U2y?M%OShB?;`Gudijde_xaJh|JQ=`SLA?Ja?1nrTB6{b$b z7p%7i_(D*_^Hth?w%Fx*4lp~b>-@E`|wPAT)=DXJ|d`T zY{PNxeE#aPopT?v1nQ`rz+oY=Y4|wux%0r{qCU;s8l&1_izh_5Xa_p7=9Kd#G1Bci z<1)*8vLtFRyyz~j09O~v15{O1Y9r=SeM2?{^OFk>xwoi%SpOCdS0sB*P-v zPtFtc9zMh2IG^MBRklGaI&#V1qGCLMDEppyKHI_98~f{ZtZ!=`d1O=IH%)vqXy{X@ zA4|rFW+lwZb*+#LfiU_WQ=E-8ovDZ`^Qj>&2j)+?kGNQ)*`w=22)Dy^memLm>u1L~ zm!qkhs-^r(%zSO&B*)^GVa0g1-v6GjaN2dPpkQOCEvy9Z-OwpaY*@o(E5x*MXS%W9 zp5blo2p&h5^KHJoB80P z)?0>IufGPAcaurYFIiumx#A;`MjGa+8Zkx1_`)^xHObAYW#!3gKBMy!V{wEKLP^{C zYJvYEaJ{ya9U(SRHD|h&@YP}h{w5m0gqJf*ED}Vu8JhOlFFU)Z$0$GokBoEdwJ!<>7!g@b zqZiAm7ctyj@Q$K~#@o7XGz^!*9$v@0m$S*a;Aty2d{_=MF-$A#l6W&XgO3a_y-7@IFj*_u%ER;ipM zlAxviYQ_wpAu^h%1I~GZYF#t;X<@eYHaj-QS}>}iZ7g1E^^CHTk%jQ&8`iZA{j;u6 z=KSNDfe}QMah7Rks1`fpQ(NZ#QAsZya)l zRMplmIbOLrIT6iTZ$Vy_al>bF&-r*UjuUR*X__b)*3jBn9tF;53h<3s6skHAb;Eu%bHh z1RAPAk3>p+cSb#pj80(V67$M+P&)qHslTk(w&bqa`l3g(r(;4t7J@-nhdUYCHj>(38}OTXG7V^K+OJL)e`C75OEK2q{R%zC1hQvKK_8 zs2|4UtbGl3EtiQ9Ay(a6so=zn+1`~`KEHcF3$v^bk{*%`jC9Cl6IJ*=6$JH?Vtxw6 zR%;k^oIexQfr~mr(HA`m_10u<$0>gf81wCy*Wy(wF5MZ->eY>Pm$7i!)ZqV6Jjvoj z;?s_>^r#P+81ZE6154QDCqX!e3u~5mko5Ics=SR*)wM#;gPHNs_bx$XLfdC#`@%!c zJ02@s1|6fUQ<`qhK$%0j)oH{!j_V#OE+Y_Gp274H9fgKFnwl}M>f zTgZ9yL-q_e&CJ0PGT?+%$ly)Sw# z%wjZ}Ri+(cNzYKQk&rNM-k^}4xNuUVHd1FHY-IYhLW@1ea)RbfU-eS9SWm9r{BUPz z4Kr`~d_`iXfFUo#^teGuArIV)mx7zI(Qt2v&9G~i(!pDg7jyT*v5Br$(d=AUvDa$_ z#6HRVIn3p^MS12(5-Suxg+|KxM9vH2FyVi}bu+cK+nysSsf!T3>=aOXqy=N}ds`O~ zaJp@TR1+JVrQ=xJ2L=tv54FlGfJ!QTk21ni!!F6bN(75 zdPd#OFCFC!8=9t_JS!?w91k^IYtw2u*Rbe6r)!OGyA@mVc5U(vu4I_3L!b9HWHaNd z$z@f-;+6uS7W3zG$CWz?@yT{|I(IsESv!i|XTD1(o%>EfJ5fAqzj&ETg9&&WGTZFe z(u88fQ+x5_^Fv3D)4M{`ZizstTV%&1x?i%tgraU&SOscPaBUVam1kn9jcCNz(t6Ln zJH#HpTWsI5UWdJ(Sm1SV63B}G1!qm`an>C?v)=HWrro;f+u81-SJhNe@e0NGO*LiM zuu3C|kyOX4NagG*hWE*dN;`(s=o`W|QWJr-u?iUTQ(HJiZl>!;KM3#&Uwr1eYEL~O zAaJ=n0Wop1#ZYY>b%e9Hp-ie6NSdyj+s=)copa21jBNZyOO?uZ0S0O}W|c!y%7)DeMg}j>WxabLYZJxa@E1K{JO%m0 zb_o{j;Xb*z+THtsRbCU~Gd4Nrk$J5YfVyH+v<7f(G>0o@cy!(pgv_i zmS5aBRTF=KJTr9RIX0T?^G!$&2nxP5S|=fd&t#R?=8WKe-QV+^KlZfbRoSS`D({TG2x$33j_lHiLLMv~XhvNO@sHH5^7~ zx3LJ)uok{On8=x(&9rpR*|dpUY1sT}sO`wx`*c3%sMqg1{pE~x=uZ`G*Q^m8!>>UY z4Li#2<+}C>QLKoiuE>mwod~~Uz1B8%bccH$haco$a`+h zQ@cK>(n@)mI@KRjv9aj~4H%Q~@FIvf6w&&%?K5kRvV=2GvMf=!J5* z<(R`r?Ql_>)`=16z)L*@Tdnr}+R_=NN|EdmOc{Fn?eh($x@_aF>#xb4bhObPCqQe8 z<9#HIemtPzc8qE1_Gel8AkDpQPiXD9vV$WMq1Pl=H@BOX7i%{&Rvgso%ZlHjRebn8 z%h9s-$}r1eZ@abJ`J~27)`{$#dmrItC|Mjj_`yBZy_Ty~nmE;fgnbl#P)gDvU`U)7 z-n-Knw6t<`pt;#4zRxw^)BJ>m_g)=`XDx=2jD2XAg6)t{4lNEHG0~)E8m}|3 zZTM0pP~>F>IRb~DP4{~K+S-GSd3C7)3JtI&vP?k1UZSN)=}MV1L^9B(FkzbTq*HYB zF1%ff7X{BH7w^Zd2O;&_Oe;Ha*S{xlHE64wNyTr{6|Y(0B^S)C&F?>$AF8>MJfPyS zyo9AIJg93hn`tmE{>1L0ZO0z)#)aLStyY#3)LYAAa{cC-Kj;TF1UaR&GhFCdW7TEvlxgxx4*xQ+*^D$|JJIm3l~FQ>)q*-8F8eqOulq^{w7bo$s>NU6`!6GPosfO~a;FJV^=v0$JtVW}E!Ini@UuHC=R%&P z^9KeVo>cTzxA8!z!79^#c;&UF(s?kEEiP63iTN14;f)fPD|d%*G%)Jp*iIed;V>13 zowPx4%=u8ojPL*TM)c;^SwQ3318`yP@FJH9k?pXeRpWJv=;1%5;g(;fvZ|_tp3`il zZqi8?uWr@+WWaSTbzrpF8E)q<%XXfVc6+`}+_;h0_+=#G1l1tHeBoVDM~XYT2?eQf zh#Ei0rODK+L1eqb^Fl*MWM9^NWFArsb1^66 zWtT1o%Nu=X{p!#S<(%u<=r zFVlxKL1$4X)ni{k7(dlg5L@KzmOn4%Ve!P1CGWP`sh88bB0U^AuEfhrwYW-i5G8lk zew!yZTOYN=K7$xPBJB_u-G#J@1497M#kh%QLQPr>q0mSMZ=E7M6>>hxDF3%xO|BlD zT?2HEv`mFO0W+S_oz82AGl-4xn2UnJvjT1l{aRZZaBVQ-Qs~|;C^+|3zt}&07dt^c z+riE_2m^?)Q^PX~Lt*9#6|@m0tGos2!3=EVo^dw1QHK91ji z#UFJnS9Sc-*|K~!94B6iT*ps3bgY&&%y4w4qT7+DxL||zUTtF@tG64vjJqPKOyV}1 zZ?5;&N2`}^vyTY%C|*5C7LOq^yw6ts;IrY(kk$SDy#eP&W^j>sVY z)kSif`A$Oj=#ij~j?O z;@*$=(=!rl{a|2hSa3ezcdU<>E}M>)#I81%t+zVSl%Fjph#BY~5y)hkxxTRG1zmG; z$(>_@1bZe>Fyf9Co_`*e;T(deR0|ex4_5m~%xHe+oIl{59~md)QLED8|1O0v+ggS7 zo+}#)Km^zim)@7vR#5=~$f56PooYzI{wLqNxbgrQ#y5yC%HAA(}Ys>d0 zp{rhX7RqKUCS1_u7}f@|C`TC!_4w&hty&?C`g~2qVM5$fzi5qbY7@p)KCI9o!s3ig z-fcqWe`n!yIe9mnqIv+ib9I$0yevim=w zWdZxjIbg)=QydhI> zpJPuP8+NPKCdpmb1EsWJ<1vqA8x%GUzvHY>%LaFkQm7f^JAbpL`Ghn%pIyt1U>Ejn zMBtIv0GaUVmJghnZ;6=;Td)bdBkcs9$kr!n6Q{Ojpjj0XJQ+V1ni0r!>rRxyj^NyV zIN{{Exlk1!IPQ5$Z2NNJ-6peXd>&PA(Nj5g8@jgmc ztqlC0z4;Q@blkgCGT4rk7H%ZWeCK8Sbumpm{^b%y%on*lG9R|*Y==JNU^KYQmItL& zF2-sHcdeOG*jueZ0_hmdmRjF|2pM}?+$h?=efrJiQ!nR2X+eKYD+rC6bz&Ww(@$7L|<29nX{zF(A7MiKQCc{wCsWPo88^{Gd?O zdzxn2OQrtjRE;fXp9y1IRDSlz*y{-5A_nN0(lmHVXzUO@MmQwetX~e%(?^Y+YkuOE zvuJ3ZV3-7(p~ytT=gr)TMK^TIspaJiV$1!XDe;b}AI3C1Rj)_)IuBsYMxgK4-a1PO ztm3+nuF(r#vg7r zqpB+bO~9<=C7D;7bQ#Az&WO)>WL~a`wHDm{WNKP5lx~!D z$xaS3-4?I#aQA}Ok|3-6lq+@;!{vr_e){)Z>w5IkZnGRcBcgNI`{j(3d9t<4|GWFEqnjJ`= z3w&g2!XF-M=x5^SS(4YV-|QDTKI5Ea>V2<`P{rOdvn(r5iQ0!b(63Vq9xCz~(G}!j z(DnyTT+ehTBPv#$oX^T|P|U;-pnr2xIDb|cZADiHY3S{9vow4`c=(htvpUcEBykbk zMFtF>*AH&-q^;`-Lb}~(Lw8XVb9^Ka-w!=MtO0?8*AzL%)@8{o3_5aD;6yW{ig00s zn{2on$J^|K*?no*U);1zW^K)IgmLA@mi+Srp{_Hbyd)vRXbqWmogYucxZEJ$77L3? zmv^A`4>}ZUrTen0)t>GjnrEHQD!s@W!{%*pmm*{B9^aqXktNHWQ965{!}QY zKI%|k95QUBp6kwGlbRrOxuSEq;@ti`1S6UW)`6{-5)3=Efdji3!J*aaNcWri- z`=SN)=0(BWChXB!(%@-Q`}WP1SziD76>^pK29_CMTRFq}E6r?zL#}KDw?j zw1j>m-Ae^yAHQ2b_XkERj;Kp~d9dY?ET&SoG-`%68R>~5^Q@;|;r!=7g~^kdOFHci zbMZKMh6`mr9esE4trz7K_2Y50Za1?TwYy(AiJ3! zr7FnmIDNJ1$>#AbPkH`N!|5KF%Y}H00qGv*s#*-9^P@@=Oi}@W7`fap5?X>xNT5#Z z$aM!fIWCSderMJY@Gc#--x~ds0M!|bkXM^W-uB2>_QCd|g2Gf-PS!x`JABM&0mb6b z>`Lx-KB?rshw`J2dK2i+oeF(mq^|@dotCZ}OuT*Jw6B2VFckP{i3%9?I<3%jw|xsi z>e1z$uNpIn$P9dRJmP|Fb>`rcY>{PNu@~AbS!>@13eZ64#zYoWa2!=K9Z$B!s|!y? zW`NmT|ABnpIPfL&zwY^+)^_9vAzDHK-m9O%4$I(XBmA(G$tkdxB`4|zYBr=}9rbadFAv~!1?QPC1)(|RUE zVy5TdUUKK|1V0nj`yWF>*YuKZ%v0*(fOJu~Zw5BEjMK4XdMkke!K(f#)orEk z29i+^AO22EdxRGZ5WnMixjNn%qQZA<4g_|Jw8bE-GB?-#kYrogl-~_KqXHs_n`!Bv z472)QGcec^U%*d+sE36*`=0)8XF`PI{hWvANI`w&``o?OW#7q9(~`du9gD(|iH-=&SX&v8 zv4&+obTGF`Bc-lnf-2JlNtnCjI<}mOOTISIrfZy-c!XkM)Imr01LJ9304VDtL(}Z$ zwE%xx@vv^RbKfn!HGjluBm7eZzyV|cZ1KHta+8TjkWkkle}jshJhqTyYhuGxfeqE@ z+x33s447gDUN4}x^8)F_nK7YD?aId+F41|HXfrIN(0oMv;S9!(F)ul+S#x!9G^&y$H%W9zH3B$rg9ye*g4aBn94yR*pZ*-!(lq<;((hlr_p1oSbImMdU5ksF9VBPoZ0( zB9G;?xn3v^%06BUzL$J$sbd|d=QHG=)KD!=&3p9iU#jlAO$#EkRoCgO=d3^tr$pua zF$tV?Ke3Qxv15w2eCsG#a-;t+7L$=VsRtlrk)Kzii^s>sh1KI@c@emLy-YJfpU!s? zK0=ETezYWti{Fz@^iMy*Um*dEjEph_&Q^J%cp0}J64-FN$h>@ck3=%zK1d~u{1|4J?KFH4&<-6bW*aOul zoLHDFXogFq9)aCdHsrg2`E{NtJ*j0iYtB9MKw~h#kLY- zn9RVlYmuNv0jLW@Vu)D9_BW0UZ7{!rcpftuTVSXm2GRq1__X%O{OCuqs`hQ`@}o@N zs7C6x2gRN*IMeY$7Nm+Uk)REl3?^%*fP6|5!8BFWmxBy(j5QNTd3YL1N(VedCL+K0 z=`MUaIDp-Tl~C6D7@1~{`(K(l2Pb!1LRjAv)_dS4Z-0+yhBX_t#b-;V92q|b;E50S zHnzmlzU}Kjx_+D3Vzw5tBPpHsEtJJFIZrl_FIOEkR_89rnBQ9lfH43-3Xu5;D!lNx z!rH$QnE$ZLJX%4r7(}u@J{3*dMm7@t-L~Xx%qLf7A1#UUU1IGc3%KBWtQUt!#DPqs ziw=@(zYWmSWB>FgFT(la`Y%TA5+9i=J}J=Nu_?Fq8RX&9o>S51ygrTf7l+c+6;OX@$p;mC-Vb~UANE>hOqBaxh^nLG2-L{o|)-s=BY0MNRF|8>Ib}J%&$u`(}9A;u@jdH zNFhK%0VV|=Wos6s9Mk2+lc6|A%gV}VAq3{p@ z8S4(rcd{K8nt#dM7f4fcBG>E+F+b*OP@+7M`w57tjrHHOzU1Xz@iwy16z&W~61g2d zlJ9Vd4)vk+Dlz{$A_D=KpNBS{o)382*JBcEFNa0R;UJ;?46suyKkNJQ<%zwMlkrF6 zI7St_17tq>WsjkJcoEQTy!orIF>kV24FH+Jy0xQQ1;Jpv9T+(>&pD4qehk@%#K~`h zxDq4^z%5@2u%BUa41PqC5%1iWd8)v5?_><26nVOQbs#wd`|3mO`{bp;F@MJO>e%dH*jg= zRV`ijYYCIM%?A8MBdqPWI|H5v;j{|s|=E{ zi9p4b{D(ia6JEZ2DKavapO5v}c|}T+{)Kt*MBX{4moKE> zWsGBl(faZPJOV)KM;x*TDM%m|^ahoyYIpiGMyg8G*SYKyE8N`T5sAK#vpLPN)@u=H-Lp6$Jw zF8I_wze!mNd{P)OpM($BvSf4MK9w^b)#TWWhO%c@s01MGwG8H~N&9a<7NB53g)dw@y3q%UbSSc-t+%b%1v=N%KPluug|96Bh>NywLeCqF zACR%Ae=T;G*5DdSQ3=Xhj{7($I%jrzH>gZdVnhI4r(^t0F%Bib(R!K+=#htN)#XW2 z09lEPQ<54I(R|7jw1vYH-M1zee2Ncd(pHkEc}6eK3utFlri%?#rt_AEwHPMU*l&9U zN&t|f=0_h1Ut8{bGBPrN12qQUNHm#Ds<{wiy~ZYvsQ!G<&XVnnLM6#inF=%>{MA78 z6h=+2{J2qwf&k^rhFWv(F|3?xMOyDST_lDfP_e^9`zg><_l< zFxW825+`F#0<>>f)t1aO)YM`1fj?^K$b&!sg~;XBR==<$P3g8}^1$-}#zHBvamh8{ zX)|!&15Ocb>NwTNlcXlyPN47r*vsY*McDc;G%B!{2_$|zH8m1axqhYEIslN>o<5i_Wl7d&F<8Vubhjn9CVxm z`My9>ujL1uz9Axikl>Ri$QkmSMo8M98RdZSBZ;LJ0`KRFa-lZ=x8rvx zkW|WYDLldT0ecS5w3z@5okX$K6a<(V`+etce#6j30p5@-rCani1I_%3N}>2EnMQDH z+?<(-aDC_`KGt)H=yPo(uE>rNMW+jil`oB|P^Y)Iw{M$bICi>=t1Q^uV0#bA<$GVZ zus}ZxI4lsqM2dm!oV1B*0AOn{^Oirc19yHvt*+(8%H=oe@&=0Wrz9#sNp7$h*N&U^6HzIRZuP44>tz_UO zP^|1t1@Y~v7(->i3@&?fH&_9)1Rt+}1%7g)t9b-NM0!(MWU^>Dw`&iB;2xbc{t1Iw zC-(ef5rhYXQ3_;4JTq0v_4D&H^&}<<24Kg8w;G&YPEKr7j>Vg2EMGj}f-#v7#59Hg zR|eLgu(y$Lzm@Y&2SH6r!@^nNR<~|D)JU&8exC}5leHqQ(LFET3e;lG@9+kJ@#I*+ z7Zrc;9VhF1)W8%pUwu+nF0%QKfdo9C1IGwmTWV9_!SjqlbzG+gaV#)a;WRgZZ5!)* zOZw;+pMVv`o$Qktd3sfPXIoUXUg|WoEXPRO;fh=mF{n zid3W{(Y`Etu)t;;u#E*Qb*gpT#?-6N1+Hq@!otGX(~}qWgM**H%RZypOJaZ*v)g`U zC_}t}d(=?wRVhnzh7i7zs;heTN7Y#-!r)8{KuH!2*#CXAlFlc zCPd2kloh`xFFzSs-|gGxrup(~htOzieA#PW1h%B#RV^!RM^1|+VvjCA2+U>SgO3C|G=V%MI`>Ju z?EX#|KMpcFZs2U*Q{_44kpdy7U^LGna3{8;hvyM%dI-Z!#ZV9pN|JVrt^jWBe!#IE z{H@BQ3e*Wef-7x+gx)=?!#9VV{c0RFlAmzkHvDOvuMU^Vl$soqo;Z{CJ9>T{3!T9I zR&F4D`gl6mJF_!0_tabfg`wxp_0c{9RJl~X@Nv>%s`s$_!vX@BfWnD_=hLaG0o(_$ zHZ;lFW|TVcjf@?b%#`0>Vn45clUEFPmUaF`baq2FOy*wZFA#vW&wx zq32FzNdS1d0I2H?Vt$ndIRsu@0gv-+I0VV!LmKqR)YTKS95qGc_KnLdil2}hAC#;k zSHi_GQ^A-$NR)?A;evNZ^BD;;><*c7g>NNY0?IGk;rgaq3EJ6a#Dr`!7D*f0AR0SONE_xFj4fFa5Kdi3(ML6n+c3mGcU&6d>U zTq)>j3Na1w(CO)EZt=RYmA5zfAmyMD+ndAh7;Wz@23*(uR=eHgrL!3KL?9x;OS5P* z{f&$JBG7EbruoET>x2&z%6D#!5FLYaJ=o+nCBZaO?C5J>I|8P$=lGIeZTlFo<%nj- z+In^H{M@d|lP}2VsFspdR<8T$^^9Hdzy%RTNfrJ`$2h(X-%s=e-eQROcR4fL^pbxV zZUNi=Xgu<$ZKNU6x5=fAW?{!l*mi7>dd7>ZDGcXtQW@pmb$VL0!aLH)R8&dkQo zv!aSeP3rKd-Ffcim`z$=iop zef%?jg|ahLJ%n{aY|OJBvsd6zj}(QvgQ^&n40FsD;|(9pbC2hrdITs2DG#wKQ*Md9 zsZj>UmuCZM%);bo^o_5c7W1W_W5}p<5!4Vaw-Bb6pvf)kwtuh?J-4*xKk6}9J$n`t zs=TJo@yQh|daK5el6DDo7%_ND8GYtLTW~RzTaU_vqs)M6{q!y0o*3bW*MJO*UunQ*d1G*t8c7#8-kwZQ!I%ZtvAqDKUoruxNo_7e_to&s&0;!{sf>4)f8Rod7(Sh_EtGD@HSw!;`Ph|}IV)>q?t|kyAWvu< zn(H_R+>9&1Av-_Zr>PX{v+)c(kRnTe>qllHLSHf<@%&Q!sQ)5x)t&9_&W^jjnIX8Y zdf4HWGbBUC=*x~T>9*g|Zg~HFv_vPBkSC@@CljuuWLdbU=d!wp^mhzV`NL;&a338Q zW(}JQB!I4AVJl*7VQ+iWVHK8t^!R|xy2&u2%HpU-t?ViV0k{Aub?-4IOkyQ)XpgoE zWI~^|wIDB#`Mlf{rCjfTAcdujW};vW5EL2P+cTSw!QthA#lB0rz?f;Ej&t3wKKLw# z{H&Q=Z8VD-z}iLM`%Ql?L6KW-NlJLXI5hqcP^Cwk{#~V{GjtX1Lmf$60l0dgc~8Vk z;ZIEC@3;~GTKjeOt5zy}9n6=kCQ@puu!GN)>({|^lH3C%+0di-IdDAR?$44f0sTlO zWK^xd`xOsPK%p>ZuHQ8d2Mt7!tWlSqX&XzTAcJnRG=YZJIvp_Bc8?gkyK|puT|qTd ziF&2iH%4-zO@4slVau$K{K!#I)j$|idgp+V_ZR*cy#Y@el1kp^iJ>~M)#*5%r+tu_xl==k~brwjZGhD%T9Y`diK`v)kfuHfc)S+cAe zV7!qG0}g0lyPFz!XC;lz)91kH^@bTRJ}DU3$AaW6pI( zAEUV7R(xYH|70}E#GPy8B;U84e*W<;@;|y3TZL_PqaQm34j9viw4QzLE(lEjTAI6# z-M7{>YPIcB)g5^pN@yGR25Zr2E{u}3G zKk|*Vn~!woGJEe#`#r4~b5!x+_{c(VTI9CEKlrT;7Ct8Mws{9_o6%~myg+E+?Bo>{B{{R+LC*KK8`*e9a`KIU3dA0#vQ@KRdY?v=5K8sw?Z}j zJ_U-Js}ln<+nYah2*1>v)Pe@Bt4mQ7#0be>;o>q38rpEi(4GgEnRbMBzX}sjl}-Ib zm(<3yO^>$u2sC(U`U4VcT@V06|HeZ%f09kAuVG0IlM#YG6i8!0JtFRVfOQ^z zmiA#E-#yvXeY!?+n(WVNP-=Xqk@**dmA;?r)ZCOSP%AMy+@~Rg*5=`Tcb^}8b-S9p z?#UdTPkx;haK9~prpT=-*O|8&p$xPX2_=1V%_D*eIU}z2c6N(8pN$J(RslMy{%ULw z`%pAfVoRwpe?aKiq$)8&PhFju(Mkn4r`I~NK6Q6QB!3F*a8n1bD}A@WHqp+{Uh3N# zpbXs#05=P}9-kbOLU^-TQbC%-Ucp%XvzM*LXAXz^DmJ3Q=lVHnN3ik3uoL!2*Xkc939i*2Q%%;e>s<~P`!Ac0=HUSi z?8TRZJqoZ7vC8zi-X$Na5gqzo6TIivwp}ZN^(#cE8AG&MUYJtxSVn;NquQ-=rAaXh zfj~H%BDgkS#r`TQmi0{caX|=?dM~VNDWAd0(lV>>tLY*>gb3A=xZ29yh6ExvhfyL^ zjU%l%W3{F4{(U@ptT-KQ(PaUgXBs?C?MizACTwm_TVbqRpBHc=#26HC4D?1YGMG@w zT`Zoo3hCbViP4c8ZqtEi7wTVyB1UJS*$smYc*XLXYI$!F9XqExK&c$##cu{d)16L} zPIL*ampP((a4+;<-;U7qI&OYM8-f=ogBO;bI+(&M?bo6^ymlg=L~r|(h}<_aB7zz* z(5rPvmDOjfHQgrYSb~U%m6xZ2SG7W=@{;H=e%Hv7a#|txgxJ^7SY37r@eRxLkl@8?`a0r3F@k~9QbDo(p~6E_NYNB)Zw5bC3Y>GV-*OOq1|QY)$_GX+Ni~#IPk{-rc-UItd{J zMT8kppJ-Ki2JHyEqco;U4Bqy9RWE=hd>em-ez3_srILSopiY{^Ckxov1t(>jcahQ+ z6CwWHHxTmNMVi(RC!bQZVqbsoYrc-{oFUG8_0$FPZE##V4z7yz7&d^x*C znLKTbt>jMx>f`|LC24HKnsMq~a-1d459s2zNjoY(_dd4JcvJQUFE3VQ;wg6f`M^s< z=4h2uUisJakzRaKa+k+FnvNQ&R#^+j2W26vkleS;@6opzU-eQyIT`!_(+NXOe;>Yd zUEgE9*9(T4QGm5C_~QKQM}XkeHFoMbzeYm7ncYPE#9eljO#0|E;}ik^1pSDQ{OE(7 z5_}5zFoF7GeHwE9{kgC{O!I>{m)#Y_H$FCF>H&sIj4~=Kg50WRM~thX&Fx6Z5;-=4 znz^N1$3wy&9;KhemWDK!Al?X$2cC{BDn)MvlEJVxdAN2=6W``8+J5s*psNn<+v5B7 zVX~agFy#6G$H_P2=Y}mxpBy#VQBbYX`5v*jofqgAgVg{jD`-TwgZ!>ef|vYGdd}Nk zZPytzCHspT17bO7jU~~&up1j*S6)`cI`r1lr=0NE4wNG2fx~_X1_SG<;lAO?>P$ji&=QZs-s*OhaBi ztC3BcMBSf@H1OG`VSL`}_Hk#aY>&3LZ0YUx3)@frts_PgmU8KHs--&$bCssBw@qH3 zf`?<0jILd~?I9^5sKisaa8`<5Ycf09v_8mcU}8XXtJbf^4jnClPS<;Ac_?3Q8eQa& z1e=~x;O#XOR5Z*P+9-5ZH5-EX34d<(BGVS`vZyDhx!aNQBQ2x8`Ns~Z{+J%HLAl|R z!&GmFWbK8Gxs7F(;}N4G0w7w1=(JdQ!X;)nRl@na5;=iVZrcU*pcPsl+)VqlNw-L1=R|Yv;e@#l>XMbydx$n!8 zos;Xu;gSP<{)O)aa z^L%jrD(0x^M^o+2P2@+**qA{utypj&`rj(Z`*8GoqKWzqQ2><-I1&Q88HIe|+qb#u z))jwE?DsF2(@;?4%|@Am(p|r_=HFHX($rB<6e{M-AO6?yKmQn&6%bIWqa336U%&8H zpaQI(dBYeeNAo`$%S{HII|-)?{?8u3bj`tZ$K3M6|JN^2$a90vcLGHGfRz93-zBxV z>gtlsah!W&udl6Vg&ab)nt*^qS)@Cd<-eciern!3)~h`pX~sinD(%OFB8HJ>@LISr?Ae|)C$tF>KXeEf#_?uUO3 zBsaME)&$OQWl-oMTTbf$ZXdVGF7qRaBpqI*0?!|ViMQ>2OV>tk*3 zvHP>gM}LN)D~Y;qZG^PqzKuDvO)KXE=&N7LT1AMDKYORQ?JsetrZhX|RO+0Y(uZHY z(AI7neF+ve4ihs-9RPj=|I>xELR4Gxb70HyWuc1GgCW&2;Ild`%pmCMs@h-jQhH=F z@@3~Ak1k3N=w?yTrk+tynD74B1f?4O`O-%jlcU1(S%3l!xZAD)>a}`3)Adg)%4%us z+gTWEcy?GR5FVMj33`k|1|MWTi?-u;Sz{RlKq)07$9<7k3p|e?G;FY{PTZ)|3iQPD7MVK$CiId z@%KDpfheBdp2+(@P08TnN!p6paQMMt{0f z2H_witDhz@rV;90k_v*zq<5mvhFp_D&jK=F6s#ne{r~w8im!R7vt?GBI`H`ouW!_D zpQE@PH|WtjK-npCRl)d^RS9%kaWKi68@0qPkye@fcWvlt6{lUvsvi zvEkv6<>xzpaooa)t8*fou1VsLY!euvQvDZjLi(>w8|+^@PhH@mofGE|E{O)j+&>$S z{ChCDN2>XMIS$}U$^xy9Y#G54jj~Cj-yzMUeid?k(hR}6 z_cSZZ{VNclXLbL?8hcg?I#u5wy+*$p9R@Bxiy;5cch`K(z|gZ-vx@$&be0ea9_g^j#p@SnyWpnrqpE#;aLfEizcAG<#OBP9y?+GbrA z3E)HRw_do@#)~2&i>G&iqA$EWYoD*W#g zG-auOH=ERcNWwz}u;M;hKvm*ei(dWJ63uhc$l?8;Cr@da+)WR)ue*U7NS;7?hY!f; z|Ln@&rF`gUk(zI>_Dozg2NT~k9Vka|Mt1+74u22M2NnD>y1x3|zt;2DH)+8@C#WM5 z*#1)J-&%MR2;pv!+3Ww)6g9Bs1P=2l;OqZqYqat}k14WW|K-1aHkb11RBwu-;w^M;e**7t6!E_9+gW2Mw>me`gbiEjdO*7}ND6i+G zq-1A$xw0^InKC?#Bg@MOYZrIr$mwoP9m*eUsf;4(u3eN!^r~fwn3vD*damDD?yqOh zbInp0_d~Ji-7f&sMe5)<-G1G^Ff2jy`CTTi} zG`TE&L7BMVJ<$ne53_sw(gc2*pRgyti;lE3-|VHf2MWj@M}C|qqN=nQ*8gDb$;*j7 zd(AwgBi-DrV?|rbbpM*%?3K3)8|Ug70a=T&EqMB+WuqdfNp%XZxvsgWKYBq~j88+CL7kV9 zdz~nJJ~F*orCmrFK%OD0WPbEm&L;gjr3TKDIpHac1Qa`aYEo_Xhx3j*S3YO546=dY z(l!%YC!)(oA{%aFa{MS8^EWn?SR@-NuuoI`y~4<7g&C?1i&ZEClWc;JE;vO|z<};q zu-p+*>IhwT=oe9YMz_|hicNoVuDvdwqV!i)`D|H1cssrfy%ofd`_JK){Mhsu}( zf&l9*-rg8R7P$X)PTX{(o0*@M=d`1PzYtA`Gtk?21Fn;1# zPKf|+*go!2m)cDKp;pl>>6+ergT^DTKb(#xaPKr)cspi!Fr^MiU+o$y$JJ(-y)Y+}P^y z`^YD)NeE4YkOVfJs*t&SD=pM#VNd}U;o99-6T+{sUMt>vfJwEVxye%mcKnuu#_nyw{SCxk+zIKSjO- zONnZ~C?E8EhijR?9VwX_B*@;X42rh)Z4|`={pQcNB!zuMMjg_=kg-sWZp(E>{wo8zB`_Y zP5eN)^8|-N`EFE*#sYz@Cy%9f4(rj|r$*TC~j9)8fBXP-o z`E>q=pw<~bqFlCr%a~6hA%T@3Y+$rwheZ0=gwX`fq4)gG&R00$>WMKQzMTpc=cBxt9R4{}_nvw=P+2})6f_d$?eF_XR4MxlS)lH;|M4=U>8af!n)VZ0|a#}$l~P!V)`n~Jez8NCj@@sn?5OzRML%@;36db2Oe<%*RNlvJT43iJl&hssP6I+Ut6~YXVGBI zb_zh=)uW2OXc0ip1bPERe%wmJvYiF-IAa+#6F>vhIH_CF&}c|*i*VRa?}X>c#_T#U zMbvE__;5gk@tsk8_va195>`<8X80D5AOU-T?JU26gyHP%W$Y43i9jSnP$Vb9v=ve& zn?@)Q6ahuPQ6M3DnS)$WxJf4)-)>+=_%OIoj-PHK+GbvUp3@m36y}Poiau2k*dg1Y z{;Ec9gHCzlzI-b_!ns$07n~kw@rI{Y%9X2@vIzB|SIi2_8zL5EBrVcsX+?95^ImwU zt_k}sieDyNI&2A{-^-45SRX}dJ#o|FIeS-PoUTJ~fX_U4J?%wZGaqFD^BlzYnmEnl zV24(ZUpG7{{IHv%4(Bpqu7RwdlV_D?37H5#!g%0}LC!5IEwnAH)v2|xl294CI4jk? z;5x1sNy=%`26B@R3YjYrA|V8!&lrflC(S?DVg4F@l;islZjEJ4up+p^T@bdHP$*l8 ztc&^5USP&rlVqS$OiM%mm`Q^6P{WnAlH{@CbytIttQ=DHU*&S#rV~1v*c77I~QQKboUfX*zQ&7 zR_Y(wLBi6kRUK~^$cu+3m2jKHizNah}CZZaP6pBox;t+bHlA&Ufr1jjbN{ zbxbI2Xe|x>w)>U7(eVBB+60=a!>CRCm-H6=KzG)qbt%0h(V=nEZ${$W;LQ2V<}4$! zoQ9da%5+lN|Bip-1^-1i=n=}h`4F5QkPvXz{Jt4Sa#*q@D_v4mFf#3FH+A=}@rbe4 z)|*)YgU1`9_DZmuMAQ$-`>Eg4q};0I+A~Vh(s`s~r{hpxT`0g{a-YXDZQFd0?S)K2` zZ_3n_>MH(~K{mEOUQtn5aVmLSK*)8c_Pe2VhBc%0NZzPwGT*|wW?VsyMwH4xa$BZ5 zGCevY%_#0q0L_<@xkK|UZhrGU-0HynK+6{7PrWBq?Ts`)sD-Y(T%{MqmxesX!C%&^cG zkG9&|4Lb_QD%fw6-dg<-K)*c1Q^Z*i_;HF2#(s_MV9C@a%Bkd#;5z(bOr<=r+wbA6 zM@L^MeIt!k)(kpo@Rp`Cza>}MiE~Ss`mI6Mu*1k)NneotHc~K5D&I;p8JQdUT5^+- z!^Y@oWwq7z7l)Rnf_r`JF^Q8$!({? zYXxhs*F^5TWlP~7eX3LQ^NZYeo9*k*ulG|Q2;7_6vnY4mUAQNyzgn4McW>h4kI(l* zfvOh{w)r1Fu+6AU8kL@JN9IuF`5RxTH4K?8e_|LrCXg`kmVeTnRFzYE=3>z85c zqAFDtVXq216aCYIb`48E{L|xk>+Y3bmS;VbLTf$;TS5mlZM#NG6zrZxf_q_0952Tk z#wpmnA3v>~cPXebHTYK65s;{A0I%8E)tVmK9eG>gW}sT{w<|Xu(Vg0os_L*X?Y(XL zx#YYmYy+_oKRwi-@7J?uG~^K3H00s!M%_T;e9`j!2&ak$ms;E>$8+zE?Y8O3uN~%_ zs_!#v#l-rlhM%n*esA&+ixXShWIdMNj8g8Q%Ah&pS?hm+p6Rd_KGUSv#0cjQ=U4MlX z@YSXoA`S$tZir9vzHN8)&~WLSjd;1`Zan+$HYmpeB>n^E+KB+cb>AQl&Gnn0dpv_4 z%C8YWr)EzK+6Eg=Bb9PTVN6b1PQBPsu($3uV3DVQc{#+@R*X(ZSq&-e;B1cMW9Mb(q!Y(QB9WrbW){K^W#s-&2mTYIvvPHH z6z1UY@bF;w;9+-gw&dUv5)$Iz0@toy{o!Oc~Sjt;ZYzn?2O%{{IEvy;8c-(vv>TQJ<7DjEhT&1cY>0gK%Ik8B|i09!iyVF@{bcqM=aELYj$o)s4T;nh8;$ZRF*Ue@DNd1x7y5(op-Kksxp@&p)eO4J8Q%LGn4ZlqG5Z z=ROqC^Y2oZFsR*vz_MC2`LJL7&wT_+6zhLsd;&wFaW#fWNd5pyNT2sE_*Xj@nMztGvj zga+Z(`L$C46hQ{qZ2u|`f$cCH1zl|&qyz$!*iS|Cz)(mOgnpOa!u81~`)-7!Tc6`! z&=F8DR1GnNVgjHmMCdeGOzk~5NLu)ixMp#iP?PKi&R;kRh*6+-M%ZS+w03f^jM}hG z_}O2QMgothEI;1_NEi?dM+TD1eGUDK<19da0^j3vV}W62p&&XIr3zW2zp^=kk#hzN zI!D1^nO^)^CxDU|u|;A7Oh%Fw#{fk}b3j6}e219d1z^q-;0tWDQ0K=A95xyOz#x@% z|JoI)OQnH1RFHXR3{o>dbnS7o#8*!c?7;b|-K4vL`PYe1Q1jCMcJ9lKNL~TdQX5i! z0&qM7jF&Djl2Xt_&RZ_F9p4%QRy%LNyLXJ`Wo1tmmzE5LJ~cHq8mZ?F4QXXJRrijK zS?`hEprWG6t*e`m-QU|wnyT|iJj`AyOIHOz@g2VN4iJZjOr^=NRpECXodn$&fn!~Y zoyQzMND?(??&US+>6ESdb%o>m?AO@V7@64CzUc-ZTc~EN7hTaXaMp?pRu2p%XJM_O z$W{UZ91PsQf+C?&a&CK7$-OJ9t88I}WxI0>Ke&Y75=V?wl4D2&+JJ(3io%_~f*-!sK%La_&%^h@7JyCj0cG0=6(R=@SEe z{SWQWp?$NnS+{T9LS5I2hP&W^Nh}(o@BAYg$Y>#m(Ozd~XLSt8V-SZYgZo}yU;15~ z2%`J2WXQHw^*2B9GvXVDU+!aW$I>Aktc{M^uRoB)zh1wrPM_(RhJG-A z)*kJVgCwqri6I;q8Cjwfr<6!7%E2s0r!0(_SBWgGZM4BR^6kBA=$39f{CPuCruSy2 z;B_#JK^kT94UeV?Rv_my7(J4I?TIafNNR3Dd468-XYU(n3s28mw4eAI6EXroHmoZ4 z7`pRjg z&_z)S#d<9v8b#mu)1jqGXm@bHu=xy>*!C~H$iLbIINeVkUf%iov}4|dIo%s>os*(5 z>Ce1(1tSWNk?LN1%j!YVIQ_Y*P_@)%XMGF-BZzwQReQ{$bQ^2`u+66sFwn{X$3^z4 zK)4+!XlOig^%TGLY{sl4EEXg5R8b}xzI@#COK@wNf$HJ%0}mO{U!$QA11JvJBL?6k zf)Mi1XzSF>U)h8-$u`!-xNpv(t%hgkA<@-gO{)RDdVuH+@66j@;#C<$_{q-I0EV$7 z=m%xIJ@T2jDZjFVJ*G!CoL=16%AB-Pf*lJDbdnAcE6sFrdxtdLc=+rUZG!04zOl^;-{tBA0%$Dmgg*m@NkcsC8<%m2%d` zh;t-$I{f^V=Z^_(w_x?$txF%aZ~eXp(H8b3b%DhQ83FOrBs&C=4+FcQeZnwUU7 z*LIyqg6xPX69Gepuo3{*d`s{Ulu@g@@8S<5d5&H@q7?P)6%vZ}5J=W3OHi#`HEPky zb=U$#k+z+&Opti7zbFfg@n|k$z23q(z{&n zk~0v_r!(XmnYSodr2*R0=}{USqp?W=HLY11#{zYh%<`7h;EQiS{E!9X8Ni0JGbtDd zoVY{3#Fv%fK){2yn7c70Lui(&C?VV+xz!5??wo-17z;W9Ec*VKBNVU-C`ag=3}Ei7ZRn&C3K3}XFMSoy z1`&^*Cq?^1i0a?_V8iK#ufKo11j{;kDhR>sssK?Z3_5~;eAP|@eMk$)|HOoCLPA1# zef@XQ_jRf2KQ-Ct-^pkla=XOePh0h~w~vfzN~%SHzM^PxF*`0p83Io@Rt0#LdW;Wl zdYi{cz!*?SHb34O`CWg61RPDk2@gefVd?4XOGAeWXrO9ToHBrB#*hR!-Ccy*b8UJF zB1uC-L;Lxc$KtyAH3tjoJ%Mt835khQuxPnJMC&&%dURyDG2Y)vfW>_Unq$@&dTnJv zD7o)Mk6tbR3A?m>80!X0Kn-^7H#ehqPukj5ONU-7wlM_GLrraLendcjGM(1i0QP=I zFqQ?gzZsmealQ2RGP!Z1+Q-rRHz0QO$f{wzU-FW3@~2SPAU(3gkw_AkNOGLR83iJ( zIban5948n=x?I}A`Fk<=c4)A}23Db)K7-hu2`t2FW*%A{D@J3*u(LzHOn>BM8VchWyp78sRnCrb7y=&!YfpwmNBsgk0$3(W^9-PX z-?JAf(hm4}$0oA-_%A`Y`zs8OF>~Be#)}_@Wftew9+=J9)AI->f4P615Wv6|+=~0O z@!4tG2isQFqkU3u{A@Za^RMn9x?Gw$KydQVPU8^_m8Ayd-2^DQiY=oi#@#B7ybB?c z&|8wHM-}c8KbkV*6Hq4`)1y(Kmy{)vgk=?q*UVF3*aHiSki3DKBY{C5fVPPp7tBO$ zWp4rl!X@g>Xn+CDkAMOCgmZ$>7DlA@h_h3J$EJ}0L^H?e#L<>fLMRTly1vd!K|#^Y zS7~5IMvFwhQtOB(gdkW%1Edr6dL$B~Mi$UQ28O(pXp{}K{#!BNt)w0wd(mq!LZEue zsNC0LVqz-L8w3P(*lzmia#n}Zpcjwm=W#I5rNXXvkG|MSDA9*;u)}54C`sp#(f9$& zdJhH^zkB|$50SYUitc=U$XzR(+b=u90p9s`%zm#gg?*;`l!;pcH4~UOg0c?<+ zBc`~j#zyh%7$gK^YBj1~yuh|3V(xMDx&h0oAHKVQVU$uh;7Kw$3PJ%#J3Kr*sjp8x z+e?BR-m)V3>a}`vPPPhRB*1ZdqKjNe^p4Jlc=-QdpFshYElz%}uC`g{{R@szNI?;1 zNJG{7RY_Mcx)m0Jx{y7$ICwQyE7b3c?82i11nDtA8(}Ua37HgIo8c1qhNV$@hPJjkcTPX zOe`Gy>SKu_)c_HxEFp-WxSGFzQ;cP`C^l3r!zS({Msi251qcGlXzZE-djJqZd2Z^b z9MXHVuDUU!kam!i=eZSmpA>AS>j~-5{eHCy82&+lpl|75+r+3B(JxvMAt{ZdPtISr zXg*cJT^-4eg&SlTn9`%2chC>7V*nLlL=y5ItZ?!4^L!l8cM=0^HZtI|WF2aIF#hvi zk)&~2f*{xdSAf&F>n$@gv1$_)oDMxM`P%1iJOGgH9;|)R?&8n+#@+p5)00YUPW(J6 zRIa0Q?;GSx7Fk8GoW5UfEAb}6aBiT&-9^0E&0IcCog&YOYDb)s2AS8 ztatwzM2m=K?d$E09;@>xaq*275copN$wW{@k1#~KnY6G+87m8vQzB5h#bP`77^Te! z+5NCkH7GtYWOrNub=>o zSya%u)+kkZ3rBKz>@5sG?ZfvD4^zj+bmN#4TlSSYVnm6yiYHD_PQr)bKTH6$l?}J^ zV5EZt_LK=7PbH&<`&ueCnGQj9F66$R^m$t{_kC2Skt)h%^k9AL*TF|&+#t}LQ}Cr7 z`8PVtr=9ibzI|RGDJ|0mGdtIn1DUS}(AQy9U_VpoYuKtp_Kep{C6UY=f`Dk){F@O` zhKzYeLx?U-mkj9L=~=We280Jhg03sHJbuTXQ}qPy1Sz47p|#(HKml(Fw=!lviaNZt za+C8CV4htnH}9jTgjns?E?~?F1}~g~JXX00d8?Fs*Dj+>TsdUzM$hDATFjj%5a>sh zW}lz|1O6`TAN^_kP_^Mq@f`vTNo4^?gXCwNs|D-`n?d)GnMX@bj)Gx!1pCK$U?VK+ z+MT3ua*%rsE3$^Bw&>`6`0cpfrH(DEf4uuKhcjq3~Lx`e} zboKQ+;+y3`q+62j+A+w}1Dwu5o41o398hl`pNK_8&Xc%A@^_R&(E9d7p|r(nb3iwL zR8yJD>c&Uu=ejSgGC?_WUjPwD@qN(gOAJk!chmJfNO%tGGn>h}VHn z$?HA*=!JR|wU`Y)m9XYB!RlvFHAgL}U<{hfQk(m=lXfjstiG z5q-P&=&1TOH8sDfsi`b0|H55Pu*0>)7namu%~_n+Z!zu=U@ekMx&`xasOp5r(@73r zqST=E$ND$DzdVP?TW~8iM|uB%OD$VC!m>KWEzBqZEMGD{0)7;Xw7m$@5t%PYhcftm zVp3afp#cwe=b?T6akJC}AxTZ7#XnfL_Msnl*6GonX8)!UQFw6UKf_WZq=kVA9kTM5)T88@N0SAOWp^9 zy0E+hm3?xr2#uuWRLV&AG_jVgV z0tHYG%Eb9MCH`C+SQ>jl-?MyJ=PQNefqt4@Szj+qPAZl1bL8YXX39obkp_JKejf)EALWU)fXw`0aqX^umU?pBStV7XvCXe-_c1S|ucc|kAG%#BTl%Cdsi z-?nPsMFS}?A2wDFjHIJrR(pA;6;l>=ruX>eL$Y-4%3OP3?B+J40Qx=MFi}`if|I84 z@mjzE(em;#C*X+Ki)A51Ab>G1^54LS6bO=v=v~HygJae1^aq9Akt*e$pWi;A9nuk% zxhB_~`e$!{KNp zznO)ONuDHvP2Gk;SgN9^v7ZC9j;4adDY^F-rW0s#C_t?lLO=o0@S|gltCCxndTdKO zLh5D95zxB?rbUnIUrbV6vIsc?p!8yGpVqW9p^!3=&I@?iqjrrHs8j|{5J++NJJJpV z1R@{F$t__bM<#b??ybm|LkfP=rAXQA(O?t;3gn2C9UMx89B0ve{9uSIbML@_>CKxr zI{}^6=dJ(-lRUJGCc9jP^EuCR3mdQZ?C_h4MPb2MtVFkNm(4wM9lTDUA7f7@&4>c> zkASqA0WNJ<4844KBMmKJ{}n1EjV{RuWXPf0NV5o3dqhKF8AH=C65*T&BnfGHZoR{> zd-NTM9=*awSz?J%S<2MiyXWMNV9S^s9FHcgIAZj&J0kg(Y7z^ zwTPRs4EP8S!c>rEssO{jl}L}j^c|X^I`o&>6;h~*;7@4PW4709b7KLh7Fq2LwS=UT_B$dc;tf<7YB!RQr&JXRzk8^%xhORMT62HQE*ybvB$9nuK zT~8-wrm04DADvfO_GI)EXKV%aU!3(^3>u_9lF6UBu@6K11q6fX%KOdieyil!m5Dy7 z9#%8v9A4j**vNlm{@Kf$NYqi;JlpVSs_Wc(|Do9CKC6RQhkmV3cUPCz+51dPB&Z0lF;{K<0 z^E45aze`!%aG)o!J;^^FQSUU?R^m07O>O@4xnG^^y?M7yuO6SyY?^Z`ZTB+Kg?J73 zR!J4GaES-ZSUx@tp4#+NXerH9d@Fz;$^XxiKmWy1)Wvf4)Y40dvnRXJFGXAJ{Lg;T z2v}x5b3ZVXrZ5AvS)7kkoh)YvVr9`L z>?Y*d8;bOj54$Gv*3|Fl3rz}v-_~>UvR_dSOz1Nqi9g#>T`PH+_IqPM*e|{l6LlnA z;7f;o_*v!crF~J;gg9qTjrCazyStH)f)`XmQ2CJM>BGjV<2kY>tBk!I=4AiVDt{i2 z>MvBC9Ddt)0=Et0`m1nf{v1?w)lKiS@7DA@>pq*gIL}NqD~SnvDZck+aXZT2w~OK2 zqUWNOS^UYmFR91H{=VC;y#Jp3VqtkVxBtGjzm|)`f|i#jz&#ru&(oGfzfG=N2z+2| z`CzoqmaU$`(<3?n=+)OzKnbpipXPM*n}HrVZ49b6vt?@2&%>q1XL?}tn5XP892aT3 zq4VqMthK67O3g2-O&ygF-qQ5qGiQqe9eoY}dvpTQ!Q=vnSWo@lJC1wb!UE)$Nu~ir zrsz+tK3X(7tW9)r%&L}yMt2Wbz22$!IkeCA$-ON_is}R_j^B#iyRtyPp-eZK6{$Ej zS@s_sd=k4ygl-PN!kMHDUp=O@jxCEg->FB`sl;^rz$P=8ohuwEjU|;zwb+~s<6s}V z=6XzBNoY7-N&U@cF_G7$k+18H?m%HIU&oyp9`B3OnTtQ~9GF9d|iQil*Kvu5JMmyFqX1x%L<5j?_8speGy7D#*q9S)y7VU+h1wJLXZ; zaB&phP3}noyaECFReO)R)H~Ue8m9>5!{i=ci!NAnfB&*x12Rrv=mkN_b4IdJZuPiR z8=6m9#ht{5zb7mUL!A0jzGU~^eIX}@{sTV;x41`u~V+I7RI=WNwFB#`E?kxJOSoat(PfLDhd-jukRzkj|@xmf(# zYkY3bWdCOhAiMsfU#J&1{8tO|68w%k9uH5d66!g~i#oVwQ)t0T<$~JXv7-$QKIOQ% z#urm96trv8uRJm+{T{hLxY!?ky1H5lBzimwr-jAU!Sk6l)v6*UnxBh=({m1=QOBux zlseM0hT{@O#ubKnkB4nmH?ob7%6yi@u=S38*$!wg-Hv&;sH$-jQ7RdXD2MdFv z(n%hVSJ)Zv91L)NHb zT^wA-&?!Vw=d%_Z}6@>O}^ge@730QbCVIBr%x=iaw{Yq9Rv)JM%Y z2|^RcN_}4np1#~W-vT)X8vjSOF-4i$#}jt^#Zw{oG!F}=EbNW(4@b*fLniUT_qg91 zQ)o}r)V~{%p}O9F!_}3$NmaqN$Eu~FMSptd<`PLWOjt&qSd8?TF>E4a5S zjZ>#w%w;!qW|Hyzm3Dng(UY9?d>+W1*zoiZnsE(sX-F%R#(l*{F27A_PIZ-&Jz&O!P*ClGFa2*7VZXs0v-`=ZM^0C zDblqykoo76Yh3DN&g7{Bg^#}dM4P*8LXjo6hjO-kSgO#4N7Jd{clIBv8yA6O^;vXN z%aQw|kGeeuX+N7M8a|4Qa?vjGIU>xm*=@|HSQli5)gEv~d4<>c^ASh9^-KUX^r4%L z2CcQg=#Mmkr8wJ7-shwfk?D&9FGy~I*_0&tFRcVp70Q&z)YZGLHbqAzP))_}xG6|BH}>353ALLaQg zHQ|nVO~{{ak4BAOJ2fZ4T^S`Y1QFutF5NZ0ar1|rN_xru7uj=%odTPVo zl{{6|U+;CE0@~-*TDQc7Z?^iqHE_2MapexzYi+!@Y`nq{vlqc-DomY#Yk^f|^&1E9 zfOgJfr+ckZC!g82Do=b4iXOb>by?^J!#>3}$JEfLMnZp5*S-D}=ED(@FdA(5Jw@Oh z?&l@xjp!=e%%fK3AKBy_zCZCg@_abV*`nmwD{;FrB)GXBst9!Br5j@9=cwboa~S!} zYCYJFmiGE}S^}>SCH;j`dX5Fx^I(sdXLF&<32kj{ ziq4OJKi{#Cx18KBB0(vB=`L_OtiX#;@M4)ElE0JijQ%q_%i0E)sHlO{3E&0ZC*!DE zQ2(s$6gw4)i*(wR9IOS5ft4$2WEn7Y;=X`TyJmCzNfIAQnyzwq zypj{A;=H6^F68fBFt$6yl8|cQ?_boE?Q{C<^_y{}6x9bSHaBSfH*Pq1N_!Md+S*Pb zX=W6`IiG-n$iv2}))DKx^hfT@f8LTcv|Bn1^lWU{dqjQz{{7Xi(fO)&-BEducpY7% zH!~YrQzasJYA;vdUKI85$F<$zszx_f`=FwejNjAppo&z^!06t^Y-quTrdZ|y#(`+y0xcTvhiBs!_!^*7wFoc*vtd_X6CQX=`9BX6bbb(S% z2a~6`GCVr5Vcne6`s;54pWhV=AK5uLw9|IxnI*=!{7wckjR+q%>>l^{Pj83fie(af zo!XA{aHY2b;(qR$L6rr1Gu5)1@pR#ZH=-Q#oZxl)B@aItp3a`=_4laH$& zmH6zogYPJIjx_yoTM|X7P`DP6K5b*)BNzHmk=k_r+aY}bW?O7oILqnI(5|!HH!f3I zol45t=1s{vyq2l%H@_~mFjJ^fS#3UnKOywE4OiY&zBjy58V<`+vkCOQ0(b)cbm8{K zvQQ=1=g}YXQac>|EvS^;JZZfiiysIob#9MDLp%lBuUf1aMA-ThC=DlopAcpqQMfusi+&$kipi=1ej78UG?7a1$#?(b z6Uo6zZI21Od|CD)GM?;Xu3<*!i2-^bTlfOqNOt14?DJ+*o(0HfC@M!2T#AW8JX)r= zQ`<5;jRt`%)qwaxYMJtZz&T(Ic9|jA4*E}FfQqQpw*J=I^r+5ZuiOL_Mv~s#-yb{H zJqV-Ob1hd8c;9% z9`q?p89BcRh2)R+Ful9v_<(D0t8RqR9C?sd^MJ1@f%YhNq3WmoKLe}$$o#!}azSYn_K zi(B2)&#(rD$b44ouVGjhn3EmhjGtJGWFXS5riV>cH?ev#BF{l_j zTlWK*!l4|IXVafnB(j^BGGMS-c%&r=GD5gT|CZJpt$|O75-s?fv}A+qw*qefh$53) z+%m@v-ALEhPUFHD0%PC(X1JUJdyQUpQy8GCug zOm7a~H0g;sih;GTO5M$9kyB9_SO4;c7HCDWw6L(i?Fa~z&T49!>Fn-K2@Qp3M+Wpl zAS|EAX(cXU0Gh+PPFMWws&prG!n%5l53`b&Q$xh$%(&o%#l;}Z&Pj1+?e%qldjl$uK}ho`Y?k{59m~j{lPZt@Uof5nk1(lOqSNOS{4<93Sq^{A(-Gh1VR%OfoKCv3U z&N~nEA=bFJQ(W>oRLQvf7*(e_ZkLcz8^mf*S~2n`+#1}Dsgrijj`yUHv><1@X6Db; z=(A{Xpla>_7HyrPaSBYlo=d1HgWqod;*Pxai; zvp&W3u9@1}+VI8Z7t{#Un-Rn@CjASL9lrN?(7~c4$<2#$ncOkg!8sTOOGL|A4i69i zxE;S{L^C`xVoKrI=W+MF*28(2ogX1+Ocp z0FuNEN?THK195(6uCmEC6v*sOg7;;7B(qiFH0Krct#IVKg|1n%n+z_35a^B^YUZBU z(n)9Faq1C0dgp-TA@wC2W4fO6%NTdLLn+}mo_T*R*V^{joVGK|Sd>;(jhb<#WAuK2 z!(j0K*AKF%Wp8|xqRpR}9Nm=t$q6-qd%mQ>!KZY@d()OdqQd}TDaEA~xzZG1m?lwl z>9$E8;hXH>q(GUl)-WbB`3ttq>1na#?)`&<+PxtW49S5kzI-~u#l?kbJQ?9EM2Mmt z!S}jE8yH3sj;0xlaqOM3$QC9NHX6o@^K_wO z2M+KvfAi@Ad|Wp(Gr&rG)YPB9k|=H^puG*K@?zYAn;bfK<6%45qDFh|on1IW488f0 zf9jXFzwPX;Fu(ecMkEP^nWdv!qhxw9iS>-lyFg++!6vN!(n0~&rn975WV^h!adn5o zV+?RDlJ6>ICnqc?r>FCAU%cnH9WV(cf}Ee9ZSR9Lnibp`~o<2?b`c;d0)8uY9sPnYtXPywo@7Db$V)d(v}*xRnUsp;@EhL^y~ ze3U)XTB4ki_ahe{pB7I)eINbjUl$(@+8!+!Le%5hNI!lwHv90S``L~(lAwnRXlm}E zXS`Ym1i#*WJ*8lwU^H#tb^I8KHI_y|H{lPw-fX1G3+Pm&_gmEa%4a<_K|lBQT!+-% zX%XT9m@43AL*p8^Q`djhqXGXQ7@bDr+Eh7FXjk}k@pxHCz6+>_eWk{`*B^Bd9uaYS zA9C6|GNOG`Og9y8wj+8luloDg^F_!{LUz{GoB=k-^lqx0Cm#XJhs8oz)*~^(Nlf<% zAk%6u(V|Vi{ips2%SY_ujy$vRnEGW+qn76-#od6D>3wx5H%z-`q!cU6pF$$H|pqzG@Ndo z2X~Fun3jEujf~&wV|gLq5?2#Oz5?ve=XF2j=c5(tjrhffR(lWd32(_xyt?&mfC0Iy zEwt5}w^(f6fCUAbmIh=lo0gDjED${$h24P5yh9ZwT|1YQ-W$E6_D*Ffg_f7S-Vi-J z>nBe>1Er#ETb~gCz}M($u(t%Ipks481OFP(|FloeaB(GAtUxI&&R%`6`hkbLtFT6O zj-e^N@0?BcxfA)5UHdX!%8K*)`%iqJXK(Xrg*i4RfHpv%o7I?xH2~|Sc93(j{nW8L z7N#Yt{UrE5iS@BVYXBM~QNQ`?g9cOWc^&#!7^XcPoGPhRIz z$it^07^Dh$J!wpoqVdK{%Q-AUI_TOPCp{Br{V%L8ticd-qs8~MXd<5=1=!V|j%m%E z+P@i&!U(9NLY`K1V0I0l?6nxA4U`L*r%nY0e1f;{DQ8P~Ir$u%UKh}fo4Fz9^0!L-1fT+Xldyjig5H}ot;0t*Yv!rA%b?-j;mpjvi+JO}$K z%nLzSRk5ad+cWz5{=x<}q#4a!Rh*Nz#rjFgUiyR8*!^tsQ*kl7zy&i$u0tamAu0o! zd5I!S69%9#Ql|^1Q{oRdrfq@BSJm&EK0pvsQ~KzO9<3^IlCALq1Kq0K9Rk~;`5Zl5 z?duM)1%XRLBD9cv$0$DRtGtvB~sYZR#W{kF~=o;2L9YP^h;o50cbva5xl#qWuEA^#wF&~EZwqf*R z2qYOa{ycf_u^Z*Rh{rTALXvka?jK+>Usb6`P7A)l!J1LmxBrp}4bzM|nVq_q^~2ib zQN0G#D<}@=sp{#n;!(7xq0RDml}6pZs~^)Eg+QX|2!K*GMgTwo2)K;=Gk3r~vx_>v z@#_A$6qKa3Gk6VRlW&9HyR-9A|E|`eV@MW1n!Wel{sZgCWI6Os?dXcdP$+0T9RJ&; z?*M|Z5~XrZ8b z#$>>6i~V%}Z@VebdBFvq zDt$(Y69y9Q4!j5Z^rE??CD29t1I^tVd}}4b`D7O5S<{WHSw<8$^iNg2g6M!iZ;X*s zTzo_Bnb~5F+q3zj=EH9HNVk-Vh>Fpcdq$;JwW{|lIPC962g>{iy1&tLuFwmFMtIgg zG^1Ci)w%Xnwq4-n2e}?f26ZO#p=RqQP_;I;@b%#PsJC*{UT^8-C%jh}Zr3AYBdY%l zdd7N?*Fb;yd~lJq3!p3B)lkaez6RF({#*Qi*7d+slq-!-{ePE^uEr?TEbX=XMBZ)#!-{bZf;0ZViE%Aq{iISP%DWW(c z3JO>)gNaDRyaN>0j|k&>v3nj3En&y}xTBt9Bu%5q52x*tp8YnU@sk)*M2jRAP?j4$ zeK+1GnazrMzv)HAi_rn65JcEJ&KU3F`CF1rrbLo@%=sDj&7WkE6lSe{8cLHA!*kk& z;{|8iI}xIQj{GbZX#H}Zl5|Tm4xh^2mp3$Vg?QADbGm5 zXma{#ytv{)g%7R0?ww{%A-$BRbQwjm4;ceVcx#vc4^?j+6=nB-4Kw4Qg9r>Dg5&^0 zh=fXacekVgk^<7wAt>F5Gy)Qm(hVw&bhk9p-SM9B_WN7U`{%4RSDpCe-uq1D65~sJ z@62}&@$4c(c^-sOHv_Uyi#>G(2JB!Vjp{v5e+E@}dYGpSV(rU)hWE|#Y?`1t)W-wy z$iA2mvU5ng88q{lA1jep;**fg#6c?lw#*9#b33aF_^L;Ni^=UX!NIz;I_KqAdGB@O z^vaDYEVU+U^o`e*;AyXiiL0lnA%{OK3UD}g>UVZ&1tsqZ9u~I-QeS!p4e#viRJazc ze~}b`?a3f(D#8`a54X%ES2|5_@%i}qx;uB|C~eiU=&@EQSmkeqZWs}aqUu7DC^=mw z3w5ldf+hxD1H@}kbfUl9U+PLA60@FhNIa`m08m#fssiJth>Y6gCs?^ z;|oTP#vep>lhcs%KBsti5VFC6RoDxG2nM@qcYO=@{Z;ddpps(0i~M)HBECjZ!IQjh zoEZAsz8DtJd)X%8-mZ;xJG<3!wK|-OOGDm$0uWwBMB{3W_?hi69+Oy(ONxG^Fw}MX z?QyAI!Di{;Ko`r=M5}JM)tg|0cbPl*PwIzi-HeQWhKhBHianH~#m3{p#Bgns=(f-- zBB=0d1w21e)0X)AG-+|~T&0Gd9*f#*ANcKecPr7*76mlU}+{iBCBUXH+dS8F@Qw<-@3Fj{4N5-sNn8$w2& z^gD|rvyvXlmOQQo4Y8xp?L3U)^UUexKAjJrZfS`6Co@6>v(LH@_W6c&-U1SkCwoQ& zbZo?pj&D?YKkjg{^Gu09KZyVWz0dGmyOvd_v?A(~sY;l?ExW9wBx+af756znX3-7% z6yXhrt^u%?06}qj<3;{@wXoC1Q>GU32IDGYsSWzM>i!_2AFiZ0r4n2}(m)QYrj2Nx3g3NfFfgbQ_y-v0>d*>4g zA%nKDFAcs9syP1k64vCt@T{m1Ljz-osX_ECKxJc>{=)t8#nwG7gRh<(8b!k0cxsn9 zDX)zl=Z8d*!8rgcW*Vp4OMYOnATZyK36?;Ev{#D%;Zn*IuhYRC##^TNV(O?PLH*&j zwP#BBuj#jqugO-jh-tjJT7G`-gXf4+B%8fZ$3;qj>E~-8#B`afk&PI3*Hr1TN&UjkhV(q z`8fk=ts#M{fgX%|ZgN#xlct?5^Q$4N!aPACEXp!S)lm;DvCG80#LkRhjLR52mcl{aTpv%cuI1>^b` z(RLxoaK^ITgPg~?{YZuTrO%37n0#i0RoyO7qemTt(w$LwfIIi3f1q}<7hlcDAk@i& z+pNmQuy;PY=Ps74)kCwe@V8&p5uN}ccBl$d$qTe={D_~EVwUd#Uj99I0GaIAT1yd? zdi@0hJ^hp?BA%AepB~|Ho#U*PA>^tB5N_VKv$s4LE@T=$t)71tj&JM#)VTE^vM(|| zafab+U-Tn$g7z0-F+M;<`tycim<0~ro=qE*o|OMi^<_2{{1X257S@_#%qTS!Hh@f9 zQQfARecCa2Zt6K)h-!Rffx98t1kE)U(C1Z9x7X%KpPQX!N^e%yxU*C|rgyI+m~u93 zmw449eU@jr!IkEZ$FdYghsJlj4%Z~W8afbrOLG5IW8tTpkLKDR3=9S?v=)GiQM#=Z zQ-S38PyXg0p&etAhc`PpGtbirS_OfHjT$RJ!VG@iaQvkWbpSauUhF zxxu!R$5)DvOUQlUa&>op4yA(}cDD8HNaoRg6PMl?jHuB&9=QkF6hqGot(n(9Ylwd} zj35||>+{0TD=KoP*ezg4BhGSmN^AXKm^384&TBdsb~f`hTcJL{_2!$f2!Gu!Ysz8o z-3%cn$rqPV3>Y3D(sJQ{M+xD)b`fSb_~P zJgjvXuh1hy#{zY|cQE}CK2Vw#iG3?{_VFzKO&i&Vtlh18Wqx)1QnJ4aNZCcpgEr+X-WlRvr z{9&58Wp!S6QdOl~t{RiFH@8pVOldogMC4-(s9jMTA-q-Y%`O`XHAO|=9&L()^ zH-bQz4uA6ylx&BEZ1RyH^rd=o)iaoBlr9mv8uhnP{`Vn=vBK^9r5j zzk&G@h!|e@Z9sK|vxb8`&CN8#aN4l+yz)@=^*?Vk_m0%@WJGzpZVX2k;ljyjHA-F- zfuA9ViSyml93M22*(AAM<#OL18t?xzg83EIj!vw7E(qXK7^m+Q+vge1NOHJUT}Hz5 zQlDp2O18+tbA6<8PzWO7U#GdoKMfaPbpVH*V{0PGZx3o6^})dwTUtd8++>&utEE|A zrr#a1(t&KoTci*U*kCA{vYR2ku8yTc;@MPB51A}97%eEX@u`=wD6R`-9&LeQ>G&>< zXPEs9OE*AMGPAY)_GVdOGUMZQ9)}nkyR2gcN zH%yIKr2oC`2yt3;L^NWLQhrFBg*x-5SN}$L#w;RGBECLXH61Ik!InVqgB6@ZUW8DK zvtKiHjpxNsxsLBC@3UbA2nYko_^hYQ+HB>+qOoyeDn1!V^d{Ej&pR% zY$uI$gk-+|=DIngzG78gs-&|N`5=i{j49kN0%q7xYCb$K{W_=>x!!A^b2?i3rT?Xb zN)G4HmHVIj5!H!&DP;u_hhb$$>g-we`GcT9I*g**rNx&(*BK(5p}Dt_HfLFArdp@o z&!?}V5PMs0nhojktsP~mhxw6I&B`;+7&_vVLk&YQIxaK}?`LOcNB6=W&?_O8PRurO zLotY4%If=rKIxZqA%5Yuq%=IKd%1Ym2i@RA_QKQ%Mb03Tl{TsCS`@%7FN}6XYVje{8qHRlM3pn}3vM@E<0>n?E&NkZ#)zhQ^RgLd_|wg9x|LBuU+ zE<9~@Re7YYp*(#c&p$$e3l*|Qfo|R$w%X}^*wk3a?%>XOV?#FUrT1DiGWEMKI11!B#M^fvHqT{M zZ4CIpXMcOWsFb&!XEJ*tR0_grYkUnXDsm);SeQN7i1~yv`rt){IXEDQCimud8{*JSDw-C;s8rLJph0Tk$<>$scxhCH|8d z@zElb(ZKj4trTkPZ`76_cmG$$+DP`(T>Re#rXRK#zq6U?p^7{mABNu$MX21+r6fMg zjZ7=^B|WQK4;zd#&*yu~aZBy>UE?Qt_%@fX`VWrZ{u%qPi_44vH+ErDR0y-V=#MZD z@Qcu@)mAZ5qGdk2LwDlZz3yA_`3*f)H@?b(Zm4;DOx9AUic5q#B2V)-Gi6cV;@d{ z?yYp}`v*x4WTf{s-!%qU5)Kmzt}#Zg_MbCv9&5X$3jL$Dd?t`lz;M~;zC6i&A!3%| zT_s6Dm!q03)x5Lw#)6*9qTOuoNrr3pD1$aiys4Leh)-`#MutYBC4Asv=dt+Gh)np5 zdVeHk{jnzA6MEw!aAqF8n@k5|3r~|$-gI@Vqv%?BaDDh zWagqEAPc$qb))K3G+{l>Q6&mKvkNhtjY?A_<)===g}V3;PE|YcOQ?_%U=C9kxX28j zj{RwXed%QQp5Yt;d&VJia1u$GBrMTkY_DgD$pU(BXT2iLCZ#t!I00c>8u=LF@B@pX z>OlUBn!ZrLB*X_iY12?x#@s@X;hw;>Xp61+OW2UiF?ME!J#IQTR_Mb|1EXh8b+ADf z@CVJejK(!p%K96qx2Ow}KOMT8BfZ;0dolf_qGyAoqHU}KTGz&5XcS00iH`&Xxb&cfIwMR-BfffZx^TZNIt!vEhyxO#b3Px}DAED)_8>gQ%1flYF&AN#gAPY^x+R z5RQzOY-d;DS5z*6HzvDiZ5NQf0wp}w`@=rJ=A9<=^4>>1`=zxkAluDt!SHa)fE;SE zXepE~mM3N8{-H^0UfO)^JnkSt#8yn2{4NzS=$I$9@A{f%Z--QI1Ym={xYv7=4=>pn z(NuRpbO&v`WDy#u42}tBlHTy|cD@7Deo;SnXpQ+La+^!9es&Va@YnmH==$lBHwKTf zPWltHs1)o64J4@k>c5sP@zIV}fgUcb(P(AiUrUp~xf z(>4jTxWh@{-pgyr%PAIYFGgQHXzEfspG~2#=&v~Y!cpb~pWG(l>oyMr+D#C#Qu&UU zDy07PtFgkl z`eo`pq;d=GZ)tYTJriO^EB_PT)5YlSc+-^X2X6yOFHaFf?1mdBM($#1!!SO>8GOr| z&Id^uX!oO=On&^A?8fL*fsMR0>snXK+ngc%67#beWyV3e*gapyV_fE=Xm$S~)!r)i3Y$#C43_ZG>4E#g>P85PjGefmC( zM88-&3D7fYWnvE9aOGb9^f*g2m19!2iQl|mpDeV!+21LE>Px9kg6%017b;7bV|vs1 z`8EVlTvHvKWbn|zX&1aq6C-ynP4ZTGaY_Z&IG72qMRPgAHY`Lt83YTzIx~z6b08pG z%5&oI>gD6PIzh!#rd|`1H_Sl^MKxI*Xf#j-2;7Xq|7bW-J;Zv-c{NY^VVs4C_d}B{ z)Nrwlmp&peK`O99w{+!XI$`ohu8e&h55Eia@YnCBcR5dMkdlA^?g*P;@^=53PV}&_ zvao$BR~0&=`6zgdOD<%;L{0=3u{dtU-UcJZ*`pO*n^Ns-m_IZa50g==vHPir9u#n+ zp#*B6UD2e3{v4Olj_#`XY1tQ573Hv@b3q}Yu=VwkmrK?Vf*tIPtpkH{ipb`v8Vo9U zi3Yujjp|UCfmYqKfVXV#UfuvBdKZjn*4KyQzveHHIy0FcDSf1psLPm~hiP(w%$1oU z4HPCuhraAT^nyU`=Mgfr?AsclSsun7tBDSOmM5m4t3;My!u7lTcPf?v?&S#pboBGF zXkLe1V?N<(W6ta^!^e8+AwGo=_HkjIf7`zg)LF%fKF92>bkDy3@Sz(>G09aUb>Jvz zZ&M*&psvq?Ze;^5e-8a%)z3mvF<>+z@PXP-h=#_%_`$Nx!FvY9l!F0~jtT0dG8B*^ zr&X}>pOso4)x8YwG6YduR(5Gye1oHt!~29+d+5gRb9HdJ;_lu3G#mKKE3IE=Y<9Vq z2_o=o8ez;B!7%R;*qTr|{}#d)9sWO&2C8+{OqO2nA7few8>zvgF?u~e9f}qH>+uz! zfQB1gchVI0er1vds>Lh;;9Q#T6q-BeqhoNiqq>7$EH3!!@5Hb#JAM*SyF(iw0jGK= zj*_^O;u!2zM{kx2|25}S&{LWG<_A_`(igP{F?&(rKu=*|a_;8`oeKwSCGs9NNF(DK z8Um;=5C88PAlMWPn%d6}s}}~$L|Z|4&;I!Fiy;IGP3NU&DCnB~5A;_i!t_5bpN~6G zU|Y%)a@J1}bUPv`&}b|vd1-tb5as&;rc6cf`i%1bNQdpjU}ZF{*-L-XF)-k?^DYMT zE(TvG2X@x?BJTmva=x|oCI#evp?JxEWM4!M7%+`+1<1nV%LmWDs4qT+e)4U7NlgIo zl>n0XYb)BRR7r)If(B@>A<%Q59~-mRytp0H6#e=Q?9ELA{n&`lB5?E!ek?NN0y+*$ z!ml=)W^R~xz7dMhkNO)SC4#PrRZq5xw9a)lTE~r>x=A2S@%NjYKG?@X-6!YIDjVO6 zwiV*qZ~QRg+?7Dj0z6Vg4>O`NqYcL?K=WBV!}7Jp-g7+Y)uq2UA_Wt`sCZr+)X=dq z8HMWHujsMo%3WVyzuW||Y<*sdg<#}^ z%sfM@26_oX1*4BDr+dzGaR*lv6R+6M{_j3u9N%b0DZQ7Hl0q{QdF>Vh{^JX$NvgE9 zwVf*6y%bHm;Dq5CSTvFW*_LL!{~&l;|Jx}*I!r7tGGdOsMaB%7eeg)&HJPIk z$C_^e>t%# z=fTZWklx*vLti`D$No)dBfIO^VH*I(+9?2U8c`J{VrrTVgcaC>gDbRaYsr{_Z94?u z$AX0<|9(=+|NAB+_$KezD2I=q)+0D<2fA)UmRnza$!a$pfSEM#8394HVtO5N>Y=~B z0v-W2N0Xc^pS))#tA>p>8rw|~MK-hZzBLg=*Bx-cgf)~Gji#pnXUQ-;swGMvho%!x znKPuko!81{@Fd?32&85B9)ieOful>IyiRl9LSlSh1K^lH-E%Ue6*eIuq1`QYQW6p~ zJ<_SSE3o{)^gITS_xD(9H=e|C3rhETUG0l^8Pe~e$)1G=191&y4s-s*6{toul~0Z# z*&!+~4*e&hWA`=n)V2A;ahbP0_9^Oxvv?k>;-UIctGv`++yt1SFM+<;b18)fTWrf3 z>1YcClT0h175Sv~N?iNga4SPPoYd;W-KzuT>mm1@cT^5KHNyqguNuHNCnioU(o?^B zd{QT{aKHD_^)A)*iyoeZJNvOk?yuTTFMkj>JO?4-uTX0T+I>K5o0=NNqn3Nc^!S87P>3~w59n%gvwghAc;Sux zuiEvU0)V28M`)Z@ZBdSqkdW}?-Me>%*6FjB@g>uMn9ALFHeB?}RA~RcJ)mkjtJ5qj zEGWG>uNm$j&_1pit>>y|?Re)fkCII+E0MxbexsB3cn=!_e+#uD5|L}}9amY1MF0Rf zX5RwU^;XXm=&`|>|Blidq-S-dX-K1)P!+OrU^ z0{UM`b(`SU3y!!IQ{nUY@p4P;)2D~;n+KA&-Az|sC?31D@z1>YOG4;=)xEfAeE27K zlh#{JT=~a+q)IR0ROAh(<}y>%4LW2n*mkPG2nOlyN7{X0^e(JfV=|?DwUYR$vTliG z*{QNN*`-~aa^6mmo!#i{?2LvkV0MVH8Sq^_jaHg?j%Uz+>qZlLE9r*;xmw*`m3>9N zuW>(ln}h25q%Ac0meEEM37QMn)^5BFc;9B-^+dZ3$MwDFh4!Sfy4&idA`YkB9$+hv z8Ms}Yu4sq&T>y&4W2(zlX#?F^K=b_-?sZ{wT#2^(d>j*dpV|bnrsfdc-ozqLsOx_F zVyAydx<9FvWkA}Yc_t*VuG4F$;8@fqbo5PVf|;556LG_i|8kgfT}D4+ID3)h@{?@lcPFboU+i{8WxdrQT`th!K=Z8cQ$GTVMD`~o zKRK&60k`2-lCb(ei})N~<|Z|OW)Y)Ss;jG=7+NkXWs%K%uKmG=iz$ifO1xhjHp@qA zqQ-fdS$l#)6y?9lp7nZc*~@cReaD$PJ?hfhH7A=P8s&BaO!Mo@C?XZ2n>C8{{c_-^_ITH>CyU@#&^B_)Z)$R?aq~m4&L$ z4iY?ek5;(P&HdH8FYoT6S!2_KtPvYwdj0Aph70;h$2$0qM&7qOY1Na{TP6VR+ztMCod8HflV*M`A540# z4~x94wg68*p~&lUos~*!qe?XlEsPM#W$qwHZ7tSF6_~pcB9g&O`b9ND_9;hrk>_Dv zmwhS+^|PKE)~wXD;4UMD}dYX0)3`!ajiZaomzjilImJ|CTFjl;=+hhIZe05OWP<) zO?)`p(($j7pRZs@X2{1Gy5`mzZU8ZZtGxNAy8N!oF;pJXAe<9n@mYndj9S*n-J-{h zdCzvgP%x4N9YaF!BTNe_ZjSVuJ)HSCHN`!qXIFY}syQsBO3Kv6#-@9EBPW30WMw~Q zBS*g5Hr^*!z^O;&4|w}m-Mec6COvVVYFb#V=eDlfrG-k)w5(^t3}fqZXQ+S|&`v6i zMVJr43g@#%w`JN-UGwkZ>3tz{ZgjPEx(b(X>$%M3dG+d5ON+d_?kzY`TROi}N;Dy1 z=lbfZ*@24eMt&)_b1G19m?LT%nA+7J{B^@z9H{{fo_)qlajr>{Z2I{L4M8UCRX!!9(SG0QxNm>ALpFyX&iYZXGie_{*O4-X9u-%_Du_1akBQd5+A## zC~HL~W(Iq0Q`e<7ahP#X&K1I$ZP$abD8|d`epH(3h)cjBYZ$$B>F37(tQ{?TWv~aB z_xh)=AtvK4j>$S=7&)oMRf6@*xPi8|{V)d?3 z>#I~(J0GO13zita-%7O>T*1Dob4|6gV$mLR;^?|K_PVa*S1fEO_|5ceetogfxjxUH zU=FX{zdDJNF~?A|vGW>;XKd+5o$sfo+8J02Uh^^#+d0jp_O9~ETAv@R+bTa{`yUq9 zTLc?EMs-+mYf{9qz9xY|1Rm~fAm2KyJfVchmwpt7cm9<7!kp~f7c{YUOG*uo{3h-wR847pV>_r}z zr;FDre$cA^UNN)WsZUy9wC-Q<=b~Z{DqJbM{Rp6~{!80Vk$p$_NOO*DlX2H$yI>Wj zFaU3TCOo3wKdh-K{%<@Y_oaucmqSM^B_vTLCGFt4v_*$e=PXNIn!%w|I(*ijRw zVu>^2w-R~10M6*C&GW4kHXuRp3r&9=JIA|o=My)G|EzaMwmr7pmh3mCl1j+do&nW@ zWqR&v+0##46P(ZkK(=lgbBvvuPe+R=NG61CpmiXFC&3gvyl0T0o5pGVO%xO4=y*trjo3C$iZGt(tg4g6!ShydBNA@<3m-=mIy*4HiFbWwc*J_uL@g$^Tk z1Q|Xn0cnG%74bDQ@kH1_jNH<3d9Vr8Qc8bB$0y%X zFTt*=F?wQO8vOp_V87ImWJPk(g22Rso?HC78m#IMG5dhNLG_+TODF5{TGUXs!ms(M zz;gjf;Z1m0ZFoa(|L;*DS0MawFSKxsl9beLv~x<0a=qgAj9Vs8<6(i#^x8oM1z_jm zuyFsye?#DzQb>FVDqP2+J;zoC9xeihw!7V`9ZcRDdX$EC+k)BzeEpnW8>q7=SOBri zh(M2^TO~+hlHc-fmJKoufZ=!fw4Wg$w}5x`8?Q56{(P@hi-o}5mz|yc3mMSOS*lBR z-|L`bfq%uP3++1*s8qvvVD+r6rjfJW-sMPa3kVK;>NT8JZbar3hO;W=VdtMFc2x>| zZ@~=xZHAH_m4kB$UlHXxi5Xt_NB}2eJplil0n&MJk{jzg&&l#PR z+!vmP)^qrdTt&+C@9$o$2sh3#o7IzD>uX})ETGQ7m)<6wNX=F*&@@_!*T@{)IJFCS zQ{*hb^F9EXg6VIS{qQihS+thUnEgCWFaNfHbF$xDthV8O zXSvA*_iD`|&=YxX4}I9CCHJ?&unDlgK%89$iO&G9*j=C>Wn*S0nM{*$Ot0S)%RwP4 zJ2EmddK^nC_U~@Q^;CCOq&HU^eN0?dR~9U{#!nP{7KXn8&jzVL9Gc9k)ihM^dS8<}B| z&iH_E6)1&bTViuoe+sb zK5$3_89vEtu{2zeAO9yJ{2kq%3Jnd72NEh|)s4wXNq6qa;Il8U1O4!^e^g@cFtQW? zq)^0wlyTDbfb{GKO~faR0lB^N<8AH{d|y3$A?7gr ziHN|^%EqPIzebw=CHeoqH^m&LiArF9p{?yK#<=+-`pOZOug%6y1+(oWg!sro-HtXM z17vVrDbu&&TRykJAq7s9e}0%rCOQ3+?);Yr6q< zf8Q+23JMESYD#?HS(~cx7G$d;0$|7hz8KV`-e94ayz6pf73MZ-P;+Vzp7(rOS80;V zZcilGfwmVyjLQ_k7|nyEZN3@-a!NmWq$&h;C&9F&qM}ts{CO{I%ku`1u8rn{G!W)$ zL+-c3!M&xJg3$GXSLg=pROiR>Tjf3vH`2bo`vnsbS&t0`q%S8O3`fy>|IeWgD)LBM zNV7oNgdCjurhO{H!biYQ0#t9%(hGhH&+R&Twv#EocFc>!85|dRm7p2LAW}3(caS^2 zsgj|Mm2_ ze3~&MZf^*Nb$55)KP(~mffT9tQ^i}P@ql_t+{JtIXB3n7|2>z2&PM|lQCUC*v>OxL z#Nh8R_Dg@{+_H-lf^1{>IBT3aisJrY00hy-uy$&|=aNMdK{7cxkG5yG1%3d^u*Pyg z{ZptNHR~LsH7TchuR~t9-KSKd;FNYK!SnuQs1~2(3^=@?ucF@;TpmGyep+V`|HiVV zzOSi~QHJ|xlFrwPUIhrr{6P(=E~)QWW+~~*ExGTcX4!!&R{42SeNxf@RQqM-W_Y%MWy-T zKrUA%+3N3aewiL7pJq=joTw}nEcI}A)hcS-KR+xgN|NkYjKIai>;9*i$!$H={uy&l zQ>Q?y`{KgA7l(?BjD2^sxmjZJ2`E_p8X5WcUr{aCs%QZQ3zB>8;W5q3!V(Wk#Tm2Y zKnQtN;rU8G1^fPv!k{FfIH8G`b>oB7zk-7;0^Fo?+qP-A6|ixf)C^r8^B* zx&cst?A0Pk80kl24h> zWl$#Y?X?!YcEVl<*hk0+ZXbQjzD~IVX?w?5h62GUOeKz^o!CM)gQri6wWtE~zke6C z-(T)7Z0zz!Fy8i=v}v49N=ka#Rpx1f0!6}ike7~*k2gtpBFif)P3ajJ&KX0CM34&564xFe zbo%)-bw;ByxHBjjgZB%PMruP)Nj=kkoD9>``qsW9)5FO{HFiru{REE+9Q$f+F^D9$ zzr>L+^-N-+gjC*C8d15Vm-%01{%*L)7PMV)^_3%%kO5Bh^@L3#fuG? zEj-7SN_aPaY}_yS7kh{xm71_2s7h8HawTQu0*se)9_|4>RF>f>Ko;Y0x!cBb|CG?c z)3ud7<%Rf#Ve=SfP7_X{e6jvL0m#&>FO?YIf3Y%RHOQM7D#PNag_AE*(58q#QHHhl zibfsh&YdwC8@tcz^t{^q`h>NgTR5o+!S^oqXKeoa-Wm8?TJK_LIVBGkB^DJHlFol6 z_UVwHVj`zgVp*Z+Y~+`MNk20E%FHTO2WVBJzmRVkTAC89oIXK_4J;xdglM(?2E$y2 z;>*0@uiY_+QggX2G97`0=X1VZ^KKXVZ~jydTD+UQtBHT6|7pXXcozcoAqQ;|kg(PU#rWlP$umg`mdAZk*#ta%S_3HnMqvw?7t?&!{Mj}< zHMr15fGw{1-Mbs5b#?m0&q7Cc?n9{w`)uU#$mr?m*FKI!oQqFVM{xQKztd#r6)?}E zsIqDbF<9^E=394Cqe3Sc(oGK`nNl6|)NyQr{`H=hXVtQemJX`3bnYDY?SDwmOQyD@ zYpOLgzpRasfN@vPC@LzBa}_JA^A9Y3P!Q2l(%~oa*-QV8q&|+W5J%!b%cI-XpBrW7 znqhs8j+=H374vkO9q9W{)W>Dr(R z4J}bDa2~?i4`~Q42X(E?NflX};>?ukaY~tr?P2@8A5pvNay=ZDbja{$Rsnyk?-txa zJk6EF#cHF6+W>GM#KeHd{*4b|!;Cm-htf2)$G${K78Z#_@IeZ!25s!D zth%M1-l-P`E6oM*)O{g+{O!7w`Y=3=9%T&d0ww;AE=C8D65$rmPfE*(;Ji&mJfQoc zU?Fn)teQ>Zedp+U$t%aN&wTD1V-7rRr(u|r^acIe1Gh+w0UOnkR1Pox&ri~)5v%W; zj5&kuy1Mr{{F!-uS7T8yaxOtB>hD<>rcgs{Py}=e zC7+sjE1Oa2UDFnV*J4O2D62PoZhC)CxuAs_oJY{}bbt^Z6X<;mm3cQeRpdbK3s=+N z#>9=lP?Hls8S`bol2-1VPN3xq`9&f$Oef`i&ht3tE(~J->QGMpHh1Gn*8MuZu|4v5 z^e)?@NBCh7ppKih+Ju6T)d8DZTNA%=5}rRWw=JJY_RC9T>3ioj#iPMQR9auZDH?1B zg}Kr`0>x^UuBT7j7Fvf9W|C>i7<_s{mX5rCPaLa5B0?|-Sh!C|zU7NeFfeYTzR?DU zPb_df+T{MKH{*_=oN%O-|JjBwSrf+*9^`W+|@RHW-qVHF2ftLOGwh0?XZ0F&( zdOYt%bYZwu!W;{DVcbN*90*A*{d~@)X!HO!5kp0>kfQc(1FKK9;5zU2KB=@%WK@`i zM#)b!Y0xMZ(J)lrlOg0k!erW$Auh79o=r6zy>cKBxl~j0CcfNM6TxSQLdaYWEV{WM z##|iQehCN+YH+$?a8gr)cKJ?-$Q!841&R;#n(dG1745?%!EQ+#l zaW^enouBuXkCPh)%T)REk!_t@2(+oN!Gb;TmQW|W6Bq_e4z@O-oen+lw}gp|Ik>TG z-)Tqha|f#WCxF8S!zWZ)0|V{p)Mj5{GSS{}Ti8`bzz!y352d#os;p}o9}q|?p{g5A z3)0qXlA0qZ%Xn0W*7)|25kx+n;zXtAW7g`9md?^{mi5oS{R;sa5O6IFUT*>h0-6#j zMsph0QJ(rmrF2>GO4(>c>;fztM9#7YIr@Q=iXxpU9QcENp)HHA8VN798$C-)W;&y2 z=H~39WP*cf$}5u}S=^PVzo_4jhgf01^B%c2qfjw=iI*lt;|^SsJ1wFP!%n1}Qks=j zT3VX5t3e*Y`;EX^)m~YD+IV3qZf0L}F7J>ckg}e=nnzg{+V9_DVn4M1`eYFgf(5s7 z{d(8##EBtjh>C*yszCiZ2{|y!94JTt8$k83LdmI%2Q!t%L9kB%yG`p&en+wM)%Sk+ zXusf#&FSLXBndO(0B1rKFt7DVi{| zT}*h+LGHxHgOwo6`L*4{ttkbo`e%j+80#?hB3$MYGO^;g;)WL0w2<`L{$%q=e$;o` z^>B$AuFc`aMg>_s&v{=)Ylv@HbtAT_&&W=*Bx4anQx{AE5h- zb%Vvy);h8ph-9errbm4k#QAx(lp_w+(p7 zhESQN&Hh2BK%}4nYetu9lel(H7JrKcowe`&gyOZe+ta-*)$JXw@87=e z4QKxwG9c(dD)wn%*(z^3-Ci-_E~y^j0|HnHO`0PcXdpUGJ3nM88m8+|%u}Ez$6;}E4Ubqxj{&nr8XE9G! zxB-n!qZNZ6imQC)_Gs0?xcZI1wNDiKn8rKOa^~V9)Q(F+w9Y6A6*3v)V?`vZ=h$|( zO^AobH&wVShO9S@-`S6jxW6xoM4!TU=S5uk*D838?ZQn<_D)w?)Ray2tI&{;u>hNZ zRKK|+m85)8yKwu&JQ_b&@3t?0hcyBlObP2)mu4>g*Mz&~(HhlV?F7xxL%Zo4vd3D- zF;g?fkO7lySB53&rULvRY-+(1q`Q-+QIuA9ZTMzF;AbrMw~mhrm>B;;7}mf?=h9|D zflRtB6-qKq^}?)F9pn+0`X5*<&tb28tayk^?tC`8nGUKE#lEJCcz@V2G(vy{m|-wARn z&f&&4qrN(gruF}`%V1@k@;$+{CZ@@0dYAZe6e0ZJhhp{W+ca1pRRPgU+Tobb*(!BY zvx_%JKUHi`bH(yM3>`$gPu$C*u)i>UA1|c_Tf2hyB40ma)k6zCN{FffwkEx#2 zt~-L4%dZYwNEg39Kj`=S*_P2=Kvh>>?YJ;%0RSBOtZWvwyX_eD)f{y$5m+C_I(M!r5oQt{V;vhG`CAg&h99G?G2F>*F{j~bg zcxTcIb_Z#To7mb&7q2n41Zs>-C{xTWR{NSFa#=?HrL@2K!I*3V z>dldP(2Vk)Cq){DapvjegM zjY~6B!QbrW26!-DaTNy{R0)>LRe%$Lpdm^{E9T*Z=cmwk1_{p;xQ?0Os8wzF`}=Ff zMa5D3IL#GtXLEN?jp@H<`OJh)VLK2a7<4}))d=X*0>kN>#q)z@1&M=Mv62JM5Y z*O*TN?e~Xs@=R=y(CgOL_n1w~xu-L?{2F`Wgv-Ig{R6K5@-tSA5SfE=#O#+Eyul-1QKWLA;TQsENKSvfkE zH;hR8;G*0)0_jB4sGv334%a;L#Uq%1j+Ny;ifm?@puBbIre<|jsR4ih9thk`W`BCY z3cQ^(v!l$=m4?}wq*_}R9=XSWoXhyu&tqN?-YUg5^>L3Xk)})}T{@Be;U6&kG2}j% z6Ak@0bK0^iDW6nF1zs)7>;O!kp8_~Zc5k)cNnS+RkMo;T9PxPl3*)WrDx=X_cx1h2`*V${lvi}Du79R)E27cj7*cQgwmObWoC9nPR$=E$)$M1!l$N_b|PyN zPvk?QkP(=WYMr&?f)7-7-(u(je#3PlvT@Nn-XjL7}X=N&3b~hyg#DXgjMK z*%|>zL1V<2L*z8dz3iU&{0B#&xD|Pxs9n~N&+fx{y40GZdxF+sPH zM=zO>xxVBVp+BF5c$q#ksYsoobW+7B1mjr^rPj`t7olm#l;5o&c)$`! zy4mJWkF<=)CuDV~XbvY7%O_kDI3VCxI+1}rc#v8C*5Ze(ueUPnKyTEQgKj!iB4FR# z!3-O?Yo7~Hh~A^Rp&RiSBKcr2dJf>ET)Trvvg2qzz96dcmlTuUl9>MzCl4TMT08`= zdhzF`rJ(^xDrtW%beypB4g=aAZktBLi|9-DP>f2d4Y3r9Z&Bgzr{&V+|C0R&7X8Ng z_I3lISqMV<)9YF3kfXnWEjVqPk*sHE{xL-HEIIZkOPW*nu-~lCdwoOv+2k+MzK>Fj z@$G#PT~Cwlw0%a1z#N6u%A-lC3v1h2V!?nl0fP_0qf}H*@otWg@HVs-rq&K-rrM(8 zE{=$vx;BQ`7rlK}<`wRjY8a5hb3aU+E^W5lv(~S}TG)4D!Sv$0t$KCo%jMSPUkCeH zlh1zLM^B{>@Uoaf-WHkVMi-KXH0s517GXC>EFgxlfNE zme>ujE0`c6mfqbbK!(cBOXSA3t0nJd_HIfjUGVATB#oJCnqKIJ0pTDhfhg+<7FiZ) z=~a7;j_aV>U@_hl*K>Xy0ly znsALJ!=q~}NLt+M={KU+HUsoNC&5&poVG5t~*z(@Z;R5ANh!PO!n0YX?T zC@uDqsMNK1f?zb%pX{~gMAV&lumM!j@Bbv~03H5dzzC>SZsHHI9vp(z+})$SjrtKO zh7PMw+OB4e{b6!>di&+kt~S|#hO5u<4PjgFj-G@T8O_hvyl={p7Q$FG6={p ze1L8(29X8NpF7|OI00A^$?98HT|5FS2bGlEvl6E4=u1e8kW|SIRj&LmYMcqf#QDoP`TyGc>bEGjw(VgYbOuC*5VjyNG)k#- z4-F!zg0u=qNk}(CNlBN2G!jZlhlGGiC?TaZ2vXAB-@3v5KF{+%eEWxe9Gg8e_qx}* z*45{EK_CGQrfX;9d<2SE1_k;O;!BsFV*Mj!yZZ)We-?N61-v1IxnF83c#Y!T-DQ ztAM48YAJ`u+fsrCl`%78QZzjLLlBkT{Dt`EP7cY%R8Tf-0vmbRD`*~by6aicu1{03 zTB4yblQ6^~yXa(dB%dwy*W!rw}=tt2KH42kpjMAxf|8{RV zzlviFT2gEx5QDyE)!EbafuWY9lDfLO6;!f;y|cW%t)GL+k9h}f9(^z`-RKwj8_lQ* zgV+8}Bt&ys)$qwj&ht zi^~?}iJsRm!Ru3b_YDp|gwK==%@r=Jmu0IKf%%~fqWr#ue|P}hDqd#wsg!}9;WB|c zVy^2gFC+xVzVGw!-Y~WZEx+-x#{$`cRA>=Q|AqK=eC+inI3R^RGgxA?DkWV zzOyC7gx{Bo$fP7Ty+TjdgjC(fCZX0eVXl5bJ7 z=pG!k?)P4QbYw0YAWDZUBNKb}k#RC}T&`8I!Cw3M2s>90T6!;x15loB_d%8m+^u3u zUr+%8e68&MZXYwmyvue}-#HjfKJO97f;1l%s~H9E6N^%WOhi-jq7QC`BBjGFHb5bs z3t!fp`8lBd&9@vkS)4pb1}>NI#w5q-reZOUaEJ*UOg|jRR7~3@T=?DQ{3| zTC^^E7Ex@61!wI`9UCnS&h6KyvrHMx=N@X#ygFsS#!=EPpr?g3ML3}DNpG*02f-8ULNe}UyKgp*giBGTo$pe1xPtczGVQo(jq~)(aM-tqsYPGj zpTRbX!j(Lx72y>+0}Y-l6?r2+hb;Qm35}&DND;h>37_OwxXgXICd12{NUBF_^DU<4 zX+v^(kCfG>74%RCMxn5U2JR610D8UrqD=#^jFUC!V^iDV;IOsfr|x%>nB-v zuc5Nth#PLQy*MwdUx(knc#!jb{WH~ z$9R9BPkRN*R`N=Je=m5-XJP2N_?42i2aP5Jn9K4;#P) zGX#7xS=YgR{FX*o<88dvQyb0Fo_Hgsi52afFoV}tuqFcw@HXr{_g9LOu_IJW4c^Me zmoJAPe4G%*Gf^}06n? z$W#H236Rc4OE}aW5fw&~xf9pM7Q55}MySLshH^*&E|KlLBEslytw$btV1W6Rl*Ggm zE*k=y0{1sxDSV`emhqFLCi=(WNBWU(K%aS$LtzOrjnJA@1uCfmc`aeNv9bE%r>jGb z!P1ZNo24L}0iJ07tfPDPZeK#=&(Uc1Wyi27c?xO0u@DiEv0_OO)PCM^-4~55SSDCM z_4QzIA(F_&;l6=;9Yf5%6o%d7O%BR-TmIuq6h&@6o~&cR_Q;KOl9tC0@gg zj8#uri`cohgB!qayI=A2BMBP2CgNlCcdx?{)9d`l6IFT1eL5A7ZJdg@+0{PWsBUQ+ zRaEtgV+vvN|G{=sCGX`IQVBcv7<}Z~-LE&7%e87KH_{Cd>gXLIIKBRZ+gGtRHtjVg#~g*B_cohF@uz`CVlnY_bP0a z>GKbsi4IX)dI@Jt8DD_!Coj?~kYqN~6_&#Lw-O1j$)Hqj1(O{W-8~7^W%L_&bLS@*c`_?Eof!V=K01S_V6OQQ(IS78yDtKe%WK8dcXRA{1kn|LiiR)xl&K z_Zcud0;4PYRvk2*@{{A1U(nb((qEuQ2YSOsX4LNT{($n%Yxmb2($C&SwnQR)Stf+J zbfR1TxpL+G!Tc`yeNIqanJzPP)~`S6sbeO|`lb=e0Itd2*xkM}U@bEfI(ku8pqVk$ z&p*-tT-~iAvl1rCM22AcYP`b2!hwT1C7jRN+B3^N$-}^69+bTCLez~slwtK01)z8T`%^cs3 zmm4ttKDp zb~`K+_+nw|QFr&lqeP^I)Jv1^WUkfqicj~aLJ~|)^4RI%%aS-mgMWh;WjZrL&5Tjb z{Vd9Gfd@v=B~~X3*>ARR(%%41{iPL&uO7|4AFu|-G#a&3`08UZ9H!>;El*ute?BKz zaZrw)9jT#XsApD;E2*K!d@NFkbbClL<%U7131oid`+LuUu_jU(NCRr)B|~-nmY?DP zecd!y&W;>mRC{0V6I1%{?-Ad-yVahkrZP7UX?st3cD8dWL~VnzAji3z9nD8vV>aO( zvRLAqo9=x!O*v*kqv3FPuC9jO->D@EK2Z7>n1#D+LVp;TpM!!oq9JCtTjpyp1L|^J zY_#2*#n1aj>@SYhMar~7hGI3N*(bk#eZgsm%|nGc2a0Y|lPUwA?)$`uytxtxF@oAKI9I zwLZ``$u57CwZFVgucND5>dsYeg=ED<-tyUXOQROBMi>Qy;qe|uYNBUOGpH<(nvu(r z%H1BS6EBL@d^rI~ec9nAqZ$p0=+-10E!SLSwlnGkO$ul8SI-DL+fXq0C-0GPs+bmd%eW&0o22dLS*%OD=RDgxB7`_miPQkT2Of8&kSyvyz1W8-+LuA^ zNfG&ugMOinU7Nl7a2^Mn=zW;5`Ac6h$iso&Qe#ul3`YT>RjeVMB3Uw9U;h*-! z7N1NO2H3lLtR~?~tC;%RI}ozA)G5zP>KU#*i6sM_i7HDa84^n=jJWSCclJ(yL5&JN z^uuB6VY!L7sXSw4dQAk9DCn+gOPAWHHdK>dSyQt(Cy_-hq zrkBKrhc6SktM0PzwY-x*6bfq(;f0b(K$oYZS{E1>RcjrN>dl(lTbaYBC44}{Rr>yY zl#BFC6N@vyGP+iAP0Sl)8o~x1L}Ea(tWw=ZV{!cZ95Au%xT?tfz@-!>ftRI?{Q1B<`7Z|dOxd5#VGm8oYdC+94Bbnw~AlT zm4fZ@GnAdBseBnB7e#%#Qq%k~+fcEcbwmHPby22?fg4%TD1rb$^C`=OQuV%j836 zgz}Q;xw2)`&2V*lmy<{6qqHb5659c~^7hJ{-(FD}eDvtmEiXA(fGwEMbtDb1`nhsZby&ZRm_{a2pyBl_W}>J913#?ta~a!>mN0xk~tM+X)cEcwyN|G9J; z5ne!xxVSh!Bvxc8qVspp9B+g)*3$br@%^uB^Uv-kY47y<`@f(?fgvqP&CSx> zEP-PD$sP3Fm+@HF%PW*ENB;M_vRF&pZ>Qg1myK$-oJz-4Y49(2l18qAHo(t01pZiG z0R3h|`|`_u6KTBwoH^%zj?R*#eMmuP!R-S@qGx#o0~xDfVMNk%iyB1k*F+Yj9?#ab zal>~ zF77>fiCA~?lZFMjj(1y}t^r7OMlH=%x|iSPm8mB%0oU0`;g0ZnnU=Q#LFbUhG8h{H zL4cY#r+j8hCj>wvDtWS*Wjnv!pP`qRCEZvhF`V$qaj)9Z9oDW-6XsiCcJ$rBxKb|* z!?Yv;8>brHID2H;yDZLKTd)1{=B`$&3$A}RGdZ)4_ zQbX+_V%-6(65pw`TkmX@@i>_X4vC?`ocmCklB53Nd!Rfp-2y&f9CvA1SYEtsgG>+8Os?j;CthA-cT`oAl9Q9~)PgZoMk=&g**9s5IT#nX$qPd}&(PKBjxSxJAmZZn z`QYn=!4Y`s_6VKsrWq5q+Yfa5btd8l#LSM)9h?{|r|f$$PK-#FUsAEQ)do(PW>(&r z+O}f_A07!z+a$6jY`Ad#{2!LwHy7Zz?_{k;G-!D4W+e>_T5+sjV`rwwli!oCDw3J! zBz1XTD?~za{|Zn>fRd-&7bawEa^v)LmJsmX-QCsuzRd}S`or3NZ#kYBtU^RtTOPki z|9+}waM}e>i%2-{6Bl>66tqmmCnSW0E27y%@T`1mfB~Dgf#coVt_?qiGzy!bkF_r? z=q>{NW0(mRcaYXTdl=50tr#%HkdxE@&vUF1#(O;}5(VEItj!qQr&gY4h=h{vZ#(I^2QS~2}oZuEOIuQxud)@$66$H^)mm}vUQ^LUha zT#I3Jy>o;nQWzB6RmN8u#(dK*7HWNk!N*9u_fyZ@*`8{CYZaTs>i5;aH^5w?#R=6F za};8Ttkum5Xo#Z+w&%}cYH)Z=a+nA+tvcRgh@;?zft|eD_>SkeL4AAqHIm48)6icl zuEs!szKVez_lBLtw@&&efJvfRhwfz7`}m7TYoJHap1p zTo7uBnTkLEPZdpwAzs7UEush6cTd?Uh)-6Dzg_TNy^z#!P}{%*YUXtm>I;j^*S(f! zy*dv+86F1gg%s@)Y`jX{dzJ8e?P&bu=r`a=dE>ekYWUR2*`|8+Z8xBYY0|58`WAn( z`zsLZluQRu-uS5cPsPch>ayZi4q-f^c&nts5H&6))=C#RT76p&-7qDzkon*Zeg(rixpmt>&6pL4vX?3A#20EI79O;vDL6rBCszYQr>y8 z^L66>04LM=m%gOT6Yua9RFA#?eDV4!x>Mc7SAz8Dob?FT=O(RI{8`*WDd{p;J7xlS zh6G+p9Q8_+53)!4awzl|4|qVEC=}|~zOdz{onfr5z6Xxc+cM?ka5hPGFIM&D_$tx( z!<4Oj)AFY&`kfr{^yLuS_CpP%8^H*kssHr`uAm@D!#)Av=ztol<6cYSrzhdf1}s zV7RqTL#Rek5+Qtg{XuU)$?l6fr$1lwWmG{VPvx;S@rpwVlvWfPv(;r<)Ll2pEgV?T*qzi_t9qZggaA2vGoxAs*Iw*Upq(#Vc`otLHMA_qac zZQ&iQ`hk+e<(1>*YH)%Ixj$A$yibm}7Do<>j)sc*I}UTa_jBxLGv>^;>uOGym}H>p_WBKv`uCmP z>*{q168H984~`U!Uzy#q5&vx@{#hoY{lj*7QF&Ci@uZqVY1i(HuH3=QbK0WhVVtDc z-#K0jIi1P9frckrhL!PGY2@L#wCc*KnG-M;QXc*g-D!&_5+@KdgX?CG{5RF(Y1PD& z-H4ML1Y(ff4a$-9>4oUx>i=}qfAu*0;{8FyWW4^!G0IZ*^SRm{j+8=3B3RaJkvTpDpa7f~@= z{#ll!Yw8+`P>oy$V~{1LDUm4_{j{!UjBmE{3nx}l#^4acCmZ4` z8=W(?#E^|K?OLoYIUf)?;+Dg<>BjOcE)`hO(;JVz=umy$|{3to>( zlc#mf5?2q;Cz?IaSmM;zb<|CK)Lm`Ak1mMGOLZ=^SEl@Q%dO79QP{wH+tizaeZDTR zVU@5!t${^{abt|EM_mW55GO-%S=R=TB50XL!`Vu1^`E5a@Mp#e%!Fe^(yhys4g=iMh14 zu=vGr>%O5$SBJ*sYMy}&Pl?v`9Us{Gk#*Xc(tLzQ>n!W1hbEle+` zt|{YXeUtqplivL6nj+rjFQDft!XXpBcD)m#&sn?^Pt!kccEyfTZZYVOt3H|RH~XJs z4e#pQOrnr}G*M^94L6RkVCCZhB>^Jm(YJ3honIs#ReMT3tn`&m5jgdThDnuI?QHIf zHsUCLTTXf#g~&wjC6DjXBpoS_Zz}%=1V^4nr1(rh3}ru8J9^w+yLr|685B8FA8{B@~v+?qV);GFox zxu^PT-euP;$Wfh7+S(Z{Bb3xJu#Jv}!;as*K-RzMj_+$G=zFU+!^sO2Z>hQu^-edN zG@E!YR(e_1)mvX8HxL*YGOMrkf`(eWC8nvftVF zL-gwZ@M#qph=p#`2s^i(BNlzwC2M`(G-^V`S%q$jBF%|9ecJ+0qq>vfaxj_aANC_) zX{xQpPq7Y>JfCl&K_!CShpW$995!i9xJPw zd!|PSWO25G0aPl&WF)JSFvVOM3Ve~^aQ@$aEcEn#m~ie79*c0*E!;0EnK6ql=W zZEo7LpSUkX!{(cK;PoL)c(91@b^XB`VEQZcmQ2Zr$7!q#ETjV$6GJu1D%#`I#S*6! z)mKuEH*<#Vy*K5($tC*Nf5`gXm0%?7`Pob?;X$F@yD%7%^ZAcF_}^Uy$Lk4$8ykY4 zhfOkB_$=IIhx^xVTL_xnkwSQ&u?-+@&`RC4%otS;fz$UjY(UP&R*@*6TQ&uL^p<_1 z8GFeH#E)0wIA;o!_d&fu%$DkYmg?ICZl#EfdZ9;|`wUv|c~BXe^R*yaSRC&!hbY6A ze#d3O&^{$fVYH;@YW9q9a2^ly!5m1&KdnbGGj6dyxxHjZ{|x z+7^Dp#X{p(wQRwm?HVHYQryjd4MiNbM&z&XWO)P(FK2%ZUHG%};n#EgTtieABqwGkRi6;#CL6Mn=Czm~C8|ssH@A*5b^qxa{Afle?dy zSg5>wG*oVB3K74JwWKEqbGcg$fCgh^&m4#oL44Cl2k&ow`)WMd<|n2}zp_Q@2?mi7 z{2`d7{0YF>&Q+NyhVG7US5#i9vsCwDihrtxb2|Dnvv1~WH3pn}DtvL5Ai*K!PD$6enao}$p>ohV*+KaFO;D1&C${UKPb8sU_&?E z03p_>32I(HJMx!$7M9vc?~6GrLT$+Z+8K!4jiviG;@Xt1)%(u|5@Nm6g!SlFx9C}= z2J;IDE{4c0n{#*$Jwdi-Wnepc+uhytSNST5y*Vl<=j0Bgn)!DG`GEFh^5yymAk%=I zpa98iYs~?51lsv*VyO%0_+}#rumXe!cV;?6Dwnbz0o*V4w|lZfFI%Ax?gFA@jFAv; z`eRxzV5EUk=zw!T6+`gt>7egLV&WuFJ0Wjq-bBOoNxPp{va+&nJExD!525g4`5(3a zE$D%-ts#`B1r^F8jE{Zv1u0(C6{(|&u4GmdA^Zq~n*x0|FGk5J+N(D}j$hZ%re7iv9J5Lx|vDxxn*HnUEwyTq-!7{ z4cY~jw~B|hEvi4Z(S##>ZlP_XCcw<97b#vb7&T3JCqtqMLh!vDEN&|t44VPlaErk< zYFGO#Nq94JK#d5o&N?w5%E8S|%?V?bOv^{;!TbXP@H4lb{a2m~i~2uERK;v*W@)xg zUWHf}3P~P?BRmiRs0xQWzF2Kk)s_Ufli3=1#?Qf&=PMT1p-@GQ3vTBcvz4PSby&tI z?bUob?fnz=nrc*^@WC znDf3PC>3P%xEOt^tA$F-%S{FbGWcvmLrr9`sY)u1OTm01)YVd2?_PZeA{LU=4v83f zQlspY=|lzIUETagd)KzDuq!o~G}QRv;fF^wYLvxQS&@jc-=@yagHNsH)UlZt;My!; zQhQk*G;bE`14_YjeU92w^42y(8-XD0N%oLN?dJ+?f8}p3l$rOxSIXuS+7eMy5T-&R zfnSF=r)h{75l+ize7M#BpdR6aKvgLOx#pj!3jpxzB^KG!mt*&N6=?buq!q1dyX11L zdTcXV5F=h#*L)wgn@%;$;N8!271b4^v9u!yUZ?>b+ltWK*7~~e(PEej<295qDbt3U zIIp!%oz^ni)qAA;g9OW5zc(igc|}af)*}eH7M>z#aou{d(9C(oeIV(S3&om54fzK& z;`&ivb(Yaa0=5p&9+au}!<7l4M%$N120iH`=;{5o^!3%~7pFxrqs(4mf=ih0cx*~K zZkRuW46P6a()t}d2vIoOC%kMkNTZlAVo1sy%S>3Fvpj97EZIsumI8I6s*hW@Uo1+q zMphmq?SgWCL#UYq-K33l_2ZGUm=ugzq%am5 zz;vdo+!%1wda6}bf)C-=8@-~V4p~9f3EZ{1RaT7x2gdfa}zgf_}8s=ihX#>r!a6bV6QVt@&<{fGvMq%bLbaNEh@ z{TBUSySm&T9g*Ax*nnk@eMrk-@F6XuyGF+CW-pxhT2_4wu)bhR5F<>_0Jm0QltP8W zZ9l9R0ljS`;rqJoC3joOWDw+B`~9I6x2McYq@fKV#TYZ{Z+C zNU=^x84?o_X_HJ#djoLf3%C`eYbUPws7%Q#C!eP0`%5)@uOj_b8v|Mfs89uQ7)gu+ zTxU3&&$|ABcMBOF`M$pF0dx*bcVKHVH`GG>u%Q3;`UA~00McW}ts8KRN%vNh!<7+w zwh=x)c$PT2D0n)l+Kq^U z0?w7DT1K2yU8WgSFa!OGdQ4t&EC28v*dOpy={4QIdlkT17}GvY>F^fD?mUlr|Ku6c zCk5HEVl>Xb3zRNztRB>!3n*;R5}&SWxn1VM+nIh{j-3pD^iy|v;CR=pO$HWsw48MU zc6T0S7C{$KYyIz%zB4xJq;3Atu-iv?=pX!Y3y0Y;xj@bPWw90;Z@{g)O(aFrtZNVjEw_z<{4191F!VdT8R*?IUyJ=P(PVrc)FMGX@jd)%saM zzJquw2O$90-%P;)Du&&M_%f@^FN$|2%pP(frfaS(MvRq5#1%anA;DIU(B3=6BT9e; zL)~b(`ZXZa&mpli5lQUx0tFFk0DPo}-kp@21aCK9;|RxY0dT%LOaK@z-O#z_40%*+YXe;<2Kyi@Q|&OW#OY)q`s9Z7Vl7mwyUq!uU70 zKpT$-&6OdjQiWlLmmDe}lEoc|?wy;;L7}cdgekgOk`JD8KlhU>!+=5D)&Z)>#eOuF z^L!)+bp2&nf$v1`6(YW{$!!eRxqFkJ{|OM`u%f7PsozS&-13dG9QP*9X?uGZzIzU< z8FAULg1+{Vv|9(G?F;nZtuupnewdQ#60oq{i6O6ET^a6Q9L#idi^4ESVr<}~q9w;{ zRyH=&1=<^8V7YYkLs07rMi$@#79;YL;@aZKEz^OX16>kBcTE7xW(Zxuh4mvvca5X$ zs}iWzTS9!7@%w{iy1}&ZC}uW)e;C1|STn}|(>6g!v!*;pEbxrvNGidVpM?92Ai-r% z0##x;dHDildjY+e<_p*Y$v+8yyXJtDi-P{n5T!xhxW$?>?D z67C?>2Am1E{*zZ%BDIZCt!^|Ii|nEgw6z(r+$Mx5w}JTs^> z6#&L^X*HL`B|;XUbYzH9qgCWv))i7o@C@0;fDe}(?F~SY%$eY{-J%tTYw0>%{!pNp z^r{eKDgl7yi5ky+u*uqFL4ltynd7+)yw2s;hI66i+31UOw`lSDLI_+Tfq_GAfWTed zyR$mtKyCi{CKr`~mYl6x#e>VzqlqQ2X0U6he@^E(vMo$luxU*PCd{PTXr2{ZEi=$H za&JohQ2{j>Lv9o@m0pblZcTQ3>-w~!`FpuHE_K(Uf5^cuuN-H|gDcyHbRd5oxWb+?R{Cl`Y><3E|^~YMla2nljxjE&ih`IpZZB`?T=ZoSrmJXh&+8(#pkk zHW&d8>)OQ%hL2I;G}h7G{`VQ?=H`$j_fj6IzR7vE#sm1nKB=lB_~2}Dy3eJc1!l-q3|uG34lrK(#afU>FP#t288<5=N+@n`IcG=lnxGMO7)V4B`av(V#SI z#b^4jv3kHazSk1ui%i=MvWGNjqYwKoB^PqrV>ymh2TPM3}tn7$`AcX$1aRI z2-W|Vk5A1Ap?eSd#qH&7i6HTNjrzJGw~P$zuP8Vx0UvBwHMF9@6IT&a3<|0n!I!jl z=~t@#Oh_es{a_1w6qAI|=9MtxcH80w$|ci+5CBsHNaD8UY<0-)$qZ&QgzFD^Mq}fL zlg2KuJ~TD`LT>dsGX>%F#-{fNxE}49%RdJl3TaVTQQ_B3S2{@jkrr9^ z3cEHIt0#v=3WKfD-PKrpxke&eGy=3925xr8$mMC(eLh6db8A8dkj)N~NTt^U;cRVGObJzOZHfo4ybqbu>O7|*xINS4bvz|uRT!Q_ z%7s)Y$}pA!WR_-R!vNsF;!_Yj8JGu|=%Pw4yx z?G=0>TavLpih`}E8I@ns7GMp1DMY5w8kkfj7}hcg0E)80(wDU8^(nLvwBH(K32OWB z_GlX4)YwLlzgIm>uB9d!tD!rGP}|9`A_Y4IaHniT!@GXJJ{Rnn-+u|GZzT#p4TrVh zcA>p32NM&G{2`JtGKFsUe@r5hx}pbVa!IRj=lQa-~$Wko%ja|ZhV0c zh-%7W1Lpi6xC)YnhUO=~Wy!KXda_i6II|@@qb3*(;Q$B?$EKW(N}*xEVxk6!0Z##n z#E2Ra3IEz%_SfBOD(-efm}9H)xiA|M-Kc4705C~1k|zu*7v#pBjsS}VN; zes0I7pTAZ0357bG1*vb!4CX zQXG>OR{P^7tt!C5hdqrT!Twy&2K}Vqr$H;`X!df0Q??oyYX3c2YUfZt9lHU#9_MIOswW^LN} zN8D+w?%dk1QMDcnuq4v(l0-qvU>Sc7wlGCYqs{0W(Ls?nzK;p+zn0ZFDBF8CG&{t; zmZ@bFjjy!S+fiP*MkQ_v69S1Bur4p5fz6QNu?ir;1BD-z5 zS;dF?zq5spIFjBTC1C2OfAs6SEW;Q5S(%q)qHe5viU938Th$F{OwwQoFpU|dGG)5D z@DR^U1_&c`6&q2pa)d92aePY@7SiA42=8&j*_XD}k3RuGEhWzs0rVtKvPh+?Q#m#s z9wd-}sRA?u+p(({bVIlZw@IA<;?SsKy3uIRf z|C2?7{@MfpTLd;=%|O5kY`K-KFz}x0?Vg^$|F3u0%?hM4bz+waxJu$@QHZ76_LCj7 z(4zrv$p!e(7eI!vR>4P1bn(^D9|3G?z)h7hpmRZURga5oj3Nl8>q_<%85-7IzBq{wt z6$>lspqU&_s(;_n7uqO`Dl-nSQM`D;bOAN-AbvVchl!t^Bt{hIvenNoD8SaDNU(=Y zi0HHKs{i2~P>9Ekj1?a;b;N%Ed;axc?R(-`f*zD60V|xyb!(He Date: Thu, 2 Apr 2020 14:15:02 +0800 Subject: [PATCH 048/104] Remove --output text in CLOUDTRAILBUCKET_LOGENABLED When adding `--output text`, aws cli will return `None` instead of `null`. It makes the following if check misjudge LoggingEnabled status. --- checks/check26 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/checks/check26 b/checks/check26 index 83395527..c05a9214 100644 --- a/checks/check26 +++ b/checks/check26 @@ -27,7 +27,7 @@ check26(){ if [[ $CLOUDTRAILBUCKET ]]; then bucket=$CLOUDTRAILBUCKET if [ "$CLOUDTRAIL_ACCOUNT_ID" == "$ACCOUNT_NUM" ]; then - CLOUDTRAILBUCKET_LOGENABLED=$($AWSCLI s3api get-bucket-logging --bucket $bucket $PROFILE_OPT --region $REGION --query 'LoggingEnabled.TargetBucket' --output text 2>&1) + CLOUDTRAILBUCKET_LOGENABLED=$($AWSCLI s3api get-bucket-logging --bucket $bucket $PROFILE_OPT --region $REGION --query 'LoggingEnabled.TargetBucket' 2>&1) if [[ $(echo "$CLOUDTRAILBUCKET_LOGENABLED" | grep AccessDenied) ]]; then textFail "Access Denied Trying to Get Bucket Logging for $bucket" continue From 5bab65c56d607be479544e8c56f425038ea8ff17 Mon Sep 17 00:00:00 2001 From: Marc Jay Date: Sat, 11 Apr 2020 03:04:03 +0100 Subject: [PATCH 049/104] - Remove securityhub output mode and replace with '-S' flag to send findings to Security Hub - Move Security Hub related code to a dedicated include/securityhub_integration file - Check that Security Hub is enabled in the target region before beginning checks when -S is specified - Add error handling to the batch-import-findings call - Add CHECK_ASFF_TYPE variables to all CIS checks to override the default - Add support for CHECK_ASFF_RESOURCE_TYPE variables which override the default 'AwsAccount' value for the resource a finding relates to. - Add CHECK_ASFF_RESOURCE_TYPE variables to all checks where there is a suitable value in the schema - Remove json-asff output for info messages as they are not appropriate for possible submission to Security Hub - Update the README to cover Security Hub integration - Add an IAM policy JSON document that provides the necessary BatchImportFindings permission for Security Hub - Remove trailing whitespace and periods in pass/fail messages to be consistent with the majority of messages, to prevent future tidy-up from changing the finding IDs --- README.md | 31 ++++++++++++++++--------- checks/check11 | 1 + checks/check110 | 1 + checks/check111 | 1 + checks/check112 | 5 ++-- checks/check113 | 3 ++- checks/check114 | 5 ++-- checks/check115 | 1 + checks/check116 | 8 ++++--- checks/check117 | 1 + checks/check118 | 1 + checks/check119 | 6 +++-- checks/check12 | 2 ++ checks/check120 | 4 +++- checks/check121 | 2 ++ checks/check122 | 2 ++ checks/check13 | 2 ++ checks/check14 | 14 ++++++----- checks/check15 | 1 + checks/check16 | 1 + checks/check17 | 1 + checks/check18 | 1 + checks/check19 | 1 + checks/check21 | 4 +++- checks/check22 | 4 +++- checks/check23 | 2 ++ checks/check24 | 2 ++ checks/check25 | 1 + checks/check26 | 2 ++ checks/check27 | 2 ++ checks/check28 | 4 +++- checks/check29 | 6 +++-- checks/check31 | 2 ++ checks/check310 | 2 ++ checks/check311 | 2 ++ checks/check312 | 2 ++ checks/check313 | 2 ++ checks/check314 | 2 ++ checks/check32 | 2 ++ checks/check33 | 2 ++ checks/check34 | 2 ++ checks/check35 | 2 ++ checks/check36 | 2 ++ checks/check37 | 2 ++ checks/check38 | 2 ++ checks/check39 | 2 ++ checks/check41 | 2 ++ checks/check42 | 2 ++ checks/check43 | 2 ++ checks/check44 | 2 ++ checks/check_extra71 | 1 + checks/check_extra710 | 1 + checks/check_extra711 | 1 + checks/check_extra712 | 2 +- checks/check_extra714 | 1 + checks/check_extra715 | 1 + checks/check_extra716 | 41 +++++++++++++++++---------------- checks/check_extra717 | 1 + checks/check_extra718 | 1 + checks/check_extra72 | 1 + checks/check_extra720 | 1 + checks/check_extra721 | 1 + checks/check_extra722 | 1 + checks/check_extra723 | 1 + checks/check_extra724 | 1 + checks/check_extra725 | 1 + checks/check_extra727 | 1 + checks/check_extra728 | 1 + checks/check_extra729 | 1 + checks/check_extra73 | 1 + checks/check_extra730 | 1 + checks/check_extra731 | 1 + checks/check_extra732 | 1 + checks/check_extra734 | 1 + checks/check_extra735 | 1 + checks/check_extra736 | 1 + checks/check_extra737 | 1 + checks/check_extra738 | 1 + checks/check_extra739 | 5 ++-- checks/check_extra74 | 1 + checks/check_extra740 | 3 ++- checks/check_extra741 | 7 +++--- checks/check_extra742 | 3 ++- checks/check_extra743 | 9 ++++---- checks/check_extra744 | 9 ++++---- checks/check_extra745 | 3 ++- checks/check_extra746 | 3 ++- checks/check_extra747 | 3 ++- checks/check_extra748 | 1 + checks/check_extra749 | 1 + checks/check_extra75 | 1 + checks/check_extra750 | 1 + checks/check_extra751 | 1 + checks/check_extra752 | 1 + checks/check_extra753 | 1 + checks/check_extra754 | 1 + checks/check_extra755 | 1 + checks/check_extra756 | 7 +++--- checks/check_extra757 | 1 + checks/check_extra758 | 1 + checks/check_extra759 | 5 ++-- checks/check_extra760 | 3 ++- checks/check_extra762 | 1 + checks/check_extra763 | 3 ++- checks/check_extra764 | 1 + checks/check_extra767 | 3 ++- checks/check_extra768 | 1 + checks/check_extra770 | 1 + checks/check_extra771 | 1 + checks/check_extra772 | 1 + checks/check_extra773 | 1 + checks/check_extra774 | 1 + checks/check_extra777 | 9 ++++---- checks/check_extra778 | 9 ++++---- checks/check_extra779 | 37 ++++++++++++++--------------- checks/check_extra78 | 1 + checks/check_extra780 | 3 ++- checks/check_extra781 | 3 ++- checks/check_extra782 | 3 ++- checks/check_extra783 | 3 ++- checks/check_extra784 | 3 ++- checks/check_extra785 | 9 ++++---- checks/check_extra79 | 1 + checks/check_sample | 3 ++- iam/prowler-security-hub.json | 12 ++++++++++ include/colors | 4 ++-- include/outputs | 35 ++++++++++++++-------------- include/securityhub_integration | 37 +++++++++++++++++++++++++++++ prowler | 24 ++++++++++++++++--- 129 files changed, 370 insertions(+), 139 deletions(-) create mode 100644 iam/prowler-security-hub.json create mode 100644 include/securityhub_integration diff --git a/README.md b/README.md index e0c5d0e5..7468c8e4 100644 --- a/README.md +++ b/README.md @@ -62,7 +62,7 @@ This script has been written in bash using AWS-CLI and it works in Linux and OSX pip install awscli ansi2html detect-secrets ``` - AWS-CLI can be also installed it using "brew", "apt", "yum" or manually from , but `ansi2html` and `detect-secrets` has to be installed using `pip`. You will need to install `jq` to get more accuracy in some checks. + AWS-CLI can be also installed it using "brew", "apt", "yum" or manually from , but `ansi2html` and `detect-secrets` has to be installed using `pip`. You will need to install `jq` to get more accuracy in some checks. - Make sure jq is installed (example below with "apt" but use a valid package manager for your OS): ```sh @@ -81,7 +81,7 @@ This script has been written in bash using AWS-CLI and it works in Linux and OSX ```sh aws configure ``` - or + or ```sh export AWS_ACCESS_KEY_ID="ASXXXXXXX" export AWS_SECRET_ACCESS_KEY="XXXXXXXXX" @@ -94,7 +94,7 @@ This script has been written in bash using AWS-CLI and it works in Linux and OSX arn:aws:iam::aws:policy/SecurityAudit ``` - > Additional permissions needed: to make sure Prowler can scan all services included in the group *Extras*, make sure you attach also the custom policy [prowler-additions-policy.json](https://github.com/toniblyx/prowler/blob/master/iam/prowler-additions-policy.json) to the role you are using. + > Additional permissions needed: to make sure Prowler can scan all services included in the group *Extras*, make sure you attach also the custom policy [prowler-additions-policy.json](https://github.com/toniblyx/prowler/blob/master/iam/prowler-additions-policy.json) to the role you are using. If you want Prowler to send findings to [AWS Security Hub](https://aws.amazon.com/security-hub), make sure you also attach the custom policy [prowler-security-hub.json](https://github.com/toniblyx/prowler/blob/master/iam/prowler-security-hub.json). ## Usage @@ -107,7 +107,7 @@ This script has been written in bash using AWS-CLI and it works in Linux and OSX Use `-l` to list all available checks and group of checks (sections) If you want to avoid installing dependences run it using Docker: - + ```sh docker run -ti --rm --name prowler --env AWS_ACCESS_KEY_ID --env AWS_SECRET_ACCESS_KEY --env AWS_SESSION_TOKEN toniblyx/prowler:latest ``` @@ -127,7 +127,7 @@ This script has been written in bash using AWS-CLI and it works in Linux and OSX ```sh docker run -ti --rm --name prowler --env AWS_ACCESS_KEY_ID --env AWS_SECRET_ACCESS_KEY --env AWS_SESSION_TOKEN toniblyx/prowler:latest "-c check310" ``` - + or multiple checks separated by comma: ```sh ./prowler -c check310,check722 @@ -185,6 +185,14 @@ This script has been written in bash using AWS-CLI and it works in Linux and OSX ./prowler -M mono | aws s3 cp - s3://bucket-name/prowler-report.txt ``` +1. If you want Prowler to submit findings to [AWS Security Hub](https://aws.amazon.com/security-hub): + + ```sh + ./prowler -M json-asff -S + ``` + + > Note that Security Hub must be enabled for the active region. It can be enabled by calling `aws securityhub enable-security-hub` + 1. To perform an assessment based on CIS Profile Definitions you can use cislevel1 or cislevel2 with `-g` flag, more information about this [here, page 8](https://d0.awsstatic.com/whitepapers/compliance/AWS_CIS_Foundations_Benchmark.pdf): ```sh @@ -228,6 +236,7 @@ This script has been written in bash using AWS-CLI and it works in Linux and OSX -b do not print Prowler banner -V show version number & exit -s show scoring report + -S send check output to AWS Security Hub - only valid when the output mode is json-asff (i.e. "-M json-asff -S") -x specify external directory with custom checks (i.e. /my/own/checks, files must start by check) -q suppress info messages and passing test output -A account id for the account where to assume a role, requires -R and -T @@ -241,17 +250,17 @@ This script has been written in bash using AWS-CLI and it works in Linux and OSX ## Advanced Usage -### Assume Role: +### Assume Role: Prowler uses the AWS CLI underneath so it uses the same authentication methods. However, there are few ways to run Prowler against multiple accounts using IAM Assume Role feature depending on eachg use case. You can just set up your custom profile inside `~/.aws/config` with all needed information about the role to assume then call it with `./prowler -p your-custom-profile`. Additionally you can use `-A 123456789012` and `-R RemoteRoleToAssume` and Prowler will get those temporary credentials using `aws sts assume-role`, set them up as environment variables and run against that given account. ``` -./prowler -A 123456789012 -R ProwlerRole +./prowler -A 123456789012 -R ProwlerRole ``` > *NOTE 1 about Session Duration*: By default it gets credentials valid for 1 hour (3600 seconds). Depending on the mount of checks you run and the size of your infrastructure, Prowler may require more than 1 hour to finish. Use option `-T ` to allow up to 12h (43200 seconds). To allow more than 1h you need to modify *"Maximum CLI/API session duration"* for that particular role, read more [here](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_use.html#id_roles_use_view-role-max-session). -> *NOTE 2 about Session Duration*: Bear in mind that if you are using roles assumed by role chaining there is a hard limit of 1 hour so consider not using role chaining if possible, read more about that, in foot note 1 below the table [here](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_use.html). +> *NOTE 2 about Session Duration*: Bear in mind that if you are using roles assumed by role chaining there is a hard limit of 1 hour so consider not using role chaining if possible, read more about that, in foot note 1 below the table [here](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_use.html). For example, if you want to get only the fails in CSV format from all checks regarding RDS without banner from the AWS Account 123456789012 assuming the role RemoteRoleToAssume and set a fixed session duration of 1h: @@ -322,7 +331,7 @@ Some new and specific checks require Prowler to inherit more permissions than Se [iam/prowler-additions-policy.json](iam/prowler-additions-policy.json) -> Note: Action `ec2:get*` is included in "ProwlerReadOnlyPolicy" policy above, that includes `get-password-data`, type `aws ec2 get-password-data help` to better understand its implications. +> Note: Action `ec2:get*` is included in "ProwlerReadOnlyPolicy" policy above, that includes `get-password-data`, type `aws ec2 get-password-data help` to better understand its implications. ### Bootstrap Script @@ -371,7 +380,7 @@ or to run just one of the checks: or to run multiple extras in one go: -```sh +```sh ./prowler -c extraNumber,extraNumber ``` @@ -441,7 +450,7 @@ In order to add any new check feel free to create a new extra check in the extra ## Third Party Integrations -### AWS Security Hub +### AWS Security Hub There is a blog post about that integration in the AWS Security blog here diff --git a/checks/check11 b/checks/check11 index f34ebed5..09bdcab1 100644 --- a/checks/check11 +++ b/checks/check11 @@ -12,6 +12,7 @@ CHECK_ID_check11="1.1,1.01" CHECK_TITLE_check11="[check11] Avoid the use of the root account (Scored)" CHECK_SCORED_check11="SCORED" CHECK_TYPE_check11="LEVEL1" +CHECK_ASFF_TYPE_check11="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ALTERNATE_check101="check11" check11(){ diff --git a/checks/check110 b/checks/check110 index 6f34b0f2..dabb80dc 100644 --- a/checks/check110 +++ b/checks/check110 @@ -12,6 +12,7 @@ CHECK_ID_check110="1.10" CHECK_TITLE_check110="[check110] Ensure IAM password policy prevents password reuse: 24 or greater (Scored)" CHECK_SCORED_check110="SCORED" CHECK_TYPE_check110="LEVEL1" +CHECK_ASFF_TYPE_check110="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ALTERNATE_check110="check110" check110(){ diff --git a/checks/check111 b/checks/check111 index ea652b63..83575d29 100644 --- a/checks/check111 +++ b/checks/check111 @@ -12,6 +12,7 @@ CHECK_ID_check111="1.11" CHECK_TITLE_check111="[check111] Ensure IAM password policy expires passwords within 90 days or less (Scored)" CHECK_SCORED_check111="SCORED" CHECK_TYPE_check111="LEVEL1" +CHECK_ASFF_TYPE_check111="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ALTERNATE_check111="check111" check111(){ diff --git a/checks/check112 b/checks/check112 index f6fa9481..a9825ee1 100644 --- a/checks/check112 +++ b/checks/check112 @@ -12,6 +12,7 @@ CHECK_ID_check112="1.12" CHECK_TITLE_check112="[check112] Ensure no root account access key exists (Scored)" CHECK_SCORED_check112="SCORED" CHECK_TYPE_check112="LEVEL1" +CHECK_ASFF_TYPE_check112="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ALTERNATE_check112="check112" check112(){ @@ -22,11 +23,11 @@ check112(){ if [ "$ROOTKEY1" == "false" ];then textPass "No access key 1 found for root" else - textFail "Found access key 1 for root " + textFail "Found access key 1 for root" fi if [ "$ROOTKEY2" == "false" ];then textPass "No access key 2 found for root" else - textFail "Found access key 2 for root " + textFail "Found access key 2 for root" fi } diff --git a/checks/check113 b/checks/check113 index 481daeef..1e034c8d 100644 --- a/checks/check113 +++ b/checks/check113 @@ -12,6 +12,7 @@ CHECK_ID_check113="1.13" CHECK_TITLE_check113="[check113] Ensure MFA is enabled for the root account (Scored)" CHECK_SCORED_check113="SCORED" CHECK_TYPE_check113="LEVEL1" +CHECK_ASFF_TYPE_check113="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ALTERNATE_check113="check113" check113(){ @@ -20,6 +21,6 @@ check113(){ if [ "$COMMAND113" == "1" ]; then textPass "Virtual MFA is enabled for root" else - textFail "MFA is not ENABLED for root account " + textFail "MFA is not ENABLED for root account" fi } diff --git a/checks/check114 b/checks/check114 index 99391d33..fff68b72 100644 --- a/checks/check114 +++ b/checks/check114 @@ -12,6 +12,7 @@ CHECK_ID_check114="1.14" CHECK_TITLE_check114="[check114] Ensure hardware MFA is enabled for the root account (Scored)" CHECK_SCORED_check114="SCORED" CHECK_TYPE_check114="LEVEL2" +CHECK_ASFF_TYPE_check114="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ALTERNATE_check114="check114" check114(){ @@ -22,9 +23,9 @@ check114(){ if [[ "$COMMAND114" ]]; then textFail "Only Virtual MFA is enabled for root" else - textPass "Hardware MFA is enabled for root " + textPass "Hardware MFA is enabled for root" fi else - textFail "MFA is not ENABLED for root account " + textFail "MFA is not ENABLED for root account" fi } diff --git a/checks/check115 b/checks/check115 index 848bd440..c52db37e 100644 --- a/checks/check115 +++ b/checks/check115 @@ -12,6 +12,7 @@ CHECK_ID_check115="1.15" CHECK_TITLE_check115="[check115] Ensure security questions are registered in the AWS account (Not Scored)" CHECK_SCORED_check115="NOT_SCORED" CHECK_TYPE_check115="LEVEL1" +CHECK_ASFF_TYPE_check115="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ALTERNATE_check115="check115" check115(){ diff --git a/checks/check116 b/checks/check116 index ad51e34c..c20abbf7 100644 --- a/checks/check116 +++ b/checks/check116 @@ -12,6 +12,8 @@ CHECK_ID_check116="1.16" CHECK_TITLE_check116="[check116] Ensure IAM policies are attached only to groups or roles (Scored)" CHECK_SCORED_check116="SCORED" CHECK_TYPE_check116="LEVEL1" +CHECK_ASFF_TYPE_check116="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" +CHECK_ASFF_RESOURCE_TYPE_check116="AwsIamUser" CHECK_ALTERNATE_check116="check116" check116(){ @@ -21,16 +23,16 @@ check116(){ for user in $LIST_USERS;do USER_POLICY=$($AWSCLI iam list-attached-user-policies --output text $PROFILE_OPT --region $REGION --user-name $user) if [[ $USER_POLICY ]]; then - textFail "$user has managed policy directly attached " + textFail "$user has managed policy directly attached" C116_NUM_USERS=$(expr $C116_NUM_USERS + 1) fi USER_POLICY=$($AWSCLI iam list-user-policies --output text $PROFILE_OPT --region $REGION --user-name $user) if [[ $USER_POLICY ]]; then - textFail "$user has inline policy directly attached " + textFail "$user has inline policy directly attached" C116_NUM_USERS=$(expr $C116_NUM_USERS + 1) fi done if [[ $C116_NUM_USERS -eq 0 ]]; then - textPass "No policies attached to users." + textPass "No policies attached to users" fi } diff --git a/checks/check117 b/checks/check117 index 69f897c9..1264c99e 100644 --- a/checks/check117 +++ b/checks/check117 @@ -12,6 +12,7 @@ CHECK_ID_check117="1.17" CHECK_TITLE_check117="[check117] Maintain current contact details (Not Scored)" CHECK_SCORED_check117="NOT_SCORED" CHECK_TYPE_check117="LEVEL1" +CHECK_ASFF_TYPE_check117="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ALTERNATE_check117="check117" check117(){ diff --git a/checks/check118 b/checks/check118 index 4217fbf1..abd76906 100644 --- a/checks/check118 +++ b/checks/check118 @@ -12,6 +12,7 @@ CHECK_ID_check118="1.18" CHECK_TITLE_check118="[check118] Ensure security contact information is registered (Not Scored)" CHECK_SCORED_check118="NOT_SCORED" CHECK_TYPE_check118="LEVEL1" +CHECK_ASFF_TYPE_check118="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ALTERNATE_check118="check118" check118(){ diff --git a/checks/check119 b/checks/check119 index 4dbc7fd0..5a0a90d7 100644 --- a/checks/check119 +++ b/checks/check119 @@ -12,6 +12,8 @@ CHECK_ID_check119="1.19" CHECK_TITLE_check119="[check119] Ensure IAM instance roles are used for AWS resource access from instances (Not Scored)" CHECK_SCORED_check119="NOT_SCORED" CHECK_TYPE_check119="LEVEL2" +CHECK_ASFF_TYPE_check119="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" +CHECK_ASFF_RESOURCE_TYPE_check119="AwsEc2Instance" CHECK_ALTERNATE_check119="check119" check119(){ @@ -25,9 +27,9 @@ check119(){ if [[ $STATE_NAME != "terminated" ]]; then PROFILEARN=$(echo $EC2_DATA | jq -r --arg i "$instance" 'select(.InstanceId==$i)|.ProfileArn') if [[ $PROFILEARN == "null" ]]; then - textFail "$regx: Instance $instance not associated with an instance role." $regx + textFail "$regx: Instance $instance not associated with an instance role" $regx else - textPass "$regx: Instance $instance associated with role ${PROFILEARN##*/}." $regx + textPass "$regx: Instance $instance associated with role ${PROFILEARN##*/}" $regx fi fi done diff --git a/checks/check12 b/checks/check12 index 6e82f10c..15bc50f1 100644 --- a/checks/check12 +++ b/checks/check12 @@ -12,6 +12,8 @@ CHECK_ID_check12="1.2,1.02" CHECK_TITLE_check12="[check12] Ensure multi-factor authentication (MFA) is enabled for all IAM users that have a console password (Scored)" CHECK_SCORED_check12="SCORED" CHECK_TYPE_check12="LEVEL1" +CHECK_ASFF_TYPE_check12="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" +CHECK_ASFF_RESOURCE_TYPE_check12="AwsIamUser" CHECK_ALTERNATE_check102="check12" check12(){ diff --git a/checks/check120 b/checks/check120 index 2983642f..dd1d4fc5 100644 --- a/checks/check120 +++ b/checks/check120 @@ -12,6 +12,8 @@ CHECK_ID_check120="1.20" CHECK_TITLE_check120="[check120] Ensure a support role has been created to manage incidents with AWS Support (Scored)" CHECK_SCORED_check120="SCORED" CHECK_TYPE_check120="LEVEL1" +CHECK_ASFF_TYPE_check120="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" +CHECK_ASFF_RESOURCE_TYPE_check120="AwsIamRole" CHECK_ALTERNATE_check120="check120" check120(){ @@ -28,7 +30,7 @@ check120(){ # textInfo "User $user has support access via $policyarn" # done else - textFail "Support Policy not applied to any Role " + textFail "Support Policy not applied to any Role" fi done else diff --git a/checks/check121 b/checks/check121 index 64032c05..b909eb7b 100644 --- a/checks/check121 +++ b/checks/check121 @@ -12,6 +12,8 @@ CHECK_ID_check121="1.21" CHECK_TITLE_check121="[check121] Do not setup access keys during initial user setup for all IAM users that have a console password (Not Scored)" CHECK_SCORED_check121="NOT_SCORED" CHECK_TYPE_check121="LEVEL1" +CHECK_ASFF_TYPE_check121="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" +CHECK_ASFF_RESOURCE_TYPE_check121="AwsIamUser" CHECK_ALTERNATE_check121="check121" check121(){ diff --git a/checks/check122 b/checks/check122 index 0ae9e6c3..1c4fdec2 100644 --- a/checks/check122 +++ b/checks/check122 @@ -12,6 +12,8 @@ CHECK_ID_check122="1.22" CHECK_TITLE_check122="[check122] Ensure IAM policies that allow full \"*:*\" administrative privileges are not created (Scored)" CHECK_SCORED_check122="SCORED" CHECK_TYPE_check122="LEVEL1" +CHECK_ASFF_TYPE_check122="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" +CHECK_ASFF_RESOURCE_TYPE_check122="AwsIamPolicy" CHECK_ALTERNATE_check122="check122" check122(){ diff --git a/checks/check13 b/checks/check13 index 10289768..9e0a4616 100644 --- a/checks/check13 +++ b/checks/check13 @@ -12,6 +12,8 @@ CHECK_ID_check13="1.3,1.03" CHECK_TITLE_check13="[check13] Ensure credentials unused for 90 days or greater are disabled (Scored)" CHECK_SCORED_check13="SCORED" CHECK_TYPE_check13="LEVEL1" +CHECK_ASFF_TYPE_check13="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" +CHECK_ASFF_RESOURCE_TYPE_check13="AwsIamUser" CHECK_ALTERNATE_check103="check13" check13(){ diff --git a/checks/check14 b/checks/check14 index 88d14407..86925def 100644 --- a/checks/check14 +++ b/checks/check14 @@ -12,6 +12,8 @@ CHECK_ID_check14="1.4,1.04" CHECK_TITLE_check14="[check14] Ensure access keys are rotated every 90 days or less (Scored)" CHECK_SCORED_check14="SCORED" CHECK_TYPE_check14="LEVEL1" +CHECK_ASFF_TYPE_check14="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" +CHECK_ASFF_RESOURCE_TYPE_check14="AwsIamUser" CHECK_ALTERNATE_check104="check14" check14(){ @@ -28,15 +30,15 @@ check14(){ HOWOLDER=$(how_older_from_today $DATEROTATED1) if [ $HOWOLDER -gt "90" ];then - textFail " $user has not rotated access key1 in over 90 days." + textFail " $user has not rotated access key1 in over 90 days" C14_NUM_USERS1=$(expr $C14_NUM_USERS1 + 1) fi done if [[ $C14_NUM_USERS1 -eq 0 ]]; then - textPass "No users with access key 1 older than 90 days." + textPass "No users with access key 1 older than 90 days" fi else - textPass "No users with access key 1." + textPass "No users with access key 1" fi if [[ $LIST_OF_USERS_WITH_ACCESS_KEY2 ]]; then @@ -46,14 +48,14 @@ check14(){ DATEROTATED2=$(cat $TEMP_REPORT_FILE | grep -v user_creation_time | grep "^${user},"| awk -F, '{ print $15 }' | grep -v "N/A" | awk -F"T" '{ print $1 }') HOWOLDER=$(how_older_from_today $DATEROTATED2) if [ $HOWOLDER -gt "90" ];then - textFail " $user has not rotated access key2 in over 90 days. " + textFail " $user has not rotated access key2 in over 90 days" C14_NUM_USERS2=$(expr $C14_NUM_USERS2 + 1) fi done if [[ $C14_NUM_USERS2 -eq 0 ]]; then - textPass "No users with access key 2 older than 90 days." + textPass "No users with access key 2 older than 90 days" fi else - textPass "No users with access key 2." + textPass "No users with access key 2" fi } diff --git a/checks/check15 b/checks/check15 index aedcba17..0aa9c732 100644 --- a/checks/check15 +++ b/checks/check15 @@ -12,6 +12,7 @@ CHECK_ID_check15="1.5,1.05" CHECK_TITLE_check15="[check15] Ensure IAM password policy requires at least one uppercase letter (Scored)" CHECK_SCORED_check15="SCORED" CHECK_TYPE_check15="LEVEL1" +CHECK_ASFF_TYPE_check15="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ALTERNATE_check105="check15" check15(){ diff --git a/checks/check16 b/checks/check16 index de224521..9ee4965b 100644 --- a/checks/check16 +++ b/checks/check16 @@ -12,6 +12,7 @@ CHECK_ID_check16="1.6,1.06" CHECK_TITLE_check16="[check16] Ensure IAM password policy require at least one lowercase letter (Scored)" CHECK_SCORED_check16="SCORED" CHECK_TYPE_check16="LEVEL1" +CHECK_ASFF_TYPE_check16="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ALTERNATE_check106="check16" check16(){ diff --git a/checks/check17 b/checks/check17 index f344c759..fc56ffe7 100644 --- a/checks/check17 +++ b/checks/check17 @@ -12,6 +12,7 @@ CHECK_ID_check17="1.7,1.07" CHECK_TITLE_check17="[check17] Ensure IAM password policy require at least one symbol (Scored)" CHECK_SCORED_check17="SCORED" CHECK_TYPE_check17="LEVEL1" +CHECK_ASFF_TYPE_check17="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ALTERNATE_check107="check17" check17(){ diff --git a/checks/check18 b/checks/check18 index 412de291..732b87bd 100644 --- a/checks/check18 +++ b/checks/check18 @@ -12,6 +12,7 @@ CHECK_ID_check18="1.8,1.08" CHECK_TITLE_check18="[check18] Ensure IAM password policy require at least one number (Scored)" CHECK_SCORED_check18="SCORED" CHECK_TYPE_check18="LEVEL1" +CHECK_ASFF_TYPE_check18="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ALTERNATE_check108="check18" check18(){ diff --git a/checks/check19 b/checks/check19 index 6e924ae8..60aad8ed 100644 --- a/checks/check19 +++ b/checks/check19 @@ -12,6 +12,7 @@ CHECK_ID_check19="1.9,1.09" CHECK_TITLE_check19="[check19] Ensure IAM password policy requires minimum length of 14 or greater (Scored)" CHECK_SCORED_check19="SCORED" CHECK_TYPE_check19="LEVEL1" +CHECK_ASFF_TYPE_check19="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ALTERNATE_check109="check19" check19(){ diff --git a/checks/check21 b/checks/check21 index 591b911c..d1a23103 100644 --- a/checks/check21 +++ b/checks/check21 @@ -12,6 +12,8 @@ CHECK_ID_check21="2.1,2.01" CHECK_TITLE_check21="[check21] Ensure CloudTrail is enabled in all regions (Scored)" CHECK_SCORED_check21="SCORED" CHECK_TYPE_check21="LEVEL1" +CHECK_ASFF_TYPE_check21="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" +CHECK_ASFF_RESOURCE_TYPE_check21="AwsCloudTrailTrail" CHECK_ALTERNATE_check201="check21" check21(){ @@ -27,7 +29,7 @@ check21(){ MULTIREGION_TRAIL_STATUS=$($AWSCLI cloudtrail describe-trails $PROFILE_OPT --region $regx --query 'trailList[*].IsMultiRegionTrail' --output text --trail-name-list $trail) ISLOGGING_STATUS=$($AWSCLI cloudtrail get-trail-status $PROFILE_OPT --region $regx --name $trail --query ['IsLogging'] --output text) INCLUDEMANAGEMENTEVENTS_STATUS=$($AWSCLI cloudtrail get-event-selectors $PROFILE_OPT --region $regx --trail-name $trail --query EventSelectors[*].IncludeManagementEvents --output text) - READWRITETYPE_STATUS=$($AWSCLI cloudtrail get-event-selectors $PROFILE_OPT --region $regx --trail-name $trail --query EventSelectors[*].ReadWriteType --output text) + READWRITETYPE_STATUS=$($AWSCLI cloudtrail get-event-selectors $PROFILE_OPT --region $regx --trail-name $trail --query EventSelectors[*].ReadWriteType --output text) if [[ "$MULTIREGION_TRAIL_STATUS" == "True" ]] && [[ "$ISLOGGING_STATUS" == "True" ]] && [[ "$INCLUDEMANAGEMENTEVENTS_STATUS" == *"True"* ]] && [[ "$READWRITETYPE_STATUS" == *"All"* ]];then textPass "$trail trail in $regx is enabled for all regions" result='True' diff --git a/checks/check22 b/checks/check22 index 18b13742..1646a94f 100644 --- a/checks/check22 +++ b/checks/check22 @@ -12,11 +12,13 @@ CHECK_ID_check22="2.2,2.02" CHECK_TITLE_check22="[check22] Ensure CloudTrail log file validation is enabled (Scored)" CHECK_SCORED_check22="SCORED" CHECK_TYPE_check22="LEVEL2" +CHECK_ASFF_TYPE_check22="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" +CHECK_ASFF_RESOURCE_TYPE_check22="AwsCloudTrailTrail" CHECK_ALTERNATE_check202="check22" check22(){ # "Ensure CloudTrail log file validation is enabled (Scored)" - + for regx in $REGIONS; do LIST_OF_TRAILS=$($AWSCLI cloudtrail describe-trails $PROFILE_OPT --region $regx --query 'trailList[*].Name' --output text --no-include-shadow-trails) if [[ $LIST_OF_TRAILS ]];then diff --git a/checks/check23 b/checks/check23 index 53d1b6f6..feab2c6b 100644 --- a/checks/check23 +++ b/checks/check23 @@ -12,6 +12,8 @@ CHECK_ID_check23="2.3,2.03" CHECK_TITLE_check23="[check23] Ensure the S3 bucket CloudTrail logs to is not publicly accessible (Scored)" CHECK_SCORED_check23="SCORED" CHECK_TYPE_check23="LEVEL1" +CHECK_ASFF_TYPE_check23="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" +CHECK_ASFF_RESOURCE_TYPE_check23="AwsS3Bucket" CHECK_ALTERNATE_check203="check23" check23(){ diff --git a/checks/check24 b/checks/check24 index 35185035..581878bc 100644 --- a/checks/check24 +++ b/checks/check24 @@ -12,6 +12,8 @@ CHECK_ID_check24="2.4,2.04" CHECK_TITLE_check24="[check24] Ensure CloudTrail trails are integrated with CloudWatch Logs (Scored)" CHECK_SCORED_check24="SCORED" CHECK_TYPE_check24="LEVEL1" +CHECK_ASFF_TYPE_check24="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" +CHECK_ASFF_RESOURCE_TYPE_check24="AwsCloudTrailTrail" CHECK_ALTERNATE_check204="check24" check24(){ diff --git a/checks/check25 b/checks/check25 index d8d81732..6bb12698 100644 --- a/checks/check25 +++ b/checks/check25 @@ -12,6 +12,7 @@ CHECK_ID_check25="2.5,2.05" CHECK_TITLE_check25="[check25] Ensure AWS Config is enabled in all regions (Scored)" CHECK_SCORED_check25="SCORED" CHECK_TYPE_check25="LEVEL1" +CHECK_ASFF_TYPE_check25="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" CHECK_ALTERNATE_check205="check25" check25(){ diff --git a/checks/check26 b/checks/check26 index 83395527..8647fd88 100644 --- a/checks/check26 +++ b/checks/check26 @@ -12,6 +12,8 @@ CHECK_ID_check26="2.6,2.06" CHECK_TITLE_check26="[check26] Ensure S3 bucket access logging is enabled on the CloudTrail S3 bucket (Scored)" CHECK_SCORED_check26="SCORED" CHECK_TYPE_check26="LEVEL1" +CHECK_ASFF_TYPE_check26="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" +CHECK_ASFF_RESOURCE_TYPE_check26="AwsS3Bucket" CHECK_ALTERNATE_check206="check26" check26(){ diff --git a/checks/check27 b/checks/check27 index f32b7bfe..56fd7392 100644 --- a/checks/check27 +++ b/checks/check27 @@ -12,6 +12,8 @@ CHECK_ID_check27="2.7,2.07" CHECK_TITLE_check27="[check27] Ensure CloudTrail logs are encrypted at rest using KMS CMKs (Scored)" CHECK_SCORED_check27="SCORED" CHECK_TYPE_check27="LEVEL2" +CHECK_ASFF_TYPE_check27="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" +CHECK_ASFF_RESOURCE_TYPE_check27="AwsCloudTrailTrail" CHECK_ALTERNATE_check207="check27" check27(){ diff --git a/checks/check28 b/checks/check28 index bec0e234..adfbca41 100644 --- a/checks/check28 +++ b/checks/check28 @@ -12,6 +12,8 @@ CHECK_ID_check28="2.8,2.08" CHECK_TITLE_check28="[check28] Ensure rotation for customer created CMKs is enabled (Scored)" CHECK_SCORED_check28="SCORED" CHECK_TYPE_check28="LEVEL2" +CHECK_ASFF_TYPE_check28="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" +CHECK_ASFF_RESOURCE_TYPE_check28="AwsKmsKey" CHECK_ALTERNATE_check208="check28" check28(){ @@ -27,7 +29,7 @@ check28(){ for key in $CHECK_KMS_KEYLIST_NO_DEFAULT; do CHECK_KMS_KEY_TYPE=$($AWSCLI kms describe-key --key-id $key $PROFILE_OPT --region $regx --query 'KeyMetadata.Origin' | sed 's/["]//g') if [[ "$CHECK_KMS_KEY_TYPE" == "EXTERNAL" ]];then - textPass "$regx: Key $key in Region $regx Customer Uploaded Key Material." "$regx" + textPass "$regx: Key $key in Region $regx Customer Uploaded Key Material" "$regx" else CHECK_KMS_KEY_ROTATION=$($AWSCLI kms get-key-rotation-status --key-id $key $PROFILE_OPT --region $regx --output text) if [[ "$CHECK_KMS_KEY_ROTATION" == "True" ]];then diff --git a/checks/check29 b/checks/check29 index d1f23dc8..c49efb2d 100644 --- a/checks/check29 +++ b/checks/check29 @@ -12,10 +12,12 @@ CHECK_ID_check29="2.9,2.09" CHECK_TITLE_check29="[check29] Ensure VPC Flow Logging is Enabled in all VPCs (Scored)" CHECK_SCORED_check29="SCORED" CHECK_TYPE_check29="LEVEL2" +CHECK_ASFF_TYPE_check29="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" +CHECK_ASFF_RESOURCE_TYPE_check29="AwsEc2Vpc" CHECK_ALTERNATE_check209="check29" check29(){ - # "Ensure VPC Flow Logging is Enabled in all VPCs (Scored)" + # "Ensure VPC Flow Logging is Enabled in all VPCs (Scored)" for regx in $REGIONS; do AVAILABLE_VPC=$($AWSCLI ec2 describe-vpcs $PROFILE_OPT --region $regx --query 'Vpcs[?State==`available`].VpcId' --output text) for vpcx in $AVAILABLE_VPC; do @@ -26,7 +28,7 @@ check29(){ done else textFail "VPC $vpcx: No VPCFlowLog has been found in Region $regx" "$regx" - fi + fi done done } diff --git a/checks/check31 b/checks/check31 index 8a8d1329..21768a15 100644 --- a/checks/check31 +++ b/checks/check31 @@ -37,6 +37,8 @@ CHECK_ID_check31="3.1,3.01" CHECK_TITLE_check31="[check31] Ensure a log metric filter and alarm exist for unauthorized API calls (Scored)" CHECK_SCORED_check31="SCORED" CHECK_TYPE_check31="LEVEL1" +CHECK_ASFF_TYPE_check31="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" +CHECK_ASFF_RESOURCE_TYPE_check31="AwsCloudTrailTrail" CHECK_ALTERNATE_check301="check31" check31(){ diff --git a/checks/check310 b/checks/check310 index 77f4b5a3..65d50773 100644 --- a/checks/check310 +++ b/checks/check310 @@ -37,6 +37,8 @@ CHECK_ID_check310="3.10" CHECK_TITLE_check310="[check310] Ensure a log metric filter and alarm exist for security group changes (Scored)" CHECK_SCORED_check310="SCORED" CHECK_TYPE_check310="LEVEL2" +CHECK_ASFF_TYPE_check310="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" +CHECK_ASFF_RESOURCE_TYPE_check310="AwsCloudTrailTrail" CHECK_ALTERNATE_check310="check310" check310(){ diff --git a/checks/check311 b/checks/check311 index b69fc75a..e38af0dc 100644 --- a/checks/check311 +++ b/checks/check311 @@ -37,6 +37,8 @@ CHECK_ID_check311="3.11" CHECK_TITLE_check311="[check311] Ensure a log metric filter and alarm exist for changes to Network Access Control Lists (NACL) (Scored)" CHECK_SCORED_check311="SCORED" CHECK_TYPE_check311="LEVEL2" +CHECK_ASFF_TYPE_check311="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" +CHECK_ASFF_RESOURCE_TYPE_check311="AwsCloudTrailTrail" CHECK_ALTERNATE_check311="check311" check311(){ diff --git a/checks/check312 b/checks/check312 index 49e1462e..b5abde10 100644 --- a/checks/check312 +++ b/checks/check312 @@ -37,6 +37,8 @@ CHECK_ID_check312="3.12" CHECK_TITLE_check312="[check312] Ensure a log metric filter and alarm exist for changes to network gateways (Scored)" CHECK_SCORED_check312="SCORED" CHECK_TYPE_check312="LEVEL1" +CHECK_ASFF_TYPE_check312="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" +CHECK_ASFF_RESOURCE_TYPE_check312="AwsCloudTrailTrail" CHECK_ALTERNATE_check312="check312" check312(){ diff --git a/checks/check313 b/checks/check313 index a6a81edc..0514045c 100644 --- a/checks/check313 +++ b/checks/check313 @@ -37,6 +37,8 @@ CHECK_ID_check313="3.13" CHECK_TITLE_check313="[check313] Ensure a log metric filter and alarm exist for route table changes (Scored)" CHECK_SCORED_check313="SCORED" CHECK_TYPE_check313="LEVEL1" +CHECK_ASFF_TYPE_check313="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" +CHECK_ASFF_RESOURCE_TYPE_check313="AwsCloudTrailTrail" CHECK_ALTERNATE_check313="check313" check313(){ diff --git a/checks/check314 b/checks/check314 index 3a6c9d7d..de9c3875 100644 --- a/checks/check314 +++ b/checks/check314 @@ -37,6 +37,8 @@ CHECK_ID_check314="3.14" CHECK_TITLE_check314="[check314] Ensure a log metric filter and alarm exist for VPC changes (Scored)" CHECK_SCORED_check314="SCORED" CHECK_TYPE_check314="LEVEL1" +CHECK_ASFF_TYPE_check314="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" +CHECK_ASFF_RESOURCE_TYPE_check314="AwsCloudTrailTrail" CHECK_ALTERNATE_check314="check314" check314(){ diff --git a/checks/check32 b/checks/check32 index 83779842..745d38d5 100644 --- a/checks/check32 +++ b/checks/check32 @@ -37,6 +37,8 @@ CHECK_ID_check32="3.2,3.02" CHECK_TITLE_check32="[check32] Ensure a log metric filter and alarm exist for Management Console sign-in without MFA (Scored)" CHECK_SCORED_check32="SCORED" CHECK_TYPE_check32="LEVEL1" +CHECK_ASFF_TYPE_check32="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" +CHECK_ASFF_RESOURCE_TYPE_check32="AwsCloudTrailTrail" CHECK_ALTERNATE_check302="check32" check32(){ diff --git a/checks/check33 b/checks/check33 index 00c6d7dd..dd2b94db 100644 --- a/checks/check33 +++ b/checks/check33 @@ -37,6 +37,8 @@ CHECK_ID_check33="3.3,3.03" CHECK_TITLE_check33="[check33] Ensure a log metric filter and alarm exist for usage of root account (Scored)" CHECK_SCORED_check33="SCORED" CHECK_TYPE_check33="LEVEL1" +CHECK_ASFF_TYPE_check33="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" +CHECK_ASFF_RESOURCE_TYPE_check33="AwsCloudTrailTrail" CHECK_ALTERNATE_check303="check33" check33(){ diff --git a/checks/check34 b/checks/check34 index e4751c79..86a55b25 100644 --- a/checks/check34 +++ b/checks/check34 @@ -37,6 +37,8 @@ CHECK_ID_check34="3.4,3.04" CHECK_TITLE_check34="[check34] Ensure a log metric filter and alarm exist for IAM policy changes (Scored)" CHECK_SCORED_check34="SCORED" CHECK_TYPE_check34="LEVEL1" +CHECK_ASFF_TYPE_check34="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" +CHECK_ASFF_RESOURCE_TYPE_check34="AwsCloudTrailTrail" CHECK_ALTERNATE_check304="check34" check34(){ diff --git a/checks/check35 b/checks/check35 index b8b4c6cf..929be5b8 100644 --- a/checks/check35 +++ b/checks/check35 @@ -37,6 +37,8 @@ CHECK_ID_check35="3.5,3.05" CHECK_TITLE_check35="[check35] Ensure a log metric filter and alarm exist for CloudTrail configuration changes (Scored)" CHECK_SCORED_check35="SCORED" CHECK_TYPE_check35="LEVEL1" +CHECK_ASFF_TYPE_check35="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" +CHECK_ASFF_RESOURCE_TYPE_check35="AwsCloudTrailTrail" CHECK_ALTERNATE_check305="check35" check35(){ diff --git a/checks/check36 b/checks/check36 index 699739d8..06e0d557 100644 --- a/checks/check36 +++ b/checks/check36 @@ -37,6 +37,8 @@ CHECK_ID_check36="3.6,3.06" CHECK_TITLE_check36="[check36] Ensure a log metric filter and alarm exist for AWS Management Console authentication failures (Scored)" CHECK_SCORED_check36="SCORED" CHECK_TYPE_check36="LEVEL2" +CHECK_ASFF_TYPE_check36="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" +CHECK_ASFF_RESOURCE_TYPE_check36="AwsCloudTrailTrail" CHECK_ALTERNATE_check306="check36" check36(){ diff --git a/checks/check37 b/checks/check37 index c0390bc9..4c6dd4d3 100644 --- a/checks/check37 +++ b/checks/check37 @@ -37,6 +37,8 @@ CHECK_ID_check37="3.7,3.07" CHECK_TITLE_check37="[check37] Ensure a log metric filter and alarm exist for disabling or scheduled deletion of customer created CMKs (Scored)" CHECK_SCORED_check37="SCORED" CHECK_TYPE_check37="LEVEL2" +CHECK_ASFF_TYPE_check37="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" +CHECK_ASFF_RESOURCE_TYPE_check37="AwsCloudTrailTrail" CHECK_ALTERNATE_check307="check37" check37(){ diff --git a/checks/check38 b/checks/check38 index ddc69d36..c112620c 100644 --- a/checks/check38 +++ b/checks/check38 @@ -37,6 +37,8 @@ CHECK_ID_check38="3.8,3.08" CHECK_TITLE_check38="[check38] Ensure a log metric filter and alarm exist for S3 bucket policy changes (Scored)" CHECK_SCORED_check38="SCORED" CHECK_TYPE_check38="LEVEL1" +CHECK_ASFF_TYPE_check38="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" +CHECK_ASFF_RESOURCE_TYPE_check38="AwsCloudTrailTrail" CHECK_ALTERNATE_check308="check38" check38(){ diff --git a/checks/check39 b/checks/check39 index 3a812a4a..a649f157 100644 --- a/checks/check39 +++ b/checks/check39 @@ -37,6 +37,8 @@ CHECK_ID_check39="3.9,3.09" CHECK_TITLE_check39="[check39] Ensure a log metric filter and alarm exist for AWS Config configuration changes (Scored)" CHECK_SCORED_check39="SCORED" CHECK_TYPE_check39="LEVEL2" +CHECK_ASFF_TYPE_check39="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" +CHECK_ASFF_RESOURCE_TYPE_check39="AwsCloudTrailTrail" CHECK_ALTERNATE_check309="check39" check39(){ diff --git a/checks/check41 b/checks/check41 index 30fd9131..00260ebb 100644 --- a/checks/check41 +++ b/checks/check41 @@ -12,6 +12,8 @@ CHECK_ID_check41="4.1,4.01" CHECK_TITLE_check41="[check41] Ensure no security groups allow ingress from 0.0.0.0/0 or ::/0 to port 22 (Scored)" CHECK_SCORED_check41="SCORED" CHECK_TYPE_check41="LEVEL2" +CHECK_ASFF_TYPE_check41="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" +CHECK_ASFF_RESOURCE_TYPE_check41="AwsEc2SecurityGroup" CHECK_ALTERNATE_check401="check41" check41(){ diff --git a/checks/check42 b/checks/check42 index 92187620..a362b0cb 100644 --- a/checks/check42 +++ b/checks/check42 @@ -12,6 +12,8 @@ CHECK_ID_check42="4.2,4.02" CHECK_TITLE_check42="[check42] Ensure no security groups allow ingress from 0.0.0.0/0 or ::/0 to port 3389 (Scored)" CHECK_SCORED_check42="SCORED" CHECK_TYPE_check42="LEVEL2" +CHECK_ASFF_TYPE_check42="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" +CHECK_ASFF_RESOURCE_TYPE_check42="AwsEc2SecurityGroup" CHECK_ALTERNATE_check402="check42" check42(){ diff --git a/checks/check43 b/checks/check43 index 4599934d..6c0122ff 100644 --- a/checks/check43 +++ b/checks/check43 @@ -12,6 +12,8 @@ CHECK_ID_check43="4.3,4.03" CHECK_TITLE_check43="[check43] Ensure the default security group of every VPC restricts all traffic (Scored)" CHECK_SCORED_check43="SCORED" CHECK_TYPE_check43="LEVEL2" +CHECK_ASFF_TYPE_check43="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" +CHECK_ASFF_RESOURCE_TYPE_check43="AwsEc2SecurityGroup" CHECK_ALTERNATE_check403="check43" check43(){ diff --git a/checks/check44 b/checks/check44 index a45c116b..55ceb977 100644 --- a/checks/check44 +++ b/checks/check44 @@ -12,6 +12,8 @@ CHECK_ID_check44="4.4,4.04" CHECK_TITLE_check44="[check44] Ensure routing tables for VPC peering are \"least access\" (Not Scored)" CHECK_SCORED_check44="NOT_SCORED" CHECK_TYPE_check44="LEVEL2" +CHECK_ASFF_TYPE_check44="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" +CHECK_ASFF_RESOURCE_TYPE_check44="AwsEc2Vpc" CHECK_ALTERNATE_check404="check44" check44(){ diff --git a/checks/check_extra71 b/checks/check_extra71 index 8667666b..368ad9d4 100644 --- a/checks/check_extra71 +++ b/checks/check_extra71 @@ -14,6 +14,7 @@ CHECK_ID_extra71="7.1,7.01" CHECK_TITLE_extra71="[extra71] Ensure users of groups with AdministratorAccess policy have MFA tokens enabled (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra71="NOT_SCORED" CHECK_TYPE_extra71="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra71="AwsIamUser" CHECK_ALTERNATE_extra701="extra71" CHECK_ALTERNATE_check71="extra71" CHECK_ALTERNATE_check701="extra71" diff --git a/checks/check_extra710 b/checks/check_extra710 index c259695a..55216b3d 100644 --- a/checks/check_extra710 +++ b/checks/check_extra710 @@ -14,6 +14,7 @@ CHECK_ID_extra710="7.10" CHECK_TITLE_extra710="[extra710] Check for internet facing EC2 Instances (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra710="NOT_SCORED" CHECK_TYPE_extra710="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra710="AwsEc2Instance" CHECK_ALTERNATE_check710="extra710" extra710(){ diff --git a/checks/check_extra711 b/checks/check_extra711 index 717c8680..3e9b29ee 100644 --- a/checks/check_extra711 +++ b/checks/check_extra711 @@ -14,6 +14,7 @@ CHECK_ID_extra711="7.11" CHECK_TITLE_extra711="[extra711] Check for Publicly Accessible Redshift Clusters (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra711="NOT_SCORED" CHECK_TYPE_extra711="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra711="AwsRedshiftCluster" CHECK_ALTERNATE_check711="extra711" extra711(){ diff --git a/checks/check_extra712 b/checks/check_extra712 index 60e71566..251d93a5 100644 --- a/checks/check_extra712 +++ b/checks/check_extra712 @@ -22,7 +22,7 @@ extra712(){ textInfo "just looking if IAM Macie related permissions exist. " MACIE_IAM_ROLES_CREATED=$($AWSCLI iam list-roles $PROFILE_OPT --query 'Roles[*].Arn'|grep AWSMacieServiceCustomer|wc -l) if [[ $MACIE_IAM_ROLES_CREATED -eq 2 ]];then - textPass "Macie related IAM roles exist so it might be enabled. Check it out manually." + textPass "Macie related IAM roles exist so it might be enabled. Check it out manually" else textFail "No Macie related IAM roles found. It is most likely not to be enabled" fi diff --git a/checks/check_extra714 b/checks/check_extra714 index a47ecffe..cb57de85 100644 --- a/checks/check_extra714 +++ b/checks/check_extra714 @@ -14,6 +14,7 @@ CHECK_ID_extra714="7.14" CHECK_TITLE_extra714="[extra714] Check if CloudFront distributions have logging enabled (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra714="NOT_SCORED" CHECK_TYPE_extra714="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra714="AwsCloudFrontDistribution" CHECK_ALTERNATE_check714="extra714" extra714(){ diff --git a/checks/check_extra715 b/checks/check_extra715 index 0acde205..2268b719 100644 --- a/checks/check_extra715 +++ b/checks/check_extra715 @@ -14,6 +14,7 @@ CHECK_ID_extra715="7.15" CHECK_TITLE_extra715="[extra715] Check if Amazon Elasticsearch Service (ES) domains have logging enabled" CHECK_SCORED_extra715="NOT_SCORED" CHECK_TYPE_extra715="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra715="AwsElasticsearchDomain" CHECK_ALTERNATE_check715="extra715" extra715(){ diff --git a/checks/check_extra716 b/checks/check_extra716 index afdb8dd4..db3aba1d 100644 --- a/checks/check_extra716 +++ b/checks/check_extra716 @@ -14,6 +14,7 @@ CHECK_ID_extra716="7.16" CHECK_TITLE_extra716="[extra716] Check if Amazon Elasticsearch Service (ES) domains are set as Public and have cross account access " CHECK_SCORED_extra716="NOT_SCORED" CHECK_TYPE_extra716="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra716="AwsElasticsearchDomain" CHECK_ALTERNATE_check716="extra716" extra716(){ @@ -32,8 +33,8 @@ extra716(){ # If the endpoint starts with "vpc-" it is in a VPC then it is fine. if [[ "$ES_DOMAIN_ENDPOINT" =~ ^vpc-* ]];then ES_DOMAIN_VPC=$($AWSCLI es describe-elasticsearch-domain --domain-name $domain $PROFILE_OPT --region $regx --query 'DomainStatus.VPCOptions.VPCId' --output text) - textInfo "$regx: Amazon ES domain $domain is in VPC $ES_DOMAIN_VPC run extra779 to make sure it is not exposed using custom proxy" "$regx" - else + textInfo "$regx: Amazon ES domain $domain is in VPC $ES_DOMAIN_VPC run extra779 to make sure it is not exposed using custom proxy" "$regx" + else $AWSCLI es describe-elasticsearch-domain-config --domain-name $domain $PROFILE_OPT --region $regx --query DomainConfig.AccessPolicies.Options --output text > $TEMP_POLICY_FILE 2> /dev/null CHECK_ES_DOMAIN_POLICY_OPEN=$(cat $TEMP_POLICY_FILE | jq -r '. | .Statement[] | select(.Effect == "Allow" and (((.Principal|type == "object") and .Principal.AWS == "*") or ((.Principal|type == "string") and .Principal == "*")) and select(has("Condition") | not))') CHECK_ES_DOMAIN_POLICY_HAS_CONDITION=$(cat $TEMP_POLICY_FILE | jq -r '. | .Statement[] | select(.Effect == "Allow" and (((.Principal|type == "object") and .Principal.AWS == "*") or ((.Principal|type == "string") and .Principal == "*")) and select(has("Condition")))' ) @@ -44,30 +45,30 @@ extra716(){ CONDITION_HAS_PRIVATE_IP=$(echo $condition_ip | grep -E '^(192\.168|10\.|172\.1[6789]\.|172\.2[0-9]\.|172\.3[01]\.)') if [[ $CONDITION_HAS_PRIVATE_IP ]];then CONDITION_HAS_PRIVATE_IP_ARRAY+=($condition_ip) - fi + fi CONDITION_HAS_PUBLIC_IP=$(echo $condition_ip | grep -vE '^(192\.168|10\.|172\.1[6789]\.|172\.2[0-9]\.|172\.3[01]\.|0\.0\.0\.0|\*)') if [[ $CONDITION_HAS_PUBLIC_IP ]];then CONDITION_HAS_PUBLIC_IP_ARRAY+=($condition_ip) fi CONDITION_HAS_ZERO_NET=$(echo $condition_ip | grep -E '^(0\.0\.0\.0|\*)') CONDITION_HAS_STAR=$(echo $condition_ip | grep -E '^\*') - done + done CHECK_ES_DOMAIN_POLICY_CONDITION_PRIVATE_IP=${CONDITION_HAS_PRIVATE_IP_ARRAY[@]} CHECK_ES_DOMAIN_POLICY_CONDITION_PUBLIC_IP=${CONDITION_HAS_PUBLIC_IP_ARRAY[@]} CHECK_ES_DOMAIN_POLICY_CONDITION_ZERO=$CONDITION_HAS_ZERO_NET CHECK_ES_DOMAIN_POLICY_CONDITION_STAR=$CONDITION_HAS_STAR - fi + fi if [[ $CHECK_ES_DOMAIN_POLICY_OPEN || $CHECK_ES_DOMAIN_POLICY_CONDITION_ZERO || $CHECK_ES_DOMAIN_POLICY_CONDITION_STAR || ${CHECK_ES_DOMAIN_POLICY_CONDITION_PUBLIC_IP[@]} ]];then #Prowler will check to read indices or kibaba status if no conditions, condition IP is *, 0.0.0.0/0, 0.0.0.0/8 or any public IP. if [[ $TEST_ES_AUTHENTICATION ]];then # check for REST API on port 443 CHECH_ES_HTTPS=$(curl -m 2 -s -w "%{http_code}" -o /dev/null -X GET "https://$ES_DOMAIN_ENDPOINT/_cat/indices") httpStatus $CHECH_ES_HTTPS - if [[ $CHECH_ES_HTTPS -eq "200" ]];then + if [[ $CHECH_ES_HTTPS -eq "200" ]];then textFail "$regx: Amazon ES domain $domain policy allows Anonymous access and ES service endpoint $ES_DOMAIN_ENDPOINT responded $SERVER_RESPONSE" "$regx" else textInfo "$regx: Amazon ES domain $domain policy allows Anonymous access but ES service endpoint $ES_DOMAIN_ENDPOINT responded $SERVER_RESPONSE" "$regx" - fi + fi # check for Kibana on port 443 CHECH_KIBANA_HTTPS=$(curl -m 2 -s -w "%{http_code}" -o /dev/null -X GET "https://$ES_DOMAIN_ENDPOINT/_plugin/kibana/api/status") httpStatus $CHECH_KIBANA_HTTPS @@ -75,27 +76,27 @@ extra716(){ textFail "$regx: Amazon ES domain $domain policy allows Anonymous access and Kibana service endpoint $ES_DOMAIN_ENDPOINT responded $SERVER_RESPONSE" "$regx" else textInfo "$regx: Amazon ES domain $domain policy allows Anonymous access but Kibana service endpoint $ES_DOMAIN_ENDPOINT responded $SERVER_RESPONSE" "$regx" - fi - else - if [[ $CHECK_ES_DOMAIN_POLICY_OPEN ]];then + fi + else + if [[ $CHECK_ES_DOMAIN_POLICY_OPEN ]];then textFail "$regx: Amazon ES domain $domain policy allows access (Principal: \"*\") AUTH NOT TESTED" "$regx" - fi - if [[ $CHECK_ES_DOMAIN_POLICY_HAS_CONDITION && $CHECK_ES_DOMAIN_POLICY_CONDITION_ZERO ]];then + fi + if [[ $CHECK_ES_DOMAIN_POLICY_HAS_CONDITION && $CHECK_ES_DOMAIN_POLICY_CONDITION_ZERO ]];then textFail "$regx: Amazon ES domain $domain policy allows access (Principal: \"*\" and network 0.0.0.0) AUTH NOT TESTED" "$regx" - fi - if [[ $CHECK_ES_DOMAIN_POLICY_HAS_CONDITION && $CHECK_ES_DOMAIN_POLICY_CONDITION_STAR ]];then + fi + if [[ $CHECK_ES_DOMAIN_POLICY_HAS_CONDITION && $CHECK_ES_DOMAIN_POLICY_CONDITION_STAR ]];then textFail "$regx: Amazon ES domain $domain policy allows access (Principal: \"*\" and network \"*\") AUTH NOT TESTED" "$regx" fi - if [[ $CHECK_ES_DOMAIN_POLICY_HAS_CONDITION && ${CHECK_ES_DOMAIN_POLICY_CONDITION_PUBLIC_IP[@]} ]];then + if [[ $CHECK_ES_DOMAIN_POLICY_HAS_CONDITION && ${CHECK_ES_DOMAIN_POLICY_CONDITION_PUBLIC_IP[@]} ]];then textFail "$regx: Amazon ES domain $domain policy allows access (Principal: \"*\" and Public IP or Network $(echo ${CONDITION_HAS_PUBLIC_IP_ARRAY[@]})) AUTH NOT TESTED" "$regx" fi - fi + fi else if [[ $CHECK_ES_DOMAIN_POLICY_HAS_CONDITION && ${CHECK_ES_DOMAIN_POLICY_CONDITION_PRIVATE_IP[@]} ]];then textInfo "$regx: Amazon ES domain $domain policy allows access from a Private IP or CIDR RFC1918 $(echo ${CONDITION_HAS_PRIVATE_IP_ARRAY[@]})" "$regx" - else - textPass "$regx: Amazon ES domain $domain does not allow Anonymous cross account access" "$regx" - fi + else + textPass "$regx: Amazon ES domain $domain does not allow Anonymous cross account access" "$regx" + fi fi rm -f $TEMP_POLICY_FILE fi @@ -104,4 +105,4 @@ extra716(){ textInfo "$regx: No Amazon ES domain found" "$regx" fi done -} +} diff --git a/checks/check_extra717 b/checks/check_extra717 index 0bb04741..74a18937 100644 --- a/checks/check_extra717 +++ b/checks/check_extra717 @@ -14,6 +14,7 @@ CHECK_ID_extra717="7.17" CHECK_TITLE_extra717="[extra717] Check if Elastic Load Balancers have logging enabled (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra717="NOT_SCORED" CHECK_TYPE_extra717="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra717="AwsElbLoadBalancer" CHECK_ALTERNATE_check717="extra717" extra717(){ diff --git a/checks/check_extra718 b/checks/check_extra718 index 73319113..0d361c3c 100644 --- a/checks/check_extra718 +++ b/checks/check_extra718 @@ -14,6 +14,7 @@ CHECK_ID_extra718="7.18" CHECK_TITLE_extra718="[extra718] Check if S3 buckets have server access logging enabled (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra718="NOT_SCORED" CHECK_TYPE_extra718="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra718="AwsS3Bucket" CHECK_ALTERNATE_check718="extra718" extra718(){ diff --git a/checks/check_extra72 b/checks/check_extra72 index f9fa11b9..52bcd93d 100644 --- a/checks/check_extra72 +++ b/checks/check_extra72 @@ -14,6 +14,7 @@ CHECK_ID_extra72="7.2,7.02" CHECK_TITLE_extra72="[extra72] Ensure there are no EBS Snapshots set as Public (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra72="NOT_SCORED" CHECK_TYPE_extra72="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra72="AwsEc2Snapshot" CHECK_ALTERNATE_extra702="extra72" CHECK_ALTERNATE_check72="extra72" CHECK_ALTERNATE_check702="extra72" diff --git a/checks/check_extra720 b/checks/check_extra720 index 1bf63cc5..64fbf043 100644 --- a/checks/check_extra720 +++ b/checks/check_extra720 @@ -14,6 +14,7 @@ CHECK_ID_extra720="7.20" CHECK_TITLE_extra720="[extra720] Check if Lambda functions invoke API operations are being recorded by CloudTrail (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra720="NOT_SCORED" CHECK_TYPE_extra720="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra720="AwsLambdaFunction" CHECK_ALTERNATE_check720="extra720" extra720(){ diff --git a/checks/check_extra721 b/checks/check_extra721 index ac6ca054..d464786a 100644 --- a/checks/check_extra721 +++ b/checks/check_extra721 @@ -14,6 +14,7 @@ CHECK_ID_extra721="7.21" CHECK_TITLE_extra721="[extra721] Check if Redshift cluster has audit logging enabled (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra721="NOT_SCORED" CHECK_TYPE_extra721="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra721="AwsRedshiftCluster" CHECK_ALTERNATE_check721="extra721" extra721(){ diff --git a/checks/check_extra722 b/checks/check_extra722 index 605f7f19..e90596b7 100644 --- a/checks/check_extra722 +++ b/checks/check_extra722 @@ -14,6 +14,7 @@ CHECK_ID_extra722="7.22" CHECK_TITLE_extra722="[extra722] Check if API Gateway has logging enabled (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra722="NOT_SCORED" CHECK_TYPE_extra722="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra722="AwsApiGatewayRestApi" CHECK_ALTERNATE_check722="extra722" extra722(){ diff --git a/checks/check_extra723 b/checks/check_extra723 index 96039eb9..c527c3c8 100644 --- a/checks/check_extra723 +++ b/checks/check_extra723 @@ -14,6 +14,7 @@ CHECK_ID_extra723="7.23" CHECK_TITLE_extra723="[extra723] Check if RDS Snapshots and Cluster Snapshots are public (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra723="NOT_SCORED" CHECK_TYPE_extra723="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra723="AwsRdsDbSnapshot" CHECK_ALTERNATE_check723="extra723" extra723(){ diff --git a/checks/check_extra724 b/checks/check_extra724 index bb750855..2aa08b0f 100644 --- a/checks/check_extra724 +++ b/checks/check_extra724 @@ -14,6 +14,7 @@ CHECK_ID_extra724="7.24" CHECK_TITLE_extra724="[extra724] Check if ACM certificates have Certificate Transparency logging enabled (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra724="NOT_SCORED" CHECK_TYPE_extra724="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra724="AwsCertificateManagerCertificate" CHECK_ALTERNATE_check724="extra724" extra724(){ diff --git a/checks/check_extra725 b/checks/check_extra725 index 257a3d8e..5f4fc99e 100644 --- a/checks/check_extra725 +++ b/checks/check_extra725 @@ -15,6 +15,7 @@ CHECK_ID_extra725="7.25" CHECK_TITLE_extra725="[extra725] Check if S3 buckets have Object-level logging enabled in CloudTrail (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra725="NOT_SCORED" CHECK_TYPE_extra725="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra725="AwsS3Bucket" CHECK_ALTERNATE_check725="extra725" # per Object-level logging is not configured at Bucket level but at CloudTrail trail level diff --git a/checks/check_extra727 b/checks/check_extra727 index 5e14e2b5..47c41fcb 100644 --- a/checks/check_extra727 +++ b/checks/check_extra727 @@ -15,6 +15,7 @@ CHECK_ID_extra727="7.27" CHECK_TITLE_extra727="[extra727] Check if SQS queues have policy set as Public (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra727="NOT_SCORED" CHECK_TYPE_extra727="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra727="AwsSqsQueue" CHECK_ALTERNATE_check727="extra727" extra727(){ diff --git a/checks/check_extra728 b/checks/check_extra728 index eb27a9de..5399822d 100644 --- a/checks/check_extra728 +++ b/checks/check_extra728 @@ -15,6 +15,7 @@ CHECK_ID_extra728="7.28" CHECK_TITLE_extra728="[extra728] Check if SQS queues have Server Side Encryption enabled (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra728="NOT_SCORED" CHECK_TYPE_extra728="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra728="AwsSqsQueue" CHECK_ALTERNATE_check728="extra728" extra728(){ diff --git a/checks/check_extra729 b/checks/check_extra729 index 603acbb0..756aa09c 100644 --- a/checks/check_extra729 +++ b/checks/check_extra729 @@ -15,6 +15,7 @@ CHECK_ID_extra729="7.29" CHECK_TITLE_extra729="[extra729] Ensure there are no EBS Volumes unencrypted (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra729="NOT_SCORED" CHECK_TYPE_extra729="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra729="AwsEc2Volume" CHECK_ALTERNATE_check729="extra729" extra729(){ diff --git a/checks/check_extra73 b/checks/check_extra73 index a587a60f..86b07197 100644 --- a/checks/check_extra73 +++ b/checks/check_extra73 @@ -15,6 +15,7 @@ CHECK_ID_extra73="7.3,7.03" CHECK_TITLE_extra73="[extra73] Ensure there are no S3 buckets open to the Everyone or Any AWS user (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra73="NOT_SCORED" CHECK_TYPE_extra73="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra73="AwsS3Bucket" CHECK_ALTERNATE_extra703="extra73" CHECK_ALTERNATE_check73="extra73" CHECK_ALTERNATE_check703="extra73" diff --git a/checks/check_extra730 b/checks/check_extra730 index 53be1c58..06266cd3 100644 --- a/checks/check_extra730 +++ b/checks/check_extra730 @@ -17,6 +17,7 @@ CHECK_ID_extra730="7.30" CHECK_TITLE_extra730="[extra730] Check if ACM Certificates are about to expire in $DAYS_TO_EXPIRE_THRESHOLD days or less (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra730="NOT_SCORED" CHECK_TYPE_extra730="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra730="AwsCertificateManagerCertificate" CHECK_ALTERNATE_check730="extra730" extra730(){ diff --git a/checks/check_extra731 b/checks/check_extra731 index 0baa1b6e..744d28a1 100644 --- a/checks/check_extra731 +++ b/checks/check_extra731 @@ -15,6 +15,7 @@ CHECK_ID_extra731="7.31" CHECK_TITLE_extra731="[extra731] Check if SNS topics have policy set as Public (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra731="NOT_SCORED" CHECK_TYPE_extra731="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra731="AwsSnsTopic" CHECK_ALTERNATE_check731="extra731" extra731(){ diff --git a/checks/check_extra732 b/checks/check_extra732 index 2fc63a22..30c6ec7f 100644 --- a/checks/check_extra732 +++ b/checks/check_extra732 @@ -15,6 +15,7 @@ CHECK_ID_extra732="7.32" CHECK_TITLE_extra732="[extra732] Check if Geo restrictions are enabled in CloudFront distributions (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra732="NOT_SCORED" CHECK_TYPE_extra732="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra732="AwsCloudFrontDistribution" CHECK_ALTERNATE_check732="extra732" extra732(){ diff --git a/checks/check_extra734 b/checks/check_extra734 index bebd2bfc..02d7f4ec 100644 --- a/checks/check_extra734 +++ b/checks/check_extra734 @@ -14,6 +14,7 @@ CHECK_ID_extra734="7.34" CHECK_TITLE_extra734="[extra734] Check if S3 buckets have default encryption (SSE) enabled or use a bucket policy to enforce it (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra734="NOT_SCORED" CHECK_TYPE_extra734="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra734="AwsS3Bucket" CHECK_ALTERNATE_check734="extra734" extra734(){ diff --git a/checks/check_extra735 b/checks/check_extra735 index cc6ad238..0e49b698 100644 --- a/checks/check_extra735 +++ b/checks/check_extra735 @@ -14,6 +14,7 @@ CHECK_ID_extra735="7.35" CHECK_TITLE_extra735="[extra735] Check if RDS instances storage is encrypted (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra735="NOT_SCORED" CHECK_TYPE_extra735="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra735="AwsRdsDbInstance" CHECK_ALTERNATE_check735="extra735" extra735(){ diff --git a/checks/check_extra736 b/checks/check_extra736 index 0b5993a7..19082827 100644 --- a/checks/check_extra736 +++ b/checks/check_extra736 @@ -14,6 +14,7 @@ CHECK_ID_extra736="7.36" CHECK_TITLE_extra736="[extra736] Check exposed KMS keys (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra736="NOT_SCORED" CHECK_TYPE_extra736="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra736="AwsKmsKey" CHECK_ALTERNATE_check736="extra736" extra736(){ diff --git a/checks/check_extra737 b/checks/check_extra737 index d10a301f..3e1b6bf2 100644 --- a/checks/check_extra737 +++ b/checks/check_extra737 @@ -14,6 +14,7 @@ CHECK_ID_extra737="7.37" CHECK_TITLE_extra737="[extra737] Check KMS keys with key rotation disabled (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra737="NOT_SCORED" CHECK_TYPE_extra737="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra737="AwsKmsKey" CHECK_ALTERNATE_check737="extra737" extra737(){ diff --git a/checks/check_extra738 b/checks/check_extra738 index bc2b9dc5..c5b59eec 100644 --- a/checks/check_extra738 +++ b/checks/check_extra738 @@ -14,6 +14,7 @@ CHECK_ID_extra738="7.38" CHECK_TITLE_extra738="[extra738] Check if CloudFront distributions are set to HTTPS (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra738="NOT_SCORED" CHECK_TYPE_extra738="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra738="AwsCloudFrontDistribution" CHECK_ALTERNATE_check738="extra738" extra738(){ diff --git a/checks/check_extra739 b/checks/check_extra739 index 33ff5ecb..b1280683 100644 --- a/checks/check_extra739 +++ b/checks/check_extra739 @@ -14,6 +14,7 @@ CHECK_ID_extra739="7.39" CHECK_TITLE_extra739="[extra739] Check if RDS instances have backup enabled (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra739="NOT_SCORED" CHECK_TYPE_extra739="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra739="AwsRdsDbInstance" CHECK_ALTERNATE_check739="extra739" extra739(){ @@ -21,12 +22,12 @@ extra739(){ LIST_OF_RDS_INSTANCES=$($AWSCLI rds describe-db-instances $PROFILE_OPT --region $regx --query 'DBInstances[*].DBInstanceIdentifier' --output text) if [[ $LIST_OF_RDS_INSTANCES ]];then for rdsinstance in $LIST_OF_RDS_INSTANCES; do - # if retention is 0 then is disabled + # if retention is 0 then is disabled BACKUP_RETENTION=$($AWSCLI rds describe-db-instances $PROFILE_OPT --region $regx --db-instance-identifier $rdsinstance --query 'DBInstances[*].BackupRetentionPeriod' --output text) if [[ $BACKUP_RETENTION == "0" ]]; then textFail "$regx: RDS instance $rdsinstance has not backup enabled!" "$regx" else - textPass "$regx: RDS instance $rdsinstance has backup enabled with retention period $BACKUP_RETENTION days " "$regx" + textPass "$regx: RDS instance $rdsinstance has backup enabled with retention period $BACKUP_RETENTION days" "$regx" fi done else diff --git a/checks/check_extra74 b/checks/check_extra74 index 68dbfa92..f700f1c4 100644 --- a/checks/check_extra74 +++ b/checks/check_extra74 @@ -14,6 +14,7 @@ CHECK_ID_extra74="7.4,7.04" CHECK_TITLE_extra74="[extra74] Ensure there are no Security Groups without ingress filtering being used (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra74="NOT_SCORED" CHECK_TYPE_extra74="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra74="AwsEc2SecurityGroup" CHECK_ALTERNATE_extra704="extra74" CHECK_ALTERNATE_check74="extra74" CHECK_ALTERNATE_check704="extra74" diff --git a/checks/check_extra740 b/checks/check_extra740 index d74c7600..5fa00518 100644 --- a/checks/check_extra740 +++ b/checks/check_extra740 @@ -14,6 +14,7 @@ CHECK_ID_extra740="7.40" CHECK_TITLE_extra740="[extra740] Check if EBS snapshots are encrypted (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra740="NOT_SCORED" CHECK_TYPE_extra740="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra740="AwsEc2Snapshot" CHECK_ALTERNATE_check740="extra740" extra740(){ @@ -30,7 +31,7 @@ extra740(){ fi done else - textInfo "$regx: No EBS Snapshots found" "$regx" + textInfo "$regx: No EBS Snapshots found" "$regx" fi done } diff --git a/checks/check_extra741 b/checks/check_extra741 index f4e54d2c..20b7b94a 100644 --- a/checks/check_extra741 +++ b/checks/check_extra741 @@ -14,14 +14,15 @@ CHECK_ID_extra741="7.41" CHECK_TITLE_extra741="[extra741] Find secrets in EC2 User Data (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra741="NOT_SCORED" CHECK_TYPE_extra741="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra741="AwsEc2Instance" CHECK_ALTERNATE_check741="extra741" extra741(){ SECRETS_TEMP_FOLDER="$PROWLER_DIR/secrets-$ACCOUNT_NUM" - if [[ ! -d $SECRETS_TEMP_FOLDER ]]; then + if [[ ! -d $SECRETS_TEMP_FOLDER ]]; then # this folder is deleted once this check is finished mkdir $SECRETS_TEMP_FOLDER - fi + fi textInfo "Looking for secrets in EC2 User Data in instances across all regions... (max 100 instances per region use -m to increase it) " for regx in $REGIONS; do @@ -56,7 +57,7 @@ extra741(){ textFail "$regx: Potential secret found in $instance" "$regx" fi fi - else + else textPass "$regx: No secrets found in $instance User Data or it is empty" "$regx" fi done diff --git a/checks/check_extra742 b/checks/check_extra742 index 8d78ab22..309f75d5 100644 --- a/checks/check_extra742 +++ b/checks/check_extra742 @@ -14,11 +14,12 @@ CHECK_ID_extra742="7.42" CHECK_TITLE_extra742="[extra742] Find secrets in CloudFormation outputs (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra742="NOT_SCORED" CHECK_TYPE_extra742="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra742="AwsCloudFormationStack" CHECK_ALTERNATE_check742="extra742" extra742(){ SECRETS_TEMP_FOLDER="$PROWLER_DIR/secrets-$ACCOUNT_NUM" - if [[ ! -d $SECRETS_TEMP_FOLDER ]]; then + if [[ ! -d $SECRETS_TEMP_FOLDER ]]; then # this folder is deleted once this check is finished mkdir $SECRETS_TEMP_FOLDER fi diff --git a/checks/check_extra743 b/checks/check_extra743 index e24326f9..4cd22003 100644 --- a/checks/check_extra743 +++ b/checks/check_extra743 @@ -14,6 +14,7 @@ CHECK_ID_extra743="7.43" CHECK_TITLE_extra743="[extra743] Check if API Gateway has client certificate enabled to access your backend endpoint (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra743="NOT_SCORED" CHECK_TYPE_extra743="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra743="AwsApiGatewayRestApi" CHECK_ALTERNATE_check743="extra743" extra743(){ @@ -23,15 +24,15 @@ extra743(){ for api in $LIST_OF_REST_APIS; do API_GW_NAME=$($AWSCLI apigateway get-rest-apis $PROFILE_OPT --region $regx --query "items[?id==\`$api\`].name" --output text) LIST_OF_STAGES=$($AWSCLI $PROFILE_OPT --region $regx apigateway get-stages --rest-api-id $api --query 'item[*].stageName' --output text) - if [[ $LIST_OF_STAGES ]]; then + if [[ $LIST_OF_STAGES ]]; then for stage in $LIST_OF_STAGES; do CHECK_CERTIFICATE=$($AWSCLI $PROFILE_OPT --region $regx apigateway get-stages --rest-api-id $api --query "item[?stageName==\`$stage\`].clientCertificateId" --output text) if [[ $CHECK_CERTIFICATE ]]; then textPass "$regx: API Gateway $API_GW_NAME ID $api in $stage has client certificate enabled" "$regx" - else + else textFail "$regx: API Gateway $API_GW_NAME ID $api in $stage has not client certificate enabled" "$regx" - fi - done + fi + done fi done else diff --git a/checks/check_extra744 b/checks/check_extra744 index 6b7bfb3a..4bc9edd6 100644 --- a/checks/check_extra744 +++ b/checks/check_extra744 @@ -14,6 +14,7 @@ CHECK_ID_extra744="7.44" CHECK_TITLE_extra744="[extra744] Check if API Gateway has a WAF ACL attached (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra744="NOT_SCORED" CHECK_TYPE_extra744="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra744="AwsApiGatewayRestApi" CHECK_ALTERNATE_check744="extra744" extra744(){ @@ -23,15 +24,15 @@ extra744(){ for api in $LIST_OF_REST_APIS; do API_GW_NAME=$($AWSCLI apigateway get-rest-apis $PROFILE_OPT --region $regx --query "items[?id==\`$api\`].name" --output text) LIST_OF_STAGES=$($AWSCLI $PROFILE_OPT --region $regx apigateway get-stages --rest-api-id $api --query 'item[*].stageName' --output text) - if [[ $LIST_OF_STAGES ]]; then + if [[ $LIST_OF_STAGES ]]; then for stage in $LIST_OF_STAGES; do CHECK_WAFACL=$($AWSCLI $PROFILE_OPT --region $regx apigateway get-stages --rest-api-id $api --query "item[?stageName==\`$stage\`].webAclArn" --output text) if [[ $CHECK_WAFACL ]]; then textPass "$regx: API Gateway $API_GW_NAME ID $api in $stage has $CHECK_WAFACL WAF ACL attached" "$regx" - else + else textFail "$regx: API Gateway $API_GW_NAME ID $api in $stage has not WAF ACL attached" "$regx" - fi - done + fi + done fi done else diff --git a/checks/check_extra745 b/checks/check_extra745 index 98a98e63..b8674e5c 100644 --- a/checks/check_extra745 +++ b/checks/check_extra745 @@ -14,6 +14,7 @@ CHECK_ID_extra745="7.45" CHECK_TITLE_extra745="[extra745] Check if API Gateway endpoint is public or private (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra745="NOT_SCORED" CHECK_TYPE_extra745="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra745="AwsApiGatewayRestApi" CHECK_ALTERNATE_check745="extra745" extra745(){ @@ -23,7 +24,7 @@ extra745(){ for api in $LIST_OF_REST_APIS; do API_GW_NAME=$($AWSCLI apigateway get-rest-apis $PROFILE_OPT --region $regx --query "items[?id==\`$api\`].name" --output text) ENDPOINT_CONFIG_TYPE=$($AWSCLI $PROFILE_OPT --region $regx apigateway get-rest-api --rest-api-id $api --query endpointConfiguration.types --output text) - if [[ $ENDPOINT_CONFIG_TYPE ]]; then + if [[ $ENDPOINT_CONFIG_TYPE ]]; then case $ENDPOINT_CONFIG_TYPE in PRIVATE ) textPass "$regx: API Gateway $API_GW_NAME ID $api is set as $ENDPOINT_CONFIG_TYPE" "$regx" diff --git a/checks/check_extra746 b/checks/check_extra746 index 073ca55f..79de26e7 100644 --- a/checks/check_extra746 +++ b/checks/check_extra746 @@ -14,6 +14,7 @@ CHECK_ID_extra746="7.46" CHECK_TITLE_extra746="[extra746] Check if API Gateway has configured authorizers (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra746="NOT_SCORED" CHECK_TYPE_extra746="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra746="AwsApiGatewayRestApi" CHECK_ALTERNATE_check746="extra746" extra746(){ @@ -23,7 +24,7 @@ extra746(){ for api in $LIST_OF_REST_APIS; do API_GW_NAME=$($AWSCLI apigateway get-rest-apis $PROFILE_OPT --region $regx --query "items[?id==\`$api\`].name" --output text) AUTHORIZER_CONFIGURED=$($AWSCLI $PROFILE_OPT --region $regx apigateway get-authorizers --rest-api-id $api --query items[*].type --output text) - if [[ $AUTHORIZER_CONFIGURED ]]; then + if [[ $AUTHORIZER_CONFIGURED ]]; then textPass "$regx: API Gateway $API_GW_NAME ID $api has authorizer configured" "$regx" else textFail "$regx: API Gateway $API_GW_NAME ID $api has not authorizer configured" "$regx" diff --git a/checks/check_extra747 b/checks/check_extra747 index b9e28b1f..027359bf 100644 --- a/checks/check_extra747 +++ b/checks/check_extra747 @@ -14,6 +14,7 @@ CHECK_ID_extra747="7.47" CHECK_TITLE_extra747="[extra747] Check if RDS instances is integrated with CloudWatch Logs (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra747="NOT_SCORED" CHECK_TYPE_extra747="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra747="AwsRdsDbInstance" CHECK_ALTERNATE_check747="extra747" extra747(){ @@ -21,7 +22,7 @@ extra747(){ LIST_OF_RDS_INSTANCES=$($AWSCLI rds describe-db-instances $PROFILE_OPT --region $regx --query 'DBInstances[*].DBInstanceIdentifier' --output text) if [[ $LIST_OF_RDS_INSTANCES ]];then for rdsinstance in $LIST_OF_RDS_INSTANCES; do - # if retention is 0 then is disabled + # if retention is 0 then is disabled ENABLED_CLOUDWATCHLOGS_EXPORTS=$($AWSCLI rds describe-db-instances $PROFILE_OPT --region $regx --db-instance-identifier $rdsinstance --query 'DBInstances[*].EnabledCloudwatchLogsExports' --output text) if [[ $ENABLED_CLOUDWATCHLOGS_EXPORTS ]]; then textPass "$regx: RDS instance $rdsinstance is shipping $ENABLED_CLOUDWATCHLOGS_EXPORTS to CloudWatch Logs" "$regx" diff --git a/checks/check_extra748 b/checks/check_extra748 index 50056980..9aa71147 100644 --- a/checks/check_extra748 +++ b/checks/check_extra748 @@ -14,6 +14,7 @@ CHECK_ID_extra748="7.48" CHECK_TITLE_extra748="[extra748] Ensure no security groups allow ingress from 0.0.0.0/0 or ::/0 to any port (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra748="NOT_SCORED" CHECK_TYPE_extra748="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra748="AwsEc2SecurityGroup" CHECK_ALTERNATE_check748="extra748" extra748(){ diff --git a/checks/check_extra749 b/checks/check_extra749 index 3c4a53be..0a74e394 100644 --- a/checks/check_extra749 +++ b/checks/check_extra749 @@ -14,6 +14,7 @@ CHECK_ID_extra749="7.49" CHECK_TITLE_extra749="[extra749] Ensure no security groups allow ingress from 0.0.0.0/0 or ::/0 to Oracle ports 1521 or 2483 (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra749="NOT_SCORED" CHECK_TYPE_extra749="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra749="AwsEc2SecurityGroup" CHECK_ALTERNATE_check749="extra749" extra749(){ diff --git a/checks/check_extra75 b/checks/check_extra75 index f2007621..a93d01e2 100644 --- a/checks/check_extra75 +++ b/checks/check_extra75 @@ -14,6 +14,7 @@ CHECK_ID_extra75="7.5,7.05" CHECK_TITLE_extra75="[extra75] Ensure there are no Security Groups not being used (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra75="NOT_SCORED" CHECK_TYPE_extra75="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra75="AwsEc2SecurityGroup" CHECK_ALTERNATE_extra705="extra75" CHECK_ALTERNATE_check75="extra75" CHECK_ALTERNATE_check705="extra75" diff --git a/checks/check_extra750 b/checks/check_extra750 index 1b1d62e3..fd105bfc 100644 --- a/checks/check_extra750 +++ b/checks/check_extra750 @@ -14,6 +14,7 @@ CHECK_ID_extra750="7.50" CHECK_TITLE_extra750="[extra750] Ensure no security groups allow ingress from 0.0.0.0/0 or ::/0 to MySQL port 3306 (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra750="NOT_SCORED" CHECK_TYPE_extra750="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra750="AwsEc2SecurityGroup" CHECK_ALTERNATE_check750="extra750" extra750(){ diff --git a/checks/check_extra751 b/checks/check_extra751 index 3c8255ea..0d623ba8 100644 --- a/checks/check_extra751 +++ b/checks/check_extra751 @@ -14,6 +14,7 @@ CHECK_ID_extra751="7.51" CHECK_TITLE_extra751="[extra751] Ensure no security groups allow ingress from 0.0.0.0/0 or ::/0 to Postgres port 5432 (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra751="NOT_SCORED" CHECK_TYPE_extra751="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra751="AwsEc2SecurityGroup" CHECK_ALTERNATE_check751="extra751" extra751(){ diff --git a/checks/check_extra752 b/checks/check_extra752 index 650c7e82..e6472181 100644 --- a/checks/check_extra752 +++ b/checks/check_extra752 @@ -14,6 +14,7 @@ CHECK_ID_extra752="7.52" CHECK_TITLE_extra752="[extra752] Ensure no security groups allow ingress from 0.0.0.0/0 or ::/0 to Redis port 6379 (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra752="NOT_SCORED" CHECK_TYPE_extra752="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra752="AwsEc2SecurityGroup" CHECK_ALTERNATE_check752="extra752" extra752(){ diff --git a/checks/check_extra753 b/checks/check_extra753 index 2ba23a51..44824b93 100644 --- a/checks/check_extra753 +++ b/checks/check_extra753 @@ -14,6 +14,7 @@ CHECK_ID_extra753="7.53" CHECK_TITLE_extra753="[extra753] Ensure no security groups allow ingress from 0.0.0.0/0 or ::/0 to MongoDB ports 27017 and 27018 (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra753="NOT_SCORED" CHECK_TYPE_extra753="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra753="AwsEc2SecurityGroup" CHECK_ALTERNATE_check753="extra753" extra753(){ diff --git a/checks/check_extra754 b/checks/check_extra754 index 0b7472e0..30e8a939 100644 --- a/checks/check_extra754 +++ b/checks/check_extra754 @@ -14,6 +14,7 @@ CHECK_ID_extra754="7.54" CHECK_TITLE_extra754="[extra754] Ensure no security groups allow ingress from 0.0.0.0/0 or ::/0 to Cassandra ports 7199 or 9160 or 8888 (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra754="NOT_SCORED" CHECK_TYPE_extra754="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra754="AwsEc2SecurityGroup" CHECK_ALTERNATE_check754="extra754" extra754(){ diff --git a/checks/check_extra755 b/checks/check_extra755 index 0481f444..e0164d76 100644 --- a/checks/check_extra755 +++ b/checks/check_extra755 @@ -14,6 +14,7 @@ CHECK_ID_extra755="7.55" CHECK_TITLE_extra755="[extra755] Ensure no security groups allow ingress from 0.0.0.0/0 or ::/0 to Memcached port 11211 (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra755="NOT_SCORED" CHECK_TYPE_extra755="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra755="AwsEc2SecurityGroup" CHECK_ALTERNATE_check755="extra755" extra755(){ diff --git a/checks/check_extra756 b/checks/check_extra756 index 538fa55b..69dabb60 100644 --- a/checks/check_extra756 +++ b/checks/check_extra756 @@ -14,6 +14,7 @@ CHECK_ID_extra756="7.56" CHECK_TITLE_extra756="[extra756] Check if Redshift cluster is Public Accessible (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra756="NOT_SCORED" CHECK_TYPE_extra756="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra756="AwsRedshiftCluster" CHECK_ALTERNATE_check756="extra756" extra756(){ @@ -22,10 +23,10 @@ extra756(){ if [[ $LIST_OF_RS_CLUSTERS ]];then for cluster in $LIST_OF_RS_CLUSTERS; do IS_PUBLICLY_ACCESSIBLE=$($AWSCLI $PROFILE_OPT redshift describe-clusters --region $regx --cluster-identifier $cluster --query Clusters[*].PubliclyAccessible --output text|grep True) - if [[ $IS_PUBLICLY_ACCESSIBLE ]]; then - textFail "$regx: Redshift cluster $cluster is publicly accessible" "$regx" + if [[ $IS_PUBLICLY_ACCESSIBLE ]]; then + textFail "$regx: Redshift cluster $cluster is publicly accessible" "$regx" else - textPass "$regx: Redshift cluster $cluster is not publicly accessible" "$regx" + textPass "$regx: Redshift cluster $cluster is not publicly accessible" "$regx" fi done else diff --git a/checks/check_extra757 b/checks/check_extra757 index 0320081a..a5ddf6fd 100644 --- a/checks/check_extra757 +++ b/checks/check_extra757 @@ -14,6 +14,7 @@ CHECK_ID_extra757="7.57" CHECK_TITLE_extra757="[extra757] Check EC2 Instances older than 6 months (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra757="NOT_SCORED" CHECK_TYPE_extra757="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra757="AwsEc2Instance" CHECK_ALTERNATE_check757="extra757" extra757(){ diff --git a/checks/check_extra758 b/checks/check_extra758 index 1c402aa5..5f5fe40b 100644 --- a/checks/check_extra758 +++ b/checks/check_extra758 @@ -14,6 +14,7 @@ CHECK_ID_extra758="7.58" CHECK_TITLE_extra758="[extra758] Check EC2 Instances older than 12 months (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra758="NOT_SCORED" CHECK_TYPE_extra758="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra758="AwsEc2Instance" CHECK_ALTERNATE_check758="extra758" extra758(){ diff --git a/checks/check_extra759 b/checks/check_extra759 index dbd07632..6b3ff15e 100644 --- a/checks/check_extra759 +++ b/checks/check_extra759 @@ -14,14 +14,15 @@ CHECK_ID_extra759="7.59" CHECK_TITLE_extra759="[extra759] Find secrets in Lambda functions variables (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra759="NOT_SCORED" CHECK_TYPE_extra759="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra759="AwsLambdaFunction" CHECK_ALTERNATE_check759="extra759" extra759(){ SECRETS_TEMP_FOLDER="$PROWLER_DIR/secrets-$ACCOUNT_NUM" - if [[ ! -d $SECRETS_TEMP_FOLDER ]]; then + if [[ ! -d $SECRETS_TEMP_FOLDER ]]; then # this folder is deleted once this check is finished mkdir $SECRETS_TEMP_FOLDER - fi + fi textInfo "Looking for secrets in Lambda variables across all regions... " for regx in $REGIONS; do diff --git a/checks/check_extra760 b/checks/check_extra760 index eac466e7..ee66c791 100644 --- a/checks/check_extra760 +++ b/checks/check_extra760 @@ -14,11 +14,12 @@ CHECK_ID_extra760="7.60" CHECK_TITLE_extra760="[extra760] Find secrets in Lambda functions code (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra760="NOT_SCORED" CHECK_TYPE_extra760="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra760="AwsLambdaFunction" CHECK_ALTERNATE_check760="extra760" extra760(){ SECRETS_TEMP_FOLDER="$PROWLER_DIR/secrets-$ACCOUNT_NUM" - if [[ ! -d $SECRETS_TEMP_FOLDER ]]; then + if [[ ! -d $SECRETS_TEMP_FOLDER ]]; then # this folder is deleted once this check is finished mkdir $SECRETS_TEMP_FOLDER fi diff --git a/checks/check_extra762 b/checks/check_extra762 index b54cd5ac..c66974e6 100644 --- a/checks/check_extra762 +++ b/checks/check_extra762 @@ -14,6 +14,7 @@ CHECK_ID_extra762="7.62" CHECK_TITLE_extra762="[extra762] Find obsolete Lambda runtimes (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra762="NOT_SCORED" CHECK_TYPE_extra762="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra762="AwsLambdaFunction" CHECK_ALTERNATE_check762="extra762" extra762(){ diff --git a/checks/check_extra763 b/checks/check_extra763 index 7c839cf3..9ceb9494 100644 --- a/checks/check_extra763 +++ b/checks/check_extra763 @@ -14,6 +14,7 @@ CHECK_ID_extra763="7.63" CHECK_TITLE_extra763="[extra763] Check if S3 buckets have object versioning enabled (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra763="NOT_SCORED" CHECK_TYPE_extra763="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra763="AwsS3Bucket" CHECK_ALTERNATE_check763="extra763" extra763(){ @@ -28,7 +29,7 @@ extra763(){ fi if [[ $(echo "$BUCKET_VERSIONING_ENABLED" | grep "^Enabled$") ]]; then textPass "Bucket $bucket has versioning enabled" - else + else textFail "Bucket $bucket has versioning disabled!" fi done diff --git a/checks/check_extra764 b/checks/check_extra764 index 8b849208..8ec26cd4 100644 --- a/checks/check_extra764 +++ b/checks/check_extra764 @@ -14,6 +14,7 @@ CHECK_ID_extra764="7.64" CHECK_TITLE_extra764="[extra764] Check if S3 buckets have secure transport policy (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra764="NOT_SCORED" CHECK_TYPE_extra764="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra764="AwsS3Bucket" CHECK_ALTERNATE_check764="extra764" extra764(){ diff --git a/checks/check_extra767 b/checks/check_extra767 index 1bbe4d8a..1683d466 100644 --- a/checks/check_extra767 +++ b/checks/check_extra767 @@ -14,6 +14,7 @@ CHECK_ID_extra767="7.67" CHECK_TITLE_extra767="[extra767] Check if CloudFront distributions have Field Level Encryption enabled (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra767="NOT_SCORED" CHECK_TYPE_extra767="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra767="AwsCloudFrontDistribution" CHECK_ALTERNATE_check767="extra767" extra767(){ @@ -23,7 +24,7 @@ extra767(){ CHECK_FLE=$($AWSCLI cloudfront get-distribution --id $dist --query Distribution.DistributionConfig.DefaultCacheBehavior.FieldLevelEncryptionId $PROFILE_OPT --output text) if [[ $CHECK_FLE ]]; then textPass "CloudFront distribution $dist has Field Level Encryption enabled" "$regx" - else + else textFail "CloudFront distribution $dist has Field Level Encryption disabled!" "$regx" fi done diff --git a/checks/check_extra768 b/checks/check_extra768 index 2c10e1dd..b357c72e 100644 --- a/checks/check_extra768 +++ b/checks/check_extra768 @@ -14,6 +14,7 @@ CHECK_ID_extra768="7.68" CHECK_TITLE_extra768="[extra768] Find secrets in ECS task definitions variables (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra768="NOT_SCORED" CHECK_TYPE_extra768="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra768="AwsEcsTaskDefinition" CHECK_ALTERNATE_check768="extra768" extra768(){ diff --git a/checks/check_extra770 b/checks/check_extra770 index f39de564..b657bbaa 100644 --- a/checks/check_extra770 +++ b/checks/check_extra770 @@ -14,6 +14,7 @@ CHECK_ID_extra770="7.70" CHECK_TITLE_extra770="[extra770] Check for internet facing EC2 instances with Instance Profiles attached (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra770="NOT_SCORED" CHECK_TYPE_extra770="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra770="AwsEc2Instance" CHECK_ALTERNATE_check770="extra770" extra770(){ diff --git a/checks/check_extra771 b/checks/check_extra771 index ecd240c3..0c940649 100644 --- a/checks/check_extra771 +++ b/checks/check_extra771 @@ -14,6 +14,7 @@ CHECK_ID_extra771="7.71" CHECK_TITLE_extra771="[extra771] Check if S3 buckets have policies which allow WRITE access (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra771="NOT_SCORED" CHECK_TYPE_extra771="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra771="AwsS3Bucket" CHECK_ALTERNATE_check771="extra771" extra771(){ diff --git a/checks/check_extra772 b/checks/check_extra772 index 83f3cdd5..088be100 100644 --- a/checks/check_extra772 +++ b/checks/check_extra772 @@ -14,6 +14,7 @@ CHECK_ID_extra772="7.72" CHECK_TITLE_extra772="[extra772] Check if elastic IPs are unused (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra772="NOT_SCORED" CHECK_TYPE_extra772="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra772="AwsEc2Eip" CHECK_ALTERNATE_check772="extra772" extra772(){ diff --git a/checks/check_extra773 b/checks/check_extra773 index ecd26385..dbfba0ca 100644 --- a/checks/check_extra773 +++ b/checks/check_extra773 @@ -14,6 +14,7 @@ CHECK_ID_extra773="7.73" CHECK_TITLE_extra773="[extra773] Check if CloudFront distributions are using WAF (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra773="NOT_SCORED" CHECK_TYPE_extra773="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra773="AwsCloudFrontDistribution" CHECK_ALTERNATE_check773="extra773" extra773(){ diff --git a/checks/check_extra774 b/checks/check_extra774 index 83e2aa6f..58042dd3 100644 --- a/checks/check_extra774 +++ b/checks/check_extra774 @@ -14,6 +14,7 @@ CHECK_ID_extra774="7.74" CHECK_TITLE_extra774="[extra774] Check if user have unused console login" CHECK_SCORED_extra774="NOT_SCORED" CHECK_TYPE_extra774="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra774="AwsIamUser" CHECK_ALTERNATE_check774="extra774" extra774(){ diff --git a/checks/check_extra777 b/checks/check_extra777 index 1ccf541f..fa3d8c0c 100644 --- a/checks/check_extra777 +++ b/checks/check_extra777 @@ -12,12 +12,13 @@ # specific language governing permissions and limitations under the License. # Current VPC Limit is 120 rules (60 inbound and 60 outbound) -# Reference: https://docs.aws.amazon.com/vpc/latest/userguide/amazon-vpc-limits.html +# Reference: https://docs.aws.amazon.com/vpc/latest/userguide/amazon-vpc-limits.html CHECK_ID_extra777="7.77" CHECK_TITLE_extra777="[extra777] Find VPC security groups with many ingress or egress rules (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra777="NOT_SCORED" CHECK_TYPE_extra777="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra777="AwsEc2SecurityGroup" CHECK_ALTERNATE_check777="extra777" extra777(){ @@ -31,7 +32,7 @@ extra777(){ --query 'SecurityGroups[*].GroupId' \ --output text | xargs ) - + for SECURITY_GROUP in ${SECURITY_GROUP_IDS}; do INGRESS_TOTAL=$(${AWSCLI} ec2 describe-security-groups \ @@ -51,8 +52,8 @@ extra777(){ ) if [[ (${INGRESS_TOTAL} -ge ${THRESHOLD}) || (${EGRESS_TOTAL} -ge ${THRESHOLD}) ]]; then - textFail "${regx}: ${SECURITY_GROUP} has ${INGRESS_TOTAL} inbound rules and ${EGRESS_TOTAL} outbound rules." "${regx}" + textFail "${regx}: ${SECURITY_GROUP} has ${INGRESS_TOTAL} inbound rules and ${EGRESS_TOTAL} outbound rules" "${regx}" fi done done -} \ No newline at end of file +} diff --git a/checks/check_extra778 b/checks/check_extra778 index 42672348..8d511687 100644 --- a/checks/check_extra778 +++ b/checks/check_extra778 @@ -15,6 +15,7 @@ CHECK_ID_extra778="7.78" CHECK_TITLE_extra778="[extra778] Find VPC security groups with wide-open public IPv4 CIDR ranges (non-RFC1918) (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra778="NOT_SCORED" CHECK_TYPE_extra778="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra778="AwsEc2SecurityGroup" CHECK_ALTERNATE_check778="extra778" extra778(){ @@ -27,7 +28,7 @@ extra778(){ local DIRECTION=$2 local DIRECTION_FILTER="" local REGION=$3 - + case ${DIRECTION} in "inbound") DIRECTION_FILTER="IpPermissions" @@ -44,14 +45,14 @@ extra778(){ --region ${REGION} \ --output text | xargs ) - + for CIDR_IP in ${CIDR_IP_LIST}; do if [[ ! ${CIDR_IP} =~ ${RFC1918_REGEX} ]]; then CIDR=$(echo ${CIDR_IP} | cut -d"/" -f2 | xargs) # Edge case "0.0.0.0/0" for RDP and SSH are checked already by check41 and check42 if [[ ${CIDR} < ${CIDR_THRESHOLD} && 0 < ${CIDR} ]]; then - textFail "${REGION}: ${SECURITY_GROUP} has potential wide-open non-RFC1918 address ${CIDR_IP} in ${DIRECTION} rule." "${REGION}" + textFail "${REGION}: ${SECURITY_GROUP} has potential wide-open non-RFC1918 address ${CIDR_IP} in ${DIRECTION} rule" "${REGION}" fi fi done @@ -69,4 +70,4 @@ extra778(){ check_cidr "${SECURITY_GROUP}" "outbound" "${regx}" done done -} \ No newline at end of file +} diff --git a/checks/check_extra779 b/checks/check_extra779 index 627bc51a..02a32868 100644 --- a/checks/check_extra779 +++ b/checks/check_extra779 @@ -12,8 +12,9 @@ # specific language governing permissions and limitations under the License. CHECK_ID_extra779="7.79" CHECK_TITLE_extra779="[extra779] Ensure no security groups allow ingress from 0.0.0.0/0 or ::/0 to Elasticsearch/Kibana ports" -CHECK_SCORED_extra779="NOT_SCORED" +CHECK_SCORED_extra779="NOT_SCORED" CHECK_TYPE_extra779="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra779="AwsEc2SecurityGroup" CHECK_ALTERNATE_check779="extra779" extra779(){ @@ -38,30 +39,30 @@ extra779(){ $AWSCLI $PROFILE_OPT --region $regx ec2 describe-instances --filters Name=instance.group-id,Values=$sg --query 'Reservations[*].Instances[*].[InstanceId,PublicIpAddress]' --output text > $TEMP_EXTRA779_FILE # in case of exposed instances it does access checks if [[ -s "$TEMP_EXTRA779_FILE" ]];then - while read instance eip ; do + while read instance eip ; do if [[ $TEST_ES_AUTHENTICATION ]];then - if [[ "$eip" != "None" ]];then + if [[ "$eip" != "None" ]];then # check for Elasticsearch on port $ES_API_PORT, rest API HTTP. CHECH_HTTP_ES_API=$(curl -m 2 -s -w "%{http_code}" -o /dev/null -X GET "http://$eip:$ES_API_PORT/_cat/indices") httpStatus $CHECH_HTTP_ES_API - if [[ $CHECH_HTTP_ES_API -eq "200" ]];then + if [[ $CHECH_HTTP_ES_API -eq "200" ]];then textFail "$regx: Found instance $instance with public IP $eip on Security Group: $sg with Elasticsearch port $ES_API_PORT response $SERVER_RESPONSE" "$regx" else textInfo "$regx: Found instance $instance with public IP $eip on Security Group: $sg with Elasticsearch port $ES_API_PORT response $SERVER_RESPONSE" "$regx" - fi + fi # check for port $ES_DATA_PORT TCP, this is the communication port, not: - # test_tcp_connectivity is in include/os_detector + # test_tcp_connectivity is in include/os_detector # syntax is 'test_tcp_connectivity $HOST $PORT $TIMEOUT' (in seconds) CHECH_HTTP_ES_DATA=$(test_tcp_connectivity $eip $ES_DATA_PORT 2) - # Using HTTP error codes here as well to reuse httpStatus function + # Using HTTP error codes here as well to reuse httpStatus function # codes for better handling, so 200 is open and 000 is not responding httpStatus $CHECH_HTTP_ES_DATA if [[ $CHECH_HTTP_ES_DATA -eq "200" ]];then textFail "$regx: Found instance $instance with public IP $eip on Security Group: $sg with Elasticsearch port $ES_DATA_PORT response $SERVER_RESPONSE" "$regx" else textInfo "$regx: Found instance $instance with public IP $eip on Security Group: $sg with Elasticsearch port $ES_DATA_PORT response $SERVER_RESPONSE" "$regx" - fi + fi # check for Kibana on port $ES_KIBANA_PORT CHECH_HTTP_ES_KIBANA=$(curl -m 2 -s -w "%{http_code}" -o /dev/null -X GET "http://$eip:$ES_KIBANA_PORT/api/status") @@ -70,25 +71,25 @@ extra779(){ textFail "$regx: Found instance $instance with public IP $eip on Security Group: $sg with Kibana on port $ES_KIBANA_PORT response $SERVER_RESPONSE" "$regx" else textInfo "$regx: Found instance $instance with public IP $eip on Security Group: $sg with Kibana on port $ES_KIBANA_PORT response $SERVER_RESPONSE" "$regx" - fi - fi + fi + fi else - if [[ "$eip" == "None" ]];then + if [[ "$eip" == "None" ]];then textInfo "$regx: Found instance $instance with private IP on Security Group: $sg" "$regx" - else + else textFail "$regx: Found instance $instance with public IP $eip on Security Group: $sg open to 0.0.0.0/0 on for Elasticsearch/Kibana ports $ES_API_PORT/$ES_DATA_PORT/$ES_KIBANA_PORT" "$regx" - fi - fi - if [[ "$eip" == "None" ]];then + fi + fi + if [[ "$eip" == "None" ]];then textInfo "$regx: Found instance $instance with private IP on Security Group: $sg" "$regx" fi # done < <(cat $TEMP_EXTRA779_FILE | grep -v None$) done < <(cat $TEMP_EXTRA779_FILE) - # while read instance eip ; do + # while read instance eip ; do # textInfo "$regx: Found instance $instance with private IP on Security Group: $sg" "$regx" # done < <(cat $TEMP_EXTRA779_FILE | grep None$) - fi - rm -rf $TEMP_EXTRA779_FILE + fi + rm -rf $TEMP_EXTRA779_FILE #textFail "$regx: Found Security Group: $sg open to 0.0.0.0/0 on for Elasticsearch ports" "$regx" done else diff --git a/checks/check_extra78 b/checks/check_extra78 index 9bea1a3d..ee652259 100644 --- a/checks/check_extra78 +++ b/checks/check_extra78 @@ -14,6 +14,7 @@ CHECK_ID_extra78="7.8,7.08" CHECK_TITLE_extra78="[extra78] Ensure there are no Public Accessible RDS instances (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra78="NOT_SCORED" CHECK_TYPE_extra78="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra78="AwsRdsDbInstance" CHECK_ALTERNATE_extra708="extra78" CHECK_ALTERNATE_check78="extra78" CHECK_ALTERNATE_check708="extra78" diff --git a/checks/check_extra780 b/checks/check_extra780 index eadb584a..0a694c35 100644 --- a/checks/check_extra780 +++ b/checks/check_extra780 @@ -14,6 +14,7 @@ CHECK_ID_extra780="7.80" CHECK_TITLE_extra780="[extra780] Check if Amazon Elasticsearch Service (ES) domains has Amazon Cognito authentication for Kibana enabled" CHECK_SCORED_extra780="NOT_SCORED" CHECK_TYPE_extra780="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra780="AwsElasticsearchDomain" CHECK_ALTERNATE_check780="extra780" extra780(){ @@ -24,7 +25,7 @@ extra780(){ CHECK_IF_COGNITO_ENABLED=$($AWSCLI es describe-elasticsearch-domain --domain-name $domain $PROFILE_OPT --region $regx --query 'DomainStatus.CognitoOptions.Enabled' --output text|grep -i true) if [[ $CHECK_IF_COGNITO_ENABLED ]];then textPass "$regx: Amazon ES domain $domain has Amazon Cognito authentication for Kibana enabled" "$regx" - else + else textFail "$regx: Amazon ES domain $domain does not have Amazon Cognito authentication for Kibana enabled" "$regx" fi done diff --git a/checks/check_extra781 b/checks/check_extra781 index 2b19cc7e..e4f36620 100644 --- a/checks/check_extra781 +++ b/checks/check_extra781 @@ -14,6 +14,7 @@ CHECK_ID_extra781="7.81" CHECK_TITLE_extra781="[extra781] Check if Amazon Elasticsearch Service (ES) domains has encryption at-rest enabled" CHECK_SCORED_extra781="NOT_SCORED" CHECK_TYPE_extra781="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra781="AwsElasticsearchDomain" CHECK_ALTERNATE_check781="extra781" extra781(){ @@ -24,7 +25,7 @@ extra781(){ CHECK_IF_ENCREST_ENABLED=$($AWSCLI es describe-elasticsearch-domain --domain-name $domain $PROFILE_OPT --region $regx --query 'DomainStatus.EncryptionAtRestOptions.Enabled' --output text|grep -i true) if [[ $CHECK_IF_ENCREST_ENABLED ]];then textPass "$regx: Amazon ES domain $domain has encryption at-rest enabled" "$regx" - else + else textFail "$regx: Amazon ES domain $domain does not have encryption at-rest enabled" "$regx" fi done diff --git a/checks/check_extra782 b/checks/check_extra782 index 1c38c7f3..ab169bee 100644 --- a/checks/check_extra782 +++ b/checks/check_extra782 @@ -14,6 +14,7 @@ CHECK_ID_extra782="7.82" CHECK_TITLE_extra782="[extra782] Check if Amazon Elasticsearch Service (ES) domains has node-to-node encryption enabled" CHECK_SCORED_extra782="NOT_SCORED" CHECK_TYPE_extra782="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra782="AwsElasticsearchDomain" CHECK_ALTERNATE_check782="extra782" extra782(){ @@ -24,7 +25,7 @@ extra782(){ CHECK_IF_NODETOENCR_ENABLED=$($AWSCLI es describe-elasticsearch-domain --domain-name $domain $PROFILE_OPT --region $regx --query 'DomainStatus.NodeToNodeEncryptionOptions.Enabled' --output text|grep -i true) if [[ $CHECK_IF_NODETOENCR_ENABLED ]];then textPass "$regx: Amazon ES domain $domain has node-to-node encryption enabled" "$regx" - else + else textFail "$regx: Amazon ES domain $domain does not have node-to-node encryption enabled" "$regx" fi done diff --git a/checks/check_extra783 b/checks/check_extra783 index 4c33b1ac..fa76f6f1 100644 --- a/checks/check_extra783 +++ b/checks/check_extra783 @@ -14,6 +14,7 @@ CHECK_ID_extra783="7.83" CHECK_TITLE_extra783="[extra783] Check if Amazon Elasticsearch Service (ES) domains has enforce HTTPS enabled" CHECK_SCORED_extra783="NOT_SCORED" CHECK_TYPE_extra783="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra783="AwsElasticsearchDomain" CHECK_ALTERNATE_check783="extra783" extra783(){ @@ -24,7 +25,7 @@ extra783(){ CHECK_IF_ENFORCEHTTPS_ENABLED=$($AWSCLI es describe-elasticsearch-domain --domain-name $domain $PROFILE_OPT --region $regx --query 'DomainStatus.DomainEndpointOptions.EnforceHTTPS' --output text|grep -i true) if [[ $CHECK_IF_ENFORCEHTTPS_ENABLED ]];then textPass "$regx: Amazon ES domain $domain has enforce HTTPS enabled" "$regx" - else + else textFail "$regx: Amazon ES domain $domain does not have enforce HTTPS enabled" "$regx" fi done diff --git a/checks/check_extra784 b/checks/check_extra784 index cb407538..29779d50 100644 --- a/checks/check_extra784 +++ b/checks/check_extra784 @@ -14,6 +14,7 @@ CHECK_ID_extra784="7.84" CHECK_TITLE_extra784="[extra784] Check if Amazon Elasticsearch Service (ES) domains internal user database enabled" CHECK_SCORED_extra784="NOT_SCORED" CHECK_TYPE_extra784="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra784="AwsElasticsearchDomain" CHECK_ALTERNATE_check784="extra784" extra784(){ @@ -24,7 +25,7 @@ extra784(){ CHECK_IF_INTERNALDB_ENABLED=$($AWSCLI es describe-elasticsearch-domain --domain-name $domain $PROFILE_OPT --region $regx --query 'DomainStatus.AdvancedSecurityOptions.InternalUserDatabaseEnabled' --output text|grep -i true) if [[ $CHECK_IF_INTERNALDB_ENABLED ]];then textPass "$regx: Amazon ES domain $domain has internal user database enabled" "$regx" - else + else textFail "$regx: Amazon ES domain $domain does not have internal user database enabled" "$regx" fi done diff --git a/checks/check_extra785 b/checks/check_extra785 index 7e22a689..59ffba11 100644 --- a/checks/check_extra785 +++ b/checks/check_extra785 @@ -14,12 +14,13 @@ CHECK_ID_extra785="7.85" CHECK_TITLE_extra785="[extra785] Check if Amazon Elasticsearch Service (ES) domains have updates available" CHECK_SCORED_extra785="NOT_SCORED" CHECK_TYPE_extra785="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra785="AwsElasticsearchDomain" CHECK_ALTERNATE_check785="extra785" -# NOTE! +# NOTE! # API does not properly shows if an update is available while it is a new version available # that can be done using the Console but not the API, not sure if it is a bug -# I have to investigate further +# I have to investigate further extra785(){ for regx in $REGIONS; do @@ -29,8 +30,8 @@ extra785(){ CHECK_IF_UPDATE_AVAILABLE_AND_VERSION=$($AWSCLI es describe-elasticsearch-domain --domain-name $domain $PROFILE_OPT --region $regx --query 'DomainStatus.[ServiceSoftwareOptions.UpdateAvailable,ElasticsearchVersion]' --output text) while read update_status es_version;do if [[ $update_status != "False" ]];then - textInfo "$regx: Amazon ES domain $domain v$es_version has updates available " "$regx" - else + textInfo "$regx: Amazon ES domain $domain v$es_version has updates available" "$regx" + else textPass "$regx: Amazon ES domain $domain v$es_version does not have have updates available" "$regx" fi done < <(echo $CHECK_IF_UPDATE_AVAILABLE_AND_VERSION) diff --git a/checks/check_extra79 b/checks/check_extra79 index e45e5ddc..d46a63a7 100644 --- a/checks/check_extra79 +++ b/checks/check_extra79 @@ -14,6 +14,7 @@ CHECK_ID_extra79="7.9,7.09" CHECK_TITLE_extra79="[extra79] Check for internet facing Elastic Load Balancers (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra79="NOT_SCORED" CHECK_TYPE_extra79="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra79="AwsElbLoadBalancer" CHECK_ALTERNATE_extra709="extra79" CHECK_ALTERNATE_check79="extra79" CHECK_ALTERNATE_check709="extra79" diff --git a/checks/check_sample b/checks/check_sample index 54c9d880..b041e30c 100644 --- a/checks/check_sample +++ b/checks/check_sample @@ -15,7 +15,7 @@ # # here URL to the relevand/official documentation # -# here commands or steps to fix it if avalable, like: +# here commands or steps to fix it if avalable, like: # aws logs put-metric-filter \ # --region us-east-1 \ # --log-group-name CloudTrail/MyCloudTrailLG \ @@ -28,6 +28,7 @@ # CHECK_TITLE_checkN="[checkN] Description (Not Scored) (Not part of CIS benchmark)" # CHECK_SCORED_checkN="NOT_SCORED" # CHECK_TYPE_checkN="EXTRA" +# CHECK_ASFF_RESOURCE_TYPE_checkN="AwsAccount" # Choose appropriate value from https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-findings-format.html#asff-resources # CHECK_ALTERNATE_checkN="extraN" # # extraN(){ diff --git a/iam/prowler-security-hub.json b/iam/prowler-security-hub.json new file mode 100644 index 00000000..309c086a --- /dev/null +++ b/iam/prowler-security-hub.json @@ -0,0 +1,12 @@ +{ + "Version": "2012-10-17", + "Statement": [ + { + "Action": [ + "securityhub:BatchImportFindings" + ], + "Effect": "Allow", + "Resource": "*" + } + ] +} diff --git a/include/colors b/include/colors index 68ac32a4..e938d143 100644 --- a/include/colors +++ b/include/colors @@ -11,9 +11,9 @@ # CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. -if [[ "$MODE" != "mono" && "$MODE" != "text" && "$MODE" != "csv" && "$MODE" != "json" && "$MODE" != "json-asff" && "$MODE" != "securityhub" ]]; then +if [[ "$MODE" != "mono" && "$MODE" != "text" && "$MODE" != "csv" && "$MODE" != "json" && "$MODE" != "json-asff" ]]; then echo "" - echo "$OPTRED ERROR!$OPTNORMAL Invalid output mode. Choose text, mono, csv, json, json-asff or securityhub." + echo "$OPTRED ERROR!$OPTNORMAL Invalid output mode. Choose text, mono, csv, json or json-asff." usage EXITCODE=1 exit $EXITCODE diff --git a/include/outputs b/include/outputs index 6af14778..40d67a7c 100644 --- a/include/outputs +++ b/include/outputs @@ -18,7 +18,7 @@ textPass(){ fi PASS_COUNTER=$((PASS_COUNTER+1)) - if [[ "$MODE" == "csv" || "$MODE" == "json" || "$MODE" == "json-asff" || "$MODE" == "securityhub" ]]; then + if [[ "$MODE" == "csv" || "$MODE" == "json" || "$MODE" == "json-asff" ]]; then if [[ $2 ]]; then REPREGION=$2 else @@ -29,10 +29,11 @@ textPass(){ elif [[ "$MODE" == "json" ]]; then generateJsonOutput "$1" "Pass" elif [[ "$MODE" == "json-asff" ]]; then - generateJsonAsffOutput "$1" "PASSED" "INFORMATIONAL" - elif [[ "$MODE" == "securityhub" ]]; then - printf " $OK PASS!$NORMAL %s... " "$1" - aws securityhub batch-import-findings --findings "$(generateJsonAsffOutput "$1" "PASSED" "INFORMATIONAL")" | jq -M -r 'if .SuccessCount == 1 then "Successfully submitted finding" else "Failed to upload finding" end' + JSON_ASFF_OUTPUT=$(generateJsonAsffOutput "$1" "PASSED" "INFORMATIONAL") + echo "${JSON_ASFF_OUTPUT}" + if [[ "${SEND_TO_SECURITY_HUB}" -eq 1 ]]; then + sendToSecurityHub "${JSON_ASFF_OUTPUT}" + fi fi else echo " $OK PASS!$NORMAL $1" @@ -54,11 +55,6 @@ textInfo(){ echo "$PROFILE${SEP}$ACCOUNT_NUM${SEP}$REPREGION${SEP}$TITLE_ID${SEP}INFO${SEP}$ITEM_SCORED${SEP}$ITEM_LEVEL${SEP}$TITLE_TEXT${SEP}$1" elif [[ "$MODE" == "json" ]]; then generateJsonOutput "$1" "Info" - elif [[ "$MODE" == "json-asff" ]]; then - generateJsonAsffOutput "$1" "NOT_AVAILABLE" "LOW" - elif [[ "$MODE" == "securityhub" ]]; then - printf " $NOTICE INFO! %s... $NORMAL" "$1" - aws securityhub batch-import-findings --findings "$(generateJsonAsffOutput "$1" "NOT_AVAILABLE" "LOW")" | jq -M -r 'if .SuccessCount == 1 then "Successfully submitted finding" else "Failed to upload finding" end' fi else echo " $NOTICE INFO! $1 $NORMAL" @@ -68,7 +64,7 @@ textInfo(){ textFail(){ FAIL_COUNTER=$((FAIL_COUNTER+1)) EXITCODE=3 - if [[ "$MODE" == "csv" || "$MODE" == "json" || "$MODE" == "json-asff" || "$MODE" == "securityhub" ]]; then + if [[ "$MODE" == "csv" || "$MODE" == "json" || "$MODE" == "json-asff" ]]; then if [[ $2 ]]; then REPREGION=$2 else @@ -79,10 +75,11 @@ textFail(){ elif [[ "$MODE" == "json" ]]; then generateJsonOutput "$1" "Fail" elif [[ "$MODE" == "json-asff" ]]; then - generateJsonAsffOutput "$1" "FAILED" "HIGH" - elif [[ "$MODE" == "securityhub" ]]; then - printf " $BAD FAIL! %s... $NORMAL" "$1" - aws securityhub batch-import-findings --findings "$(generateJsonAsffOutput "$1" "FAILED" "HIGH")" | jq -M -r 'if .SuccessCount == 1 then "Successfully submitted finding" else "Failed to upload finding" end' + JSON_ASFF_OUTPUT=$(generateJsonAsffOutput "$1" "FAILED" "HIGH") + echo "${JSON_ASFF_OUTPUT}" + if [[ "${SEND_TO_SECURITY_HUB}" -eq 1 ]]; then + sendToSecurityHub "${JSON_ASFF_OUTPUT}" + fi fi else echo " $BAD FAIL! $1 $NORMAL" @@ -178,6 +175,8 @@ generateJsonAsffOutput(){ --arg SCORED "$ITEM_SCORED" \ --arg ITEM_LEVEL "$ITEM_LEVEL" \ --arg TITLE_ID "$TITLE_ID" \ + --arg TYPE "$ASFF_TYPE" \ + --arg RESOURCE_TYPE "$ASFF_RESOURCE_TYPE" \ --arg REPREGION "$REPREGION" \ --arg TIMESTAMP $(date -u +"%Y-%m-%dT%H:%M:%SZ") \ --arg PROWLER_VERSION "$PROWLER_VERSION" \ @@ -192,7 +191,7 @@ generateJsonAsffOutput(){ "GeneratorId": "prowler-\($PROWLER_VERSION)", "AwsAccountId": $ACCOUNT_NUM, "Types": [ - "Software and Configuration Checks" + $TYPE ], "FirstObservedAt": $TIMESTAMP, "UpdatedAt": $TIMESTAMP, @@ -204,8 +203,8 @@ generateJsonAsffOutput(){ "Description": $MESSAGE, "Resources": [ { - "Type": "AwsAccount", - "Id": "AWS: : : :Account:\($ACCOUNT_NUM)", + "Type": $RESOURCE_TYPE, + "Id": "AWS::::Account:\($ACCOUNT_NUM)", "Partition": "aws", "Region": $REPREGION } diff --git a/include/securityhub_integration b/include/securityhub_integration new file mode 100644 index 00000000..b08f5277 --- /dev/null +++ b/include/securityhub_integration @@ -0,0 +1,37 @@ +#!/usr/bin/env bash + +# Prowler - the handy cloud security tool (copyright 2018) by Toni de la Fuente +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy +# of the License at http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software distributed +# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +# CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. + +# Checks that the correct mode (json-asff) has been specified if wanting to send check output to AWS Security Hub +# and that Security Hub is enabled in the chosen region +checkSecurityHubCompatibility(){ + if [[ "${MODE}" != "json-asff" ]]; then + echo -e "\n$RED ERROR!$NORMAL Output can only be sent to Security Hub when the output mode is json-asff, i.e. -M json-asff -S\n" + EXITCODE=1 + exit $EXITCODE + fi + SECURITY_HUB_ENABLED=$($AWSCLI securityhub --region $REGION $PROFILE_OPT describe-hub) + if [[ -z "${SECURITY_HUB_ENABLED}" ]]; then + echo -e "\n$RED ERROR!$NORMAL Security Hub is not enabled in $REGION. Enable it by calling '$AWSCLI securityhub --region $REGION $PROFILE_OPT enable-security-hub'\n" + EXITCODE=1 + exit $EXITCODE + fi +} + +sendToSecurityHub(){ + BATCH_IMPORT_RESULT=$($AWSCLI securityhub --region $REGION $PROFILE_OPT batch-import-findings --findings "$1") + # A successful CLI response is: {"SuccessCount": 1,"FailedFindings": [],"FailedCount": 0} + # Therefore, check that SuccessCount is indeed 1 + if [[ -z "${BATCH_IMPORT_RESULT}" ]] || ! jq -e '.SuccessCount == 1' <<< "${BATCH_IMPORT_RESULT}" > /dev/null 2>&1; then + echo -e "\n$RED ERROR!$NORMAL Failed to send check output to AWS Security Hub\n" + fi +} diff --git a/prowler b/prowler index 01eb9a35..68dce2bc 100755 --- a/prowler +++ b/prowler @@ -44,6 +44,7 @@ QUIET=0 SEP=',' KEEPCREDREPORT=0 EXITCODE=0 +SEND_TO_SECURITY_HUB=0 SCRIPT_START_TIME=$( date -u +"%Y-%m-%dT%H:%M:%S%z" ) TITLE_ID="" TITLE_TEXT="CALLER ERROR - UNSET TITLE" @@ -64,7 +65,7 @@ USAGE: -f specify an AWS region to run checks against (i.e.: us-west-1) -m specify the maximum number of items to return for long-running requests (default: 100) - -M output mode: text (default), mono, json, json-asff, securityhub, csv (separator is ","; data is on stdout; progress on stderr) + -M output mode: text (default), mono, json, json-asff, csv (separator is ","; data is on stdout; progress on stderr) -k keep the credential report -n show check numbers to sort easier (i.e.: 1.01 instead of 1.1) @@ -75,6 +76,7 @@ USAGE: -b do not print Prowler banner -V show version number & exit -s show scoring report + -S send check output to AWS Security Hub - only valid when the output mode is json-asff (i.e. "-M json-asff -S") -x specify external directory with custom checks (i.e. /my/own/checks, files must start by "check") -q suppress info messages and passing test output -A account id for the account where to assume a role, requires -R and -T @@ -88,7 +90,7 @@ USAGE: exit } -while getopts ":hlLkqp:r:c:g:f:m:M:E:enbVsx:A:R:T:" OPTION; do +while getopts ":hlLkqp:r:c:g:f:m:M:E:enbVsSx:A:R:T:" OPTION; do case $OPTION in h ) usage @@ -145,6 +147,9 @@ while getopts ":hlLkqp:r:c:g:f:m:M:E:enbVsx:A:R:T:" OPTION; do s ) SCORING=1 ;; + S ) + SEND_TO_SECURITY_HUB=1 + ;; x ) EXTERNAL_CHECKS_PATH=$OPTARG ;; @@ -195,6 +200,7 @@ trap "{ rm -f /tmp/prowler*.policy.*; }" EXIT . $PROWLER_DIR/include/check3x . $PROWLER_DIR/include/assume_role . $PROWLER_DIR/include/connection_tests +. $PROWLER_DIR/include/securityhub_integration # Get a list of all available AWS Regions REGIONS=$($AWSCLI ec2 describe-regions --query 'Regions[].RegionName' \ @@ -245,6 +251,14 @@ execute_check() { # for example, we might have been passed 1.01 which is another name for 1.1 local alternate_name_var=CHECK_ALTERNATE_$1 local alternate_name=${!alternate_name_var} + # See if this check defines an ASFF Type, if so, use this, falling back to a sane default + # For a list of Types, see: https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-findings-format.html#securityhub-findings-format-type-taxonomy + local asff_type_var=CHECK_ASFF_TYPE_$1 + ASFF_TYPE="${!asff_type_var:-Software and Configuration Checks}" + # See if this check defines an ASFF Resource Type, if so, use this, falling back to a sane default + # For a list of Resource Types, see: https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-findings-format.html#asff-resources + local asff_resource_type_var=CHECK_ASFF_RESOURCE_TYPE_$1 + ASFF_RESOURCE_TYPE="${!asff_resource_type_var:-AwsAccount}" # Generate the credential report, only if it is group1 related which checks we # run so that the checks can safely assume it's available if [ ${alternate_name} ];then @@ -396,10 +410,14 @@ if [[ $PRINTGROUPSONLY == "1" ]]; then fi # Check that jq is installed for JSON outputs -if [[ "$MODE" == "json" || "$MODE" == "json-asff" || "$MODE" == "securityhub" ]]; then +if [[ "$MODE" == "json" || "$MODE" == "json-asff" ]]; then . $PROWLER_DIR/include/jq_detector fi +if [[ "$SEND_TO_SECURITY_HUB" -eq 1 ]]; then + checkSecurityHubCompatibility +fi + # Gather account data / test aws cli connectivity getWhoami From c02811f4115a4b8556e3b62c0a93f9f9d20cfe19 Mon Sep 17 00:00:00 2001 From: Marc Jay Date: Sat, 11 Apr 2020 03:34:32 +0100 Subject: [PATCH 050/104] Add CHECK_ASFF_RESOURCE_TYPE variables for recently added checks --- checks/check_extra786 | 15 ++++++++------- checks/check_extra787 | 27 ++++++++++++++------------- checks/check_extra788 | 25 +++++++++++++------------ checks/check_extra789 | 11 ++++++----- checks/check_extra790 | 11 ++++++----- 5 files changed, 47 insertions(+), 42 deletions(-) diff --git a/checks/check_extra786 b/checks/check_extra786 index dd9f378e..f011a7f4 100644 --- a/checks/check_extra786 +++ b/checks/check_extra786 @@ -14,30 +14,31 @@ CHECK_ID_extra786="7.86" CHECK_TITLE_extra786="[extra786] Check if EC2 Instance Metadata Service Version 2 (IMDSv2) is Enabled and Required (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra786="NOT_SCORED" CHECK_TYPE_extra786="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra786="AwsEc2Instance" CHECK_ALTERNATE_check786="extra786" extra786(){ - for regx in $REGIONS; do + for regx in $REGIONS; do TEMP_EXTRA786_FILE=$(mktemp -t prowler-${ACCOUNT_NUM}-es-domain.EXTRA786.XXXXXXXXXX) $AWSCLI ec2 describe-instances $PROFILE_OPT --region $regx \ --query 'Reservations[*].Instances[*].{HttpTokens:MetadataOptions.HttpTokens,HttpEndpoint:MetadataOptions.HttpEndpoint,InstanceId:InstanceId}' \ --output text --max-items $MAXITEMS > $TEMP_EXTRA786_FILE - # if the file contains data, there are instances in that region + # if the file contains data, there are instances in that region if [[ -s "$TEMP_EXTRA786_FILE" ]];then # here we read content from the file fields instanceid httptokens_status httpendpoint - while read httpendpoint httptokens_status instanceid ; do + while read httpendpoint httptokens_status instanceid ; do #echo i:$instanceid tok:$httptokens_status end:$httpendpoint if [[ "$httpendpoint" == "enabled" && "$httptokens_status" == "required" ]];then textPass "$regx: EC2 Instance $instanceid has IMDSv2 enabled and required" "$regx" - elif [[ "$httpendpoint" == "disabled" ]];then + elif [[ "$httpendpoint" == "disabled" ]];then textInfo "$regx: EC2 Instance $instanceid has HTTP endpoint access to metadata service disabled" "$regx" else textFail "$regx: EC2 Instance $instanceid has IMDSv2 disabled or not required" "$regx" - fi + fi done < <(cat $TEMP_EXTRA786_FILE) - else + else textInfo "$regx: no EC2 Instances found" "$regx" - fi + fi rm -fr $TEMP_EXTRA786_FILE done } diff --git a/checks/check_extra787 b/checks/check_extra787 index 6f867902..ea681f8e 100644 --- a/checks/check_extra787 +++ b/checks/check_extra787 @@ -12,8 +12,9 @@ # specific language governing permissions and limitations under the License. CHECK_ID_extra787="7.87" CHECK_TITLE_extra787="[extra787] Check connection and authentication for Internet exposed Elasticsearch/Kibana ports" -CHECK_SCORED_extra787="NOT_SCORED" +CHECK_SCORED_extra787="NOT_SCORED" CHECK_TYPE_extra787="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra787="AwsEc2Instance" CHECK_ALTERNATE_check787="extra787" extra787(){ @@ -25,7 +26,7 @@ extra787(){ ES_KIBANA_PORT="5601" for regx in $REGIONS; do - # crate a list of SG open to the world with port $ES_API_PORT or $ES_DATA_PORT or $ES_KIBANA_PORT + # create a list of SG open to the world with port $ES_API_PORT or $ES_DATA_PORT or $ES_KIBANA_PORT SG_LIST=$($AWSCLI ec2 describe-security-groups $PROFILE_OPT --region $regx --output text \ --query "SecurityGroups[?length(IpPermissions[?((FromPort==null && ToPort==null) || (FromPort<=\`$ES_API_PORT\` && ToPort>=\`$ES_API_PORT\`) || (FromPort<=\`$ES_DATA_PORT\` && ToPort>=\`$ES_DATA_PORT\`) || (FromPort<=\`$ES_KIBANA_PORT\` && ToPort>=\`$ES_KIBANA_PORT\`)) && (contains(IpRanges[].CidrIp, \`0.0.0.0/0\`) || contains(Ipv6Ranges[].CidrIpv6, \`::/0\`))]) > \`0\`].{GroupId:GroupId}") # in case of open security groups goes through each one @@ -37,28 +38,28 @@ extra787(){ $AWSCLI $PROFILE_OPT --region $regx ec2 describe-instances --filters Name=instance.group-id,Values=$sg --query 'Reservations[*].Instances[*].[InstanceId,PublicIpAddress]' --output text > $TEMP_EXTRA787_FILE # in case of exposed instances it does access checks if [[ -s "$TEMP_EXTRA787_FILE" ]];then - while read instance eip ; do - if [[ "$eip" != "None" ]];then + while read instance eip ; do + if [[ "$eip" != "None" ]];then # check for Elasticsearch on port $ES_API_PORT, rest API HTTP. CHECH_HTTP_ES_API=$(curl -m 2 -s -w "%{http_code}" -o /dev/null -X GET "http://$eip:$ES_API_PORT/_cat/indices") httpStatus $CHECH_HTTP_ES_API - if [[ $CHECH_HTTP_ES_API -eq "200" ]];then + if [[ $CHECH_HTTP_ES_API -eq "200" ]];then textFail "$regx: Found instance $instance with public IP $eip on Security Group: $sg with Elasticsearch port $ES_API_PORT response $SERVER_RESPONSE" "$regx" else textInfo "$regx: Found instance $instance with public IP $eip on Security Group: $sg with Elasticsearch port $ES_API_PORT response $SERVER_RESPONSE" "$regx" fi # check for port $ES_DATA_PORT TCP, this is the communication port, not: - # test_tcp_connectivity is in include/os_detector + # test_tcp_connectivity is in include/os_detector # syntax is 'test_tcp_connectivity $HOST $PORT $TIMEOUT' (in seconds) CHECH_HTTP_ES_DATA=$(test_tcp_connectivity $eip $ES_DATA_PORT 2) - # Using HTTP error codes here as well to reuse httpStatus function + # Using HTTP error codes here as well to reuse httpStatus function # codes for better handling, so 200 is open and 000 is not responding httpStatus $CHECH_HTTP_ES_DATA if [[ $CHECH_HTTP_ES_DATA -eq "200" ]];then textFail "$regx: Found instance $instance with public IP $eip on Security Group: $sg with Elasticsearch port $ES_DATA_PORT response $SERVER_RESPONSE" "$regx" else textInfo "$regx: Found instance $instance with public IP $eip on Security Group: $sg with Elasticsearch port $ES_DATA_PORT response $SERVER_RESPONSE" "$regx" - fi + fi # check for Kibana on port $ES_KIBANA_PORT CHECH_HTTP_ES_KIBANA=$(curl -m 2 -s -w "%{http_code}" -o /dev/null -X GET "http://$eip:$ES_KIBANA_PORT/api/status") httpStatus $CHECH_HTTP_ES_KIBANA @@ -66,13 +67,13 @@ extra787(){ textFail "$regx: Found instance $instance with public IP $eip on Security Group: $sg with Kibana on port $ES_KIBANA_PORT response $SERVER_RESPONSE" "$regx" else textInfo "$regx: Found instance $instance with public IP $eip on Security Group: $sg with Kibana on port $ES_KIBANA_PORT response $SERVER_RESPONSE" "$regx" - fi - else + fi + else textInfo "$regx: Found instance $instance with private IP on Security Group: $sg" "$regx" - fi + fi done < <(cat $TEMP_EXTRA787_FILE) - fi - rm -rf $TEMP_EXTRA787_FILE + fi + rm -rf $TEMP_EXTRA787_FILE done else textPass "$regx: No Security Groups found open to 0.0.0.0/0 for Elasticsearch/Kibana ports" "$regx" diff --git a/checks/check_extra788 b/checks/check_extra788 index f2258843..6c3c9fd0 100644 --- a/checks/check_extra788 +++ b/checks/check_extra788 @@ -14,6 +14,7 @@ CHECK_ID_extra788="7.88" CHECK_TITLE_extra788="[extra788] Check connection and authentication for Internet exposed Amazon Elasticsearch Service (ES) domains" CHECK_SCORED_extra788="NOT_SCORED" CHECK_TYPE_extra788="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra788="AwsElasticsearchDomain" CHECK_ALTERNATE_check788="extra788" extra788(){ @@ -31,8 +32,8 @@ extra788(){ # If the endpoint starts with "vpc-" it is in a VPC then it is fine. if [[ "$ES_DOMAIN_ENDPOINT" =~ ^vpc-* ]];then ES_DOMAIN_VPC=$($AWSCLI es describe-elasticsearch-domain --domain-name $domain $PROFILE_OPT --region $regx --query 'DomainStatus.VPCOptions.VPCId' --output text) - textInfo "$regx: Amazon ES domain $domain is in VPC $ES_DOMAIN_VPC run extra779 to make sure it is not exposed using custom proxy" "$regx" - else + textInfo "$regx: Amazon ES domain $domain is in VPC $ES_DOMAIN_VPC run extra779 to make sure it is not exposed using custom proxy" "$regx" + else $AWSCLI es describe-elasticsearch-domain-config --domain-name $domain $PROFILE_OPT --region $regx --query DomainConfig.AccessPolicies.Options --output text > $TEMP_POLICY_FILE 2> /dev/null CHECK_ES_DOMAIN_POLICY_OPEN=$(cat $TEMP_POLICY_FILE | jq -r '. | .Statement[] | select(.Effect == "Allow" and (((.Principal|type == "object") and .Principal.AWS == "*") or ((.Principal|type == "string") and .Principal == "*")) and select(has("Condition") | not))') CHECK_ES_DOMAIN_POLICY_HAS_CONDITION=$(cat $TEMP_POLICY_FILE | jq -r '. | .Statement[] | select(.Effect == "Allow" and (((.Principal|type == "object") and .Principal.AWS == "*") or ((.Principal|type == "string") and .Principal == "*")) and select(has("Condition")))' ) @@ -43,29 +44,29 @@ extra788(){ CONDITION_HAS_PRIVATE_IP=$(echo "${condition_ip}" | grep -E '^(192\.168|10\.|172\.1[6789]\.|172\.2[0-9]\.|172\.3[01]\.)') if [[ $CONDITION_HAS_PRIVATE_IP ]];then CONDITION_HAS_PRIVATE_IP_ARRAY+=($condition_ip) - fi + fi CONDITION_HAS_PUBLIC_IP=$(echo "${condition_ip}" | grep -vE '^(192\.168|10\.|172\.1[6789]\.|172\.2[0-9]\.|172\.3[01]\.|0\.0\.0\.0|\*)') if [[ $CONDITION_HAS_PUBLIC_IP ]];then CONDITION_HAS_PUBLIC_IP_ARRAY+=($condition_ip) fi CONDITION_HAS_ZERO_NET=$(echo "${condition_ip}" | grep -E '^(0\.0\.0\.0)') CONDITION_HAS_STAR=$(echo "${condition_ip}" | grep -E '^\*') - done + done CHECK_ES_DOMAIN_POLICY_CONDITION_PRIVATE_IP=${CONDITION_HAS_PRIVATE_IP_ARRAY[@]} CHECK_ES_DOMAIN_POLICY_CONDITION_PUBLIC_IP=${CONDITION_HAS_PUBLIC_IP_ARRAY[@]} CHECK_ES_DOMAIN_POLICY_CONDITION_ZERO=$CONDITION_HAS_ZERO_NET CHECK_ES_DOMAIN_POLICY_CONDITION_STAR=$CONDITION_HAS_STAR - fi + fi if [[ $CHECK_ES_DOMAIN_POLICY_OPEN || $CHECK_ES_DOMAIN_POLICY_CONDITION_ZERO || $CHECK_ES_DOMAIN_POLICY_CONDITION_STAR || ${CHECK_ES_DOMAIN_POLICY_CONDITION_PUBLIC_IP[@]} ]];then #Prowler will check to read indices or kibaba status if no conditions, condition IP is *, 0.0.0.0/0, 0.0.0.0/8 or any public IP. # check for REST API on port 443 CHECH_ES_HTTPS=$(curl -m 2 -s -w "%{http_code}" -o /dev/null -X GET "https://$ES_DOMAIN_ENDPOINT/_cat/indices") httpStatus $CHECH_ES_HTTPS - if [[ $CHECH_ES_HTTPS -eq "200" ]];then + if [[ $CHECH_ES_HTTPS -eq "200" ]];then textFail "$regx: Amazon ES domain $domain policy allows Anonymous access and ES service endpoint $ES_DOMAIN_ENDPOINT responded $SERVER_RESPONSE" "$regx" else textInfo "$regx: Amazon ES domain $domain policy allows Anonymous access but ES service endpoint $ES_DOMAIN_ENDPOINT responded $SERVER_RESPONSE" "$regx" - fi + fi # check for Kibana on port 443 CHECH_KIBANA_HTTPS=$(curl -m 2 -s -w "%{http_code}" -o /dev/null -X GET "https://$ES_DOMAIN_ENDPOINT/_plugin/kibana/api/status") httpStatus $CHECH_KIBANA_HTTPS @@ -73,13 +74,13 @@ extra788(){ textFail "$regx: Amazon ES domain $domain policy allows Anonymous access and Kibana service endpoint $ES_DOMAIN_ENDPOINT responded $SERVER_RESPONSE" "$regx" else textInfo "$regx: Amazon ES domain $domain policy allows Anonymous access but Kibana service endpoint $ES_DOMAIN_ENDPOINT responded $SERVER_RESPONSE" "$regx" - fi + fi else if [[ $CHECK_ES_DOMAIN_POLICY_HAS_CONDITION && ${CHECK_ES_DOMAIN_POLICY_CONDITION_PRIVATE_IP[@]} ]];then textInfo "$regx: Amazon ES domain $domain policy allows access from a Private IP or CIDR RFC1918 $(echo ${CONDITION_HAS_PRIVATE_IP_ARRAY[@]})" "$regx" - else - textPass "$regx: Amazon ES domain $domain does not allow Anonymous cross account access" "$regx" - fi + else + textPass "$regx: Amazon ES domain $domain does not allow Anonymous cross account access" "$regx" + fi fi rm -f $TEMP_POLICY_FILE fi @@ -88,4 +89,4 @@ extra788(){ textInfo "$regx: No Amazon ES domain found" "$regx" fi done -} +} diff --git a/checks/check_extra789 b/checks/check_extra789 index 964067cd..87f3a1a1 100644 --- a/checks/check_extra789 +++ b/checks/check_extra789 @@ -15,11 +15,12 @@ CHECK_ID_extra789="7.89" CHECK_TITLE_extra789="[extra789] Find trust boundaries in VPC endpoint services connections" CHECK_SCORED_extra789="NOT_SCORED" CHECK_TYPE_extra789="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra789="AwsEc2Vpc" CHECK_ALTERNATE_extra789="extra789" extra789(){ TRUSTED_ACCOUNT_IDS=$( echo "${ACCOUNT_NUM} ${GROUP_TRUSTBOUNDARIES_TRUSTED_ACCOUNT_IDS}" | xargs ) - + for regx in ${REGIONS}; do ENDPOINT_SERVICES_IDS=$(${AWSCLI} ec2 describe-vpc-endpoint-services \ ${PROFILE_OPT} \ @@ -29,19 +30,19 @@ extra789(){ ) for ENDPOINT_SERVICE_ID in ${ENDPOINT_SERVICES_IDS}; do - + ENDPOINT_CONNECTION_LIST=$(${AWSCLI} ec2 describe-vpc-endpoint-connections \ ${PROFILE_OPT} \ --query "VpcEndpointConnections[?VpcEndpointState=='available'].VpcEndpointOwner" \ --region ${regx} \ --output text | xargs ) - + for ENDPOINT_CONNECTION in ${ENDPOINT_CONNECTION_LIST}; do for ACCOUNT_ID in ${TRUSTED_ACCOUNT_IDS}; do if [[ "${ACCOUNT_ID}" == "${ENDPOINT_CONNECTION}" ]]; then textPass "${regx}: Found trusted account in VPC endpoint service connection ${ENDPOINT_CONNECTION}" "${regx}" - # Algorithm: + # Algorithm: # Remove all trusted ACCOUNT_IDs from ENDPOINT_CONNECTION_LIST. # As a result, the ENDPOINT_CONNECTION_LIST finally contains only unknown/untrusted account ids. ENDPOINT_CONNECTION_LIST=("${ENDPOINT_CONNECTION_LIST[@]/$ENDPOINT_CONNECTION}") # remove hit from whitelist @@ -52,6 +53,6 @@ extra789(){ for UNTRUSTED_CONNECTION in ${ENDPOINT_CONNECTION_LIST}; do textFail "${regx}: Found untrusted account in VPC endpoint service connection ${UNTRUSTED_CONNECTION}" "${regx}" done - done + done done } diff --git a/checks/check_extra790 b/checks/check_extra790 index 9a56cf17..6e9c2e80 100644 --- a/checks/check_extra790 +++ b/checks/check_extra790 @@ -15,6 +15,7 @@ CHECK_ID_extra790="7.90" CHECK_TITLE_extra790="[extra790] Find trust boundaries in VPC endpoint services whitelisted principles" CHECK_SCORED_extra790="NOT_SCORED" CHECK_TYPE_extra790="EXTRA" +CHECK_ASFF_RESOURCE_TYPE_extra790="AwsEc2Vpc" CHECK_ALTERNATE_extra790="extra790" extra790(){ @@ -40,14 +41,14 @@ extra790(){ for ENDPOINT_PERMISSION in ${ENDPOINT_PERMISSIONS_LIST}; do # Take only account id from ENDPOINT_PERMISSION: arn:aws:iam::965406151242:root ENDPOINT_PERMISSION_ACCOUNT_ID=$(echo ${ENDPOINT_PERMISSION} | cut -d':' -f5 | xargs) - + for ACCOUNT_ID in ${TRUSTED_ACCOUNT_IDS}; do if [[ "${ACCOUNT_ID}" == "${ENDPOINT_PERMISSION_ACCOUNT_ID}" ]]; then textPass "${regx}: Found trusted account in VPC endpoint service permission ${ENDPOINT_PERMISSION}" "${regx}" - # Algorithm: + # Algorithm: # Remove all trusted ACCOUNT_IDs from ENDPOINT_PERMISSIONS_LIST. # As a result, the ENDPOINT_PERMISSIONS_LIST finally contains only unknown/untrusted account ids. - ENDPOINT_PERMISSIONS_LIST=("${ENDPOINT_PERMISSIONS_LIST[@]/$ENDPOINT_PERMISSION}") + ENDPOINT_PERMISSIONS_LIST=("${ENDPOINT_PERMISSIONS_LIST[@]/$ENDPOINT_PERMISSION}") fi done done @@ -55,6 +56,6 @@ extra790(){ for UNTRUSTED_PERMISSION in ${ENDPOINT_PERMISSIONS_LIST}; do textFail "${regx}: Found untrusted account in VPC endpoint service permission ${UNTRUSTED_PERMISSION}" "${regx}" done - done + done done -} +} From 8d9c7e8ab0e9b0baecae340d3dc9e79c08a7f0a2 Mon Sep 17 00:00:00 2001 From: Marc Jay Date: Sat, 11 Apr 2020 20:07:03 +0100 Subject: [PATCH 051/104] Handle IAM credential report containing 'no_information' for a user's last console login date A user who has never logged into the console, or not logged in since Oct 2014 will present as 'no_information' in the 'password_last_used' column of the credential report. Handle this scenario and output a failed message if it has been more than MAX_DAYS days since the user was created, or an info message if it is less than MAX_DAYS Fixes #501 --- checks/check_extra774 | 18 +++++++++++++++--- 1 file changed, 15 insertions(+), 3 deletions(-) diff --git a/checks/check_extra774 b/checks/check_extra774 index 83e2aa6f..0f882b45 100644 --- a/checks/check_extra774 +++ b/checks/check_extra774 @@ -24,11 +24,23 @@ extra774(){ user=$(cat $TEMP_REPORT_FILE|awk -F, '{ print $1,$5 }' |grep "^$i " |awk '{ print $1 }') last_login_date=$(cat $TEMP_REPORT_FILE|awk -F, '{ print $1,$5 }' |grep "^$i " |awk '{ print $2 }') - days_not_in_use=$(how_many_days_from_today ${last_login_date%T*}) - if [ "$days_not_in_use" -lt "$MAX_DAYS" ];then - textFail "User $user has not used console login for more then ${MAX_DAYS#-} days" + # If the user has never logged into the console, their last login date is 'no_information'. See: + # https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_getting-report.html#id_credentials_understanding_the_report_format + if [[ "${last_login_date}" == "no_information" ]]; then + user_created_date=$(cat $TEMP_REPORT_FILE|awk -F, '{ print $1,$3 }' |grep "^$i " |awk '{ print $2 }') + days_since_user_created=$(how_many_days_from_today ${user_created_date%T*}) + if [ "$days_since_user_created" -lt "$MAX_DAYS" ];then + textFail "User $user has never used console login since they were created ${days_since_user_created} days ago" + else + textInfo "User $user has not used console login since they were created ${days_since_user_created} days ago" + fi + else + days_not_in_use=$(how_many_days_from_today ${last_login_date%T*}) + if [ "$days_not_in_use" -lt "$MAX_DAYS" ];then + textFail "User $user has not used console login for more than ${MAX_DAYS#-} days" else textPass "User $user has used console login in the past ${MAX_DAYS#-} days" + fi fi done } From ce1058dfedb737a4e1e70f3339b7c0dc8aac2f9c Mon Sep 17 00:00:00 2001 From: Marc Jay Date: Sun, 12 Apr 2020 01:22:34 +0100 Subject: [PATCH 052/104] Remove the varying number of days in the message so that message stays consistent over time --- checks/check_extra774 | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/checks/check_extra774 b/checks/check_extra774 index 0f882b45..a5ef1580 100644 --- a/checks/check_extra774 +++ b/checks/check_extra774 @@ -30,9 +30,9 @@ extra774(){ user_created_date=$(cat $TEMP_REPORT_FILE|awk -F, '{ print $1,$3 }' |grep "^$i " |awk '{ print $2 }') days_since_user_created=$(how_many_days_from_today ${user_created_date%T*}) if [ "$days_since_user_created" -lt "$MAX_DAYS" ];then - textFail "User $user has never used console login since they were created ${days_since_user_created} days ago" + textFail "User $user has never used console login since they were created over ${MAX_DAYS#-} days ago" else - textInfo "User $user has not used console login since they were created ${days_since_user_created} days ago" + textInfo "User $user has not used console login since they were created" fi else days_not_in_use=$(how_many_days_from_today ${last_login_date%T*}) From d9588f4de07aa949ffc58d0251a3acf511f635c2 Mon Sep 17 00:00:00 2001 From: Marc Jay Date: Sun, 12 Apr 2020 01:28:11 +0100 Subject: [PATCH 053/104] Detect when GNU coreutils is installed on Mac OS X and use the correct date functions As some users may have installed GNU coreutils on Mac OS X, e.g. `brew install coreutils`, it's possible that the `date` command uses the GNU version, instead of the standard BSD version. - Detect if GNU coreutils is installed on Mac and if it is, use the GNU variants of date functions - Reduce some of the duplication in the file, which resolves a bug where the cygwin version of `how_many_days_from_today()` had the operands switched around, leading to a positive result instead of negative - Add test_tcp_connectivity function for cygwin (uses the GNU variant) Fixes #534 --- include/os_detector | 278 ++++++++++++++++++++++++-------------------- 1 file changed, 155 insertions(+), 123 deletions(-) diff --git a/include/os_detector b/include/os_detector index 1e24df71..565c6b25 100644 --- a/include/os_detector +++ b/include/os_detector @@ -11,146 +11,178 @@ # CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. +gnu_how_older_from_today() { + DATE_TO_COMPARE=$1 + TODAY_IN_DAYS=$(date -d "$(date +%Y-%m-%d)" +%s) + DATE_FROM_IN_DAYS=$(date -d $DATE_TO_COMPARE +%s) + DAYS_SINCE=$((($TODAY_IN_DAYS - $DATE_FROM_IN_DAYS )/60/60/24)) + echo $DAYS_SINCE +} +bsd_how_older_from_today() { + DATE_TO_COMPARE=$1 + TODAY_IN_DAYS=$(date +%s) + DATE_FROM_IN_DAYS=$(date -jf %Y-%m-%d $DATE_TO_COMPARE +%s) + DAYS_SINCE=$((($TODAY_IN_DAYS - $DATE_FROM_IN_DAYS )/60/60/24)) + echo $DAYS_SINCE +} + +# function to convert from timestamp to date +# output date format %Y-%m-%d +gnu_timestamp_to_date() { + # remove fractions of a second + TIMESTAMP_TO_CONVERT=$(echo $1 | cut -f1 -d".") + OUTPUT_DATE=$(date -d @$TIMESTAMP_TO_CONVERT +'%Y-%m-%d') + echo $OUTPUT_DATE +} +bsd_timestamp_to_date() { + # remove fractions of a second + TIMESTAMP_TO_CONVERT=$(echo $1 | cut -f1 -d".") + OUTPUT_DATE=$(date -r $TIMESTAMP_TO_CONVERT +'%Y-%m-%d') + echo $OUTPUT_DATE +} + +gnu_decode_report() { + base64 -d +} +bsd_decode_report() { + base64 -D +} + +gnu_how_many_days_from_today() { + DATE_TO_COMPARE=$1 + TODAY_IN_DAYS=$(date -d "$(date +%Y-%m-%d)" +%s) + DATE_IN_DAYS=$(date -d $DATE_TO_COMPARE +%s) + DAYS_TO=$((( $DATE_IN_DAYS - $TODAY_IN_DAYS )/60/60/24)) + echo $DAYS_TO +} +bsd_how_many_days_from_today() { + DATE_TO_COMPARE=$1 + TODAY_IN_DAYS=$(date +%s) + DATE_IN_DAYS=$(date -jf %Y-%m-%d $DATE_TO_COMPARE +%s) + DAYS_TO=$((( $DATE_IN_DAYS - $TODAY_IN_DAYS )/60/60/24)) + echo $DAYS_TO +} + +gnu_get_date_previous_than_months() { + MONTHS_TO_COMPARE=$1 + MONTHS_TO_COMPARE_IN_SECONDS=$(( 60 * 60 * 24 * 31 * $MONTHS_TO_COMPARE )) + CURRENTSECS=$(date +%s) + STARTDATEINSECS=$(( $CURRENTSECS - $MONTHS_TO_COMPARE_IN_SECONDS )) + DATE_BEFORE_MONTHS_TO_COMPARE=$(date -d @$STARTDATEINSECS '+%Y-%m-%d') + echo $DATE_BEFORE_MONTHS_TO_COMPARE +} +bsd_get_date_previous_than_months() { + MONTHS_TO_COMPARE=$1 + DATE_BEFORE_MONTHS_TO_COMPARE=$(date -v -$(echo $MONTHS_TO_COMPARE)m '+%Y-%m-%d') + echo $DATE_BEFORE_MONTHS_TO_COMPARE +} + +gnu_test_tcp_connectivity() { + HOST=$1 + PORT=$2 + TIMEOUT=$3 + # This is initially for ES port 9300, not not HTTP but I add HTTP error + # codes for better handling, so 200 is open and 000 is not responding + timeout $TIMEOUT bash -c '(echo > /dev/tcp/'$HOST'/'$PORT') >/dev/null 2>&1 && echo "200" || echo "000"' +} +bsd_test_tcp_connectivity() { + HOST=$1 + PORT=$2 + TIMEOUT=$3 + # This is initially for ES port 9300, not not HTTP but I add HTTP error + # codes for better handling, so 200 is open and 000 is not responding + nc -z -G $TIMEOUT $HOST $PORT >/dev/null 2>&1 && echo "200" || echo "000" +} # Functions to manage dates depending on OS if [ "$OSTYPE" == "linux-gnu" ] || [ "$OSTYPE" == "linux-musl" ]; then TEMP_REPORT_FILE=$(mktemp -t -p /tmp prowler.cred_report-XXXXXX) # function to compare in days, usage how_older_from_today date # date format %Y-%m-%d - how_older_from_today() - { - DATE_TO_COMPARE=$1 - TODAY_IN_DAYS=$(date -d "$(date +%Y-%m-%d)" +%s) - DATE_FROM_IN_DAYS=$(date -d $DATE_TO_COMPARE +%s) - DAYS_SINCE=$((($TODAY_IN_DAYS - $DATE_FROM_IN_DAYS )/60/60/24)) - echo $DAYS_SINCE - } - # function to convert from timestamp to date, usage timestamp_to_date timestamp - # output date format %Y-%m-%d - timestamp_to_date() - { - # remove fractions of a second - TIMESTAMP_TO_CONVERT=$(echo $1 | cut -f1 -d".") - OUTPUT_DATE=$(date -d @$TIMESTAMP_TO_CONVERT +'%Y-%m-%d') - echo $OUTPUT_DATE - } - decode_report() - { - base64 -d - } - how_many_days_from_today() - { - DATE_TO_COMPARE=$1 - TODAY_IN_DAYS=$(date -d "$(date +%Y-%m-%d)" +%s) - DATE_IN_DAYS=$(date -d $DATE_TO_COMPARE +%s) - DAYS_TO=$((( $DATE_IN_DAYS - $TODAY_IN_DAYS )/60/60/24)) - echo $DAYS_TO - } - get_date_previous_than_months() - { - MONTHS_TO_COMPARE=$1 - MONTHS_TO_COMPARE_IN_SECONDS=$(( 60 * 60 * 24 * 31 * $MONTHS_TO_COMPARE )) - CURRENTSECS=`date +%s` - STARTDATEINSECS=$(( $CURRENTSECS - $MONTHS_TO_COMPARE_IN_SECONDS )) - DATE_BEFORE_MONTHS_TO_COMPARE=$(date -d @$STARTDATEINSECS '+%Y-%m-%d') - echo $DATE_BEFORE_MONTHS_TO_COMPARE - } - test_tcp_connectivity() - { - HOST=$1 - PORT=$2 - TIMEOUT=$3 - # This is initially for ES port 9300, not not HTTP but I add HTTP error - # codes for better handling, so 200 is open and 000 is not responding - timeout $TIMEOUT bash -c '(echo > /dev/tcp/'$HOST'/'$PORT') >/dev/null 2>&1 && echo "200" || echo "000"' - } + how_older_from_today() { + gnu_how_older_from_today "$1" + } + timestamp_to_date() { + gnu_timestamp_to_date "$1" + } + decode_report() { + gnu_decode_report + } + how_many_days_from_today() { + gnu_how_many_days_from_today "$1" + } + get_date_previous_than_months() { + gnu_get_date_previous_than_months "$1" + } + test_tcp_connectivity() { + gnu_test_tcp_connectivity "$1" "$2" "$3" + } elif [[ "$OSTYPE" == "darwin"* ]]; then # BSD/OSX commands compatibility TEMP_REPORT_FILE=$(mktemp -t prowler.cred_report-XXXXXX) - how_older_from_today() - { - DATE_TO_COMPARE=$1 - TODAY_IN_DAYS=$(date +%s) - DATE_FROM_IN_DAYS=$(date -jf %Y-%m-%d $DATE_TO_COMPARE +%s) - DAYS_SINCE=$((($TODAY_IN_DAYS - $DATE_FROM_IN_DAYS )/60/60/24)) - echo $DAYS_SINCE + # It is possible that the user has installed GNU coreutils, replacing the default Mac OS X BSD tools with + # GNU coreutils equivalents. Only GNU date allows --version as a valid argument, so use the validity of this argument + # as a means to detect that coreutils is installed and is overriding the default tools + if date --version >/dev/null 2>&1 ; then + how_older_from_today() { + gnu_how_older_from_today "$1" } - timestamp_to_date() - { - # remove fractions of a second - TIMESTAMP_TO_CONVERT=$(echo $1 | cut -f1 -d".") - OUTPUT_DATE=$(date -r $TIMESTAMP_TO_CONVERT +'%Y-%m-%d') - echo $OUTPUT_DATE + timestamp_to_date() { + gnu_timestamp_to_date "$1" } - decode_report() - { - base64 -D + decode_report() { + gnu_decode_report } - how_many_days_from_today() - { - DATE_TO_COMPARE=$1 - TODAY_IN_DAYS=$(date +%s) - DATE_IN_DAYS=$(date -jf %Y-%m-%d $DATE_TO_COMPARE +%s) - DAYS_TO=$((( $DATE_IN_DAYS - $TODAY_IN_DAYS )/60/60/24)) - echo $DAYS_TO + how_many_days_from_today() { + gnu_how_many_days_from_today "$1" } - get_date_previous_than_months() - { - MONTHS_TO_COMPARE=$1 - DATE_BEFORE_MONTHS_TO_COMPARE=$(date -v -$(echo $MONTHS_TO_COMPARE)m '+%Y-%m-%d') - echo $DATE_BEFORE_MONTHS_TO_COMPARE + get_date_previous_than_months() { + gnu_get_date_previous_than_months "$1" } - test_tcp_connectivity() - { - HOST=$1 - PORT=$2 - TIMEOUT=$3 - # This is initially for ES port 9300, not not HTTP but I add HTTP error - # codes for better handling, so 200 is open and 000 is not responding - nc -z -G $TIMEOUT $HOST $PORT >/dev/null 2>&1 && echo "200" || echo "000" + else + how_older_from_today() { + bsd_how_older_from_today "$1" } + timestamp_to_date() { + bsd_timestamp_to_date "$1" + } + decode_report() { + bsd_decode_report + } + how_many_days_from_today() { + bsd_how_many_days_from_today "$1" + } + get_date_previous_than_months() { + bsd_get_date_previous_than_months "$1" + } + fi + test_tcp_connectivity() { + bsd_test_tcp_connectivity "$1" "$2" "$3" + } elif [[ "$OSTYPE" == "cygwin" ]]; then # POSIX compatibility layer and Linux environment emulation for Windows TEMP_REPORT_FILE=$(mktemp -t -p /tmp prowler.cred_report-XXXXXX) - how_older_from_today() - { - DATE_TO_COMPARE=$1 - TODAY_IN_DAYS=$(date -d "$(date +%Y-%m-%d)" +%s) - DATE_FROM_IN_DAYS=$(date -d $DATE_TO_COMPARE +%s) - DAYS_SINCE=$((($TODAY_IN_DAYS - $DATE_FROM_IN_DAYS )/60/60/24)) - echo $DAYS_SINCE - } - timestamp_to_date() - { - # remove fractions of a second - TIMESTAMP_TO_CONVERT=$(echo $1 | cut -f1 -d".") - OUTPUT_DATE=$(date -d @$TIMESTAMP_TO_CONVERT +'%Y-%m-%d') - echo $OUTPUT_DATE - } - decode_report() - { - base64 -d - } - how_many_days_from_today() - { - DATE_TO_COMPARE=$1 - TODAY_IN_DAYS=$(date -d "$(date +%Y-%m-%d)" +%s) - DATE_IN_DAYS=$(date -d $DATE_TO_COMPARE +%s) - DAYS_TO=$((( $TODAY_IN_DAYS - $DATE_IN_DAYS )/60/60/24)) - echo $DAYS_TO - } - get_date_previous_than_months() - { - MONTHS_TO_COMPARE=$1 - MONTHS_TO_COMPARE_IN_SECONDS=$(( 60 * 60 * 24 * 31 * $MONTHS_TO_COMPARE )) - CURRENTSECS=`date +%s` - STARTDATEINSECS=$(( $CURRENTSECS - $MONTHS_TO_COMPARE_IN_SECONDS )) - DATE_BEFORE_MONTHS_TO_COMPARE=$(date -d @$STARTDATEINSECS '+%Y-%m-%d') - echo $DATE_BEFORE_MONTHS_TO_COMPARE - } + how_older_from_today() { + gnu_how_older_from_today "$1" + } + timestamp_to_date() { + gnu_timestamp_to_date "$1" + } + decode_report() { + gnu_decode_report + } + how_many_days_from_today() { + gnu_how_many_days_from_today "$1" + } + get_date_previous_than_months() { + gnu_get_date_previous_than_months "$1" + } + test_tcp_connectivity() { + gnu_test_tcp_connectivity "$1" "$2" "$3" + } else - echo "Unknown Operating System! Valid \$OSTYPE: linux-gnu, linux-musl, darwin* or cygwin" - echo "Found: $OSTYPE" - EXITCODE=1 - exit $EXITCODE + echo "Unknown Operating System! Valid \$OSTYPE: linux-gnu, linux-musl, darwin* or cygwin" + echo "Found: $OSTYPE" + EXITCODE=1 + exit $EXITCODE fi From 4f623b4e314f317400b30dfd562672766bdfacfd Mon Sep 17 00:00:00 2001 From: Marc Jay Date: Sun, 12 Apr 2020 02:18:42 +0100 Subject: [PATCH 054/104] check121 - Filter out users who do not have a console password According to the benchmark, only users with a console password should be considered for this check, therefore filter out any users who do not have a console password Fixes #513 --- checks/check121 | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/checks/check121 b/checks/check121 index 64032c05..b15a0125 100644 --- a/checks/check121 +++ b/checks/check121 @@ -19,7 +19,8 @@ check121(){ LIST_USERS=$($AWSCLI iam list-users --query 'Users[*].UserName' --output text $PROFILE_OPT --region $REGION) # List of USERS with KEY1 last_used_date as N/A LIST_USERS_KEY1_NA=$(for user in $LIST_USERS; do grep "^${user}," $TEMP_REPORT_FILE|awk -F, '{ print $1,$11 }'|grep N/A |awk '{ print $1 }'; done) - LIST_USERS_KEY1_ACTIVE=$(for user in $LIST_USERS_KEY1_NA; do grep "^${user}," $TEMP_REPORT_FILE|awk -F, '{ print $1,$9 }'|grep "true$"|awk '{ print $1 }'|sed 's/[[:blank:]]+/,/g' ; done) + # List of USERS with KEY1 active, last_used_date as N/A and have a console password + LIST_USERS_KEY1_ACTIVE=$(for user in $LIST_USERS_KEY1_NA; do grep "^${user}," $TEMP_REPORT_FILE|awk -F, '{ print $1,$4,$9 }'|grep "true true$"|awk '{ print $1 }'|sed 's/[[:blank:]]+/,/g' ; done) if [[ $LIST_USERS_KEY1_ACTIVE ]]; then for user in $LIST_USERS_KEY1_ACTIVE; do textFail "$user has never used Access Key 1" @@ -29,7 +30,8 @@ check121(){ fi # List of USERS with KEY2 last_used_date as N/A LIST_USERS_KEY2_NA=$(for user in $LIST_USERS; do grep "^${user}," $TEMP_REPORT_FILE|awk -F, '{ print $1,$16 }'|grep N/A |awk '{ print $1 }' ; done) - LIST_USERS_KEY2_ACTIVE=$(for user in $LIST_USERS_KEY2_NA; do grep "^${user}," $TEMP_REPORT_FILE|awk -F, '{ print $1,$14 }'|grep "true$" |awk '{ print $1 }' ; done) + # List of USERS with KEY2 active, last_used_date as N/A and have a console password + LIST_USERS_KEY2_ACTIVE=$(for user in $LIST_USERS_KEY2_NA; do grep "^${user}," $TEMP_REPORT_FILE|awk -F, '{ print $1,$4,$14 }'|grep "true true$" |awk '{ print $1 }' ; done) if [[ $LIST_USERS_KEY2_ACTIVE ]]; then for user in $LIST_USERS_KEY2_ACTIVE; do textFail "$user has never used Access Key 2" From 57c15c2cc9791a4afe5fc45748839a582b69865a Mon Sep 17 00:00:00 2001 From: Marc Jay Date: Sun, 12 Apr 2020 17:09:02 +0100 Subject: [PATCH 055/104] Avoid changing the execution order of checks when some checks are excluded Replace the use of `sort -u` to remove duplicate checks, which has the side-effect of reordering checks alphabetically when one or more are excluded with awk, which preserves the check order Adjust indentation and formatting to be more consistent with the rest of the file Fixes #492 --- prowler | 56 ++++++++++++++++++++++++++++---------------------------- 1 file changed, 28 insertions(+), 28 deletions(-) diff --git a/prowler b/prowler index 2486808c..458abbad 100755 --- a/prowler +++ b/prowler @@ -77,12 +77,12 @@ USAGE: -s show scoring report -x specify external directory with custom checks (i.e. /my/own/checks, files must start by "check") -q suppress info messages and passing test output - -A account id for the account where to assume a role, requires -R and -T + -A account id for the account where to assume a role, requires -R and -T (i.e.: 123456789012) - -R role name to assume in the account, requires -A and -T + -R role name to assume in the account, requires -A and -T (i.e.: ProwlerRole) -T session durantion given to that role credentials in seconds, default 1h (3600) recommended 12h, requires -R and -T - (i.e.: 43200) + (i.e.: 43200) -h this help " exit @@ -344,38 +344,38 @@ show_all_group_titles() { done } - # Function to execute all checks but exclude some of them get_all_checks_without_exclusion() { - CHECKS_EXCLUDED=() - local CHECKS_TO_EXCLUDE=() - local TOTAL_CHECKS=() - #Get a list of checks to exclude - IFS=',' read -ra E_CHECKS <<< "$1" - for E_CHECK in "${E_CHECKS[@]}"; do - CHECKS_TO_EXCLUDE+=($E_CHECK) - done - #Get a list of total checks available by ID + CHECKS_EXCLUDED=() + local CHECKS_TO_EXCLUDE=() + local TOTAL_CHECKS=() + # Get a list of checks to exclude + IFS=',' read -ra E_CHECKS <<< "$1" + for E_CHECK in "${E_CHECKS[@]}"; do + CHECKS_TO_EXCLUDE+=($E_CHECK) + done + # Get a list of total checks available by ID for i in "${!GROUP_TITLE[@]}"; do - #show_group_title $i + # show_group_title $i IFS=',' read -ra CHECKS <<< ${GROUP_CHECKS[$i]} for j in ${CHECKS[@]}; do - TOTAL_CHECKS+=($CHECK_ID_$j) + TOTAL_CHECKS+=($CHECK_ID_$j) done done - TOTAL_CHECKS=($(echo "${TOTAL_CHECKS[*]}" | tr ' ' '\n' | sort -u)) #removes duplicate and store the result as an array - #Create a list that contains all checks but excluded ones - for i in "${TOTAL_CHECKS[@]}"; do - local COINCIDENCE=false - for x in "${CHECKS_TO_EXCLUDE[@]}"; do - if [[ "$i" == "$x" ]]; then - COINCIDENCE=true - fi - done - if [[ "$COINCIDENCE" = false ]]; then - CHECKS_EXCLUDED+=($i) - fi - done + # Remove duplicates whilst preserving the order of checks, and store the result as an array + TOTAL_CHECKS=($(echo "${TOTAL_CHECKS[*]}" | tr ' ' '\n' | awk '!seen[$0]++')) + # Create a list that contains all checks but excluded ones + for i in "${TOTAL_CHECKS[@]}"; do + local COINCIDENCE=false + for x in "${CHECKS_TO_EXCLUDE[@]}"; do + if [[ "$i" == "$x" ]]; then + COINCIDENCE=true + fi + done + if [[ "$COINCIDENCE" = false ]]; then + CHECKS_EXCLUDED+=($i) + fi + done } ### All functions defined above ... run the workflow From 24e691901e94d94f10336026a1f35dbc0fa1cb27 Mon Sep 17 00:00:00 2001 From: Marc Jay Date: Sun, 12 Apr 2020 17:17:46 +0100 Subject: [PATCH 056/104] Convert tabs to spaces within modified function --- prowler | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/prowler b/prowler index 458abbad..d7ed2b93 100755 --- a/prowler +++ b/prowler @@ -355,13 +355,13 @@ get_all_checks_without_exclusion() { CHECKS_TO_EXCLUDE+=($E_CHECK) done # Get a list of total checks available by ID - for i in "${!GROUP_TITLE[@]}"; do - # show_group_title $i - IFS=',' read -ra CHECKS <<< ${GROUP_CHECKS[$i]} - for j in ${CHECKS[@]}; do + for i in "${!GROUP_TITLE[@]}"; do + # show_group_title $i + IFS=',' read -ra CHECKS <<< ${GROUP_CHECKS[$i]} + for j in ${CHECKS[@]}; do TOTAL_CHECKS+=($CHECK_ID_$j) - done - done + done + done # Remove duplicates whilst preserving the order of checks, and store the result as an array TOTAL_CHECKS=($(echo "${TOTAL_CHECKS[*]}" | tr ' ' '\n' | awk '!seen[$0]++')) # Create a list that contains all checks but excluded ones From 05247a2ccbdd51c9e3504689788c98a9d83a1875 Mon Sep 17 00:00:00 2001 From: Julio Delgado Jr Date: Mon, 13 Apr 2020 12:39:20 -0400 Subject: [PATCH 057/104] Prowler IAM Policy Enhancements and ReadMe Updates --- README.md | 36 +++++--- iam/prowler-additions-policy.json | 144 +++++++----------------------- 2 files changed, 58 insertions(+), 122 deletions(-) diff --git a/README.md b/README.md index fb163fd1..145a53ff 100644 --- a/README.md +++ b/README.md @@ -90,10 +90,11 @@ This script has been written in bash using AWS-CLI and it works in Linux and OSX export AWS_SESSION_TOKEN="XXXXXXXXX" ``` -- Those credentials must be associated to a user or role with proper permissions to do all checks. To make sure add SecurityAuditor default policy to your user. Policy ARN is +- Those credentials must be associated to a user or role with proper permissions to do all checks. To make sure, add the AWS managed policies, SecurityAudit and ViewOnlyAccess, to the user or role being used. Policy ARNs are: ```sh arn:aws:iam::aws:policy/SecurityAudit + arn:aws:iam::aws:policy/job-function/ViewOnlyAccess ``` > Additional permissions needed: to make sure Prowler can scan all services included in the group *Extras*, make sure you attach also the custom policy [prowler-additions-policy.json](https://github.com/toniblyx/prowler/blob/master/iam/prowler-additions-policy.json) to the role you are using. If you want Prowler to send findings to [AWS Security Hub](https://aws.amazon.com/security-hub), make sure you also attach the custom policy [prowler-security-hub.json](https://github.com/toniblyx/prowler/blob/master/iam/prowler-security-hub.json). @@ -327,27 +328,42 @@ or set manually up your `~/.aws/credentials` file properly. There are some helpfull tools to save time in this process like [aws-mfa-script](https://github.com/asagage/aws-mfa-script) or [aws-cli-mfa](https://github.com/sweharris/aws-cli-mfa). +### AWS Managed IAM Policies + +[ViewOnlyAccess](https://docs.aws.amazon.com/IAM/latest/UserGuide/access_policies_job-functions.html#jf_view-only-user) +- Use case: This user can view a list of AWS resources and basic metadata in the account across all services. The user cannot read resource content or metadata that goes beyond the quota and list information for resources. +- Policy description: This policy grants List*, Describe*, Get*, View*, and Lookup* access to resources for most AWS services. To see what actions this policy includes for each service, see [ViewOnlyAccess Permissions](https://console.aws.amazon.com/iam/home#policies/arn:aws:iam::aws:policy/job-function/ViewOnlyAccess) + +[SecurityAudit](https://docs.aws.amazon.com/IAM/latest/UserGuide/access_policies_job-functions.html#jf_security-auditor) +- Use case: This user monitors accounts for compliance with security requirements. This user can access logs and events to investigate potential security breaches or potential malicious activity. +- Policy description: This policy grants permissions to view configuration data for many AWS services and to review their logs. To see what actions this policy includes for each service, see [SecurityAudit Permissions](https://console.aws.amazon.com/iam/home#policies/arn:aws:iam::aws:policy/SecurityAudit) + ### Custom IAM Policy -Some new and specific checks require Prowler to inherit more permissions than SecurityAudit to work properly. In addition to the AWS managed policy "SecurityAudit" for the role you use for checks you may need to create a custom policy with a few more permissions (get and list and additional services mostly). Here you go a good example for a "ProwlerReadOnlyPolicy" (see below bootstrap script for set it up): +[Prowler-Additions-Policy](iam/prowler-additions-policy.json) -[iam/prowler-additions-policy.json](iam/prowler-additions-policy.json) +Some new and specific checks require Prowler to inherit more permissions than SecurityAudit and ViewOnlyAccess to work properly. In addition to the AWS managed policies, "SecurityAudit" and "ViewOnlyAccess", the user/role you use for checks may need to be granted a custom policy with a few more read-only permissions (to support additional services mostly). Here is an example policy with the additional rights, "Prowler-Additions-Policy" (see below bootstrap script for set it up): +- [iam/prowler-additions-policy.json](iam/prowler-additions-policy.json) -> Note: Action `ec2:get*` is included in "ProwlerReadOnlyPolicy" policy above, that includes `get-password-data`, type `aws ec2 get-password-data help` to better understand its implications. +[Prowler-Security-Hub Policy](iam/prowler-security-hub.json) + +Allows Prowler to import its findings to [AWS Security Hub](https://aws.amazon.com/security-hub). With Security Hub, you now have a single place that aggregates, organizes, and prioritizes your security alerts, or findings, from multiple AWS services, such as Amazon GuardDuty, Amazon Inspector, Amazon Macie, AWS Identity and Access Management (IAM) Access Analyzer, and AWS Firewall Manager, as well as from AWS Partner solutions. +- [iam/prowler-security-hub.json](iam/prowler-security-hub.json) ### Bootstrap Script -Quick bash script to set up a "prowler" IAM user with "SecurityAudit" group with the required permissions (including "ProwlerReadOnlyPolicy"). To run the script below, you need user with administrative permissions; set the `AWS_DEFAULT_PROFILE` to use that account: +Quick bash script to set up a "prowler" IAM user with "SecurityAudit" and "ViewOnlyAccess" group with the required permissions (including "Prowler-Additions-Policy"). To run the script below, you need user with administrative permissions; set the `AWS_DEFAULT_PROFILE` to use that account: ```sh export AWS_DEFAULT_PROFILE=default export ACCOUNT_ID=$(aws sts get-caller-identity --query 'Account' | tr -d '"') -aws iam create-group --group-name SecurityAudit -aws iam create-policy --policy-name ProwlerReadOnlyPolicy --policy-document file://$(pwd)/iam/prowler-additions-policy.json -aws iam attach-group-policy --group-name SecurityAudit --policy-arn arn:aws:iam::aws:policy/SecurityAudit -aws iam attach-group-policy --group-name SecurityAudit --policy-arn arn:aws:iam::${ACCOUNT_ID}:policy/ProwlerReadOnlyPolicy +aws iam create-group --group-name Prowler +aws iam create-policy --policy-name Prowler-Additions-Policy --policy-document file://$(pwd)/iam/prowler-additions-policy.json +aws iam attach-group-policy --group-name Prowler --policy-arn arn:aws:iam::aws:policy/SecurityAudit +aws iam attach-group-policy --group-name Prowler --policy-arn arn:aws:iam::aws:policy/job-function/ViewOnlyAccess +aws iam attach-group-policy --group-name Prowler --policy-arn arn:aws:iam::${ACCOUNT_ID}:policy/Prowler-Additions-Policy aws iam create-user --user-name prowler -aws iam add-user-to-group --user-name prowler --group-name SecurityAudit +aws iam add-user-to-group --user-name prowler --group-name Prowler aws iam create-access-key --user-name prowler unset ACCOUNT_ID AWS_DEFAULT_PROFILE ``` diff --git a/iam/prowler-additions-policy.json b/iam/prowler-additions-policy.json index 213c811e..79cfdc9a 100644 --- a/iam/prowler-additions-policy.json +++ b/iam/prowler-additions-policy.json @@ -1,113 +1,33 @@ { - "Version": "2012-10-17", - "Statement": [ - { - "Action": [ - "access-analyzer:List*", - "apigateway:get*", - "apigatewayv2:get*", - "aws-marketplace:viewsubscriptions", - "batch:listjobs", - "clouddirectory:listappliedschemaarns", - "clouddirectory:listdevelopmentschemaarns", - "clouddirectory:listpublishedschemaarns", - "cloudformation:list*", - "cloudhsm:listavailablezones", - "cloudsearch:list*", - "cloudwatch:get*", - "cloudwatch:list*", - "codebuild:listbuilds*", - "codestar:verify*", - "cognito-identity:listidentities", - "cognito-idp:list*", - "cognito-sync:listdatasets", - "connect:list*", - "datapipeline:getaccountlimits", - "dax:describeclusters", - "dax:describedefaultparameters", - "dax:describeevents", - "dax:describeparametergroups", - "dax:describeparameters", - "dax:describesubnetgroups", - "dax:describetable", - "dax:listtables", - "devicefarm:list*", - "discovery:list*", - "dms:list*", - "ds:ListAuthorizedApplications", - "ds:DescribeRoles", - "dynamodb:describebackup", - "dynamodb:describeglobaltablesettings", - "dynamodb:describelimits", - "dynamodb:describereservedcapacity", - "dynamodb:describereservedcapacityofferings", - "dynamodb:describestream", - "dynamodb:listtagsofresource", - "ec2:get*", - "ecr:describe*", - "ecr:listimages", - "elasticbeanstalk:listavailablesolutionstacks", - "elasticmapreduce:list*", - "elastictranscoder:list*", - "gamelift:list*", - "glacier:list*", - "importexport:listjobs", - "lambda:GetAccountSettings", - "lambda:GetFunctionConfiguration", - "lambda:GetLayerVersionPolicy", - "lambda:GetPolicy", - "lambda:List*", - "lex:getbotaliases", - "lex:getbotchannelassociations", - "lex:getbots", - "lex:getbotversions", - "lex:getintents", - "lex:getintentversions", - "lex:getslottypes", - "lex:getslottypeversions", - "lex:getutterancesview", - "lightsail:getblueprints", - "lightsail:getbundles", - "lightsail:getinstancesnapshots", - "lightsail:getkeypair", - "lightsail:getregions", - "lightsail:getstaticips", - "lightsail:isvpcpeered", - "machinelearning:describe*", - "mobilehub:listavailablefeatures", - "mobilehub:listavailableregions", - "mobilehub:listprojects", - "mobiletargeting:getapplicationsettings", - "mobiletargeting:getcampaigns", - "mobiletargeting:getimportjobs", - "mobiletargeting:getsegments", - "opsworks-cm:describe*", - "opsworks:describe*", - "polly:describe*", - "polly:list*", - "redshift:viewqueriesinconsole", - "route53domains:list*", - "s3:listbucket", - "sdb:list*", - "secretsmanager:listsecretversionids", - "servicecatalog:list*", - "ses:list*", - "sns:list*", - "sqs:listqueuetags", - "ssm:listassociations", - "states:listactivities", - "support:describe*", - "swf:list*", - "tag:gettagkeys", - "trustedadvisor:describe*", - "waf-regional:list*", - "waf:list*", - "workdocs:describeavailabledirectories", - "workdocs:describeinstances", - "workmail:describe*" - ], - "Effect": "Allow", - "Resource": "*" - } - ] -} + "Version": "2012-10-17", + "Statement": [ + { + "Action": [ + "access-analyzer:List*", + "apigateway:Get*", + "apigatewayv2:Get*", + "aws-marketplace:ViewSubscriptions", + "dax:ListTables", + "ds:ListAuthorizedApplications", + "ds:DescribeRoles", + "ec2:GetEbsEncryptionByDefault", + "ecr:Describe*", + "lambda:GetAccountSettings", + "lambda:GetFunctionConfiguration", + "lambda:GetLayerVersionPolicy", + "lambda:GetPolicy", + "opsworks-cm:Describe*", + "opsworks:Describe*", + "secretsmanager:ListSecretVersionIds", + "sns:List*", + "sqs:ListQueueTags", + "states:ListActivities", + "support:Describe*", + "tag:GetTagKeys" + ], + "Resource": "*", + "Effect": "Allow", + "Sid": "AllowMoreReadForProwler" + } + ] +} \ No newline at end of file From c4374a281849f7af5e5ab9b715462f442315f43e Mon Sep 17 00:00:00 2001 From: Marc Jay Date: Mon, 13 Apr 2020 22:24:48 +0100 Subject: [PATCH 058/104] Extra741 - Check if User Data is a valid GZIP file before attempting to gunzip Test if the user data is a valid GZIP file using `gunzip -t` and only then attempt to gunzip it Remove some code duplication Fixes #535 --- checks/check_extra741 | 34 +++++++++++++--------------------- 1 file changed, 13 insertions(+), 21 deletions(-) diff --git a/checks/check_extra741 b/checks/check_extra741 index 20b7b94a..5ec8c077 100644 --- a/checks/check_extra741 +++ b/checks/check_extra741 @@ -31,31 +31,23 @@ extra741(){ for instance in $LIST_OF_EC2_INSTANCES; do EC2_USERDATA_FILE="$SECRETS_TEMP_FOLDER/extra741-$instance-userData.decoded" EC2_USERDATA=$($AWSCLI ec2 describe-instance-attribute --attribute userData $PROFILE_OPT --region $regx --instance-id $instance --query UserData.Value --output text| grep -v ^None | decode_report > $EC2_USERDATA_FILE) - if [ -s $EC2_USERDATA_FILE ];then - FILE_FORMAT_ASCII=$(file -b $EC2_USERDATA_FILE|grep ASCII) + if [ -s "$EC2_USERDATA_FILE" ];then # This finds ftp or http URLs with credentials and common keywords # FINDINGS=$(egrep -i '[[:alpha:]]*://[[:alnum:]]*:[[:alnum:]]*@.*/|key|secret|token|pass' $EC2_USERDATA_FILE |wc -l|tr -d '\ ') # New implementation using https://github.com/Yelp/detect-secrets - if [[ $FILE_FORMAT_ASCII ]]; then - FINDINGS=$(secretsDetector file $EC2_USERDATA_FILE) - if [[ $FINDINGS -eq 0 ]]; then - textPass "$regx: No secrets found in $instance" "$regx" - # delete file if nothing interesting is there - rm -f $EC2_USERDATA_FILE - else - textFail "$regx: Potential secret found in $instance" "$regx" - # delete file to not leave trace, user must look at the instance User Data - rm -f $EC2_USERDATA_FILE - fi + # Test if user data is a valid GZIP file, if so gunzip first + if gunzip -t "$EC2_USERDATA_FILE" > /dev/null 2>&1; then + mv "$EC2_USERDATA_FILE" "$EC2_USERDATA_FILE.gz" ; gunzip "$EC2_USERDATA_FILE.gz" + fi + FINDINGS=$(secretsDetector file "$EC2_USERDATA_FILE") + if [[ $FINDINGS -eq 0 ]]; then + textPass "$regx: No secrets found in $instance" "$regx" + # delete file if nothing interesting is there + rm -f "$EC2_USERDATA_FILE" else - mv $EC2_USERDATA_FILE $EC2_USERDATA_FILE.gz ; gunzip $EC2_USERDATA_FILE.gz - FINDINGS=$(secretsDetector file $EC2_USERDATA_FILE) - if [[ $FINDINGS -eq 0 ]]; then - textPass "$regx: No secrets found in $instance User Data" "$regx" - rm -f $EC2_USERDATA_FILE - else - textFail "$regx: Potential secret found in $instance" "$regx" - fi + textFail "$regx: Potential secret found in $instance" "$regx" + # delete file to not leave trace, user must look at the instance User Data + rm -f "$EC2_USERDATA_FILE" fi else textPass "$regx: No secrets found in $instance User Data or it is empty" "$regx" From 460f65618b049d6de05210149f35c0181f1dc902 Mon Sep 17 00:00:00 2001 From: Marc Jay Date: Mon, 13 Apr 2020 22:43:22 +0100 Subject: [PATCH 059/104] Add clarifying text to pass/fail messages --- checks/check_extra741 | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/checks/check_extra741 b/checks/check_extra741 index 5ec8c077..7545c9aa 100644 --- a/checks/check_extra741 +++ b/checks/check_extra741 @@ -41,11 +41,11 @@ extra741(){ fi FINDINGS=$(secretsDetector file "$EC2_USERDATA_FILE") if [[ $FINDINGS -eq 0 ]]; then - textPass "$regx: No secrets found in $instance" "$regx" + textPass "$regx: No secrets found in $instance User Data" "$regx" # delete file if nothing interesting is there rm -f "$EC2_USERDATA_FILE" else - textFail "$regx: Potential secret found in $instance" "$regx" + textFail "$regx: Potential secret found in $instance User Data" "$regx" # delete file to not leave trace, user must look at the instance User Data rm -f "$EC2_USERDATA_FILE" fi From 0f4946860177e7da3bb69d8e446c257e8b5db5e0 Mon Sep 17 00:00:00 2001 From: Marc Jay Date: Tue, 14 Apr 2020 02:02:48 +0100 Subject: [PATCH 060/104] Limit CHECK_ID to a single value, handing the left-pad formatting in one place Remove the second entry in any comma-separated check IDs from each check, formatting the check ID with leading zeros in `include/outputs` if the `-n` flag is active --- checks/check11 | 2 +- checks/check12 | 2 +- checks/check13 | 2 +- checks/check14 | 2 +- checks/check15 | 2 +- checks/check16 | 2 +- checks/check17 | 2 +- checks/check18 | 2 +- checks/check19 | 2 +- checks/check21 | 2 +- checks/check22 | 2 +- checks/check23 | 2 +- checks/check24 | 2 +- checks/check25 | 2 +- checks/check26 | 2 +- checks/check27 | 2 +- checks/check28 | 2 +- checks/check29 | 2 +- checks/check31 | 2 +- checks/check32 | 2 +- checks/check33 | 2 +- checks/check34 | 2 +- checks/check35 | 2 +- checks/check36 | 2 +- checks/check37 | 2 +- checks/check38 | 2 +- checks/check39 | 2 +- checks/check41 | 2 +- checks/check42 | 2 +- checks/check43 | 2 +- checks/check44 | 2 +- checks/check_extra71 | 2 +- checks/check_extra72 | 2 +- checks/check_extra73 | 2 +- checks/check_extra74 | 2 +- checks/check_extra75 | 2 +- checks/check_extra76 | 2 +- checks/check_extra77 | 2 +- checks/check_extra78 | 2 +- checks/check_extra79 | 2 +- include/outputs | 5 ++--- 41 files changed, 42 insertions(+), 43 deletions(-) diff --git a/checks/check11 b/checks/check11 index 09bdcab1..59e982ef 100644 --- a/checks/check11 +++ b/checks/check11 @@ -8,7 +8,7 @@ # You should have received a copy of the license along with this # work. If not, see . -CHECK_ID_check11="1.1,1.01" +CHECK_ID_check11="1.1" CHECK_TITLE_check11="[check11] Avoid the use of the root account (Scored)" CHECK_SCORED_check11="SCORED" CHECK_TYPE_check11="LEVEL1" diff --git a/checks/check12 b/checks/check12 index 15bc50f1..800b64ce 100644 --- a/checks/check12 +++ b/checks/check12 @@ -8,7 +8,7 @@ # You should have received a copy of the license along with this # work. If not, see . -CHECK_ID_check12="1.2,1.02" +CHECK_ID_check12="1.2" CHECK_TITLE_check12="[check12] Ensure multi-factor authentication (MFA) is enabled for all IAM users that have a console password (Scored)" CHECK_SCORED_check12="SCORED" CHECK_TYPE_check12="LEVEL1" diff --git a/checks/check13 b/checks/check13 index 9e0a4616..9f8f5a4c 100644 --- a/checks/check13 +++ b/checks/check13 @@ -8,7 +8,7 @@ # You should have received a copy of the license along with this # work. If not, see . -CHECK_ID_check13="1.3,1.03" +CHECK_ID_check13="1.3" CHECK_TITLE_check13="[check13] Ensure credentials unused for 90 days or greater are disabled (Scored)" CHECK_SCORED_check13="SCORED" CHECK_TYPE_check13="LEVEL1" diff --git a/checks/check14 b/checks/check14 index 86925def..21e2be49 100644 --- a/checks/check14 +++ b/checks/check14 @@ -8,7 +8,7 @@ # You should have received a copy of the license along with this # work. If not, see . -CHECK_ID_check14="1.4,1.04" +CHECK_ID_check14="1.4" CHECK_TITLE_check14="[check14] Ensure access keys are rotated every 90 days or less (Scored)" CHECK_SCORED_check14="SCORED" CHECK_TYPE_check14="LEVEL1" diff --git a/checks/check15 b/checks/check15 index 0aa9c732..bfc31270 100644 --- a/checks/check15 +++ b/checks/check15 @@ -8,7 +8,7 @@ # You should have received a copy of the license along with this # work. If not, see . -CHECK_ID_check15="1.5,1.05" +CHECK_ID_check15="1.5" CHECK_TITLE_check15="[check15] Ensure IAM password policy requires at least one uppercase letter (Scored)" CHECK_SCORED_check15="SCORED" CHECK_TYPE_check15="LEVEL1" diff --git a/checks/check16 b/checks/check16 index 9ee4965b..881b9a83 100644 --- a/checks/check16 +++ b/checks/check16 @@ -8,7 +8,7 @@ # You should have received a copy of the license along with this # work. If not, see . -CHECK_ID_check16="1.6,1.06" +CHECK_ID_check16="1.6" CHECK_TITLE_check16="[check16] Ensure IAM password policy require at least one lowercase letter (Scored)" CHECK_SCORED_check16="SCORED" CHECK_TYPE_check16="LEVEL1" diff --git a/checks/check17 b/checks/check17 index fc56ffe7..ad8faecd 100644 --- a/checks/check17 +++ b/checks/check17 @@ -8,7 +8,7 @@ # You should have received a copy of the license along with this # work. If not, see . -CHECK_ID_check17="1.7,1.07" +CHECK_ID_check17="1.7" CHECK_TITLE_check17="[check17] Ensure IAM password policy require at least one symbol (Scored)" CHECK_SCORED_check17="SCORED" CHECK_TYPE_check17="LEVEL1" diff --git a/checks/check18 b/checks/check18 index 732b87bd..bec51868 100644 --- a/checks/check18 +++ b/checks/check18 @@ -8,7 +8,7 @@ # You should have received a copy of the license along with this # work. If not, see . -CHECK_ID_check18="1.8,1.08" +CHECK_ID_check18="1.8" CHECK_TITLE_check18="[check18] Ensure IAM password policy require at least one number (Scored)" CHECK_SCORED_check18="SCORED" CHECK_TYPE_check18="LEVEL1" diff --git a/checks/check19 b/checks/check19 index 60aad8ed..28199d77 100644 --- a/checks/check19 +++ b/checks/check19 @@ -8,7 +8,7 @@ # You should have received a copy of the license along with this # work. If not, see . -CHECK_ID_check19="1.9,1.09" +CHECK_ID_check19="1.9" CHECK_TITLE_check19="[check19] Ensure IAM password policy requires minimum length of 14 or greater (Scored)" CHECK_SCORED_check19="SCORED" CHECK_TYPE_check19="LEVEL1" diff --git a/checks/check21 b/checks/check21 index d1a23103..1af4509a 100644 --- a/checks/check21 +++ b/checks/check21 @@ -8,7 +8,7 @@ # You should have received a copy of the license along with this # work. If not, see . -CHECK_ID_check21="2.1,2.01" +CHECK_ID_check21="2.1" CHECK_TITLE_check21="[check21] Ensure CloudTrail is enabled in all regions (Scored)" CHECK_SCORED_check21="SCORED" CHECK_TYPE_check21="LEVEL1" diff --git a/checks/check22 b/checks/check22 index 1646a94f..d302f128 100644 --- a/checks/check22 +++ b/checks/check22 @@ -8,7 +8,7 @@ # You should have received a copy of the license along with this # work. If not, see . -CHECK_ID_check22="2.2,2.02" +CHECK_ID_check22="2.2" CHECK_TITLE_check22="[check22] Ensure CloudTrail log file validation is enabled (Scored)" CHECK_SCORED_check22="SCORED" CHECK_TYPE_check22="LEVEL2" diff --git a/checks/check23 b/checks/check23 index 0149c5ce..9614fe68 100644 --- a/checks/check23 +++ b/checks/check23 @@ -8,7 +8,7 @@ # You should have received a copy of the license along with this # work. If not, see . -CHECK_ID_check23="2.3,2.03" +CHECK_ID_check23="2.3" CHECK_TITLE_check23="[check23] Ensure the S3 bucket CloudTrail logs to is not publicly accessible (Scored)" CHECK_SCORED_check23="SCORED" CHECK_TYPE_check23="LEVEL1" diff --git a/checks/check24 b/checks/check24 index 581878bc..1fb3c133 100644 --- a/checks/check24 +++ b/checks/check24 @@ -8,7 +8,7 @@ # You should have received a copy of the license along with this # work. If not, see . -CHECK_ID_check24="2.4,2.04" +CHECK_ID_check24="2.4" CHECK_TITLE_check24="[check24] Ensure CloudTrail trails are integrated with CloudWatch Logs (Scored)" CHECK_SCORED_check24="SCORED" CHECK_TYPE_check24="LEVEL1" diff --git a/checks/check25 b/checks/check25 index 6bb12698..456223fa 100644 --- a/checks/check25 +++ b/checks/check25 @@ -8,7 +8,7 @@ # You should have received a copy of the license along with this # work. If not, see . -CHECK_ID_check25="2.5,2.05" +CHECK_ID_check25="2.5" CHECK_TITLE_check25="[check25] Ensure AWS Config is enabled in all regions (Scored)" CHECK_SCORED_check25="SCORED" CHECK_TYPE_check25="LEVEL1" diff --git a/checks/check26 b/checks/check26 index 270c367a..da563445 100644 --- a/checks/check26 +++ b/checks/check26 @@ -8,7 +8,7 @@ # You should have received a copy of the license along with this # work. If not, see . -CHECK_ID_check26="2.6,2.06" +CHECK_ID_check26="2.6" CHECK_TITLE_check26="[check26] Ensure S3 bucket access logging is enabled on the CloudTrail S3 bucket (Scored)" CHECK_SCORED_check26="SCORED" CHECK_TYPE_check26="LEVEL1" diff --git a/checks/check27 b/checks/check27 index 56fd7392..6f5d81a3 100644 --- a/checks/check27 +++ b/checks/check27 @@ -8,7 +8,7 @@ # You should have received a copy of the license along with this # work. If not, see . -CHECK_ID_check27="2.7,2.07" +CHECK_ID_check27="2.7" CHECK_TITLE_check27="[check27] Ensure CloudTrail logs are encrypted at rest using KMS CMKs (Scored)" CHECK_SCORED_check27="SCORED" CHECK_TYPE_check27="LEVEL2" diff --git a/checks/check28 b/checks/check28 index adfbca41..23c797da 100644 --- a/checks/check28 +++ b/checks/check28 @@ -8,7 +8,7 @@ # You should have received a copy of the license along with this # work. If not, see . -CHECK_ID_check28="2.8,2.08" +CHECK_ID_check28="2.8" CHECK_TITLE_check28="[check28] Ensure rotation for customer created CMKs is enabled (Scored)" CHECK_SCORED_check28="SCORED" CHECK_TYPE_check28="LEVEL2" diff --git a/checks/check29 b/checks/check29 index c49efb2d..01681bb8 100644 --- a/checks/check29 +++ b/checks/check29 @@ -8,7 +8,7 @@ # You should have received a copy of the license along with this # work. If not, see . -CHECK_ID_check29="2.9,2.09" +CHECK_ID_check29="2.9" CHECK_TITLE_check29="[check29] Ensure VPC Flow Logging is Enabled in all VPCs (Scored)" CHECK_SCORED_check29="SCORED" CHECK_TYPE_check29="LEVEL2" diff --git a/checks/check31 b/checks/check31 index 21768a15..2ea65085 100644 --- a/checks/check31 +++ b/checks/check31 @@ -33,7 +33,7 @@ # --actions-enabled \ # --alarm-actions arn:aws:sns:us-east-1:123456789012:CloudWatchAlarmTopic -CHECK_ID_check31="3.1,3.01" +CHECK_ID_check31="3.1" CHECK_TITLE_check31="[check31] Ensure a log metric filter and alarm exist for unauthorized API calls (Scored)" CHECK_SCORED_check31="SCORED" CHECK_TYPE_check31="LEVEL1" diff --git a/checks/check32 b/checks/check32 index 745d38d5..d6000238 100644 --- a/checks/check32 +++ b/checks/check32 @@ -33,7 +33,7 @@ # --actions-enabled \ # --alarm-actions arn:aws:sns:us-east-1:123456789012:CloudWatchAlarmTopic -CHECK_ID_check32="3.2,3.02" +CHECK_ID_check32="3.2" CHECK_TITLE_check32="[check32] Ensure a log metric filter and alarm exist for Management Console sign-in without MFA (Scored)" CHECK_SCORED_check32="SCORED" CHECK_TYPE_check32="LEVEL1" diff --git a/checks/check33 b/checks/check33 index dd2b94db..837d5fb5 100644 --- a/checks/check33 +++ b/checks/check33 @@ -33,7 +33,7 @@ # --actions-enabled \ # --alarm-actions arn:aws:sns:us-east-1:123456789012:CloudWatchAlarmTopic -CHECK_ID_check33="3.3,3.03" +CHECK_ID_check33="3.3" CHECK_TITLE_check33="[check33] Ensure a log metric filter and alarm exist for usage of root account (Scored)" CHECK_SCORED_check33="SCORED" CHECK_TYPE_check33="LEVEL1" diff --git a/checks/check34 b/checks/check34 index 86a55b25..7d2a6e26 100644 --- a/checks/check34 +++ b/checks/check34 @@ -33,7 +33,7 @@ # --actions-enabled \ # --alarm-actions arn:aws:sns:us-east-1:123456789012:CloudWatchAlarmTopic -CHECK_ID_check34="3.4,3.04" +CHECK_ID_check34="3.4" CHECK_TITLE_check34="[check34] Ensure a log metric filter and alarm exist for IAM policy changes (Scored)" CHECK_SCORED_check34="SCORED" CHECK_TYPE_check34="LEVEL1" diff --git a/checks/check35 b/checks/check35 index 929be5b8..9fd5e0f5 100644 --- a/checks/check35 +++ b/checks/check35 @@ -33,7 +33,7 @@ # --actions-enabled \ # --alarm-actions arn:aws:sns:us-east-1:123456789012:CloudWatchAlarmTopic -CHECK_ID_check35="3.5,3.05" +CHECK_ID_check35="3.5" CHECK_TITLE_check35="[check35] Ensure a log metric filter and alarm exist for CloudTrail configuration changes (Scored)" CHECK_SCORED_check35="SCORED" CHECK_TYPE_check35="LEVEL1" diff --git a/checks/check36 b/checks/check36 index 06e0d557..334ae475 100644 --- a/checks/check36 +++ b/checks/check36 @@ -33,7 +33,7 @@ # --actions-enabled \ # --alarm-actions arn:aws:sns:us-east-1:123456789012:CloudWatchAlarmTopic -CHECK_ID_check36="3.6,3.06" +CHECK_ID_check36="3.6" CHECK_TITLE_check36="[check36] Ensure a log metric filter and alarm exist for AWS Management Console authentication failures (Scored)" CHECK_SCORED_check36="SCORED" CHECK_TYPE_check36="LEVEL2" diff --git a/checks/check37 b/checks/check37 index 4c6dd4d3..548535d0 100644 --- a/checks/check37 +++ b/checks/check37 @@ -33,7 +33,7 @@ # --actions-enabled \ # --alarm-actions arn:aws:sns:us-east-1:123456789012:CloudWatchAlarmTopic -CHECK_ID_check37="3.7,3.07" +CHECK_ID_check37="3.7" CHECK_TITLE_check37="[check37] Ensure a log metric filter and alarm exist for disabling or scheduled deletion of customer created CMKs (Scored)" CHECK_SCORED_check37="SCORED" CHECK_TYPE_check37="LEVEL2" diff --git a/checks/check38 b/checks/check38 index c112620c..829cd122 100644 --- a/checks/check38 +++ b/checks/check38 @@ -33,7 +33,7 @@ # --actions-enabled \ # --alarm-actions arn:aws:sns:us-east-1:123456789012:CloudWatchAlarmTopic -CHECK_ID_check38="3.8,3.08" +CHECK_ID_check38="3.8" CHECK_TITLE_check38="[check38] Ensure a log metric filter and alarm exist for S3 bucket policy changes (Scored)" CHECK_SCORED_check38="SCORED" CHECK_TYPE_check38="LEVEL1" diff --git a/checks/check39 b/checks/check39 index a649f157..6ca13baa 100644 --- a/checks/check39 +++ b/checks/check39 @@ -33,7 +33,7 @@ # --actions-enabled \ # --alarm-actions arn:aws:sns:us-east-1:123456789012:CloudWatchAlarmTopic -CHECK_ID_check39="3.9,3.09" +CHECK_ID_check39="3.9" CHECK_TITLE_check39="[check39] Ensure a log metric filter and alarm exist for AWS Config configuration changes (Scored)" CHECK_SCORED_check39="SCORED" CHECK_TYPE_check39="LEVEL2" diff --git a/checks/check41 b/checks/check41 index 00260ebb..da704739 100644 --- a/checks/check41 +++ b/checks/check41 @@ -8,7 +8,7 @@ # You should have received a copy of the license along with this # work. If not, see . -CHECK_ID_check41="4.1,4.01" +CHECK_ID_check41="4.1" CHECK_TITLE_check41="[check41] Ensure no security groups allow ingress from 0.0.0.0/0 or ::/0 to port 22 (Scored)" CHECK_SCORED_check41="SCORED" CHECK_TYPE_check41="LEVEL2" diff --git a/checks/check42 b/checks/check42 index a362b0cb..69e19891 100644 --- a/checks/check42 +++ b/checks/check42 @@ -8,7 +8,7 @@ # You should have received a copy of the license along with this # work. If not, see . -CHECK_ID_check42="4.2,4.02" +CHECK_ID_check42="4.2" CHECK_TITLE_check42="[check42] Ensure no security groups allow ingress from 0.0.0.0/0 or ::/0 to port 3389 (Scored)" CHECK_SCORED_check42="SCORED" CHECK_TYPE_check42="LEVEL2" diff --git a/checks/check43 b/checks/check43 index 6c0122ff..35cf44c5 100644 --- a/checks/check43 +++ b/checks/check43 @@ -8,7 +8,7 @@ # You should have received a copy of the license along with this # work. If not, see . -CHECK_ID_check43="4.3,4.03" +CHECK_ID_check43="4.3" CHECK_TITLE_check43="[check43] Ensure the default security group of every VPC restricts all traffic (Scored)" CHECK_SCORED_check43="SCORED" CHECK_TYPE_check43="LEVEL2" diff --git a/checks/check44 b/checks/check44 index 55ceb977..e7f620f8 100644 --- a/checks/check44 +++ b/checks/check44 @@ -8,7 +8,7 @@ # You should have received a copy of the license along with this # work. If not, see . -CHECK_ID_check44="4.4,4.04" +CHECK_ID_check44="4.4" CHECK_TITLE_check44="[check44] Ensure routing tables for VPC peering are \"least access\" (Not Scored)" CHECK_SCORED_check44="NOT_SCORED" CHECK_TYPE_check44="LEVEL2" diff --git a/checks/check_extra71 b/checks/check_extra71 index 368ad9d4..197eec97 100644 --- a/checks/check_extra71 +++ b/checks/check_extra71 @@ -10,7 +10,7 @@ # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR # CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. -CHECK_ID_extra71="7.1,7.01" +CHECK_ID_extra71="7.1" CHECK_TITLE_extra71="[extra71] Ensure users of groups with AdministratorAccess policy have MFA tokens enabled (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra71="NOT_SCORED" CHECK_TYPE_extra71="EXTRA" diff --git a/checks/check_extra72 b/checks/check_extra72 index 52bcd93d..b9471f9b 100644 --- a/checks/check_extra72 +++ b/checks/check_extra72 @@ -10,7 +10,7 @@ # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR # CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. -CHECK_ID_extra72="7.2,7.02" +CHECK_ID_extra72="7.2" CHECK_TITLE_extra72="[extra72] Ensure there are no EBS Snapshots set as Public (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra72="NOT_SCORED" CHECK_TYPE_extra72="EXTRA" diff --git a/checks/check_extra73 b/checks/check_extra73 index 86b07197..281b9f90 100644 --- a/checks/check_extra73 +++ b/checks/check_extra73 @@ -11,7 +11,7 @@ # CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. -CHECK_ID_extra73="7.3,7.03" +CHECK_ID_extra73="7.3" CHECK_TITLE_extra73="[extra73] Ensure there are no S3 buckets open to the Everyone or Any AWS user (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra73="NOT_SCORED" CHECK_TYPE_extra73="EXTRA" diff --git a/checks/check_extra74 b/checks/check_extra74 index f700f1c4..c6d0aa04 100644 --- a/checks/check_extra74 +++ b/checks/check_extra74 @@ -10,7 +10,7 @@ # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR # CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. -CHECK_ID_extra74="7.4,7.04" +CHECK_ID_extra74="7.4" CHECK_TITLE_extra74="[extra74] Ensure there are no Security Groups without ingress filtering being used (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra74="NOT_SCORED" CHECK_TYPE_extra74="EXTRA" diff --git a/checks/check_extra75 b/checks/check_extra75 index a93d01e2..91a34df3 100644 --- a/checks/check_extra75 +++ b/checks/check_extra75 @@ -10,7 +10,7 @@ # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR # CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. -CHECK_ID_extra75="7.5,7.05" +CHECK_ID_extra75="7.5" CHECK_TITLE_extra75="[extra75] Ensure there are no Security Groups not being used (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra75="NOT_SCORED" CHECK_TYPE_extra75="EXTRA" diff --git a/checks/check_extra76 b/checks/check_extra76 index 2a65705f..e524ea7d 100644 --- a/checks/check_extra76 +++ b/checks/check_extra76 @@ -10,7 +10,7 @@ # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR # CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. -CHECK_ID_extra76="7.6,7.06" +CHECK_ID_extra76="7.6" CHECK_TITLE_extra76="[extra76] Ensure there are no EC2 AMIs set as Public (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra76="NOT_SCORED" CHECK_TYPE_extra76="EXTRA" diff --git a/checks/check_extra77 b/checks/check_extra77 index cfd1078a..ad3011c6 100644 --- a/checks/check_extra77 +++ b/checks/check_extra77 @@ -11,7 +11,7 @@ # CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. -CHECK_ID_extra77="7.7,7.07" +CHECK_ID_extra77="7.7" CHECK_TITLE_extra77="[extra77] Ensure there are no ECR repositories set as Public (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra77="NOT_SCORED" CHECK_TYPE_extra77="EXTRA" diff --git a/checks/check_extra78 b/checks/check_extra78 index ee652259..d1c0c8ab 100644 --- a/checks/check_extra78 +++ b/checks/check_extra78 @@ -10,7 +10,7 @@ # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR # CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. -CHECK_ID_extra78="7.8,7.08" +CHECK_ID_extra78="7.8" CHECK_TITLE_extra78="[extra78] Ensure there are no Public Accessible RDS instances (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra78="NOT_SCORED" CHECK_TYPE_extra78="EXTRA" diff --git a/checks/check_extra79 b/checks/check_extra79 index d46a63a7..01c7b41e 100644 --- a/checks/check_extra79 +++ b/checks/check_extra79 @@ -10,7 +10,7 @@ # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR # CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. -CHECK_ID_extra79="7.9,7.09" +CHECK_ID_extra79="7.9" CHECK_TITLE_extra79="[extra79] Check for internet facing Elastic Load Balancers (Not Scored) (Not part of CIS benchmark)" CHECK_SCORED_extra79="NOT_SCORED" CHECK_TYPE_extra79="EXTRA" diff --git a/include/outputs b/include/outputs index 40d67a7c..46c4d8a6 100644 --- a/include/outputs +++ b/include/outputs @@ -90,9 +90,8 @@ textTitle(){ CHECKS_COUNTER=$((CHECKS_COUNTER+1)) TITLE_ID=$1 if [[ $NUMERAL ]]; then - TITLE_ID=$(echo $TITLE_ID | cut -d, -f2) - else - TITLE_ID=$(echo $TITLE_ID | cut -d, -f1) + # Left-pad the check ID with zeros to simplify sorting, e.g. 1.1 -> 1.01 + TITLE_ID=$(awk -F'.' '{ printf "%d.%02d", $1, $2 }' <<< "$TITLE_ID") fi TITLE_TEXT=$2 From 7e5a4a1de429849da78ba0f634e2e8db5c4564ba Mon Sep 17 00:00:00 2001 From: Marc Jay Date: Tue, 14 Apr 2020 02:17:28 +0100 Subject: [PATCH 061/104] Adjust execute_check() now that check71's ID has changed Fix minor typo in a comment --- prowler | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/prowler b/prowler index a07474ba..bbc68a63 100755 --- a/prowler +++ b/prowler @@ -239,7 +239,7 @@ show_check_title() { # Function to show the title of a group, by numeric id show_group_title() { - # when csv mode is used, no group tittle is shown + # when csv mode is used, no group title is shown if [[ "$MODE" != "csv" ]]; then textTitle "${GROUP_NUMBER[$1]}" "${GROUP_TITLE[$1]}" "NOT_SCORED" "SUPPORT" fi @@ -275,7 +275,7 @@ execute_check() { local check_id_var=CHECK_ID_$1 local check_id=${!check_id_var} if [ ${check_id} ]; then - if [[ ${check_id} == 1* || ${check_id} == 7.1,7.01 || ${check_id} == 7.74 ]];then + if [[ ${check_id} == 1* || ${check_id} == 7.1 || ${check_id} == 7.74 ]];then if [ ! -s $TEMP_REPORT_FILE ];then genCredReport saveReport From 036ae640e539afbb221fb7b1fb278fd91147a737 Mon Sep 17 00:00:00 2001 From: nalansitan Date: Tue, 14 Apr 2020 10:38:01 +0800 Subject: [PATCH 062/104] support arn:aws:s3::: on extra725 --- checks/check_extra725 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/checks/check_extra725 b/checks/check_extra725 index 36dd6840..88f43ce5 100644 --- a/checks/check_extra725 +++ b/checks/check_extra725 @@ -30,7 +30,7 @@ extra725(){ if [[ $LIST_OF_TRAILS ]]; then BUCKET_ENABLED_TRAILS=() for trail in $LIST_OF_TRAILS; do - BUCKET_ENABLED_IN_TRAIL=$($AWSCLI cloudtrail get-event-selectors $PROFILE_OPT --trail-name $trail --query "EventSelectors[*].DataResources[?Type == \`AWS::S3::Object\`].Values" --output text |xargs -n1| grep -E "^arn:aws:s3:::$bucketName/\S*$|^arn:aws:s3$") + BUCKET_ENABLED_IN_TRAIL=$($AWSCLI cloudtrail get-event-selectors $PROFILE_OPT --trail-name $trail --query "EventSelectors[*].DataResources[?Type == \`AWS::S3::Object\`].Values" --output text |xargs -n1| grep -E "^arn:aws:s3:::$bucketName/\S*$|^arn:aws:s3$|^arn:aws:s3:::$") if [[ $BUCKET_ENABLED_IN_TRAIL ]]; then BUCKET_ENABLED_TRAILS+=($trail) # textPass "$regx: S3 bucket $bucketName has Object-level logging enabled in trail $trail" "$regx" From 11c182c5fe6479fd5c2660e5995904de738b94c5 Mon Sep 17 00:00:00 2001 From: Toni de la Fuente Date: Tue, 14 Apr 2020 16:45:37 +0200 Subject: [PATCH 063/104] Fixed issue with regions on check21 --- checks/check21 | 26 +++++++++----------------- 1 file changed, 9 insertions(+), 17 deletions(-) diff --git a/checks/check21 b/checks/check21 index d1a23103..ffb6cc59 100644 --- a/checks/check21 +++ b/checks/check21 @@ -19,30 +19,22 @@ CHECK_ALTERNATE_check201="check21" check21(){ trail_count=0 # "Ensure CloudTrail is enabled in all regions (Scored)" - REGIONS=$($AWSCLI cloudtrail describe-trails $PROFILE_OPT --query 'trailList[*].HomeRegion' --output text) - result='False' for regx in $REGIONS; do - LIST_OF_TRAILS=$($AWSCLI cloudtrail describe-trails $PROFILE_OPT --region $regx --query 'trailList[*].Name' --output text) + LIST_OF_TRAILS=$($AWSCLI cloudtrail describe-trails $PROFILE_OPT --region $regx --query 'trailList[*].Name' --output text --no-include-shadow-trails) if [[ $LIST_OF_TRAILS ]];then for trail in $LIST_OF_TRAILS;do trail_count=$((trail_count + 1)) MULTIREGION_TRAIL_STATUS=$($AWSCLI cloudtrail describe-trails $PROFILE_OPT --region $regx --query 'trailList[*].IsMultiRegionTrail' --output text --trail-name-list $trail) - ISLOGGING_STATUS=$($AWSCLI cloudtrail get-trail-status $PROFILE_OPT --region $regx --name $trail --query ['IsLogging'] --output text) - INCLUDEMANAGEMENTEVENTS_STATUS=$($AWSCLI cloudtrail get-event-selectors $PROFILE_OPT --region $regx --trail-name $trail --query EventSelectors[*].IncludeManagementEvents --output text) - READWRITETYPE_STATUS=$($AWSCLI cloudtrail get-event-selectors $PROFILE_OPT --region $regx --trail-name $trail --query EventSelectors[*].ReadWriteType --output text) - if [[ "$MULTIREGION_TRAIL_STATUS" == "True" ]] && [[ "$ISLOGGING_STATUS" == "True" ]] && [[ "$INCLUDEMANAGEMENTEVENTS_STATUS" == *"True"* ]] && [[ "$READWRITETYPE_STATUS" == *"All"* ]];then - textPass "$trail trail in $regx is enabled for all regions" - result='True' - break + if [[ "$MULTIREGION_TRAIL_STATUS" == 'False' ]];then + textFail "$trail trail in $regx is not enabled in multi region mode" + else + textPass "$trail trail in $regx is enabled for all regions" fi done fi done - if [[ $result == 'False' ]]; then - textFail "trail exist but it is not enabled in multi region mode" - fi - if [[ $trail_count == 0 ]]; then - textFail "No CloudTrail trails were found in the account" - fi -} + if [[ $trail_count == 0 ]]; then + textFail "No CloudTrail trails were found in the account" + fi +} \ No newline at end of file From 58d793ec2a8b3ffea80d990847f6c66396d8d627 Mon Sep 17 00:00:00 2001 From: Toni de la Fuente Date: Tue, 14 Apr 2020 18:51:13 +0200 Subject: [PATCH 064/104] Added section for Security Hub integration --- README.md | 26 +++++++++++++++++--------- 1 file changed, 17 insertions(+), 9 deletions(-) diff --git a/README.md b/README.md index 145a53ff..d8a8dd11 100644 --- a/README.md +++ b/README.md @@ -7,6 +7,7 @@ - [Requirements and Installation](#requirements-and-installation) - [Usage](#usage) - [Advanced Usage](#advanced-usage) +- [Security Hub integration](#security-hub-integration) - [Fix](#fix) - [Screenshots](#screenshots) - [Troubleshooting](#troubleshooting) @@ -188,14 +189,6 @@ This script has been written in bash using AWS-CLI and it works in Linux and OSX ./prowler -M mono | aws s3 cp - s3://bucket-name/prowler-report.txt ``` -1. If you want Prowler to submit findings to [AWS Security Hub](https://aws.amazon.com/security-hub): - - ```sh - ./prowler -M json-asff -S - ``` - - > Note that Security Hub must be enabled for the active region. It can be enabled by calling `aws securityhub enable-security-hub` - 1. To perform an assessment based on CIS Profile Definitions you can use cislevel1 or cislevel2 with `-g` flag, more information about this [here, page 8](https://d0.awsstatic.com/whitepapers/compliance/AWS_CIS_Foundations_Benchmark.pdf): ```sh @@ -283,6 +276,21 @@ In order to remove noise and get only FAIL findings there is a `-q` flag that ma ./prowler -q -M csv -b ``` +## Security Hub integration + +Since version v2.3, Prowler supports natively sending findings to [AWS Security Hub](https://aws.amazon.com/security-hub). This integration allows Prowler to import its findings to AWS Security Hub. With Security Hub, you now have a single place that aggregates, organizes, and prioritizes your security alerts, or findings, from multiple AWS services, such as Amazon GuardDuty, Amazon Inspector, Amazon Macie, AWS Identity and Access Management (IAM) Access Analyzer, and AWS Firewall Manager, as well as from AWS Partner solutions and now from Prowler. It is as simple as running the commanbd below: + + ```sh + ./prowler -M json-asff -S + ``` +There are two requirements: + +1. Security Hub must be enabled for the active region from where you are calling Prowler (if no region is used with `-r` then `us-east-1` is used). It can be enabled by calling `aws securityhub enable-security-hub` +2. As mentioned in section "Custom IAM Policy", to allow Prowler to import its findings to AWS Security Hub you need to add the policy below to the role or user running Prowler: + + - [iam/prowler-security-hub.json](iam/prowler-security-hub.json) + + ## How to fix every FAIL @@ -347,7 +355,7 @@ Some new and specific checks require Prowler to inherit more permissions than Se [Prowler-Security-Hub Policy](iam/prowler-security-hub.json) -Allows Prowler to import its findings to [AWS Security Hub](https://aws.amazon.com/security-hub). With Security Hub, you now have a single place that aggregates, organizes, and prioritizes your security alerts, or findings, from multiple AWS services, such as Amazon GuardDuty, Amazon Inspector, Amazon Macie, AWS Identity and Access Management (IAM) Access Analyzer, and AWS Firewall Manager, as well as from AWS Partner solutions. +Allows Prowler to import its findings to [AWS Security Hub](https://aws.amazon.com/security-hub). More information in [Security Hub integration](#security-hub-integration): - [iam/prowler-security-hub.json](iam/prowler-security-hub.json) ### Bootstrap Script From e6fe5addbcf8401b07d4f45a6a7953387e6f2751 Mon Sep 17 00:00:00 2001 From: Toni de la Fuente Date: Tue, 14 Apr 2020 18:52:48 +0200 Subject: [PATCH 065/104] Added section for Security Hub integration --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index d8a8dd11..14049803 100644 --- a/README.md +++ b/README.md @@ -287,9 +287,9 @@ There are two requirements: 1. Security Hub must be enabled for the active region from where you are calling Prowler (if no region is used with `-r` then `us-east-1` is used). It can be enabled by calling `aws securityhub enable-security-hub` 2. As mentioned in section "Custom IAM Policy", to allow Prowler to import its findings to AWS Security Hub you need to add the policy below to the role or user running Prowler: - - [iam/prowler-security-hub.json](iam/prowler-security-hub.json) +>Note: to have updated findings in Security Hub you have to run Prowler periodically. Once a day or every certain amount of hours. ## How to fix every FAIL From 4ea18643659546c714f451ac13e774dae93b6e64 Mon Sep 17 00:00:00 2001 From: Toni de la Fuente Date: Tue, 14 Apr 2020 22:28:58 +0200 Subject: [PATCH 066/104] Allow multiple report types at once #345 --- README.md | 38 ++++++++++++++++++++------------ include/colors | 16 ++++++++------ include/outputs | 58 ++++++++++++++++++++++++++++++------------------- prowler | 9 ++++---- 4 files changed, 73 insertions(+), 48 deletions(-) diff --git a/README.md b/README.md index 14049803..3ec882c9 100644 --- a/README.md +++ b/README.md @@ -159,7 +159,25 @@ This script has been written in bash using AWS-CLI and it works in Linux and OSX Valid check numbers are based on the AWS CIS Benchmark guide, so 1.1 is check11 and 3.10 is check310 -1. If you want to save your report for later analysis: +### Save your reports + +1. If you want to save your report for later analysis thare are different ways, natively (supported text, mono, csv, json and json-asff see note below for more info): + + ```sh + ./prowler -M csv + ``` + or with multiple formats at the same time: + ```sh + ./prowler -M csv,json,json-asff + ``` + or just a group of checks in multiple formats: + ```sh + ./prowler -g gdpr -M csv,json,json-asff + ``` + + Now `-M` creates a file inside the prowler root directory named `prowler-output-YYYYMMDDHHMMSS.format`. You don't have to specify anything else, no pipes, no redirects. + + or just saving the output to a file like below: ```sh ./prowler -M mono > prowler-report.txt @@ -172,18 +190,9 @@ This script has been written in bash using AWS-CLI and it works in Linux and OSX ./prowler | ansi2html -la > report.html ``` - or if you want a pipe-delimited report file, do: + >Note about output formats to use with `-M`: "text" is the default one with colors, "mono" is like default one but monochrome, "csv" is comma separated values, "json" plain basic json (without comma between lines) and "json-asff" is also json with Amazon Security Finding Format that you can ship to Security Hub using `-S`. - ```sh - ./prowler -M csv > output.psv - ``` - or json formatted output using jq, do: - - ```sh - ./prowler -M json > prowler-output.json - ``` - - or save your report in a S3 bucket: + or save your report in a S3 bucket (this only works for text or mono, for csv, json or json-asff it has to be copied afterwards): ```sh ./prowler -M mono | aws s3 cp - s3://bucket-name/prowler-report.txt @@ -221,7 +230,8 @@ This script has been written in bash using AWS-CLI and it works in Linux and OSX -f specify an AWS region to run checks against (i.e.: us-west-1) -m specify the maximum number of items to return for long-running requests (default: 100) - -M output mode: text (default), mono, json, csv (separator is ,; data is on stdout; progress on stderr) + -M output mode: text (default), mono, json, json-asff, csv. They can be used combined comma separated. + (separator is ","; data is on stdout; progress on stderr). -k keep the credential report -n show check numbers to sort easier (i.e.: 1.01 instead of 1.1) @@ -280,7 +290,7 @@ In order to remove noise and get only FAIL findings there is a `-q` flag that ma Since version v2.3, Prowler supports natively sending findings to [AWS Security Hub](https://aws.amazon.com/security-hub). This integration allows Prowler to import its findings to AWS Security Hub. With Security Hub, you now have a single place that aggregates, organizes, and prioritizes your security alerts, or findings, from multiple AWS services, such as Amazon GuardDuty, Amazon Inspector, Amazon Macie, AWS Identity and Access Management (IAM) Access Analyzer, and AWS Firewall Manager, as well as from AWS Partner solutions and now from Prowler. It is as simple as running the commanbd below: - ```sh + ``` ./prowler -M json-asff -S ``` There are two requirements: diff --git a/include/colors b/include/colors index e938d143..2ae6f77f 100644 --- a/include/colors +++ b/include/colors @@ -11,13 +11,15 @@ # CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. -if [[ "$MODE" != "mono" && "$MODE" != "text" && "$MODE" != "csv" && "$MODE" != "json" && "$MODE" != "json-asff" ]]; then - echo "" - echo "$OPTRED ERROR!$OPTNORMAL Invalid output mode. Choose text, mono, csv, json or json-asff." - usage - EXITCODE=1 - exit $EXITCODE -fi + +IFS=',' read -ra MODES <<< "${MODE}" +for MODE in "${MODES[@]}"; do + if [[ "$MODE" != "mono" && "$MODE" != "text" && "$MODE" != "csv" && "$MODE" != "json" && "$MODE" != "json-asff" ]]; then + echo -e "${OPTRED}ERROR!$OPTNORMAL Invalid output mode. Choose text, mono, csv, json or json-asff. ./prowler -h for help" + EXITCODE=1 + exit $EXITCODE + fi +done if [[ "$MODE" == "mono" || "$MODE" == "csv" || "$MODE" == "json" || "$MODE" == "json-asff" ]]; then MONOCHROME=1 diff --git a/include/outputs b/include/outputs index 40d67a7c..1bd93943 100644 --- a/include/outputs +++ b/include/outputs @@ -12,25 +12,36 @@ # specific language governing permissions and limitations under the License. # Output formatting functions + +EXTENSION_CSV="csv" +EXTENSION_JSON="json" +EXTENSION_ASFF="asff-json" +EXTENSION_HTML="html" # not implemented yet, use ansi2html as in documentation +OUTPUT_DATE=$(date -u +"%Y%m%d%H%M%S") +OUTPUT_FILE_NAME=prowler-output-$OUTPUT_DATE + + textPass(){ if [[ "$QUIET" == 1 ]]; then return fi PASS_COUNTER=$((PASS_COUNTER+1)) - if [[ "$MODE" == "csv" || "$MODE" == "json" || "$MODE" == "json-asff" ]]; then + if [[ "${MODES[@]}" =~ "csv" || "${MODES[@]}" =~ "json" || "${MODES[@]}" =~ "json-asff" ]]; then if [[ $2 ]]; then REPREGION=$2 else REPREGION=$REGION fi - if [[ "$MODE" == "csv" ]]; then - echo "$PROFILE${SEP}$ACCOUNT_NUM${SEP}$REPREGION${SEP}$TITLE_ID${SEP}PASS${SEP}$ITEM_SCORED${SEP}$ITEM_LEVEL${SEP}$TITLE_TEXT${SEP}$1" - elif [[ "$MODE" == "json" ]]; then - generateJsonOutput "$1" "Pass" - elif [[ "$MODE" == "json-asff" ]]; then + if [[ "${MODES[@]}" =~ "csv" ]]; then + echo "$PROFILE${SEP}$ACCOUNT_NUM${SEP}$REPREGION${SEP}$TITLE_ID${SEP}PASS${SEP}$ITEM_SCORED${SEP}$ITEM_LEVEL${SEP}$TITLE_TEXT${SEP}$1" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_CSV + fi + if [[ "${MODES[@]}" =~ "json" ]]; then + generateJsonOutput "$1" "Pass" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_JSON + fi + if [[ "${MODES[@]}" =~ "json-asff" ]]; then JSON_ASFF_OUTPUT=$(generateJsonAsffOutput "$1" "PASSED" "INFORMATIONAL") - echo "${JSON_ASFF_OUTPUT}" + echo "${JSON_ASFF_OUTPUT}" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_ASFF if [[ "${SEND_TO_SECURITY_HUB}" -eq 1 ]]; then sendToSecurityHub "${JSON_ASFF_OUTPUT}" fi @@ -45,16 +56,17 @@ textInfo(){ return fi - if [[ "$MODE" == "csv" || "$MODE" == "json" || "$MODE" == "json-asff" ]]; then + if [[ "${MODES[@]}" =~ "csv" || "${MODES[@]}" =~ "json" || "${MODES[@]}" =~ "json-asff" ]]; then if [[ $2 ]]; then REPREGION=$2 else REPREGION=$REGION fi - if [[ "$MODE" == "csv" ]]; then - echo "$PROFILE${SEP}$ACCOUNT_NUM${SEP}$REPREGION${SEP}$TITLE_ID${SEP}INFO${SEP}$ITEM_SCORED${SEP}$ITEM_LEVEL${SEP}$TITLE_TEXT${SEP}$1" - elif [[ "$MODE" == "json" ]]; then - generateJsonOutput "$1" "Info" + if [[ "${MODES[@]}" =~ "csv" ]]; then + echo "$PROFILE${SEP}$ACCOUNT_NUM${SEP}$REPREGION${SEP}$TITLE_ID${SEP}INFO${SEP}$ITEM_SCORED${SEP}$ITEM_LEVEL${SEP}$TITLE_TEXT${SEP}$1" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_CSV + fi + if [[ "${MODES[@]}" =~ "json" ]]; then + generateJsonOutput "$1" "Info" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_JSON fi else echo " $NOTICE INFO! $1 $NORMAL" @@ -64,19 +76,21 @@ textInfo(){ textFail(){ FAIL_COUNTER=$((FAIL_COUNTER+1)) EXITCODE=3 - if [[ "$MODE" == "csv" || "$MODE" == "json" || "$MODE" == "json-asff" ]]; then + if [[ "${MODES[@]}" =~ "csv" || "${MODES[@]}" =~ "json" || "${MODES[@]}" =~ "json-asff" ]]; then if [[ $2 ]]; then REPREGION=$2 else REPREGION=$REGION fi - if [[ "$MODE" == "csv" ]]; then - echo "$PROFILE${SEP}$ACCOUNT_NUM${SEP}$REPREGION${SEP}$TITLE_ID${SEP}FAIL${SEP}$ITEM_SCORED${SEP}$ITEM_LEVEL${SEP}$TITLE_TEXT${SEP}$1" - elif [[ "$MODE" == "json" ]]; then - generateJsonOutput "$1" "Fail" - elif [[ "$MODE" == "json-asff" ]]; then + if [[ "${MODES[@]}" =~ "csv" ]]; then + echo "$PROFILE${SEP}$ACCOUNT_NUM${SEP}$REPREGION${SEP}$TITLE_ID${SEP}FAIL${SEP}$ITEM_SCORED${SEP}$ITEM_LEVEL${SEP}$TITLE_TEXT${SEP}$1" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_CSV + fi + if [[ "${MODES[@]}" =~ "json" ]]; then + generateJsonOutput "$1" "Fail" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_JSON + fi + if [[ "${MODES[@]}" =~ "json-asff" ]]; then JSON_ASFF_OUTPUT=$(generateJsonAsffOutput "$1" "FAILED" "HIGH") - echo "${JSON_ASFF_OUTPUT}" + echo "${JSON_ASFF_OUTPUT}" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_ASFF if [[ "${SEND_TO_SECURITY_HUB}" -eq 1 ]]; then sendToSecurityHub "${JSON_ASFF_OUTPUT}" fi @@ -117,9 +131,9 @@ textTitle(){ *) ITEM_LEVEL="Unspecified or Invalid";; esac - if [[ "$MODE" == "csv" ]]; then - >&2 echo "$TITLE_ID $TITLE_TEXT" - elif [[ "$MODE" == "json" || "$MODE" == "json-asff" ]]; then + if [[ "${MODES[@]}" =~ "csv" ]]; then + >&2 echo "$TITLE_ID $TITLE_TEXT" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_CSV + elif [[ "${MODES[@]}" =~ "json" || "${MODES[@]}" =~ "json-asff" ]]; then : else if [[ "$ITEM_SCORED" == "Scored" ]]; then diff --git a/prowler b/prowler index a07474ba..022d1b33 100755 --- a/prowler +++ b/prowler @@ -65,7 +65,8 @@ USAGE: -f specify an AWS region to run checks against (i.e.: us-west-1) -m specify the maximum number of items to return for long-running requests (default: 100) - -M output mode: text (default), mono, json, json-asff, csv (separator is ","; data is on stdout; progress on stderr) + -M output mode: text (default), mono, json, json-asff, csv. They can be used combined comma separated. + (separator is ","; data is on stdout; progress on stderr). -k keep the credential report -n show check numbers to sort easier (i.e.: 1.01 instead of 1.1) @@ -393,7 +394,7 @@ get_all_checks_without_exclusion() { } ### All functions defined above ... run the workflow -if [[ $MODE != "csv" ]]; then +if [[ ${MODES[@]} =~ "mono" || ${MODES[@]} =~ "text" ]]; then prowlerBanner fi @@ -424,9 +425,7 @@ getWhoami # Execute group of checks if called with -g if [[ $GROUP_ID_READ ]];then if [[ " ${GROUP_ID[@]} " =~ " ${GROUP_ID_READ} " ]]; then - if [[ $MODE == "csv" ]]; then - BANNER=0 - fi + execute_group_by_id ${GROUP_ID_READ} ${EXCLUDE_CHECK_ID} cleanTemp scoring From f3664b56ecf96249dc218a53de55019735e571c5 Mon Sep 17 00:00:00 2001 From: Toni de la Fuente Date: Tue, 14 Apr 2020 22:46:44 +0200 Subject: [PATCH 067/104] Open --- README.md | 10 ++++++++++ include/assume_role | 19 ++++++++++++++++--- prowler | 15 ++++++++++----- 3 files changed, 36 insertions(+), 8 deletions(-) diff --git a/README.md b/README.md index 3ec882c9..139a93b5 100644 --- a/README.md +++ b/README.md @@ -251,6 +251,7 @@ This script has been written in bash using AWS-CLI and it works in Linux and OSX (i.e.: ProwlerRole) -T session durantion given to that role credentials in seconds, default 1h (3600) recommended 12h, requires -R and -T (i.e.: 43200) + -I External ID to be used when assuming roles (no mandatory) -h this help ``` @@ -264,6 +265,10 @@ Prowler uses the AWS CLI underneath so it uses the same authentication methods. ./prowler -A 123456789012 -R ProwlerRole ``` +``` +./prowler -A 123456789012 -R ProwlerRole -I 123456 +``` + > *NOTE 1 about Session Duration*: By default it gets credentials valid for 1 hour (3600 seconds). Depending on the mount of checks you run and the size of your infrastructure, Prowler may require more than 1 hour to finish. Use option `-T ` to allow up to 12h (43200 seconds). To allow more than 1h you need to modify *"Maximum CLI/API session duration"* for that particular role, read more [here](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_use.html#id_roles_use_view-role-max-session). > *NOTE 2 about Session Duration*: Bear in mind that if you are using roles assumed by role chaining there is a hard limit of 1 hour so consider not using role chaining if possible, read more about that, in foot note 1 below the table [here](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_use.html). @@ -274,6 +279,10 @@ For example, if you want to get only the fails in CSV format from all checks reg ./prowler -A 123456789012 -R RemoteRoleToAssume -T 3600 -b -M cvs -q -g rds ``` +``` +./prowler -A 123456789012 -R RemoteRoleToAssume -T 3600 -I 123456 -b -M cvs -q -g rds +``` + ### Custom folder for custom checks Flag `-x /my/own/checks` will include any check in that particular directory. To see how to write checks see [Add Custom Checks](#add-custom-checks) section. @@ -552,3 +561,4 @@ NOTE: If you are interested in using Prowler for commercial purposes remember th **I'm not related anyhow with CIS organization, I just write and maintain Prowler to help companies over the world to make their cloud infrastructure more secure.** If you want to contact me visit + diff --git a/include/assume_role b/include/assume_role index 4fedfb3d..58b21399 100644 --- a/include/assume_role +++ b/include/assume_role @@ -25,11 +25,24 @@ if [[ $ACCOUNT_TO_ASSUME ]]; then # temporary file where to store credentials TEMP_STS_ASSUMED_FILE=$(mktemp -t prowler.sts_assumed-XXXXXX) + + #Check if external ID has bee provided if so execute with external ID if not ignore + if [[ -z $ROLE_EXTERNAL_ID ]]; then + # assume role command + $AWSCLI $PROFILE_OPT sts assume-role --role-arn arn:aws:iam::$ACCOUNT_TO_ASSUME:role/$ROLE_TO_ASSUME \ + --role-session-name ProwlerAssessmentSession \ + --duration-seconds $SESSION_DURATION_TO_ASSUME > $TEMP_STS_ASSUMED_FILE + else + $AWSCLI $PROFILE_OPT sts assume-role --role-arn arn:aws:iam::$ACCOUNT_TO_ASSUME:role/$ROLE_TO_ASSUME \ + --role-session-name ProwlerAssessmentSession \ + --duration-seconds $SESSION_DURATION_TO_ASSUME \ + --external-id $ROLE_EXTERNAL_ID > $TEMP_STS_ASSUMED_FILE + fi # assume role command - $AWSCLI $PROFILE_OPT sts assume-role --role-arn arn:aws:iam::$ACCOUNT_TO_ASSUME:role/$ROLE_TO_ASSUME \ - --role-session-name ProwlerAssessmentSession \ - --duration-seconds $SESSION_DURATION_TO_ASSUME > $TEMP_STS_ASSUMED_FILE + #$AWSCLI $PROFILE_OPT sts assume-role --role-arn arn:aws:iam::$ACCOUNT_TO_ASSUME:role/$ROLE_TO_ASSUME \ + # --role-session-name ProwlerAssessmentSession \ + # --duration-seconds $SESSION_DURATION_TO_ASSUME > $TEMP_STS_ASSUMED_FILE # if previous command fails exit with the given error from aws-cli # this is likely to be due to session duration limit of 1h in case diff --git a/prowler b/prowler index 022d1b33..1af355db 100755 --- a/prowler +++ b/prowler @@ -85,13 +85,14 @@ USAGE: -R role name to assume in the account, requires -A and -T (i.e.: ProwlerRole) -T session durantion given to that role credentials in seconds, default 1h (3600) recommended 12h, requires -R and -T - (i.e.: 43200) + (i.e.: 43200) + -I External ID to be used when assuming roles (no mandatory), requires -A and -R. -h this help " exit } -while getopts ":hlLkqp:r:c:g:f:m:M:E:enbVsSx:A:R:T:" OPTION; do +while getopts ":hlLkqp:r:c:g:f:m:M:E:enbVsSxI:A:R:T:" OPTION; do case $OPTION in h ) usage @@ -163,6 +164,9 @@ while getopts ":hlLkqp:r:c:g:f:m:M:E:enbVsSx:A:R:T:" OPTION; do R ) ROLE_TO_ASSUME=$OPTARG ;; + I ) + ROLE_EXTERNAL_ID=$OPTARG + ;; T ) SESSION_DURATION_TO_ASSUME=$OPTARG ;; @@ -457,6 +461,10 @@ if [[ $CHECK_ID ]];then exit $EXITCODE fi +execute_all +scoring +cleanTemp + if [[ $ACCOUNT_TO_ASSUME ]]; then # unset env variables with assumed role credentials unset AWS_ACCESS_KEY_ID @@ -465,7 +473,4 @@ if [[ $ACCOUNT_TO_ASSUME ]]; then fi -execute_all -scoring -cleanTemp exit $EXITCODE From 2de49c39409f4b44be3b8152042c5188ae56e087 Mon Sep 17 00:00:00 2001 From: Toni de la Fuente Date: Tue, 14 Apr 2020 23:55:02 +0200 Subject: [PATCH 068/104] Added more sample commands and updates --- README.md | 29 +++++++++++++++++++++-------- 1 file changed, 21 insertions(+), 8 deletions(-) diff --git a/README.md b/README.md index 139a93b5..8d71e75a 100644 --- a/README.md +++ b/README.md @@ -31,10 +31,10 @@ Read more about [CIS Amazon Web Services Foundations Benchmark v1.2.0 - 05-23-20 ## Features -It covers hardening and security best practices for all AWS regions related to the next groups: +~140 checks controls covering security best practices across all AWS regions and most of AWS services and related to the next groups: -- Identity and Access Management (22 checks) [group1] -- Logging (9 checks) [group2] +- Identity and Access Management [group1] +- Logging [group2] - Monitoring (14 checks) [group3] - Networking (4 checks) [group4] - CIS Level 1 [cislevel1] @@ -46,14 +46,14 @@ It covers hardening and security best practices for all AWS regions related to t - Trust Boundaries [trustboundaries] Read more [here](#trustboundaries-checks) -For a comprehensive list and resolution look at the guide on the link above. - With Prowler you can: - get a colorful or monochrome report -- a CSV format report for diff -- run specific checks without having to run the entire report -- check multiple AWS accounts in parallel +- a CSV, JSON or JSON ASFF format report +- send findings directly to Security Hub +- run specific checks +- check multiple AWS accounts in parallel or sequentially +- and more! Read examples below ## Requirements and Installation @@ -283,6 +283,19 @@ For example, if you want to get only the fails in CSV format from all checks reg ./prowler -A 123456789012 -R RemoteRoleToAssume -T 3600 -I 123456 -b -M cvs -q -g rds ``` +### Assume Role and across all accounts in AWS Organizations: + +If you want to run Prowler or just a check or a group across all accounts of AWS Organizations you can do this: + +First get a list of accounts: +``` +ACCOUNTS_IN_ORGS=$(aws organizations list-accounts --query Accounts[*].Id --output text) +``` +Then run Prowler to assume a role (same in all members) per each account, in this example it is just running one particular check: +``` +for accountId in $ACCOUNTS_IN_ORGS; do ./prowler -A $accountId -R RemoteRoleToAssume -c extra79; done +``` + ### Custom folder for custom checks Flag `-x /my/own/checks` will include any check in that particular directory. To see how to write checks see [Add Custom Checks](#add-custom-checks) section. From f065beb93b58fa28d3d416e1b5e6e957e18bfd67 Mon Sep 17 00:00:00 2001 From: Toni de la Fuente Date: Tue, 14 Apr 2020 23:57:55 +0200 Subject: [PATCH 069/104] Fixed title in group16_trustboundaries --- groups/group16_trustboundaries | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/groups/group16_trustboundaries b/groups/group16_trustboundaries index 2c6875fc..93aa07ba 100644 --- a/groups/group16_trustboundaries +++ b/groups/group16_trustboundaries @@ -13,7 +13,7 @@ GROUP_ID[16]='trustboundaries' GROUP_NUMBER[16]='16.0' -GROUP_TITLE[16]='Find cross-account trust boundaries - [trustboundaries] ****************************' +GROUP_TITLE[16]='Find cross-account trust boundaries - [trustboundaries] *******' GROUP_RUN_BY_DEFAULT[16]='N' # run it when execute_all is called GROUP_CHECKS[16]='extra789,extra790' From 994390351eff80fa6b35ec4419bf3ef859a06480 Mon Sep 17 00:00:00 2001 From: Marc Jay Date: Wed, 15 Apr 2020 02:36:16 +0100 Subject: [PATCH 070/104] Add the ability to generate JUnit XML reports with a -J flag If the -J flag is passed, generate JUnit XML reports for each check, in-line with how Java tools generate JUnit reports. Check section numbers equate to 'root packages', checks are second-level packages, each check equates to a testsuite (mirroring Java where each test class is a testsuite) and each pass/fail of a check equates to a testcase Time the execution of each check and include this in the report Include properties (Prowler version, check level etc.) in-line with standard JUnit files XML escape all strings for safety Detect if a user has GNU coreutils installed on Mac OS X, but not as their default, switching to using gdate for date commands if so, as it has more features, including getting dates in milliseconds Add prowler-output, junit-reports and VSCode files to .gitignore Update README to include JUnit info, address markdownlint warnings Remove unused arguments to jq in generateJsonAsffOutput Fixes #537 --- .gitignore | 11 ++++- README.md | 83 +++++++++++++++++++++++++----------- include/junit_integration | 89 +++++++++++++++++++++++++++++++++++++++ include/os_detector | 79 +++++++++++++++++++++++++++------- include/outputs | 30 +++++++------ prowler | 52 +++++++++++++++++------ 6 files changed, 277 insertions(+), 67 deletions(-) create mode 100644 include/junit_integration diff --git a/.gitignore b/.gitignore index c80f14d6..3d433f87 100644 --- a/.gitignore +++ b/.gitignore @@ -18,4 +18,13 @@ tags [._]*.un~ # MacOs DS_Store -*.DS_Store \ No newline at end of file +*.DS_Store + +# Prowler output +prowler-output-* + +# JUnit Reports +junit-reports/ + +# VSCode files +.vscode/ diff --git a/README.md b/README.md index 8d71e75a..4b07afa6 100644 --- a/README.md +++ b/README.md @@ -45,7 +45,6 @@ Read more about [CIS Amazon Web Services Foundations Benchmark v1.2.0 - 05-23-20 - HIPAA [hipaa] Read more [here](#hipaa-checks) - Trust Boundaries [trustboundaries] Read more [here](#trustboundaries-checks) - With Prowler you can: - get a colorful or monochrome report @@ -68,6 +67,7 @@ This script has been written in bash using AWS-CLI and it works in Linux and OSX AWS-CLI can be also installed it using "brew", "apt", "yum" or manually from , but `ansi2html` and `detect-secrets` has to be installed using `pip`. You will need to install `jq` to get more accuracy in some checks. - Make sure jq is installed (example below with "apt" but use a valid package manager for your OS): + ```sh sudo apt install jq ``` @@ -84,7 +84,9 @@ This script has been written in bash using AWS-CLI and it works in Linux and OSX ```sh aws configure ``` + or + ```sh export AWS_ACCESS_KEY_ID="ASXXXXXXX" export AWS_SECRET_ACCESS_KEY="XXXXXXXXX" @@ -110,7 +112,7 @@ This script has been written in bash using AWS-CLI and it works in Linux and OSX Use `-l` to list all available checks and group of checks (sections) - If you want to avoid installing dependences run it using Docker: + If you want to avoid installing dependencies run it using Docker: ```sh docker run -ti --rm --name prowler --env AWS_ACCESS_KEY_ID --env AWS_SECRET_ACCESS_KEY --env AWS_SESSION_TOKEN toniblyx/prowler:latest @@ -127,16 +129,21 @@ This script has been written in bash using AWS-CLI and it works in Linux and OSX ```sh ./prowler -c check310 ``` + With Docker: + ```sh docker run -ti --rm --name prowler --env AWS_ACCESS_KEY_ID --env AWS_SECRET_ACCESS_KEY --env AWS_SESSION_TOKEN toniblyx/prowler:latest "-c check310" ``` or multiple checks separated by comma: + ```sh ./prowler -c check310,check722 ``` + or all checks but some of them: + ```sh ./prowler -E check42,check43 ``` @@ -152,25 +159,31 @@ This script has been written in bash using AWS-CLI and it works in Linux and OSX ```sh ./prowler -g group1 # for iam related checks ``` + or exclude some checks in the group: + ```sh ./prowler -g group4 -E check42,check43 ``` Valid check numbers are based on the AWS CIS Benchmark guide, so 1.1 is check11 and 3.10 is check310 -### Save your reports +### Save your reports 1. If you want to save your report for later analysis thare are different ways, natively (supported text, mono, csv, json and json-asff see note below for more info): ```sh ./prowler -M csv ``` + or with multiple formats at the same time: + ```sh ./prowler -M csv,json,json-asff ``` + or just a group of checks in multiple formats: + ```sh ./prowler -g gdpr -M csv,json,json-asff ``` @@ -190,7 +203,13 @@ This script has been written in bash using AWS-CLI and it works in Linux and OSX ./prowler | ansi2html -la > report.html ``` - >Note about output formats to use with `-M`: "text" is the default one with colors, "mono" is like default one but monochrome, "csv" is comma separated values, "json" plain basic json (without comma between lines) and "json-asff" is also json with Amazon Security Finding Format that you can ship to Security Hub using `-S`. + To generate JUnit report files add `-J`. This can be combined with any format. Files are written inside a prowler root directory named `junit-reports`: + + ```sh + ./prowler -J + ``` + + >Note about output formats to use with `-M`: "text" is the default one with colors, "mono" is like default one but monochrome, "csv" is comma separated values, "json" plain basic json (without comma between lines) and "json-asff" is also json with Amazon Security Finding Format that you can ship to Security Hub using `-S`. or save your report in a S3 bucket (this only works for text or mono, for csv, json or json-asff it has to be copied afterwards): @@ -213,7 +232,7 @@ This script has been written in bash using AWS-CLI and it works in Linux and OSX 1. For help use: - ``` + ```sh ./prowler -h USAGE: @@ -243,6 +262,7 @@ This script has been written in bash using AWS-CLI and it works in Linux and OSX -V show version number & exit -s show scoring report -S send check output to AWS Security Hub - only valid when the output mode is json-asff (i.e. "-M json-asff -S") + -J generate JUnit reports, readable by Jenkins or other CI tools. Files are written to ./junit-reports -x specify external directory with custom checks (i.e. /my/own/checks, files must start by check) -q suppress info messages and passing test output -A account id for the account where to assume a role, requires -R and -T @@ -261,11 +281,11 @@ This script has been written in bash using AWS-CLI and it works in Linux and OSX Prowler uses the AWS CLI underneath so it uses the same authentication methods. However, there are few ways to run Prowler against multiple accounts using IAM Assume Role feature depending on eachg use case. You can just set up your custom profile inside `~/.aws/config` with all needed information about the role to assume then call it with `./prowler -p your-custom-profile`. Additionally you can use `-A 123456789012` and `-R RemoteRoleToAssume` and Prowler will get those temporary credentials using `aws sts assume-role`, set them up as environment variables and run against that given account. -``` +```sh ./prowler -A 123456789012 -R ProwlerRole ``` -``` +```sh ./prowler -A 123456789012 -R ProwlerRole -I 123456 ``` @@ -275,11 +295,11 @@ Prowler uses the AWS CLI underneath so it uses the same authentication methods. For example, if you want to get only the fails in CSV format from all checks regarding RDS without banner from the AWS Account 123456789012 assuming the role RemoteRoleToAssume and set a fixed session duration of 1h: -``` +```sh ./prowler -A 123456789012 -R RemoteRoleToAssume -T 3600 -b -M cvs -q -g rds ``` -``` +```sh ./prowler -A 123456789012 -R RemoteRoleToAssume -T 3600 -I 123456 -b -M cvs -q -g rds ``` @@ -304,25 +324,25 @@ Flag `-x /my/own/checks` will include any check in that particular directory. To In order to remove noise and get only FAIL findings there is a `-q` flag that makes Prowler to show and log only FAILs. It can be combined with any other option. -``` +```sh ./prowler -q -M csv -b ``` ## Security Hub integration -Since version v2.3, Prowler supports natively sending findings to [AWS Security Hub](https://aws.amazon.com/security-hub). This integration allows Prowler to import its findings to AWS Security Hub. With Security Hub, you now have a single place that aggregates, organizes, and prioritizes your security alerts, or findings, from multiple AWS services, such as Amazon GuardDuty, Amazon Inspector, Amazon Macie, AWS Identity and Access Management (IAM) Access Analyzer, and AWS Firewall Manager, as well as from AWS Partner solutions and now from Prowler. It is as simple as running the commanbd below: +Since version v2.3, Prowler supports natively sending findings to [AWS Security Hub](https://aws.amazon.com/security-hub). This integration allows Prowler to import its findings to AWS Security Hub. With Security Hub, you now have a single place that aggregates, organizes, and prioritizes your security alerts, or findings, from multiple AWS services, such as Amazon GuardDuty, Amazon Inspector, Amazon Macie, AWS Identity and Access Management (IAM) Access Analyzer, and AWS Firewall Manager, as well as from AWS Partner solutions and now from Prowler. It is as simple as running the command below: + +```sh +./prowler -M json-asff -S +``` - ``` - ./prowler -M json-asff -S - ``` There are two requirements: 1. Security Hub must be enabled for the active region from where you are calling Prowler (if no region is used with `-r` then `us-east-1` is used). It can be enabled by calling `aws securityhub enable-security-hub` 2. As mentioned in section "Custom IAM Policy", to allow Prowler to import its findings to AWS Security Hub you need to add the policy below to the role or user running Prowler: - [iam/prowler-security-hub.json](iam/prowler-security-hub.json) ->Note: to have updated findings in Security Hub you have to run Prowler periodically. Once a day or every certain amount of hours. - +>Note: to have updated findings in Security Hub you have to run Prowler periodically. Once a day or every certain amount of hours. ## How to fix every FAIL @@ -344,7 +364,7 @@ Check your report and fix the issues following all specific guidelines per check If you are using an STS token for AWS-CLI and your session is expired you probably get this error: -``` +```sh A client error (ExpiredToken) occurred when calling the GenerateCredentialReport operation: The security token included in the request is expired ``` @@ -354,16 +374,19 @@ To fix it, please renew your token by authenticating again to the AWS API, see n To run Prowler using a profile that requires MFA you just need to get the session token before hand. Just make sure you use this command: -``` +```sh aws --profile sts get-session-token --duration 129600 --serial-number --token-code --output text - ``` -Once you get your token you can export it as environment variable: ``` + +Once you get your token you can export it as environment variable: + +```sh export AWS_PROFILE=YOUR_AWS_PROFILE export AWS_SESSION_TOKEN=YOUR_NEW_TOKEN AWS_SECRET_ACCESS_KEY=YOUR_SECRET export AWS_ACCESS_KEY_ID=YOUR_KEY ``` + or set manually up your `~/.aws/credentials` file properly. There are some helpfull tools to save time in this process like [aws-mfa-script](https://github.com/asagage/aws-mfa-script) or [aws-cli-mfa](https://github.com/sweharris/aws-cli-mfa). @@ -383,11 +406,13 @@ There are some helpfull tools to save time in this process like [aws-mfa-script] [Prowler-Additions-Policy](iam/prowler-additions-policy.json) Some new and specific checks require Prowler to inherit more permissions than SecurityAudit and ViewOnlyAccess to work properly. In addition to the AWS managed policies, "SecurityAudit" and "ViewOnlyAccess", the user/role you use for checks may need to be granted a custom policy with a few more read-only permissions (to support additional services mostly). Here is an example policy with the additional rights, "Prowler-Additions-Policy" (see below bootstrap script for set it up): + - [iam/prowler-additions-policy.json](iam/prowler-additions-policy.json) [Prowler-Security-Hub Policy](iam/prowler-security-hub.json) Allows Prowler to import its findings to [AWS Security Hub](https://aws.amazon.com/security-hub). More information in [Security Hub integration](#security-hub-integration): + - [iam/prowler-security-hub.json](iam/prowler-security-hub.json) ### Bootstrap Script @@ -418,7 +443,7 @@ Some of these checks look for publicly facing resources may not actually be full To list all existing checks please run the command below: -``` +```sh ./prowler -l ``` @@ -474,6 +499,7 @@ With this group of checks, Prowler shows results of controls related to the "Sec More information on the original PR is [here](https://github.com/toniblyx/prowler/issues/227). ### Note on Business Associate Addendum's (BAA) + Under the HIPAA regulations, cloud service providers (CSPs) such as AWS are considered business associates. The Business Associate Addendum (BAA) is an AWS contract that is required under HIPAA rules to ensure that AWS appropriately safeguards protected health information (PHI). The BAA also serves to clarify and limit, as appropriate, the permissible uses and disclosures of PHI by AWS, based on the relationship between AWS and our customers, and the activities or services being performed by AWS. Customers may use any AWS service in an account designated as a HIPAA account, but they should only process, store, and transmit protected health information (PHI) in the HIPAA-eligible services defined in the Business Associate Addendum (BAA). For the latest list of HIPAA-eligible AWS services, see [HIPAA Eligible Services Reference](https://aws.amazon.com/compliance/hipaa-eligible-services-reference/). More information on AWS & HIPAA can be found [here](https://aws.amazon.com/compliance/hipaa-compliance/) @@ -489,7 +515,9 @@ The `hipaa` group of checks uses existing and extra checks. To get a HIPAA repor ``` ## Trust Boundaries Checks + ### Definition and Terms + The term "trust boundary" is originating from the threat modelling process and the most popular contributor Adam Shostack and author of "Threat Modeling: Designing for Security" defines it as following ([reference](https://adam.shostack.org/uncover.html)): > Trust boundaries are perhaps the most subjective of all: these represent the border between trusted and untrusted elements. Trust is complex. You might trust your mechanic with your car, your dentist with your teeth, and your banker with your money, but you probably don't trust your dentist to change your spark plugs. @@ -498,17 +526,23 @@ AWS is made to be flexible for service links within and between different AWS ac This group of checks helps to analyse a particular AWS account (subject) on existing links to other AWS accounts across various AWS services, in order to identify untrusted links. -### Run +### Run + To give it a quick shot just call: + ```sh ./prowler -g trustboundaries ``` + ### Scenarios + Currently this check group supports two different scenarios: - 1. Single account environment: no action required, the configuration is happening automatically for you. - 2. Multi account environment: in case you environment has multiple trusted and known AWS accounts you maybe want to append them manually to [groups/group16_trustboundaries](groups/group16_trustboundaries) as a space separated list into `GROUP_TRUSTBOUNDARIES_TRUSTED_ACCOUNT_IDS` variable, then just run prowler. + +1. Single account environment: no action required, the configuration is happening automatically for you. +2. Multi account environment: in case you environment has multiple trusted and known AWS accounts you maybe want to append them manually to [groups/group16_trustboundaries](groups/group16_trustboundaries) as a space separated list into `GROUP_TRUSTBOUNDARIES_TRUSTED_ACCOUNT_IDS` variable, then just run prowler. ### Coverage + Current coverage of Amazon Web Service (AWS) taken from [here](https://docs.aws.amazon.com/whitepapers/latest/aws-overview/introduction.html): | Topic | Service | Trust Boundary | |---------------------------------|------------|---------------------------------------------------------------------------| @@ -518,6 +552,7 @@ Current coverage of Amazon Web Service (AWS) taken from [here](https://docs.aws. All ideas or recommendations to extend this group are very welcome [here](https://github.com/toniblyx/prowler/issues/new/choose). ### Detailed Explanation of the Concept + The diagrams depict two common scenarios, single account and multi account environments. Every circle represents one AWS account. The dashed line represents the trust boundary, that separates trust and untrusted AWS accounts. diff --git a/include/junit_integration b/include/junit_integration new file mode 100644 index 00000000..b0134d66 --- /dev/null +++ b/include/junit_integration @@ -0,0 +1,89 @@ +#!/usr/bin/env bash + +# Prowler - the handy cloud security tool (copyright 2018) by Toni de la Fuente +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy +# of the License at http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software distributed +# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +# CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. + +# Generates JUnit XML reports which can be read by Jenkins or other CI tools + +JUNIT_OUTPUT_DIRECTORY="junit-reports" + +xml_escape() { + sed 's/&/\&/g; s//\>/g; s/\"/\"/g; s/'"'"'/\'/g' <<< "$1" +} + +prepare_junit_output() { + # Remove any JUnit output from previous runs + rm -rf "$JUNIT_OUTPUT_DIRECTORY" + mkdir "$JUNIT_OUTPUT_DIRECTORY" + echo "" + echo "$NOTICE Writing JUnit XML reports to $PROWLER_DIR/$JUNIT_OUTPUT_DIRECTORY $NORMAL" +} + +prepare_junit_check_output() { + # JUnit test cases must be named uniquely, but each Prowler check can output many times due to multiple resources, + # therefore append an index value to the test case name to provide uniqueness, reset it to 1 before starting this check + JUNIT_CHECK_INDEX=1 + # To match JUnit behaviour in Java, and ensure that an aborted execution does not leave a partially written and therefore invalid XML file, + # output a JUnit XML file per check + JUNIT_OUTPUT_FILE="$JUNIT_OUTPUT_DIRECTORY/$1.xml" + printf '%s\n' \ + "" \ + "" \ + " " \ + " " \ + " " \ + " " \ + " " \ + " " \ + " " \ + " " \ + " " \ + " " \ + > "$JUNIT_OUTPUT_FILE" + JUNIT_CHECK_START_TIME=$(get_time_in_milliseconds) +} + +finalise_junit_check_output() { + echo '' >> "$JUNIT_OUTPUT_FILE" +} + +output_junit_success() { + output_junit_test_case "$1" "$(xml_escape "$1")" +} + +output_junit_info() { + # Nothing to output for JUnit for this level of message, but reset the check timer for timing the next check + JUNIT_CHECK_START_TIME=$(get_time_in_milliseconds) +} + +output_junit_failure() { + output_junit_test_case "$1" "" +} + +get_junit_classname() { + #
. naturally follows a Java package structure, so it is suitable as a package name + echo "$TITLE_ID" +} + +output_junit_test_case() { + local time_now + local test_case_duration + time_now=$(get_time_in_milliseconds) + # JUnit test case time values are in seconds, so divide by 1000 using e-3 to convert from milliseconds without losing accuracy due to non-floating point arithmetic + test_case_duration=$(printf "%.3f" "$(("$time_now" - "$JUNIT_CHECK_START_TIME"))e-3") + printf '%s\n' \ + " " \ + " $2" \ + " " >> "$JUNIT_OUTPUT_FILE" + # Reset the check timer for timing the next check + JUNIT_CHECK_START_TIME=$(get_time_in_milliseconds) + ((JUNIT_CHECK_INDEX+=1)) +} diff --git a/include/os_detector b/include/os_detector index 565c6b25..aa071564 100644 --- a/include/os_detector +++ b/include/os_detector @@ -11,17 +11,19 @@ # CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. +DATE_CMD="date" + gnu_how_older_from_today() { DATE_TO_COMPARE=$1 - TODAY_IN_DAYS=$(date -d "$(date +%Y-%m-%d)" +%s) - DATE_FROM_IN_DAYS=$(date -d $DATE_TO_COMPARE +%s) + TODAY_IN_DAYS=$("$DATE_CMD" -d "$("$DATE_CMD" +%Y-%m-%d)" +%s) + DATE_FROM_IN_DAYS=$("$DATE_CMD" -d $DATE_TO_COMPARE +%s) DAYS_SINCE=$((($TODAY_IN_DAYS - $DATE_FROM_IN_DAYS )/60/60/24)) echo $DAYS_SINCE } bsd_how_older_from_today() { DATE_TO_COMPARE=$1 - TODAY_IN_DAYS=$(date +%s) - DATE_FROM_IN_DAYS=$(date -jf %Y-%m-%d $DATE_TO_COMPARE +%s) + TODAY_IN_DAYS=$("$DATE_CMD" +%s) + DATE_FROM_IN_DAYS=$("$DATE_CMD" -jf %Y-%m-%d $DATE_TO_COMPARE +%s) DAYS_SINCE=$((($TODAY_IN_DAYS - $DATE_FROM_IN_DAYS )/60/60/24)) echo $DAYS_SINCE } @@ -31,13 +33,13 @@ bsd_how_older_from_today() { gnu_timestamp_to_date() { # remove fractions of a second TIMESTAMP_TO_CONVERT=$(echo $1 | cut -f1 -d".") - OUTPUT_DATE=$(date -d @$TIMESTAMP_TO_CONVERT +'%Y-%m-%d') + OUTPUT_DATE=$("$DATE_CMD" -d @$TIMESTAMP_TO_CONVERT +'%Y-%m-%d') echo $OUTPUT_DATE } bsd_timestamp_to_date() { # remove fractions of a second TIMESTAMP_TO_CONVERT=$(echo $1 | cut -f1 -d".") - OUTPUT_DATE=$(date -r $TIMESTAMP_TO_CONVERT +'%Y-%m-%d') + OUTPUT_DATE=$("$DATE_CMD" -r $TIMESTAMP_TO_CONVERT +'%Y-%m-%d') echo $OUTPUT_DATE } @@ -50,15 +52,15 @@ bsd_decode_report() { gnu_how_many_days_from_today() { DATE_TO_COMPARE=$1 - TODAY_IN_DAYS=$(date -d "$(date +%Y-%m-%d)" +%s) - DATE_IN_DAYS=$(date -d $DATE_TO_COMPARE +%s) + TODAY_IN_DAYS=$("$DATE_CMD" -d "$("$DATE_CMD" +%Y-%m-%d)" +%s) + DATE_IN_DAYS=$("$DATE_CMD" -d $DATE_TO_COMPARE +%s) DAYS_TO=$((( $DATE_IN_DAYS - $TODAY_IN_DAYS )/60/60/24)) echo $DAYS_TO } bsd_how_many_days_from_today() { DATE_TO_COMPARE=$1 - TODAY_IN_DAYS=$(date +%s) - DATE_IN_DAYS=$(date -jf %Y-%m-%d $DATE_TO_COMPARE +%s) + TODAY_IN_DAYS=$("$DATE_CMD" +%s) + DATE_IN_DAYS=$("$DATE_CMD" -jf %Y-%m-%d $DATE_TO_COMPARE +%s) DAYS_TO=$((( $DATE_IN_DAYS - $TODAY_IN_DAYS )/60/60/24)) echo $DAYS_TO } @@ -66,17 +68,32 @@ bsd_how_many_days_from_today() { gnu_get_date_previous_than_months() { MONTHS_TO_COMPARE=$1 MONTHS_TO_COMPARE_IN_SECONDS=$(( 60 * 60 * 24 * 31 * $MONTHS_TO_COMPARE )) - CURRENTSECS=$(date +%s) + CURRENTSECS=$("$DATE_CMD" +%s) STARTDATEINSECS=$(( $CURRENTSECS - $MONTHS_TO_COMPARE_IN_SECONDS )) - DATE_BEFORE_MONTHS_TO_COMPARE=$(date -d @$STARTDATEINSECS '+%Y-%m-%d') + DATE_BEFORE_MONTHS_TO_COMPARE=$("$DATE_CMD" -d @$STARTDATEINSECS '+%Y-%m-%d') echo $DATE_BEFORE_MONTHS_TO_COMPARE } bsd_get_date_previous_than_months() { MONTHS_TO_COMPARE=$1 - DATE_BEFORE_MONTHS_TO_COMPARE=$(date -v -$(echo $MONTHS_TO_COMPARE)m '+%Y-%m-%d') + DATE_BEFORE_MONTHS_TO_COMPARE=$("$DATE_CMD" -v -$(echo $MONTHS_TO_COMPARE)m '+%Y-%m-%d') echo $DATE_BEFORE_MONTHS_TO_COMPARE } +gnu_get_time_in_milliseconds() { + "$DATE_CMD" +%s%3N +} +bsd_get_time_in_milliseconds() { + # BSD date does not support outputting milliseconds, so pad with zeros + "$DATE_CMD" +%s000 +} + +gnu_get_iso8601_timestamp() { + "$DATE_CMD" -u +"%Y-%m-%dT%H:%M:%SZ" +} +bsd_get_iso8601_timestamp() { + "$DATE_CMD" -u +"%Y-%m-%dT%H:%M:%SZ" +} + gnu_test_tcp_connectivity() { HOST=$1 PORT=$2 @@ -114,16 +131,28 @@ if [ "$OSTYPE" == "linux-gnu" ] || [ "$OSTYPE" == "linux-musl" ]; then get_date_previous_than_months() { gnu_get_date_previous_than_months "$1" } + get_time_in_milliseconds() { + gnu_get_time_in_milliseconds + } + get_iso8601_timestamp() { + gnu_get_iso8601_timestamp + } test_tcp_connectivity() { gnu_test_tcp_connectivity "$1" "$2" "$3" } elif [[ "$OSTYPE" == "darwin"* ]]; then # BSD/OSX commands compatibility TEMP_REPORT_FILE=$(mktemp -t prowler.cred_report-XXXXXX) - # It is possible that the user has installed GNU coreutils, replacing the default Mac OS X BSD tools with - # GNU coreutils equivalents. Only GNU date allows --version as a valid argument, so use the validity of this argument + # It is possible that the user has installed GNU coreutils on OS X. By default, this will make GNU commands + # available with a 'g' prefix, e.g. 'gdate'. Test if this is present, and use it if so, as it supports more features. + # The user also may have replaced the default Mac OS X BSD tools with the GNU coreutils equivalents. + # Only GNU date allows --version as a valid argument, so use the validity of this argument # as a means to detect that coreutils is installed and is overriding the default tools - if date --version >/dev/null 2>&1 ; then + GDATE=$(which gdate) + if [ -n "${GDATE}" ]; then + DATE_CMD="gdate" + fi + if "$DATE_CMD" --version >/dev/null 2>&1 ; then how_older_from_today() { gnu_how_older_from_today "$1" } @@ -139,6 +168,12 @@ elif [[ "$OSTYPE" == "darwin"* ]]; then get_date_previous_than_months() { gnu_get_date_previous_than_months "$1" } + get_time_in_milliseconds() { + gnu_get_time_in_milliseconds + } + get_iso8601_timestamp() { + gnu_get_iso8601_timestamp + } else how_older_from_today() { bsd_how_older_from_today "$1" @@ -155,6 +190,12 @@ elif [[ "$OSTYPE" == "darwin"* ]]; then get_date_previous_than_months() { bsd_get_date_previous_than_months "$1" } + get_time_in_milliseconds() { + bsd_get_time_in_milliseconds + } + get_iso8601_timestamp() { + bsd_get_iso8601_timestamp + } fi test_tcp_connectivity() { bsd_test_tcp_connectivity "$1" "$2" "$3" @@ -177,6 +218,12 @@ elif [[ "$OSTYPE" == "cygwin" ]]; then get_date_previous_than_months() { gnu_get_date_previous_than_months "$1" } + get_time_in_milliseconds() { + gnu_get_time_in_milliseconds + } + get_iso8601_timestamp() { + gnu_get_iso8601_timestamp + } test_tcp_connectivity() { gnu_test_tcp_connectivity "$1" "$2" "$3" } diff --git a/include/outputs b/include/outputs index b1e23265..a87f8342 100644 --- a/include/outputs +++ b/include/outputs @@ -16,7 +16,7 @@ EXTENSION_CSV="csv" EXTENSION_JSON="json" EXTENSION_ASFF="asff-json" -EXTENSION_HTML="html" # not implemented yet, use ansi2html as in documentation +EXTENSION_HTML="html" # not implemented yet, use ansi2html as in documentation OUTPUT_DATE=$(date -u +"%Y%m%d%H%M%S") OUTPUT_FILE_NAME=prowler-output-$OUTPUT_DATE @@ -27,6 +27,9 @@ textPass(){ fi PASS_COUNTER=$((PASS_COUNTER+1)) + if [[ "${GENERATE_JUNIT}" -eq 1 ]]; then + output_junit_success "$1" + fi if [[ "${MODES[@]}" =~ "csv" || "${MODES[@]}" =~ "json" || "${MODES[@]}" =~ "json-asff" ]]; then if [[ $2 ]]; then REPREGION=$2 @@ -34,14 +37,14 @@ textPass(){ REPREGION=$REGION fi if [[ "${MODES[@]}" =~ "csv" ]]; then - echo "$PROFILE${SEP}$ACCOUNT_NUM${SEP}$REPREGION${SEP}$TITLE_ID${SEP}PASS${SEP}$ITEM_SCORED${SEP}$ITEM_LEVEL${SEP}$TITLE_TEXT${SEP}$1" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_CSV - fi + echo "$PROFILE${SEP}$ACCOUNT_NUM${SEP}$REPREGION${SEP}$TITLE_ID${SEP}PASS${SEP}$ITEM_SCORED${SEP}$ITEM_LEVEL${SEP}$TITLE_TEXT${SEP}$1" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_CSV + fi if [[ "${MODES[@]}" =~ "json" ]]; then - generateJsonOutput "$1" "Pass" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_JSON + generateJsonOutput "$1" "Pass" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_JSON fi if [[ "${MODES[@]}" =~ "json-asff" ]]; then JSON_ASFF_OUTPUT=$(generateJsonAsffOutput "$1" "PASSED" "INFORMATIONAL") - echo "${JSON_ASFF_OUTPUT}" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_ASFF + echo "${JSON_ASFF_OUTPUT}" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_ASFF if [[ "${SEND_TO_SECURITY_HUB}" -eq 1 ]]; then sendToSecurityHub "${JSON_ASFF_OUTPUT}" fi @@ -56,6 +59,9 @@ textInfo(){ return fi + if [[ "${GENERATE_JUNIT}" -eq 1 ]]; then + output_junit_info "$1" + fi if [[ "${MODES[@]}" =~ "csv" || "${MODES[@]}" =~ "json" || "${MODES[@]}" =~ "json-asff" ]]; then if [[ $2 ]]; then REPREGION=$2 @@ -76,6 +82,9 @@ textInfo(){ textFail(){ FAIL_COUNTER=$((FAIL_COUNTER+1)) EXITCODE=3 + if [[ "${GENERATE_JUNIT}" -eq 1 ]]; then + output_junit_failure "$1" + fi if [[ "${MODES[@]}" =~ "csv" || "${MODES[@]}" =~ "json" || "${MODES[@]}" =~ "json-asff" ]]; then if [[ $2 ]]; then REPREGION=$2 @@ -86,7 +95,7 @@ textFail(){ echo "$PROFILE${SEP}$ACCOUNT_NUM${SEP}$REPREGION${SEP}$TITLE_ID${SEP}FAIL${SEP}$ITEM_SCORED${SEP}$ITEM_LEVEL${SEP}$TITLE_TEXT${SEP}$1" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_CSV fi if [[ "${MODES[@]}" =~ "json" ]]; then - generateJsonOutput "$1" "Fail" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_JSON + generateJsonOutput "$1" "Fail" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_JSON fi if [[ "${MODES[@]}" =~ "json-asff" ]]; then JSON_ASFF_OUTPUT=$(generateJsonAsffOutput "$1" "FAILED" "HIGH") @@ -131,7 +140,7 @@ textTitle(){ esac if [[ "${MODES[@]}" =~ "csv" ]]; then - >&2 echo "$TITLE_ID $TITLE_TEXT" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_CSV + >&2 echo "$TITLE_ID $TITLE_TEXT" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_CSV elif [[ "${MODES[@]}" =~ "json" || "${MODES[@]}" =~ "json-asff" ]]; then : else @@ -156,7 +165,7 @@ generateJsonOutput(){ --arg ITEM_LEVEL "$ITEM_LEVEL" \ --arg TITLE_ID "$TITLE_ID" \ --arg REPREGION "$REPREGION" \ - --arg TIMESTAMP $(date -u +"%Y-%m-%dT%H:%M:%SZ") \ + --arg TIMESTAMP "$(get_iso8601_timestamp)" \ -n '{ "Profile": $PROFILE, "Account Number": $ACCOUNT_NUM, @@ -178,20 +187,17 @@ generateJsonAsffOutput(){ local status=$2 local severity=$3 jq -M -c \ - --arg PROFILE "$PROFILE" \ --arg ACCOUNT_NUM "$ACCOUNT_NUM" \ --arg TITLE_TEXT "$TITLE_TEXT" \ --arg MESSAGE "$(echo -e "${message}" | sed -e 's/^[[:space:]]*//')" \ --arg UNIQUE_ID "$(LC_ALL=C echo -e "${message}" | tr -cs '[:alnum:]._~-\n' '_')" \ --arg STATUS "$status" \ --arg SEVERITY "$severity" \ - --arg SCORED "$ITEM_SCORED" \ - --arg ITEM_LEVEL "$ITEM_LEVEL" \ --arg TITLE_ID "$TITLE_ID" \ --arg TYPE "$ASFF_TYPE" \ --arg RESOURCE_TYPE "$ASFF_RESOURCE_TYPE" \ --arg REPREGION "$REPREGION" \ - --arg TIMESTAMP $(date -u +"%Y-%m-%dT%H:%M:%SZ") \ + --arg TIMESTAMP "$(get_iso8601_timestamp)" \ --arg PROWLER_VERSION "$PROWLER_VERSION" \ -n '{ "SchemaVersion": "2018-10-08", diff --git a/prowler b/prowler index 32b1dbb3..bca29ef0 100755 --- a/prowler +++ b/prowler @@ -45,6 +45,7 @@ SEP=',' KEEPCREDREPORT=0 EXITCODE=0 SEND_TO_SECURITY_HUB=0 +GENERATE_JUNIT=0 SCRIPT_START_TIME=$( date -u +"%Y-%m-%dT%H:%M:%S%z" ) TITLE_ID="" TITLE_TEXT="CALLER ERROR - UNSET TITLE" @@ -78,21 +79,22 @@ USAGE: -V show version number & exit -s show scoring report -S send check output to AWS Security Hub - only valid when the output mode is json-asff (i.e. "-M json-asff -S") + -J generate JUnit reports, readable by Jenkins or other CI tools. Files are written to ./junit-reports -x specify external directory with custom checks (i.e. /my/own/checks, files must start by "check") -q suppress info messages and passing test output -A account id for the account where to assume a role, requires -R and -T (i.e.: 123456789012) -R role name to assume in the account, requires -A and -T (i.e.: ProwlerRole) - -T session durantion given to that role credentials in seconds, default 1h (3600) recommended 12h, requires -R and -T - (i.e.: 43200) - -I External ID to be used when assuming roles (no mandatory), requires -A and -R. + -T session duration given to that role credentials in seconds, default 1h (3600) recommended 12h, requires -R and -T + (i.e.: 43200) + -I External ID to be used when assuming roles (not mandatory), requires -A and -R. -h this help " exit } -while getopts ":hlLkqp:r:c:g:f:m:M:E:enbVsSxI:A:R:T:" OPTION; do +while getopts ":hlLkqp:r:c:g:f:m:M:E:enbVsSJxI:A:R:T:" OPTION; do case $OPTION in h ) usage @@ -152,6 +154,9 @@ while getopts ":hlLkqp:r:c:g:f:m:M:E:enbVsSxI:A:R:T:" OPTION; do S ) SEND_TO_SECURITY_HUB=1 ;; + J ) + GENERATE_JUNIT=1 + ;; x ) EXTERNAL_CHECKS_PATH=$OPTARG ;; @@ -206,6 +211,7 @@ trap "{ rm -f /tmp/prowler*.policy.*; }" EXIT . $PROWLER_DIR/include/assume_role . $PROWLER_DIR/include/connection_tests . $PROWLER_DIR/include/securityhub_integration +. $PROWLER_DIR/include/junit_integration # Get a list of all available AWS Regions REGIONS=$($AWSCLI ec2 describe-regions --query 'Regions[].RegionName' \ @@ -252,8 +258,8 @@ show_group_title() { # Function to execute the check execute_check() { - # See if this is an alternate name for a check - # for example, we might have been passed 1.01 which is another name for 1.1 + # See if this is an alternate name for a check + # for example, we might have been passed 1.01 which is another name for 1.1 local alternate_name_var=CHECK_ALTERNATE_$1 local alternate_name=${!alternate_name_var} # See if this check defines an ASFF Type, if so, use this, falling back to a sane default @@ -266,16 +272,23 @@ execute_check() { ASFF_RESOURCE_TYPE="${!asff_resource_type_var:-AwsAccount}" # Generate the credential report, only if it is group1 related which checks we # run so that the checks can safely assume it's available - if [ ${alternate_name} ];then + if [ ${alternate_name} ];then if [[ ${alternate_name} == check1* || ${alternate_name} == extra71 ]];then if [ ! -s $TEMP_REPORT_FILE ];then genCredReport saveReport fi fi - show_check_title ${alternate_name} - ${alternate_name} - else + show_check_title ${alternate_name} + if [[ "${GENERATE_JUNIT}" -eq 1 ]]; then + prepare_junit_check_output "$1" + fi + # Execute the check + ${alternate_name} + if [[ "${GENERATE_JUNIT}" -eq 1 ]]; then + finalise_junit_check_output "$1" + fi + else # Check to see if this is a real check local check_id_var=CHECK_ID_$1 local check_id=${!check_id_var} @@ -286,13 +299,20 @@ execute_check() { saveReport fi fi - show_check_title $1 - $1 + show_check_title $1 + if [[ "${GENERATE_JUNIT}" -eq 1 ]]; then + prepare_junit_check_output "$1" + fi + # Execute the check + $1 + if [[ "${GENERATE_JUNIT}" -eq 1 ]]; then + finalise_junit_check_output "$1" + fi else textFail "ERROR! Use a valid check name (i.e. check41 or extra71)"; exit $EXITCODE fi - fi + fi } # Function to execute all checks in a group @@ -415,7 +435,7 @@ if [[ $PRINTGROUPSONLY == "1" ]]; then fi # Check that jq is installed for JSON outputs -if [[ "$MODE" == "json" || "$MODE" == "json-asff" ]]; then +if [[ ${MODES[@]} =~ "json" || ${MODES[@]} =~ "json-asff" ]]; then . $PROWLER_DIR/include/jq_detector fi @@ -423,6 +443,10 @@ if [[ "$SEND_TO_SECURITY_HUB" -eq 1 ]]; then checkSecurityHubCompatibility fi +if [[ "${GENERATE_JUNIT}" -eq 1 ]]; then + prepare_junit_output +fi + # Gather account data / test aws cli connectivity getWhoami From fa17829832019873c587411423df5767b515c046 Mon Sep 17 00:00:00 2001 From: Marc Jay Date: Wed, 15 Apr 2020 12:52:48 +0100 Subject: [PATCH 071/104] Fix arithmetic expression for calculating test duration --- include/junit_integration | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/include/junit_integration b/include/junit_integration index b0134d66..375531da 100644 --- a/include/junit_integration +++ b/include/junit_integration @@ -78,7 +78,7 @@ output_junit_test_case() { local test_case_duration time_now=$(get_time_in_milliseconds) # JUnit test case time values are in seconds, so divide by 1000 using e-3 to convert from milliseconds without losing accuracy due to non-floating point arithmetic - test_case_duration=$(printf "%.3f" "$(("$time_now" - "$JUNIT_CHECK_START_TIME"))e-3") + test_case_duration=$(printf "%.3f" "$((time_now - JUNIT_CHECK_START_TIME))e-3") printf '%s\n' \ " " \ " $2" \ From dc31adcc18b1b83b41e06f718c49ce67e60aa679 Mon Sep 17 00:00:00 2001 From: Marc Jay Date: Wed, 15 Apr 2020 13:42:33 +0100 Subject: [PATCH 072/104] Rename JUnit XML files to match the Java convention - with a 'TEST-' prefix --- include/junit_integration | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/include/junit_integration b/include/junit_integration index 375531da..479a7118 100644 --- a/include/junit_integration +++ b/include/junit_integration @@ -33,7 +33,7 @@ prepare_junit_check_output() { JUNIT_CHECK_INDEX=1 # To match JUnit behaviour in Java, and ensure that an aborted execution does not leave a partially written and therefore invalid XML file, # output a JUnit XML file per check - JUNIT_OUTPUT_FILE="$JUNIT_OUTPUT_DIRECTORY/$1.xml" + JUNIT_OUTPUT_FILE="$JUNIT_OUTPUT_DIRECTORY/TEST-$1.xml" printf '%s\n' \ "" \ "" \ From 172f4b2681e3d5aad5cfc416f47c6f7d52ea8c86 Mon Sep 17 00:00:00 2001 From: Alex Gray Date: Wed, 15 Apr 2020 15:19:44 -0400 Subject: [PATCH 073/104] Only check latest version of task definition --- checks/check_extra768 | 7 ++++-- .../get_latest_ecs_task_definition_version.py | 23 +++++++++++++++++++ 2 files changed, 28 insertions(+), 2 deletions(-) create mode 100644 checks/get_latest_ecs_task_definition_version.py diff --git a/checks/check_extra768 b/checks/check_extra768 index b357c72e..94089008 100644 --- a/checks/check_extra768 +++ b/checks/check_extra768 @@ -23,10 +23,13 @@ extra768(){ # this folder is deleted once this check is finished mkdir $SECRETS_TEMP_FOLDER fi - + DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" textInfo "Looking for secrets in ECS task definitions' environment variables across all regions... " for regx in $REGIONS; do - LIST_OF_TASK_DEFINITIONS=$($AWSCLI ecs list-task-definitions $PROFILE_OPT --region $regx --query taskDefinitionArns[*] --output text) + # Get a list of ALL Task Definitions: + $AWSCLI ecs list-task-definitions $PROFILE_OPT --region $regx | jq -r .taskDefinitionArns[] > ALL_TASK_DEFINITIONS.txt + # Filter it down to ONLY the latest version of that task definition: + LIST_OF_TASK_DEFINITIONS=$(python ${DIR}/get_latest_ecs_task_definition_version.py -f ALL_TASK_DEFINITIONS.txt) if [[ $LIST_OF_TASK_DEFINITIONS ]]; then for taskDefinition in $LIST_OF_TASK_DEFINITIONS;do IFS='/' read -r -a splitArn <<< "$taskDefinition" diff --git a/checks/get_latest_ecs_task_definition_version.py b/checks/get_latest_ecs_task_definition_version.py new file mode 100644 index 00000000..d096d6fb --- /dev/null +++ b/checks/get_latest_ecs_task_definition_version.py @@ -0,0 +1,23 @@ +import argparse + +def parseArgs(): + parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter) + parser.add_argument('-f', help='file containing list of ecs task definitions', required=True) + args = parser.parse_args() + return args + + +if __name__ == '__main__': + args = parseArgs() + family = {} + with open(args.f, 'r') as fd: + for line in fd: + l = line.strip() + family_name = l[:l.rfind(':')] + version_int = int(l[l.rfind(':') + 1:]) + if family_name not in family: + family[family_name] = version_int + if family[family_name] < version_int: + family[family_name] = version_int + for family, version in family.items(): + print('{}:{}'.format(family, version)) From 78f649bd6594698d2c8a11e01ed865bd2e21bdd6 Mon Sep 17 00:00:00 2001 From: Marc Jay Date: Wed, 15 Apr 2020 23:36:40 +0100 Subject: [PATCH 074/104] Replace -J flag with junit-xml output format Rearrange output functions so they support outputting text alongside other formats, if specified Add a convenience function for checking if JUnit output is enabled Move monochrome setting into loop so it better supports multiple formats Update README --- README.md | 9 ++-- include/colors | 13 +++-- include/junit_integration | 8 +++ include/outputs | 109 ++++++++++++++++++-------------------- prowler | 19 +++---- 5 files changed, 78 insertions(+), 80 deletions(-) diff --git a/README.md b/README.md index 4b07afa6..18072315 100644 --- a/README.md +++ b/README.md @@ -170,7 +170,7 @@ This script has been written in bash using AWS-CLI and it works in Linux and OSX ### Save your reports -1. If you want to save your report for later analysis thare are different ways, natively (supported text, mono, csv, json and json-asff see note below for more info): +1. If you want to save your report for later analysis thare are different ways, natively (supported text, mono, csv, json, json-asff and junit-xml see note below for more info): ```sh ./prowler -M csv @@ -203,10 +203,10 @@ This script has been written in bash using AWS-CLI and it works in Linux and OSX ./prowler | ansi2html -la > report.html ``` - To generate JUnit report files add `-J`. This can be combined with any format. Files are written inside a prowler root directory named `junit-reports`: + To generate JUnit report files, include the junit-xml format. This can be combined with any other format. Files are written inside a prowler root directory named `junit-reports`: ```sh - ./prowler -J + ./prowler -M text,junit-xml ``` >Note about output formats to use with `-M`: "text" is the default one with colors, "mono" is like default one but monochrome, "csv" is comma separated values, "json" plain basic json (without comma between lines) and "json-asff" is also json with Amazon Security Finding Format that you can ship to Security Hub using `-S`. @@ -249,7 +249,7 @@ This script has been written in bash using AWS-CLI and it works in Linux and OSX -f specify an AWS region to run checks against (i.e.: us-west-1) -m specify the maximum number of items to return for long-running requests (default: 100) - -M output mode: text (default), mono, json, json-asff, csv. They can be used combined comma separated. + -M output mode: text (default), mono, json, json-asff, junit-xml, csv. They can be used combined comma separated. (separator is ","; data is on stdout; progress on stderr). -k keep the credential report -n show check numbers to sort easier @@ -262,7 +262,6 @@ This script has been written in bash using AWS-CLI and it works in Linux and OSX -V show version number & exit -s show scoring report -S send check output to AWS Security Hub - only valid when the output mode is json-asff (i.e. "-M json-asff -S") - -J generate JUnit reports, readable by Jenkins or other CI tools. Files are written to ./junit-reports -x specify external directory with custom checks (i.e. /my/own/checks, files must start by check) -q suppress info messages and passing test output -A account id for the account where to assume a role, requires -R and -T diff --git a/include/colors b/include/colors index 2ae6f77f..7bb9f84e 100644 --- a/include/colors +++ b/include/colors @@ -14,16 +14,15 @@ IFS=',' read -ra MODES <<< "${MODE}" for MODE in "${MODES[@]}"; do - if [[ "$MODE" != "mono" && "$MODE" != "text" && "$MODE" != "csv" && "$MODE" != "json" && "$MODE" != "json-asff" ]]; then - echo -e "${OPTRED}ERROR!$OPTNORMAL Invalid output mode. Choose text, mono, csv, json or json-asff. ./prowler -h for help" + if [[ "$MODE" != "mono" && "$MODE" != "text" && "$MODE" != "csv" && "$MODE" != "json" && "$MODE" != "json-asff" && "$MODE" != "junit-xml" ]]; then + echo -e "${OPTRED}ERROR!$OPTNORMAL Invalid output mode. Choose text, mono, csv, json, json-asff or junit-xml. ./prowler -h for help" EXITCODE=1 exit $EXITCODE fi -done - -if [[ "$MODE" == "mono" || "$MODE" == "csv" || "$MODE" == "json" || "$MODE" == "json-asff" ]]; then - MONOCHROME=1 -fi + if [[ "$MODE" == "mono" || "$MODE" == "csv" || "$MODE" == "json" || "$MODE" == "json-asff" ]]; then + MONOCHROME=1 + fi +done if [[ $MONOCHROME -eq 1 ]]; then # Colors diff --git a/include/junit_integration b/include/junit_integration index 479a7118..54bcd892 100644 --- a/include/junit_integration +++ b/include/junit_integration @@ -15,6 +15,14 @@ JUNIT_OUTPUT_DIRECTORY="junit-reports" +is_junit_output_enabled() { + if [[ ${MODES[@]} =~ "junit-xml" ]]; then + true + else + false + fi +} + xml_escape() { sed 's/&/\&/g; s//\>/g; s/\"/\"/g; s/'"'"'/\'/g' <<< "$1" } diff --git a/include/outputs b/include/outputs index a87f8342..0b56ef88 100644 --- a/include/outputs +++ b/include/outputs @@ -27,29 +27,28 @@ textPass(){ fi PASS_COUNTER=$((PASS_COUNTER+1)) - if [[ "${GENERATE_JUNIT}" -eq 1 ]]; then + if [[ $2 ]]; then + REPREGION=$2 + else + REPREGION=$REGION + fi + if [[ "${MODES[@]}" =~ "csv" ]]; then + echo "$PROFILE${SEP}$ACCOUNT_NUM${SEP}$REPREGION${SEP}$TITLE_ID${SEP}PASS${SEP}$ITEM_SCORED${SEP}$ITEM_LEVEL${SEP}$TITLE_TEXT${SEP}$1" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_CSV + fi + if [[ "${MODES[@]}" =~ "json" ]]; then + generateJsonOutput "$1" "Pass" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_JSON + fi + if [[ "${MODES[@]}" =~ "json-asff" ]]; then + JSON_ASFF_OUTPUT=$(generateJsonAsffOutput "$1" "PASSED" "INFORMATIONAL") + echo "${JSON_ASFF_OUTPUT}" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_ASFF + if [[ "${SEND_TO_SECURITY_HUB}" -eq 1 ]]; then + sendToSecurityHub "${JSON_ASFF_OUTPUT}" + fi + fi + if is_junit_output_enabled; then output_junit_success "$1" fi - if [[ "${MODES[@]}" =~ "csv" || "${MODES[@]}" =~ "json" || "${MODES[@]}" =~ "json-asff" ]]; then - if [[ $2 ]]; then - REPREGION=$2 - else - REPREGION=$REGION - fi - if [[ "${MODES[@]}" =~ "csv" ]]; then - echo "$PROFILE${SEP}$ACCOUNT_NUM${SEP}$REPREGION${SEP}$TITLE_ID${SEP}PASS${SEP}$ITEM_SCORED${SEP}$ITEM_LEVEL${SEP}$TITLE_TEXT${SEP}$1" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_CSV - fi - if [[ "${MODES[@]}" =~ "json" ]]; then - generateJsonOutput "$1" "Pass" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_JSON - fi - if [[ "${MODES[@]}" =~ "json-asff" ]]; then - JSON_ASFF_OUTPUT=$(generateJsonAsffOutput "$1" "PASSED" "INFORMATIONAL") - echo "${JSON_ASFF_OUTPUT}" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_ASFF - if [[ "${SEND_TO_SECURITY_HUB}" -eq 1 ]]; then - sendToSecurityHub "${JSON_ASFF_OUTPUT}" - fi - fi - else + if [[ "${MODES[@]}" =~ "text" ]]; then echo " $OK PASS!$NORMAL $1" fi } @@ -59,22 +58,21 @@ textInfo(){ return fi - if [[ "${GENERATE_JUNIT}" -eq 1 ]]; then + if [[ $2 ]]; then + REPREGION=$2 + else + REPREGION=$REGION + fi + if [[ "${MODES[@]}" =~ "csv" ]]; then + echo "$PROFILE${SEP}$ACCOUNT_NUM${SEP}$REPREGION${SEP}$TITLE_ID${SEP}INFO${SEP}$ITEM_SCORED${SEP}$ITEM_LEVEL${SEP}$TITLE_TEXT${SEP}$1" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_CSV + fi + if [[ "${MODES[@]}" =~ "json" ]]; then + generateJsonOutput "$1" "Info" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_JSON + fi + if is_junit_output_enabled; then output_junit_info "$1" fi - if [[ "${MODES[@]}" =~ "csv" || "${MODES[@]}" =~ "json" || "${MODES[@]}" =~ "json-asff" ]]; then - if [[ $2 ]]; then - REPREGION=$2 - else - REPREGION=$REGION - fi - if [[ "${MODES[@]}" =~ "csv" ]]; then - echo "$PROFILE${SEP}$ACCOUNT_NUM${SEP}$REPREGION${SEP}$TITLE_ID${SEP}INFO${SEP}$ITEM_SCORED${SEP}$ITEM_LEVEL${SEP}$TITLE_TEXT${SEP}$1" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_CSV - fi - if [[ "${MODES[@]}" =~ "json" ]]; then - generateJsonOutput "$1" "Info" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_JSON - fi - else + if [[ "${MODES[@]}" =~ "text" ]]; then echo " $NOTICE INFO! $1 $NORMAL" fi } @@ -82,29 +80,28 @@ textInfo(){ textFail(){ FAIL_COUNTER=$((FAIL_COUNTER+1)) EXITCODE=3 - if [[ "${GENERATE_JUNIT}" -eq 1 ]]; then + if [[ $2 ]]; then + REPREGION=$2 + else + REPREGION=$REGION + fi + if [[ "${MODES[@]}" =~ "csv" ]]; then + echo "$PROFILE${SEP}$ACCOUNT_NUM${SEP}$REPREGION${SEP}$TITLE_ID${SEP}FAIL${SEP}$ITEM_SCORED${SEP}$ITEM_LEVEL${SEP}$TITLE_TEXT${SEP}$1" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_CSV + fi + if [[ "${MODES[@]}" =~ "json" ]]; then + generateJsonOutput "$1" "Fail" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_JSON + fi + if [[ "${MODES[@]}" =~ "json-asff" ]]; then + JSON_ASFF_OUTPUT=$(generateJsonAsffOutput "$1" "FAILED" "HIGH") + echo "${JSON_ASFF_OUTPUT}" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_ASFF + if [[ "${SEND_TO_SECURITY_HUB}" -eq 1 ]]; then + sendToSecurityHub "${JSON_ASFF_OUTPUT}" + fi + fi + if is_junit_output_enabled; then output_junit_failure "$1" fi - if [[ "${MODES[@]}" =~ "csv" || "${MODES[@]}" =~ "json" || "${MODES[@]}" =~ "json-asff" ]]; then - if [[ $2 ]]; then - REPREGION=$2 - else - REPREGION=$REGION - fi - if [[ "${MODES[@]}" =~ "csv" ]]; then - echo "$PROFILE${SEP}$ACCOUNT_NUM${SEP}$REPREGION${SEP}$TITLE_ID${SEP}FAIL${SEP}$ITEM_SCORED${SEP}$ITEM_LEVEL${SEP}$TITLE_TEXT${SEP}$1" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_CSV - fi - if [[ "${MODES[@]}" =~ "json" ]]; then - generateJsonOutput "$1" "Fail" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_JSON - fi - if [[ "${MODES[@]}" =~ "json-asff" ]]; then - JSON_ASFF_OUTPUT=$(generateJsonAsffOutput "$1" "FAILED" "HIGH") - echo "${JSON_ASFF_OUTPUT}" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_ASFF - if [[ "${SEND_TO_SECURITY_HUB}" -eq 1 ]]; then - sendToSecurityHub "${JSON_ASFF_OUTPUT}" - fi - fi - else + if [[ "${MODES[@]}" =~ "text" ]]; then echo " $BAD FAIL! $1 $NORMAL" fi } diff --git a/prowler b/prowler index bca29ef0..f5e097f6 100755 --- a/prowler +++ b/prowler @@ -45,7 +45,6 @@ SEP=',' KEEPCREDREPORT=0 EXITCODE=0 SEND_TO_SECURITY_HUB=0 -GENERATE_JUNIT=0 SCRIPT_START_TIME=$( date -u +"%Y-%m-%dT%H:%M:%S%z" ) TITLE_ID="" TITLE_TEXT="CALLER ERROR - UNSET TITLE" @@ -66,7 +65,7 @@ USAGE: -f specify an AWS region to run checks against (i.e.: us-west-1) -m specify the maximum number of items to return for long-running requests (default: 100) - -M output mode: text (default), mono, json, json-asff, csv. They can be used combined comma separated. + -M output mode: text (default), mono, json, json-asff, junit-xml, csv. They can be used combined comma separated. (separator is ","; data is on stdout; progress on stderr). -k keep the credential report -n show check numbers to sort easier @@ -79,7 +78,6 @@ USAGE: -V show version number & exit -s show scoring report -S send check output to AWS Security Hub - only valid when the output mode is json-asff (i.e. "-M json-asff -S") - -J generate JUnit reports, readable by Jenkins or other CI tools. Files are written to ./junit-reports -x specify external directory with custom checks (i.e. /my/own/checks, files must start by "check") -q suppress info messages and passing test output -A account id for the account where to assume a role, requires -R and -T @@ -94,7 +92,7 @@ USAGE: exit } -while getopts ":hlLkqp:r:c:g:f:m:M:E:enbVsSJxI:A:R:T:" OPTION; do +while getopts ":hlLkqp:r:c:g:f:m:M:E:enbVsSxI:A:R:T:" OPTION; do case $OPTION in h ) usage @@ -154,9 +152,6 @@ while getopts ":hlLkqp:r:c:g:f:m:M:E:enbVsSJxI:A:R:T:" OPTION; do S ) SEND_TO_SECURITY_HUB=1 ;; - J ) - GENERATE_JUNIT=1 - ;; x ) EXTERNAL_CHECKS_PATH=$OPTARG ;; @@ -280,12 +275,12 @@ execute_check() { fi fi show_check_title ${alternate_name} - if [[ "${GENERATE_JUNIT}" -eq 1 ]]; then + if is_junit_output_enabled; then prepare_junit_check_output "$1" fi # Execute the check ${alternate_name} - if [[ "${GENERATE_JUNIT}" -eq 1 ]]; then + if is_junit_output_enabled; then finalise_junit_check_output "$1" fi else @@ -300,12 +295,12 @@ execute_check() { fi fi show_check_title $1 - if [[ "${GENERATE_JUNIT}" -eq 1 ]]; then + if is_junit_output_enabled; then prepare_junit_check_output "$1" fi # Execute the check $1 - if [[ "${GENERATE_JUNIT}" -eq 1 ]]; then + if is_junit_output_enabled; then finalise_junit_check_output "$1" fi else @@ -443,7 +438,7 @@ if [[ "$SEND_TO_SECURITY_HUB" -eq 1 ]]; then checkSecurityHubCompatibility fi -if [[ "${GENERATE_JUNIT}" -eq 1 ]]; then +if is_junit_output_enabled; then prepare_junit_output fi From 6747b208ceec1dc6a02a1db8fe6368c30a748439 Mon Sep 17 00:00:00 2001 From: Toni de la Fuente Date: Fri, 17 Apr 2020 15:16:55 +0200 Subject: [PATCH 075/104] Improved extra716 and extra788 --- checks/check_extra716 | 13 +++++++++---- checks/check_extra788 | 4 ++-- include/connection_tests | 15 +++++++++++++-- 3 files changed, 24 insertions(+), 8 deletions(-) diff --git a/checks/check_extra716 b/checks/check_extra716 index 8dc3c383..9d664bd1 100644 --- a/checks/check_extra716 +++ b/checks/check_extra716 @@ -11,14 +11,13 @@ # CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. CHECK_ID_extra716="7.16" -CHECK_TITLE_extra716="[extra716] Check if Amazon Elasticsearch Service (ES) domains are set as Public and have cross account access " +CHECK_TITLE_extra716="[extra716] Check if Amazon Elasticsearch Service (ES) domains are set as Public or if it has open policy access" CHECK_SCORED_extra716="NOT_SCORED" CHECK_TYPE_extra716="EXTRA" CHECK_ASFF_RESOURCE_TYPE_extra716="AwsElasticsearchDomain" CHECK_ALTERNATE_check716="extra716" extra716(){ - # "Check if Elasticsearch Service domains allow open access (Not Scored) (Not part of CIS benchmark)" for regx in $REGIONS; do LIST_OF_DOMAINS=$($AWSCLI es list-domain-names $PROFILE_OPT --region $regx --query DomainNames --output text) if [[ $LIST_OF_DOMAINS ]]; then @@ -32,11 +31,17 @@ extra716(){ textInfo "$regx: Amazon ES domain $domain is in VPC $ES_DOMAIN_VPC run extra779 to make sure it is not exposed using custom proxy" "$regx" else $AWSCLI es describe-elasticsearch-domain-config --domain-name $domain $PROFILE_OPT --region $regx --query DomainConfig.AccessPolicies.Options --output text > $TEMP_POLICY_FILE 2> /dev/null + # check if the policy has a principal set up + CHECK_ES_POLICY_PRINCIPAL=$(cat $TEMP_POLICY_FILE | jq -r '. | .Statement[] | select(.Effect == "Allow" and (((.Principal|type == "object") and .Principal.AWS != "*") or ((.Principal|type == "string") and .Principal != "*")) and select(has("Condition") | not))') + if [[ $CHECK_ES_POLICY_PRINCIPAL ]]; then + textPass "$regx: Amazon ES domain $domain does have a Principal set up" "$regx" + fi CHECK_ES_DOMAIN_POLICY_OPEN=$(cat $TEMP_POLICY_FILE | jq -r '. | .Statement[] | select(.Effect == "Allow" and (((.Principal|type == "object") and .Principal.AWS == "*") or ((.Principal|type == "string") and .Principal == "*")) and select(has("Condition") | not))') CHECK_ES_DOMAIN_POLICY_HAS_CONDITION=$(cat $TEMP_POLICY_FILE | jq -r '. | .Statement[] | select(.Effect == "Allow" and (((.Principal|type == "object") and .Principal.AWS == "*") or ((.Principal|type == "string") and .Principal == "*")) and select(has("Condition")))' ) if [[ $CHECK_ES_DOMAIN_POLICY_HAS_CONDITION ]]; then # get content of IpAddress."aws:SourceIp" and get a clean list LIST_CONDITION_IPS=$(cat $TEMP_POLICY_FILE | jq '.Statement[0] .Condition.IpAddress."aws:SourceIp"'| awk -F'"' '{print $2}' | tr -d '",^$' | sed '/^$/d') + unset CONDITION_HAS_PUBLIC_IP_ARRAY for condition_ip in "${LIST_CONDITION_IPS}";do CONDITION_HAS_PRIVATE_IP=$(echo "${condition_ip}" | grep -E '^(192\.168|10\.|172\.1[6789]\.|172\.2[0-9]\.|172\.3[01]\.)') if [[ $CONDITION_HAS_PRIVATE_IP ]];then @@ -65,13 +70,13 @@ extra716(){ textFail "$regx: Amazon ES domain $domain policy allows access (Principal: \"*\" and network \"*\") - use extra788 to test AUTH" "$regx" fi if [[ $CHECK_ES_DOMAIN_POLICY_HAS_CONDITION && ${CHECK_ES_DOMAIN_POLICY_CONDITION_PUBLIC_IP[@]} ]];then - textFail "$regx: Amazon ES domain $domain policy allows access (Principal: \"*\" and Public IP or Network $(echo ${CONDITION_HAS_PUBLIC_IP_ARRAY[@]})) - use extra788 to test AUTH" "$regx" + textInfo "$regx: Amazon ES domain $domain policy allows access (Principal: \"*\" and Public IP or Network $(echo ${CONDITION_HAS_PUBLIC_IP_ARRAY[@]})) - use extra788 to test AUTH" "$regx" fi else if [[ $CHECK_ES_DOMAIN_POLICY_HAS_CONDITION && ${CHECK_ES_DOMAIN_POLICY_CONDITION_PRIVATE_IP[@]} ]];then textInfo "$regx: Amazon ES domain $domain policy allows access from a Private IP or CIDR RFC1918 $(echo ${CONDITION_HAS_PRIVATE_IP_ARRAY[@]})" "$regx" else - textPass "$regx: Amazon ES domain $domain does not allow Anonymous cross account access" "$regx" + textPass "$regx: Amazon ES domain $domain does not allow anonymous access" "$regx" fi fi rm -f $TEMP_POLICY_FILE diff --git a/checks/check_extra788 b/checks/check_extra788 index 6c3c9fd0..1afcb9db 100644 --- a/checks/check_extra788 +++ b/checks/check_extra788 @@ -68,9 +68,9 @@ extra788(){ textInfo "$regx: Amazon ES domain $domain policy allows Anonymous access but ES service endpoint $ES_DOMAIN_ENDPOINT responded $SERVER_RESPONSE" "$regx" fi # check for Kibana on port 443 - CHECH_KIBANA_HTTPS=$(curl -m 2 -s -w "%{http_code}" -o /dev/null -X GET "https://$ES_DOMAIN_ENDPOINT/_plugin/kibana/api/status") + CHECH_KIBANA_HTTPS=$(curl -m 2 -s -w "%{http_code}" -o /dev/null -X GET "https://$ES_DOMAIN_ENDPOINT/_plugin/kibana") httpStatus $CHECH_KIBANA_HTTPS - if [[ $CHECH_KIBANA_HTTPS -eq "200" ]];then + if [[ $CHECH_KIBANA_HTTPS -eq "200" || $CHECH_KIBANA_HTTPS -eq "301" || $CHECH_KIBANA_HTTPS -eq "302" ]];then textFail "$regx: Amazon ES domain $domain policy allows Anonymous access and Kibana service endpoint $ES_DOMAIN_ENDPOINT responded $SERVER_RESPONSE" "$regx" else textInfo "$regx: Amazon ES domain $domain policy allows Anonymous access but Kibana service endpoint $ES_DOMAIN_ENDPOINT responded $SERVER_RESPONSE" "$regx" diff --git a/include/connection_tests b/include/connection_tests index a34fb46c..6ceeb503 100644 --- a/include/connection_tests +++ b/include/connection_tests @@ -14,11 +14,22 @@ # Function test_tcp_connectivity is in include/os_detector +# see here https://gist.github.com/rsvp/1171304/3d6714a469105faf03943b685090f90f576cf904 + # Functions to connection responses initially used for Elasticsearch related checks httpStatus(){ case $1 in 000) SERVER_RESPONSE="000 Not responding" ;; 200) SERVER_RESPONSE="200 Successful" ;; + 300) SERVER_RESPONSE="300 Multiple Choices" ;; + 301) SERVER_RESPONSE="301 Moved Permanently" ;; + 302) SERVER_RESPONSE="302 Found residing temporarily under different URI" ;; + 303) SERVER_RESPONSE="303 See Other" ;; + 304) SERVER_RESPONSE="304 Not Modified" ;; + 305) SERVER_RESPONSE="305 Use Proxy" ;; + 306) SERVER_RESPONSE="306 Status not defined" ;; + 307) SERVER_RESPONSE="307 Temporary Redirect" ;; + 301) SERVER_RESPONSE="301 Moved" ;; 400) SERVER_RESPONSE="400 Error: Bad Request" ;; 401) SERVER_RESPONSE="401 Error: Unauthorized" ;; 403) SERVER_RESPONSE="403 Error: Forbidden" ;; @@ -30,6 +41,6 @@ httpStatus(){ 503) SERVER_RESPONSE="503 Error: Service Unavailable" ;; 504) SERVER_RESPONSE="504 Error: Gateway Timeout" ;; 505) SERVER_RESPONSE="505 Error: HTTP Version Not Supported" ;; - *) SERVER_RESPONSE="HTTP: status not defined." ;; + *) SERVER_RESPONSE="HTTP: SERVER_RESPONSE not defined." ;; esac - } \ No newline at end of file + } From 47a05c203ae54b3929f83e46cbddd31b576a2ab2 Mon Sep 17 00:00:00 2001 From: Marc Jay Date: Mon, 20 Apr 2020 01:07:01 +0100 Subject: [PATCH 076/104] Improve listing of Checks and Groups Change `-l` flag to print a unique list of every single check (assuming none are orphaned outside of all groups) Allow `-g ` to be specified in combination with `-l`, to only print checks that are referenced by the specified group When listing all checks with `-l` only, print out all groups that reference each check Fixes: #545 --- README.md | 14 ++++---- include/outputs | 23 ++++++++----- prowler | 87 +++++++++++++++++++++++++++++++++---------------- 3 files changed, 80 insertions(+), 44 deletions(-) diff --git a/README.md b/README.md index 8d71e75a..c2f498ca 100644 --- a/README.md +++ b/README.md @@ -108,9 +108,9 @@ This script has been written in bash using AWS-CLI and it works in Linux and OSX ./prowler ``` - Use `-l` to list all available checks and group of checks (sections) + Use `-l` to list all available checks and the groups (sections) that reference them - If you want to avoid installing dependences run it using Docker: + If you want to avoid installing dependencies run it using Docker: ```sh docker run -ti --rm --name prowler --env AWS_ACCESS_KEY_ID --env AWS_SECRET_ACCESS_KEY --env AWS_SESSION_TOKEN toniblyx/prowler:latest @@ -159,7 +159,7 @@ This script has been written in bash using AWS-CLI and it works in Linux and OSX Valid check numbers are based on the AWS CIS Benchmark guide, so 1.1 is check11 and 3.10 is check310 -### Save your reports +### Save your reports 1. If you want to save your report for later analysis thare are different ways, natively (supported text, mono, csv, json and json-asff see note below for more info): @@ -190,7 +190,7 @@ This script has been written in bash using AWS-CLI and it works in Linux and OSX ./prowler | ansi2html -la > report.html ``` - >Note about output formats to use with `-M`: "text" is the default one with colors, "mono" is like default one but monochrome, "csv" is comma separated values, "json" plain basic json (without comma between lines) and "json-asff" is also json with Amazon Security Finding Format that you can ship to Security Hub using `-S`. + >Note about output formats to use with `-M`: "text" is the default one with colors, "mono" is like default one but monochrome, "csv" is comma separated values, "json" plain basic json (without comma between lines) and "json-asff" is also json with Amazon Security Finding Format that you can ship to Security Hub using `-S`. or save your report in a S3 bucket (this only works for text or mono, for csv, json or json-asff it has to be copied afterwards): @@ -235,7 +235,7 @@ This script has been written in bash using AWS-CLI and it works in Linux and OSX -k keep the credential report -n show check numbers to sort easier (i.e.: 1.01 instead of 1.1) - -l list all available checks only (does not perform any check) + -l list all available checks only (does not perform any check). Add -g to only list checks within the specified group -L list all groups (does not perform any check) -e exclude group extras -E execute all tests except a list of specified checks separated by comma (i.e. check21,check31) @@ -321,7 +321,7 @@ There are two requirements: 2. As mentioned in section "Custom IAM Policy", to allow Prowler to import its findings to AWS Security Hub you need to add the policy below to the role or user running Prowler: - [iam/prowler-security-hub.json](iam/prowler-security-hub.json) ->Note: to have updated findings in Security Hub you have to run Prowler periodically. Once a day or every certain amount of hours. +>Note: to have updated findings in Security Hub you have to run Prowler periodically. Once a day or every certain amount of hours. ## How to fix every FAIL @@ -498,7 +498,7 @@ AWS is made to be flexible for service links within and between different AWS ac This group of checks helps to analyse a particular AWS account (subject) on existing links to other AWS accounts across various AWS services, in order to identify untrusted links. -### Run +### Run To give it a quick shot just call: ```sh ./prowler -g trustboundaries diff --git a/include/outputs b/include/outputs index b1e23265..3bf75247 100644 --- a/include/outputs +++ b/include/outputs @@ -16,7 +16,7 @@ EXTENSION_CSV="csv" EXTENSION_JSON="json" EXTENSION_ASFF="asff-json" -EXTENSION_HTML="html" # not implemented yet, use ansi2html as in documentation +EXTENSION_HTML="html" # not implemented yet, use ansi2html as in documentation OUTPUT_DATE=$(date -u +"%Y%m%d%H%M%S") OUTPUT_FILE_NAME=prowler-output-$OUTPUT_DATE @@ -34,14 +34,14 @@ textPass(){ REPREGION=$REGION fi if [[ "${MODES[@]}" =~ "csv" ]]; then - echo "$PROFILE${SEP}$ACCOUNT_NUM${SEP}$REPREGION${SEP}$TITLE_ID${SEP}PASS${SEP}$ITEM_SCORED${SEP}$ITEM_LEVEL${SEP}$TITLE_TEXT${SEP}$1" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_CSV - fi + echo "$PROFILE${SEP}$ACCOUNT_NUM${SEP}$REPREGION${SEP}$TITLE_ID${SEP}PASS${SEP}$ITEM_SCORED${SEP}$ITEM_LEVEL${SEP}$TITLE_TEXT${SEP}$1" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_CSV + fi if [[ "${MODES[@]}" =~ "json" ]]; then - generateJsonOutput "$1" "Pass" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_JSON + generateJsonOutput "$1" "Pass" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_JSON fi if [[ "${MODES[@]}" =~ "json-asff" ]]; then JSON_ASFF_OUTPUT=$(generateJsonAsffOutput "$1" "PASSED" "INFORMATIONAL") - echo "${JSON_ASFF_OUTPUT}" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_ASFF + echo "${JSON_ASFF_OUTPUT}" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_ASFF if [[ "${SEND_TO_SECURITY_HUB}" -eq 1 ]]; then sendToSecurityHub "${JSON_ASFF_OUTPUT}" fi @@ -86,7 +86,7 @@ textFail(){ echo "$PROFILE${SEP}$ACCOUNT_NUM${SEP}$REPREGION${SEP}$TITLE_ID${SEP}FAIL${SEP}$ITEM_SCORED${SEP}$ITEM_LEVEL${SEP}$TITLE_TEXT${SEP}$1" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_CSV fi if [[ "${MODES[@]}" =~ "json" ]]; then - generateJsonOutput "$1" "Fail" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_JSON + generateJsonOutput "$1" "Fail" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_JSON fi if [[ "${MODES[@]}" =~ "json-asff" ]]; then JSON_ASFF_OUTPUT=$(generateJsonAsffOutput "$1" "FAILED" "HIGH") @@ -130,15 +130,20 @@ textTitle(){ *) ITEM_LEVEL="Unspecified or Invalid";; esac + local group_ids + if [[ -n "$5" ]]; then + group_ids="$CYAN [$5] $NORMAL" + fi + if [[ "${MODES[@]}" =~ "csv" ]]; then - >&2 echo "$TITLE_ID $TITLE_TEXT" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_CSV + >&2 echo "$TITLE_ID $TITLE_TEXT" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_CSV elif [[ "${MODES[@]}" =~ "json" || "${MODES[@]}" =~ "json-asff" ]]; then : else if [[ "$ITEM_SCORED" == "Scored" ]]; then - echo -e "\n$BLUE $TITLE_ID $NORMAL $TITLE_TEXT" + echo -e "\n$BLUE $TITLE_ID $NORMAL $TITLE_TEXT $group_ids" else - echo -e "\n$PURPLE $TITLE_ID $TITLE_TEXT $NORMAL" + echo -e "\n$PURPLE $TITLE_ID $TITLE_TEXT $NORMAL $group_ids" fi fi } diff --git a/prowler b/prowler index 32b1dbb3..9347ae81 100755 --- a/prowler +++ b/prowler @@ -48,6 +48,7 @@ SEND_TO_SECURITY_HUB=0 SCRIPT_START_TIME=$( date -u +"%Y-%m-%dT%H:%M:%S%z" ) TITLE_ID="" TITLE_TEXT="CALLER ERROR - UNSET TITLE" +TOTAL_CHECKS=() # Command usage menu usage(){ @@ -70,7 +71,7 @@ USAGE: -k keep the credential report -n show check numbers to sort easier (i.e.: 1.01 instead of 1.1) - -l list all available checks only (does not perform any check) + -l list all available checks only (does not perform any check). Add -g to only list checks within the specified group -L list all groups (does not perform any check) -e exclude group extras -E execute all tests except a list of specified checks separated by comma (i.e. check21,check31) @@ -85,7 +86,7 @@ USAGE: -R role name to assume in the account, requires -A and -T (i.e.: ProwlerRole) -T session durantion given to that role credentials in seconds, default 1h (3600) recommended 12h, requires -R and -T - (i.e.: 43200) + (i.e.: 43200) -I External ID to be used when assuming roles (no mandatory), requires -A and -R. -h this help " @@ -232,14 +233,37 @@ if [[ $EXTERNAL_CHECKS_PATH ]]; then done fi -# Function to show the title of the check +# Get a list of total checks available by ID +for i in "${!GROUP_TITLE[@]}"; do + IFS=',' read -ra CHECKS <<< "${GROUP_CHECKS[$i]}" + for j in "${CHECKS[@]}"; do + TOTAL_CHECKS+=("$CHECK_ID_$j") + done +done +# Remove duplicates whilst preserving the order of checks, and store the result as an array +TOTAL_CHECKS=($(echo "${TOTAL_CHECKS[*]}" | tr ' ' '\n' | awk '!seen[$0]++')) + +# Function to show the title of the check, and optionally which group(s) it belongs to # using this way instead of arrays to keep bash3 (osx) and bash4(linux) compatibility show_check_title() { local check_id=CHECK_ID_$1 local check_title=CHECK_TITLE_$1 local check_scored=CHECK_SCORED_$1 local check_type=CHECK_TYPE_$1 - textTitle "${!check_id}" "${!check_title}" "${!check_scored}" "${!check_type}" + local group_ids + # If requested ($2 is any non-null value) iterate all GROUP_CHECKS and produce a comma-separated list of all + # the GROUP_IDs that include this particular check + if [[ -n "$2" ]]; then + for i in "${!GROUP_ID[@]}"; do + if [[ "${GROUP_CHECKS[$i]}" =~ "$1" ]]; then + if [[ -n "$group_ids" ]]; then + group_ids+=", " + fi + group_ids+="${GROUP_ID[$i]}" + fi + done + fi + textTitle "${!check_id}" "${!check_title}" "${!check_scored}" "${!check_type}" "$group_ids" } # Function to show the title of a group, by numeric id @@ -343,24 +367,41 @@ execute_all() { done } -# Function to show the titles of everything +# Function to show the titles of either all checks or only those in the specified group show_all_titles() { - MAIN_GROUPS=(1 2 3 4 7) - for i in "${MAIN_GROUPS[@]}"; do - show_group_title $i - # Display the title of the checks in groups 1,2,3,4 and 7 - # Any other group has checks in these groups - IFS=',' read -ra CHECKS <<< ${GROUP_CHECKS[$i]} - for j in ${CHECKS[@]}; do - show_check_title $j - done - done + local checks + local check_id + local group_index + # If '-g ' has been specified, only show the titles of checks within the specified group + if [[ $GROUP_ID_READ ]];then + if [[ " ${GROUP_ID[@]} " =~ " ${GROUP_ID_READ} " ]]; then + for group_index in "${!GROUP_ID[@]}"; do + if [ "${GROUP_ID[$group_index]}" == "${GROUP_ID_READ}" ]; then + show_group_title "$group_index" + IFS=',' read -ra checks <<< "${GROUP_CHECKS[$i]}" + for check_id in ${checks[@]}; do + show_check_title "$check_id" + done + fi + done + else + textFail "Use a valid check group ID i.e.: group1, extras, forensics-ready, etc." + show_all_group_titles + exit $EXITCODE + fi + else + for check_id in "${TOTAL_CHECKS[@]}"; do + # Pass 1 so that the group IDs that this check belongs to are printed + show_check_title "$check_id" 1 + done + fi } show_all_group_titles() { - for i in "${!GROUP_TITLE[@]}"; do - show_group_title $i - done + local group_index + for group_index in "${!GROUP_TITLE[@]}"; do + show_group_title "$group_index" + done } # Function to execute all checks but exclude some of them @@ -373,16 +414,6 @@ get_all_checks_without_exclusion() { for E_CHECK in "${E_CHECKS[@]}"; do CHECKS_TO_EXCLUDE+=($E_CHECK) done - # Get a list of total checks available by ID - for i in "${!GROUP_TITLE[@]}"; do - # show_group_title $i - IFS=',' read -ra CHECKS <<< ${GROUP_CHECKS[$i]} - for j in ${CHECKS[@]}; do - TOTAL_CHECKS+=($CHECK_ID_$j) - done - done - # Remove duplicates whilst preserving the order of checks, and store the result as an array - TOTAL_CHECKS=($(echo "${TOTAL_CHECKS[*]}" | tr ' ' '\n' | awk '!seen[$0]++')) # Create a list that contains all checks but excluded ones for i in "${TOTAL_CHECKS[@]}"; do local COINCIDENCE=false From 8f179338d87365a6e528bcc9ba4fd0eba4094ee5 Mon Sep 17 00:00:00 2001 From: Marc Jay Date: Mon, 20 Apr 2020 01:30:37 +0100 Subject: [PATCH 077/104] Fix invalid references to $i when it should reference a local $group_index variable --- prowler | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/prowler b/prowler index 9347ae81..b4e0df88 100755 --- a/prowler +++ b/prowler @@ -251,15 +251,16 @@ show_check_title() { local check_scored=CHECK_SCORED_$1 local check_type=CHECK_TYPE_$1 local group_ids + local group_index # If requested ($2 is any non-null value) iterate all GROUP_CHECKS and produce a comma-separated list of all # the GROUP_IDs that include this particular check if [[ -n "$2" ]]; then - for i in "${!GROUP_ID[@]}"; do - if [[ "${GROUP_CHECKS[$i]}" =~ "$1" ]]; then + for group_index in "${!GROUP_ID[@]}"; do + if [[ "${GROUP_CHECKS[$group_index]}" =~ "$1" ]]; then if [[ -n "$group_ids" ]]; then group_ids+=", " fi - group_ids+="${GROUP_ID[$i]}" + group_ids+="${GROUP_ID[$group_index]}" fi done fi @@ -378,7 +379,7 @@ show_all_titles() { for group_index in "${!GROUP_ID[@]}"; do if [ "${GROUP_ID[$group_index]}" == "${GROUP_ID_READ}" ]; then show_group_title "$group_index" - IFS=',' read -ra checks <<< "${GROUP_CHECKS[$i]}" + IFS=',' read -ra checks <<< "${GROUP_CHECKS[$group_index]}" for check_id in ${checks[@]}; do show_check_title "$check_id" done From b42cc33a6c59665794916d57167df8d8cb83b46c Mon Sep 17 00:00:00 2001 From: "He.Longfei" Date: Mon, 20 Apr 2020 15:01:38 +0800 Subject: [PATCH 078/104] using api commands to check if macie is enabled instead of looking iam role --- checks/check_extra712 | 22 +++++++++++++--------- 1 file changed, 13 insertions(+), 9 deletions(-) diff --git a/checks/check_extra712 b/checks/check_extra712 index 251d93a5..d15f5bcf 100644 --- a/checks/check_extra712 +++ b/checks/check_extra712 @@ -18,12 +18,16 @@ CHECK_ALTERNATE_check712="extra712" extra712(){ # "Check if Amazon Macie is enabled (Not Scored) (Not part of CIS benchmark)" - textInfo "No API commands available to check if Macie is enabled," - textInfo "just looking if IAM Macie related permissions exist. " - MACIE_IAM_ROLES_CREATED=$($AWSCLI iam list-roles $PROFILE_OPT --query 'Roles[*].Arn'|grep AWSMacieServiceCustomer|wc -l) - if [[ $MACIE_IAM_ROLES_CREATED -eq 2 ]];then - textPass "Macie related IAM roles exist so it might be enabled. Check it out manually" - else - textFail "No Macie related IAM roles found. It is most likely not to be enabled" - fi -} + MACIE_NOT_SUPPORTED="Could not connect to the endpoint URL" + MACIE_NOT_ENABLED="Macie is not enabled for this AWS account" + for regx in $REGIONS; do + MACIE_MEMBER_ACCOUNTS=$($AWSCLI macie list-member-accounts $PROFILE_OPT --region $regx --output text --query 'memberAccounts[*]' 2>&1) + if [[ ${MACIE_MEMBER_ACCOUNTS} == *${MACIE_NOT_SUPPORTED}* ]];then + textInfo "Macie is not supported in the $regx AWS Regions" + elif [[ ${MACIE_MEMBER_ACCOUNTS} == *${MACIE_NOT_ENABLED}* ]];then + textFail "$regx: Amazon Macie is not enabled!" + else + textPass "$regx: Amazon Macie is enabled!" + fi + done +} From 5b8370179a136c8a44a905e5f2916e9daf599b69 Mon Sep 17 00:00:00 2001 From: Alex Gray Date: Mon, 20 Apr 2020 09:15:15 -0400 Subject: [PATCH 079/104] Get the list of families and then get latest task definition --- checks/check_extra768 | 22 +++++++++--------- .../get_latest_ecs_task_definition_version.py | 23 ------------------- 2 files changed, 11 insertions(+), 34 deletions(-) delete mode 100644 checks/get_latest_ecs_task_definition_version.py diff --git a/checks/check_extra768 b/checks/check_extra768 index 94089008..591983af 100644 --- a/checks/check_extra768 +++ b/checks/check_extra768 @@ -23,22 +23,22 @@ extra768(){ # this folder is deleted once this check is finished mkdir $SECRETS_TEMP_FOLDER fi - DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" textInfo "Looking for secrets in ECS task definitions' environment variables across all regions... " for regx in $REGIONS; do - # Get a list of ALL Task Definitions: - $AWSCLI ecs list-task-definitions $PROFILE_OPT --region $regx | jq -r .taskDefinitionArns[] > ALL_TASK_DEFINITIONS.txt - # Filter it down to ONLY the latest version of that task definition: - LIST_OF_TASK_DEFINITIONS=$(python ${DIR}/get_latest_ecs_task_definition_version.py -f ALL_TASK_DEFINITIONS.txt) - if [[ $LIST_OF_TASK_DEFINITIONS ]]; then - for taskDefinition in $LIST_OF_TASK_DEFINITIONS;do - IFS='/' read -r -a splitArn <<< "$taskDefinition" + # Get a list of all families first: + FAMILIES=$($AWSCLI ecs list-task-definition-families $PROFILE_OPT --region $regx --status ACTIVE | jq -r .families[]) + if [[ $FAMILIES ]]; then + for FAMILY in $FAMILIES;do + # Get the full task definition arn: + TASK_DEFINITION_TEMP=$($AWSCLI ecs list-task-definitions $PROFILE_OPT --region $regx --family-prefix $FAMILY --sort DESC --max-items 1 | jq -r .taskDefinitionArns[0]) + # We only care about the task definition name: + IFS='/' read -r -a splitArn <<< "$TASK_DEFINITION_TEMP" TASK_DEFINITION=${splitArn[1]} TASK_DEFINITION_ENV_VARIABLES_FILE="$SECRETS_TEMP_FOLDER/extra768-$TASK_DEFINITION-$regx-variables.txt" - TASK_DEFINITION_ENV_VARIABLES=$($AWSCLI ecs $PROFILE_OPT --region $regx describe-task-definition --task-definition $taskDefinition --query 'taskDefinition.containerDefinitions[*].environment' --output text > $TASK_DEFINITION_ENV_VARIABLES_FILE) + TASK_DEFINITION_ENV_VARIABLES=$($AWSCLI ecs $PROFILE_OPT --region $regx describe-task-definition --task-definition $TASK_DEFINITION --query 'taskDefinition.containerDefinitions[*].environment' --output text > $TASK_DEFINITION_ENV_VARIABLES_FILE) if [ -s $TASK_DEFINITION_ENV_VARIABLES_FILE ];then - # Implementation using https://github.com/Yelp/detect-secrets - FINDINGS=$(secretsDetector file $TASK_DEFINITION_ENV_VARIABLES_FILE) + # Implementation using https://github.com/Yelp/detect-secrets + FINDINGS=$(secretsDetector file $TASK_DEFINITION_ENV_VARIABLES_FILE) if [[ $FINDINGS -eq 0 ]]; then textPass "$regx: No secrets found in ECS task definition $TASK_DEFINITION variables" "$regx" # delete file if nothing interesting is there diff --git a/checks/get_latest_ecs_task_definition_version.py b/checks/get_latest_ecs_task_definition_version.py deleted file mode 100644 index d096d6fb..00000000 --- a/checks/get_latest_ecs_task_definition_version.py +++ /dev/null @@ -1,23 +0,0 @@ -import argparse - -def parseArgs(): - parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter) - parser.add_argument('-f', help='file containing list of ecs task definitions', required=True) - args = parser.parse_args() - return args - - -if __name__ == '__main__': - args = parseArgs() - family = {} - with open(args.f, 'r') as fd: - for line in fd: - l = line.strip() - family_name = l[:l.rfind(':')] - version_int = int(l[l.rfind(':') + 1:]) - if family_name not in family: - family[family_name] = version_int - if family[family_name] < version_int: - family[family_name] = version_int - for family, version in family.items(): - print('{}:{}'.format(family, version)) From 86ea46d77cebccffbc6fc0d963a9ad70540dbc28 Mon Sep 17 00:00:00 2001 From: Toni de la Fuente Date: Mon, 20 Apr 2020 19:19:05 +0200 Subject: [PATCH 080/104] Update check_extra712 --- checks/check_extra712 | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/checks/check_extra712 b/checks/check_extra712 index d15f5bcf..ac941c2e 100644 --- a/checks/check_extra712 +++ b/checks/check_extra712 @@ -25,9 +25,9 @@ extra712(){ if [[ ${MACIE_MEMBER_ACCOUNTS} == *${MACIE_NOT_SUPPORTED}* ]];then textInfo "Macie is not supported in the $regx AWS Regions" elif [[ ${MACIE_MEMBER_ACCOUNTS} == *${MACIE_NOT_ENABLED}* ]];then - textFail "$regx: Amazon Macie is not enabled!" + textFail "$regx: Amazon Macie is not enabled!" "$regx" else - textPass "$regx: Amazon Macie is enabled!" + textPass "$regx: Amazon Macie is enabled!" "$regx" fi done } From d6374f8bc8771dbf1582e73c2596292ab032cecb Mon Sep 17 00:00:00 2001 From: Toni de la Fuente Date: Mon, 20 Apr 2020 19:27:39 +0200 Subject: [PATCH 081/104] Updated textInfo message on extra712 --- checks/check_extra712 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/checks/check_extra712 b/checks/check_extra712 index ac941c2e..1a9891ec 100644 --- a/checks/check_extra712 +++ b/checks/check_extra712 @@ -23,7 +23,7 @@ extra712(){ for regx in $REGIONS; do MACIE_MEMBER_ACCOUNTS=$($AWSCLI macie list-member-accounts $PROFILE_OPT --region $regx --output text --query 'memberAccounts[*]' 2>&1) if [[ ${MACIE_MEMBER_ACCOUNTS} == *${MACIE_NOT_SUPPORTED}* ]];then - textInfo "Macie is not supported in the $regx AWS Regions" + textInfo "$regx: Amazon Macie is not supported in $regx" "$regx" elif [[ ${MACIE_MEMBER_ACCOUNTS} == *${MACIE_NOT_ENABLED}* ]];then textFail "$regx: Amazon Macie is not enabled!" "$regx" else From ad66254b45497dad9c784e5750621564b12910da Mon Sep 17 00:00:00 2001 From: Marc Jay Date: Tue, 21 Apr 2020 01:21:55 +0100 Subject: [PATCH 082/104] Extend check13 to meet all CIS rules and consolidate with extra774 Create `include/check_creds_last_used` and move all logic for checking last usages of passwords and access keys there Modify check13 and extra774 to call new function, specifying time-range of last 90 days and last 30 days respectively Modify messages in check14 and check121 so that all mentions of 'access key's are consistent Fixes #496 --- checks/check121 | 8 +- checks/check13 | 22 +----- checks/check14 | 4 +- checks/check_extra774 | 29 +------ include/check_creds_last_used | 143 ++++++++++++++++++++++++++++++++++ prowler | 1 + 6 files changed, 153 insertions(+), 54 deletions(-) create mode 100644 include/check_creds_last_used diff --git a/checks/check121 b/checks/check121 index 636cae73..3fbd5535 100644 --- a/checks/check121 +++ b/checks/check121 @@ -25,10 +25,10 @@ check121(){ LIST_USERS_KEY1_ACTIVE=$(for user in $LIST_USERS_KEY1_NA; do grep "^${user}," $TEMP_REPORT_FILE|awk -F, '{ print $1,$4,$9 }'|grep "true true$"|awk '{ print $1 }'|sed 's/[[:blank:]]+/,/g' ; done) if [[ $LIST_USERS_KEY1_ACTIVE ]]; then for user in $LIST_USERS_KEY1_ACTIVE; do - textFail "$user has never used Access Key 1" + textFail "User $user has never used access key 1" done else - textPass "No users found with Access Key 1 never used" + textPass "No users found with access key 1 never used" fi # List of USERS with KEY2 last_used_date as N/A LIST_USERS_KEY2_NA=$(for user in $LIST_USERS; do grep "^${user}," $TEMP_REPORT_FILE|awk -F, '{ print $1,$16 }'|grep N/A |awk '{ print $1 }' ; done) @@ -36,9 +36,9 @@ check121(){ LIST_USERS_KEY2_ACTIVE=$(for user in $LIST_USERS_KEY2_NA; do grep "^${user}," $TEMP_REPORT_FILE|awk -F, '{ print $1,$4,$14 }'|grep "true true$" |awk '{ print $1 }' ; done) if [[ $LIST_USERS_KEY2_ACTIVE ]]; then for user in $LIST_USERS_KEY2_ACTIVE; do - textFail "$user has never used Access Key 2" + textFail "User $user has never used access key 2" done else - textPass "No users found with Access Key 2 never used" + textPass "No users found with access key 2 never used" fi } diff --git a/checks/check13 b/checks/check13 index 9f8f5a4c..929a6aa2 100644 --- a/checks/check13 +++ b/checks/check13 @@ -17,25 +17,5 @@ CHECK_ASFF_RESOURCE_TYPE_check13="AwsIamUser" CHECK_ALTERNATE_check103="check13" check13(){ - # "Ensure credentials unused for 90 days or greater are disabled (Scored)" - COMMAND12_LIST_USERS_WITH_PASSWORD_ENABLED=$(cat $TEMP_REPORT_FILE|awk -F, '{ print $1,$4 }' |grep true | awk '{ print $1 }') - # Only check Password last used for users with password enabled - if [[ $COMMAND12_LIST_USERS_WITH_PASSWORD_ENABLED ]]; then - for i in $COMMAND12_LIST_USERS_WITH_PASSWORD_ENABLED; do - DATEUSED=$($AWSCLI iam list-users --query "Users[?UserName=='$i'].PasswordLastUsed" --output text $PROFILE_OPT --region $REGION | cut -d'T' -f1) - if [ "$DATEUSED" == "" ] - then - textFail "User \"$i\" has not logged in during the last 90 days" - else - HOWOLDER=$(how_older_from_today $DATEUSED) - if [ $HOWOLDER -gt "90" ];then - textFail "User \"$i\" has not logged in during the last 90 days" - else - textPass "User \"$i\" found with credentials used in the last 90 days" - fi - fi - done - else - textPass "No users found with password enabled" - fi + check_creds_used_in_last_days 90 } diff --git a/checks/check14 b/checks/check14 index 21e2be49..438b8364 100644 --- a/checks/check14 +++ b/checks/check14 @@ -30,7 +30,7 @@ check14(){ HOWOLDER=$(how_older_from_today $DATEROTATED1) if [ $HOWOLDER -gt "90" ];then - textFail " $user has not rotated access key1 in over 90 days" + textFail "$user has not rotated access key 1 in over 90 days" C14_NUM_USERS1=$(expr $C14_NUM_USERS1 + 1) fi done @@ -48,7 +48,7 @@ check14(){ DATEROTATED2=$(cat $TEMP_REPORT_FILE | grep -v user_creation_time | grep "^${user},"| awk -F, '{ print $15 }' | grep -v "N/A" | awk -F"T" '{ print $1 }') HOWOLDER=$(how_older_from_today $DATEROTATED2) if [ $HOWOLDER -gt "90" ];then - textFail " $user has not rotated access key2 in over 90 days" + textFail "$user has not rotated access key 2 in over 90 days" C14_NUM_USERS2=$(expr $C14_NUM_USERS2 + 1) fi done diff --git a/checks/check_extra774 b/checks/check_extra774 index 64c096c3..b88bfad8 100644 --- a/checks/check_extra774 +++ b/checks/check_extra774 @@ -11,37 +11,12 @@ # CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. CHECK_ID_extra774="7.74" -CHECK_TITLE_extra774="[extra774] Check if user have unused console login" +CHECK_TITLE_extra774="[extra774] Ensure credentials unused for 30 days or greater are disabled" CHECK_SCORED_extra774="NOT_SCORED" CHECK_TYPE_extra774="EXTRA" CHECK_ASFF_RESOURCE_TYPE_extra774="AwsIamUser" CHECK_ALTERNATE_check774="extra774" extra774(){ - MAX_DAYS=-30 - LIST_USERS_WITH_PASSWORD_ENABLED=$(cat $TEMP_REPORT_FILE|awk -F, '{ print $1,$4,$5 }' |grep true | awk '{ print $1 }') - - for i in $LIST_USERS_WITH_PASSWORD_ENABLED; do - user=$(cat $TEMP_REPORT_FILE|awk -F, '{ print $1,$5 }' |grep "^$i " |awk '{ print $1 }') - last_login_date=$(cat $TEMP_REPORT_FILE|awk -F, '{ print $1,$5 }' |grep "^$i " |awk '{ print $2 }') - - # If the user has never logged into the console, their last login date is 'no_information'. See: - # https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_getting-report.html#id_credentials_understanding_the_report_format - if [[ "${last_login_date}" == "no_information" ]]; then - user_created_date=$(cat $TEMP_REPORT_FILE|awk -F, '{ print $1,$3 }' |grep "^$i " |awk '{ print $2 }') - days_since_user_created=$(how_many_days_from_today ${user_created_date%T*}) - if [ "$days_since_user_created" -lt "$MAX_DAYS" ];then - textFail "User $user has never used console login since they were created over ${MAX_DAYS#-} days ago" - else - textInfo "User $user has not used console login since they were created" - fi - else - days_not_in_use=$(how_many_days_from_today ${last_login_date%T*}) - if [ "$days_not_in_use" -lt "$MAX_DAYS" ];then - textFail "User $user has not used console login for more than ${MAX_DAYS#-} days" - else - textPass "User $user has used console login in the past ${MAX_DAYS#-} days" - fi - fi - done + check_creds_used_in_last_days 30 } diff --git a/include/check_creds_last_used b/include/check_creds_last_used new file mode 100644 index 00000000..4f8633b3 --- /dev/null +++ b/include/check_creds_last_used @@ -0,0 +1,143 @@ +#!/usr/bin/env bash + +# Prowler - the handy cloud security tool (copyright 2018) by Toni de la Fuente +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy +# of the License at http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software distributed +# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +# CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. + +# Set of functions for checking credential usage, following CIS 1.3 "Ensure credentials unused for 90 days or greater are disabled" rules +# but support a custom time-range to allow for stricter policies, e.g. extra774 + +# CSV Report Column Numbering +# See also https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_getting-report.html#id_credentials_understanding_the_report_format +# 1 - user +# 2 - arn +# 3 - user_creation_time +# 4 - password_enabled +# 5 - password_last_used +# 6 - password_last_changed +# 7 - password_next_rotation +# 8 - mfa_active +# 9 - access_key_1_active +# 10 - access_key_1_last_rotated +# 11 - access_key_1_last_used_date +# 12 - access_key_1_last_used_region +# 13 - access_key_1_last_used_service +# 14 - access_key_2_active +# 15 - access_key_2_last_rotated +# 16 - access_key_2_last_used_date +# 17 - access_key_2_last_used_region +# 18 - access_key_2_last_used_service +# 19 - cert_1_active +# 20 - cert_1_last_rotated +# 21 - cert_2_active +# 22 - cert_2_last_rotated + +# Check both passwords and access keys - e.g. CIS rule +check_creds_used_in_last_days() { + local max_days=$1 + + check_passwords_used_in_last_days "$max_days" + check_access_keys_used_in_last_days "$max_days" +} + +check_passwords_used_in_last_days() { + local max_days=$1 + + local user + local users_with_password_enabled + local last_login_date + local days_since_password_last_changed + local days_password_not_in_use + users_with_password_enabled=$(awk -F, '{ print $1,$4 }' "$TEMP_REPORT_FILE" | grep " true$" | awk '{ print $1 }') + # Only check password last used date for users with password enabled + if [[ $users_with_password_enabled ]]; then + for user in $users_with_password_enabled; do + last_login_date=$(awk -F, '{ print $1,$5 }' "$TEMP_REPORT_FILE" | grep "^$user " | awk '{ print $2 }') + + # If the user has never logged into the console, their last login date is 'no_information'. See: + # https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_getting-report.html#id_credentials_understanding_the_report_format + if [[ "${last_login_date}" == "no_information" ]]; then + user_password_changed_date=$(awk -F, '{ print $1,$6 }' "$TEMP_REPORT_FILE" | grep "^$user " | awk '{ print $2 }') + days_since_password_last_changed=$(how_older_from_today "${user_password_changed_date%T*}") + + # "When password_enabled is set to TRUE and password_last_used is set to no_information, ensure password_last_changed is less than X days ago" + if [[ "$days_since_password_last_changed" -ge "$max_days" ]]; then + textFail "User $user has never logged into the console since creation and their password not changed in the past ${max_days} days" + else + textInfo "User $user has not logged into the console since creation" + fi + else + days_password_not_in_use=$(how_older_from_today "${last_login_date%T*}") + + # "For each user having password_enabled set to TRUE, ensure password_last_used_date is less than X days ago." + if [[ "$days_password_not_in_use" -ge "$max_days" ]]; then + textFail "User $user has not logged into the console in the past ${max_days} days" + else + textPass "User $user has logged into the console in the past ${max_days} days" + fi + fi + done + else + textPass "No users found with password enabled" + fi +} + +check_access_keys_used_in_last_days() { + local max_days=$1 + + check_access_key_used_in_last_days "$max_days" 1 9 10 11 + check_access_key_used_in_last_days "$max_days" 2 14 15 16 +} + +check_access_key_used_in_last_days() { + local max_days=$1 + local access_key_name=$2 + local access_key_active_col=$3 + local access_key_last_rotated_col=$4 + local access_key_last_used_col=$5 + + local user + local users_with_access_key_enabled + local access_key_last_used_date + local access_key_last_rotated_date + local days_since_access_key_rotated + local days_since_access_key_used + users_with_access_key_enabled=$(awk -F, -v i="$access_key_active_col" '{ print $1,$i }' "$TEMP_REPORT_FILE" | grep " true$" | awk '{ print $1 }') + # Only check access key last used date for users with this access key enabled + if [[ $users_with_access_key_enabled ]]; then + for user in $users_with_access_key_enabled; do + access_key_last_used_date=$(awk -F, -v i="$access_key_last_used_col" '{ print $1,$i }' "$TEMP_REPORT_FILE" | grep "^$user " | awk '{ print $2 }') + + if [[ "${access_key_last_used_date}" == "N/A" ]]; then + access_key_last_rotated_date=$(awk -F, -v i="$access_key_last_rotated_col" '{ print $1,$i }' "$TEMP_REPORT_FILE" | grep "^$user " | awk '{ print $2 }') + days_since_access_key_rotated=$(how_older_from_today "${access_key_last_rotated_date%T*}") + + # "When a user having an access_key_x_active (where x is 1 or 2) to TRUE and corresponding access_key_x_last_used_date is set to N/A, + # ensure access_key_x_last_rotated is less than X days ago" + if [[ "$days_since_access_key_rotated" -ge "$max_days" ]]; then + textFail "User $user has never used access key $access_key_name since creation and not rotated it in the past ${max_days} days" + else + textInfo "User $user has not used access key $access_key_name since creation" + fi + else + days_since_access_key_used=$(how_older_from_today "${access_key_last_used_date%T*}") + + # "For each user having an access_key_1_active or access_key_2_active to TRUE, ensure the corresponding access_key_n_last_used_date is less than X days ago" + if [[ "$days_since_access_key_used" -ge "$max_days" ]]; then + textFail "User $user has not used access key $access_key_name in the past ${max_days} days" + else + textPass "User $user has used access key $access_key_name in the past ${max_days} days" + fi + fi + done + else + textPass "No users found with access key $access_key_name enabled" + fi +} diff --git a/prowler b/prowler index 10a02791..aa933066 100755 --- a/prowler +++ b/prowler @@ -203,6 +203,7 @@ trap "{ rm -f /tmp/prowler*.policy.*; }" EXIT . $PROWLER_DIR/include/scoring . $PROWLER_DIR/include/python_detector . $PROWLER_DIR/include/secrets_detector +. $PROWLER_DIR/include/check_creds_last_used . $PROWLER_DIR/include/check3x . $PROWLER_DIR/include/assume_role . $PROWLER_DIR/include/connection_tests From 92091d9ecdd39ec50a63ec3175f9cc0105d53693 Mon Sep 17 00:00:00 2001 From: Toni de la Fuente Date: Wed, 22 Apr 2020 10:31:30 +0200 Subject: [PATCH 083/104] Rollback #562 fix issue #564 --- checks/check_extra712 | 25 ++++++++++--------------- 1 file changed, 10 insertions(+), 15 deletions(-) diff --git a/checks/check_extra712 b/checks/check_extra712 index 1a9891ec..d6ecfb7f 100644 --- a/checks/check_extra712 +++ b/checks/check_extra712 @@ -16,18 +16,13 @@ CHECK_SCORED_extra712="NOT_SCORED" CHECK_TYPE_extra712="EXTRA" CHECK_ALTERNATE_check712="extra712" -extra712(){ - # "Check if Amazon Macie is enabled (Not Scored) (Not part of CIS benchmark)" - MACIE_NOT_SUPPORTED="Could not connect to the endpoint URL" - MACIE_NOT_ENABLED="Macie is not enabled for this AWS account" - for regx in $REGIONS; do - MACIE_MEMBER_ACCOUNTS=$($AWSCLI macie list-member-accounts $PROFILE_OPT --region $regx --output text --query 'memberAccounts[*]' 2>&1) - if [[ ${MACIE_MEMBER_ACCOUNTS} == *${MACIE_NOT_SUPPORTED}* ]];then - textInfo "$regx: Amazon Macie is not supported in $regx" "$regx" - elif [[ ${MACIE_MEMBER_ACCOUNTS} == *${MACIE_NOT_ENABLED}* ]];then - textFail "$regx: Amazon Macie is not enabled!" "$regx" - else - textPass "$regx: Amazon Macie is enabled!" "$regx" - fi - done -} + extra712(){ + textInfo "No API commands available to check if Macie is enabled," + textInfo "just looking if IAM Macie related permissions exist. " + MACIE_IAM_ROLES_CREATED=$($AWSCLI iam list-roles $PROFILE_OPT --query 'Roles[*].Arn'|grep AWSMacieServiceCustomer|wc -l) + if [[ $MACIE_IAM_ROLES_CREATED -eq 2 ]];then + textPass "Macie related IAM roles exist so it might be enabled. Check it out manually" +else + textFail "No Macie related IAM roles found. It is most likely not to be enabled" +fi +} From c2669622cf39b1115419a245eecd3c119927b2ce Mon Sep 17 00:00:00 2001 From: Marc Jay Date: Wed, 22 Apr 2020 09:58:33 +0100 Subject: [PATCH 084/104] Fix -E flag no longer excluding checks Remove re-declaration of TOTAL_CHECKS variable Bug introduced by #561 Fixes #566 --- prowler | 1 - 1 file changed, 1 deletion(-) diff --git a/prowler b/prowler index aa933066..42834c8a 100755 --- a/prowler +++ b/prowler @@ -425,7 +425,6 @@ show_all_group_titles() { get_all_checks_without_exclusion() { CHECKS_EXCLUDED=() local CHECKS_TO_EXCLUDE=() - local TOTAL_CHECKS=() # Get a list of checks to exclude IFS=',' read -ra E_CHECKS <<< "$1" for E_CHECK in "${E_CHECKS[@]}"; do From 9cbdefc2deb4c0499b0c4fb7a2fecedfe6d71659 Mon Sep 17 00:00:00 2001 From: Toni de la Fuente Date: Wed, 22 Apr 2020 11:27:08 +0200 Subject: [PATCH 085/104] Adds CSV header to the output file too #565 --- include/csv_header | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/include/csv_header b/include/csv_header index abd1b8c4..07ac75bc 100644 --- a/include/csv_header +++ b/include/csv_header @@ -15,5 +15,5 @@ printCsvHeader() { >&2 echo "" >&2 echo "Generating \"${SEP}\" delimited report on stdout for profile $PROFILE, account $ACCOUNT_NUM" - echo "PROFILE${SEP}ACCOUNT_NUM${SEP}REGION${SEP}TITLE_ID${SEP}RESULT${SEP}SCORED${SEP}LEVEL${SEP}TITLE_TEXT${SEP}NOTES" + echo "PROFILE${SEP}ACCOUNT_NUM${SEP}REGION${SEP}TITLE_ID${SEP}RESULT${SEP}SCORED${SEP}LEVEL${SEP}TITLE_TEXT${SEP}NOTES" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_CSV } From 5805576dcea2c685ef0f23a99ae29e250ee38bd8 Mon Sep 17 00:00:00 2001 From: Marc Jay Date: Wed, 22 Apr 2020 10:35:33 +0100 Subject: [PATCH 086/104] Check if gbase64 (GNU) is available on Mac and use it in preference to BSD base64 Previously it was switching to GNU versions of base64 even if base64 was the BSD version Fixes #568 --- include/os_detector | 26 +++++++++++++++++--------- 1 file changed, 17 insertions(+), 9 deletions(-) diff --git a/include/os_detector b/include/os_detector index aa071564..2d2faf76 100644 --- a/include/os_detector +++ b/include/os_detector @@ -12,6 +12,7 @@ # specific language governing permissions and limitations under the License. DATE_CMD="date" +BASE64_CMD="base64" gnu_how_older_from_today() { DATE_TO_COMPARE=$1 @@ -44,10 +45,10 @@ bsd_timestamp_to_date() { } gnu_decode_report() { - base64 -d + "$BASE64_CMD" -d } bsd_decode_report() { - base64 -D + "$BASE64_CMD" -D } gnu_how_many_days_from_today() { @@ -146,12 +147,16 @@ elif [[ "$OSTYPE" == "darwin"* ]]; then # It is possible that the user has installed GNU coreutils on OS X. By default, this will make GNU commands # available with a 'g' prefix, e.g. 'gdate'. Test if this is present, and use it if so, as it supports more features. # The user also may have replaced the default Mac OS X BSD tools with the GNU coreutils equivalents. - # Only GNU date allows --version as a valid argument, so use the validity of this argument + # Only GNU date/base64 allows --version as a valid argument, so use the validity of this argument # as a means to detect that coreutils is installed and is overriding the default tools GDATE=$(which gdate) if [ -n "${GDATE}" ]; then DATE_CMD="gdate" fi + GBASE64=$(which gbase64) + if [ -n "${GBASE64}" ]; then + BASE64_CMD="gbase64" + fi if "$DATE_CMD" --version >/dev/null 2>&1 ; then how_older_from_today() { gnu_how_older_from_today "$1" @@ -159,9 +164,6 @@ elif [[ "$OSTYPE" == "darwin"* ]]; then timestamp_to_date() { gnu_timestamp_to_date "$1" } - decode_report() { - gnu_decode_report - } how_many_days_from_today() { gnu_how_many_days_from_today "$1" } @@ -181,9 +183,6 @@ elif [[ "$OSTYPE" == "darwin"* ]]; then timestamp_to_date() { bsd_timestamp_to_date "$1" } - decode_report() { - bsd_decode_report - } how_many_days_from_today() { bsd_how_many_days_from_today "$1" } @@ -197,6 +196,15 @@ elif [[ "$OSTYPE" == "darwin"* ]]; then bsd_get_iso8601_timestamp } fi + if "$BASE64_CMD" --version >/dev/null 2>&1 ; then + decode_report() { + gnu_decode_report + } + else + decode_report() { + bsd_decode_report + } + fi test_tcp_connectivity() { bsd_test_tcp_connectivity "$1" "$2" "$3" } From ef952ce9cc61d730fe436bf0bdefe9604bfd0c64 Mon Sep 17 00:00:00 2001 From: Toni de la Fuente Date: Wed, 22 Apr 2020 12:07:20 +0200 Subject: [PATCH 087/104] Simplified caller id info on outputs --- include/whoami | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/include/whoami b/include/whoami index abe59cc5..c54385f2 100644 --- a/include/whoami +++ b/include/whoami @@ -14,9 +14,11 @@ # Get whoami in AWS, who is the user running this shell script getWhoami(){ - ACCOUNT_NUM=$($AWSCLI sts get-caller-identity --output json $PROFILE_OPT --region $REGION --query "Account" | tr -d '"') + ACCOUNT_NUM=$($AWSCLI sts get-caller-identity --output text $PROFILE_OPT --region $REGION --query "Account") + CALLER_ARN=$($AWSCLI sts get-caller-identity --output text $PROFILE_OPT --region $REGION --query "Arn") + USER_ID=$($AWSCLI sts get-caller-identity --output text $PROFILE_OPT --region $REGION --query "UserId") + if [[ "$MODE" == "csv" ]]; then - CALLER_ARN_RAW=$($AWSCLI sts get-caller-identity --output json $PROFILE_OPT --region $REGION --query "Arn") if [[ 255 -eq $? ]]; then # Failed to get own identity ... exit echo "ERROR WITH $PROFILE CREDENTIALS - EXITING!" @@ -24,7 +26,6 @@ getWhoami(){ EXITCODE=2 exit $EXITCODE fi - CALLER_ARN=$(echo $CALLER_ARN_RAW | tr -d '"') printCsvHeader textTitle "0.0" "Show report generation info" "NOT_SCORED" "SUPPORT" textInfo "ARN: $CALLER_ARN TIMESTAMP: $SCRIPT_START_TIME" @@ -33,10 +34,10 @@ getWhoami(){ else echo "" echo -e " This report is being generated using credentials below:\n" - echo -e " AWS-CLI Profile: $NOTICE[$PROFILE]$NORMAL AWS API Region: $NOTICE[$REGION]$NORMAL AWS Filter Region: $NOTICE[${FILTERREGION:-all}]$NORMAL\n" + echo -e " AWS-CLI Profile: $NOTICE[$PROFILE]$NORMAL AWS API Region: $NOTICE[$REGION]$NORMAL AWS Filter Region: $NOTICE[${FILTERREGION:-all}]$NORMAL" if [[ $MONOCHROME -eq 1 ]]; then - echo -e " Caller Identity:" - $AWSCLI sts get-caller-identity --output text $PROFILE_OPT --region $REGION --query "Arn" + echo -e " AWS Account: $NOTICE[$ACCOUNT_NUM]$NORMAL UserId: $NOTICE[$USER_ID]$NORMAL" + echo -e " Caller Identity ARN: $NOTICE[$CALLER_ARN]$NORMAL" if [[ 255 -eq $? ]]; then # Failed to get own identity ... exit echo "ERROR WITH $PROFILE CREDENTIALS - EXITING!" @@ -44,8 +45,8 @@ getWhoami(){ exit 2 fi else - echo -e " Caller Identity:" - $AWSCLI sts get-caller-identity --output table $PROFILE_OPT --region $REGION + echo -e " AWS Account: $NOTICE[$ACCOUNT_NUM]$NORMAL UserId: $NOTICE[$USER_ID]$NORMAL" + echo -e " Caller Identity ARN: $NOTICE[$CALLER_ARN]$NORMAL" if [[ 255 -eq $? ]]; then # Failed to get own identity ... exit echo variable $PROFILE_OPT From 43fb877109468e1111ff0968f1ec13a626311885 Mon Sep 17 00:00:00 2001 From: Toni de la Fuente Date: Wed, 22 Apr 2020 12:28:31 +0200 Subject: [PATCH 088/104] Added account id to the output filename --- README.md | 2 +- include/whoami | 12 ++++++++---- 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index 87ec9615..751587ed 100644 --- a/README.md +++ b/README.md @@ -188,7 +188,7 @@ This script has been written in bash using AWS-CLI and it works in Linux and OSX ./prowler -g gdpr -M csv,json,json-asff ``` - Now `-M` creates a file inside the prowler root directory named `prowler-output-YYYYMMDDHHMMSS.format`. You don't have to specify anything else, no pipes, no redirects. + Now `-M` creates a file inside the prowler root directory named `prowler-output-AWSACCOUNTID-YYYYMMDDHHMMSS.format`. You don't have to specify anything else, no pipes, no redirects. or just saving the output to a file like below: diff --git a/include/whoami b/include/whoami index c54385f2..4cc32d5d 100644 --- a/include/whoami +++ b/include/whoami @@ -13,11 +13,15 @@ # Get whoami in AWS, who is the user running this shell script -getWhoami(){ - ACCOUNT_NUM=$($AWSCLI sts get-caller-identity --output text $PROFILE_OPT --region $REGION --query "Account") - CALLER_ARN=$($AWSCLI sts get-caller-identity --output text $PROFILE_OPT --region $REGION --query "Arn") - USER_ID=$($AWSCLI sts get-caller-identity --output text $PROFILE_OPT --region $REGION --query "UserId") +ACCOUNT_NUM=$($AWSCLI sts get-caller-identity --output text $PROFILE_OPT --region $REGION --query "Account") +CALLER_ARN=$($AWSCLI sts get-caller-identity --output text $PROFILE_OPT --region $REGION --query "Arn") +USER_ID=$($AWSCLI sts get-caller-identity --output text $PROFILE_OPT --region $REGION --query "UserId") +if [[ $ACCOUNT_TO_ASSUME ]]; then + ACCOUNT_NUM=$ACCOUNT_TO_ASSUME +fi + +getWhoami(){ if [[ "$MODE" == "csv" ]]; then if [[ 255 -eq $? ]]; then # Failed to get own identity ... exit From 2d64a1182e95975b008a94de79df066318067b6c Mon Sep 17 00:00:00 2001 From: Toni de la Fuente Date: Wed, 22 Apr 2020 12:31:27 +0200 Subject: [PATCH 089/104] Added account id to the output filename --- include/outputs | 26 ++++++++++++++++---------- 1 file changed, 16 insertions(+), 10 deletions(-) diff --git a/include/outputs b/include/outputs index 662bee06..b150e3e0 100644 --- a/include/outputs +++ b/include/outputs @@ -13,13 +13,19 @@ # Output formatting functions +# if [[ $ACCOUNT_TO_ASSUME ]];then +# ACCOUNT_ID="$ACCOUNT_TO_ASSUME" +# else +# ACCOUNT_ID="$ACCOUNT_NUM" +# fi + + EXTENSION_CSV="csv" EXTENSION_JSON="json" EXTENSION_ASFF="asff-json" EXTENSION_HTML="html" # not implemented yet, use ansi2html as in documentation OUTPUT_DATE=$(date -u +"%Y%m%d%H%M%S") -OUTPUT_FILE_NAME=prowler-output-$OUTPUT_DATE - +OUTPUT_FILE_NAME="prowler-output-${ACCOUNT_NUM}-${OUTPUT_DATE}" textPass(){ if [[ "$QUIET" == 1 ]]; then @@ -33,10 +39,10 @@ textPass(){ REPREGION=$REGION fi if [[ "${MODES[@]}" =~ "csv" ]]; then - echo "$PROFILE${SEP}$ACCOUNT_NUM${SEP}$REPREGION${SEP}$TITLE_ID${SEP}PASS${SEP}$ITEM_SCORED${SEP}$ITEM_LEVEL${SEP}$TITLE_TEXT${SEP}$1" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_CSV + echo "$PROFILE${SEP}$ACCOUNT_NUM${SEP}$REPREGION${SEP}$TITLE_ID${SEP}PASS${SEP}$ITEM_SCORED${SEP}$ITEM_LEVEL${SEP}$TITLE_TEXT${SEP}$1" | tee -a ${OUTPUT_FILE_NAME}.$EXTENSION_CSV fi if [[ "${MODES[@]}" =~ "json" ]]; then - generateJsonOutput "$1" "Pass" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_JSON + generateJsonOutput "$1" "Pass" | tee -a ${OUTPUT_FILE_NAME}.$EXTENSION_JSON fi if [[ "${MODES[@]}" =~ "json-asff" ]]; then JSON_ASFF_OUTPUT=$(generateJsonAsffOutput "$1" "PASSED" "INFORMATIONAL") @@ -64,10 +70,10 @@ textInfo(){ REPREGION=$REGION fi if [[ "${MODES[@]}" =~ "csv" ]]; then - echo "$PROFILE${SEP}$ACCOUNT_NUM${SEP}$REPREGION${SEP}$TITLE_ID${SEP}INFO${SEP}$ITEM_SCORED${SEP}$ITEM_LEVEL${SEP}$TITLE_TEXT${SEP}$1" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_CSV + echo "$PROFILE${SEP}$ACCOUNT_NUM${SEP}$REPREGION${SEP}$TITLE_ID${SEP}INFO${SEP}$ITEM_SCORED${SEP}$ITEM_LEVEL${SEP}$TITLE_TEXT${SEP}$1" | tee -a ${OUTPUT_FILE_NAME}.${EXTENSION_CSV} fi if [[ "${MODES[@]}" =~ "json" ]]; then - generateJsonOutput "$1" "Info" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_JSON + generateJsonOutput "$1" "Info" | tee -a ${OUTPUT_FILE_NAME}.${EXTENSION_JSON} fi if is_junit_output_enabled; then output_junit_info "$1" @@ -86,14 +92,14 @@ textFail(){ REPREGION=$REGION fi if [[ "${MODES[@]}" =~ "csv" ]]; then - echo "$PROFILE${SEP}$ACCOUNT_NUM${SEP}$REPREGION${SEP}$TITLE_ID${SEP}FAIL${SEP}$ITEM_SCORED${SEP}$ITEM_LEVEL${SEP}$TITLE_TEXT${SEP}$1" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_CSV + echo "$PROFILE${SEP}$ACCOUNT_NUM${SEP}$REPREGION${SEP}$TITLE_ID${SEP}FAIL${SEP}$ITEM_SCORED${SEP}$ITEM_LEVEL${SEP}$TITLE_TEXT${SEP}$1" | tee -a ${OUTPUT_FILE_NAME}.${EXTENSION_CSV} fi if [[ "${MODES[@]}" =~ "json" ]]; then - generateJsonOutput "$1" "Fail" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_JSON + generateJsonOutput "$1" "Fail" | tee -a ${OUTPUT_FILE_NAME}.${EXTENSION_JSON} fi if [[ "${MODES[@]}" =~ "json-asff" ]]; then JSON_ASFF_OUTPUT=$(generateJsonAsffOutput "$1" "FAILED" "HIGH") - echo "${JSON_ASFF_OUTPUT}" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_ASFF + echo "${JSON_ASFF_OUTPUT}" | tee -a ${OUTPUT_FILE_NAME}.${EXTENSION_ASFF} if [[ "${SEND_TO_SECURITY_HUB}" -eq 1 ]]; then sendToSecurityHub "${JSON_ASFF_OUTPUT}" fi @@ -142,7 +148,7 @@ textTitle(){ fi if [[ "${MODES[@]}" =~ "csv" ]]; then - >&2 echo "$TITLE_ID $TITLE_TEXT" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_CSV + >&2 echo "$TITLE_ID $TITLE_TEXT" | tee -a ${OUTPUT_FILE_NAME}.${EXTENSION_CSV} elif [[ "${MODES[@]}" =~ "json" || "${MODES[@]}" =~ "json-asff" ]]; then : else From 2eb41ff91019cb8c1ab0708b4ec03d8415758360 Mon Sep 17 00:00:00 2001 From: Toni de la Fuente Date: Wed, 22 Apr 2020 12:32:05 +0200 Subject: [PATCH 090/104] Added account id to the output filename --- include/outputs | 7 ------- 1 file changed, 7 deletions(-) diff --git a/include/outputs b/include/outputs index b150e3e0..cbadf8b2 100644 --- a/include/outputs +++ b/include/outputs @@ -13,13 +13,6 @@ # Output formatting functions -# if [[ $ACCOUNT_TO_ASSUME ]];then -# ACCOUNT_ID="$ACCOUNT_TO_ASSUME" -# else -# ACCOUNT_ID="$ACCOUNT_NUM" -# fi - - EXTENSION_CSV="csv" EXTENSION_JSON="json" EXTENSION_ASFF="asff-json" From 9f03bd75455a8ae4db1f036efc9f6f6e926ecbd5 Mon Sep 17 00:00:00 2001 From: Toni de la Fuente Date: Wed, 22 Apr 2020 12:58:54 +0200 Subject: [PATCH 091/104] Added txt output as mono for -M --- include/outputs | 12 +++++++++++- prowler | 2 +- 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/include/outputs b/include/outputs index cbadf8b2..4e61c0c3 100644 --- a/include/outputs +++ b/include/outputs @@ -16,6 +16,7 @@ EXTENSION_CSV="csv" EXTENSION_JSON="json" EXTENSION_ASFF="asff-json" +EXTENSION_TEXT="txt" EXTENSION_HTML="html" # not implemented yet, use ansi2html as in documentation OUTPUT_DATE=$(date -u +"%Y%m%d%H%M%S") OUTPUT_FILE_NAME="prowler-output-${ACCOUNT_NUM}-${OUTPUT_DATE}" @@ -47,7 +48,10 @@ textPass(){ if is_junit_output_enabled; then output_junit_success "$1" fi - if [[ "${MODES[@]}" =~ "text" ]]; then + if [[ "${MODES[@]}" =~ "mono" ]]; then + echo " $OK PASS!$NORMAL $1" | tee -a ${OUTPUT_FILE_NAME}.$EXTENSION_TEXT + fi + if [[ "${MODES[@]}" =~ "text" || "${MODES[@]}" =~ "mono" ]]; then echo " $OK PASS!$NORMAL $1" fi } @@ -71,6 +75,9 @@ textInfo(){ if is_junit_output_enabled; then output_junit_info "$1" fi + if [[ "${MODES[@]}" =~ "mono" ]]; then + echo " $NOTICE INFO! $1 $NORMAL" | tee -a ${OUTPUT_FILE_NAME}.$EXTENSION_TEXT + fi if [[ "${MODES[@]}" =~ "text" ]]; then echo " $NOTICE INFO! $1 $NORMAL" fi @@ -100,6 +107,9 @@ textFail(){ if is_junit_output_enabled; then output_junit_failure "$1" fi + if [[ "${MODES[@]}" =~ "mono" ]]; then + echo " $BAD FAIL! $1 $NORMAL" | tee -a ${OUTPUT_FILE_NAME}.$EXTENSION_TEXT + fi if [[ "${MODES[@]}" =~ "text" ]]; then echo " $BAD FAIL! $1 $NORMAL" fi diff --git a/prowler b/prowler index 42834c8a..2e148724 100755 --- a/prowler +++ b/prowler @@ -195,10 +195,10 @@ trap "{ rm -f /tmp/prowler*.policy.*; }" EXIT . $PROWLER_DIR/include/os_detector . $PROWLER_DIR/include/aws_profile_loader . $PROWLER_DIR/include/awscli_detector +. $PROWLER_DIR/include/whoami . $PROWLER_DIR/include/outputs . $PROWLER_DIR/include/csv_header . $PROWLER_DIR/include/banner -. $PROWLER_DIR/include/whoami . $PROWLER_DIR/include/credentials_report . $PROWLER_DIR/include/scoring . $PROWLER_DIR/include/python_detector From 8c9aea1231c8043ca42ed1ca17163fdd3dbcd38d Mon Sep 17 00:00:00 2001 From: Toni de la Fuente Date: Wed, 22 Apr 2020 13:54:17 +0200 Subject: [PATCH 092/104] Improved GetCallerIdentity handling / credentials --- include/whoami | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/include/whoami b/include/whoami index 4cc32d5d..3bd2b174 100644 --- a/include/whoami +++ b/include/whoami @@ -13,7 +13,16 @@ # Get whoami in AWS, who is the user running this shell script + ACCOUNT_NUM=$($AWSCLI sts get-caller-identity --output text $PROFILE_OPT --region $REGION --query "Account") + +if [[ 255 -eq $? ]]; then + # Failed to get own identity ... exit + echo -e "$RED ERROR Getting credentials to run Prowler - EXITING! $NORMAL" + EXITCODE=2 + exit $EXITCODE +fi + CALLER_ARN=$($AWSCLI sts get-caller-identity --output text $PROFILE_OPT --region $REGION --query "Arn") USER_ID=$($AWSCLI sts get-caller-identity --output text $PROFILE_OPT --region $REGION --query "UserId") From 7dc790a3f5548cda5aab86a6b5ddd22518ec2a69 Mon Sep 17 00:00:00 2001 From: Toni de la Fuente Date: Wed, 22 Apr 2020 20:05:39 +0200 Subject: [PATCH 093/104] Fixed issue with govcloud on extra764 #536 --- checks/check_extra764 | 6 +++--- include/whoami | 1 + 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/checks/check_extra764 b/checks/check_extra764 index 8ec26cd4..ede847c7 100644 --- a/checks/check_extra764 +++ b/checks/check_extra764 @@ -18,13 +18,13 @@ CHECK_ASFF_RESOURCE_TYPE_extra764="AwsS3Bucket" CHECK_ALTERNATE_check764="extra764" extra764(){ - LIST_OF_BUCKETS=$($AWSCLI s3api list-buckets $PROFILE_OPT --query Buckets[*].Name --output text|xargs -n1) + LIST_OF_BUCKETS=$($AWSCLI s3api list-buckets $PROFILE_OPT --query Buckets[*].Name --output text --region $REGION|xargs -n1) if [[ $LIST_OF_BUCKETS ]]; then for bucket in $LIST_OF_BUCKETS;do TEMP_STP_POLICY_FILE=$(mktemp -t prowler-${ACCOUNT_NUM}-${bucket}.policy.XXXXXXXXXX) # get bucket policy - $AWSCLI s3api get-bucket-policy $PROFILE_OPT --bucket $bucket --output text --query Policy > $TEMP_STP_POLICY_FILE 2>&1 + $AWSCLI s3api get-bucket-policy $PROFILE_OPT --bucket $bucket --output text --query Policy --region $REGION > $TEMP_STP_POLICY_FILE 2>&1 if [[ $(grep AccessDenied $TEMP_STP_POLICY_FILE) ]]; then textFail "Access Denied Trying to Get Bucket Policy for $bucket" rm -f $TEMP_STP_POLICY_FILE @@ -37,7 +37,7 @@ extra764(){ fi # https://aws.amazon.com/premiumsupport/knowledge-center/s3-bucket-policy-for-config-rule/ - CHECK_BUCKET_STP_POLICY_PRESENT=$(cat $TEMP_STP_POLICY_FILE | jq --arg arn "arn:aws:s3:::${bucket}" '.Statement[]|select((((.Principal|type == "object") and .Principal.AWS == "*") or ((.Principal|type == "string") and .Principal == "*")) and .Action=="s3:*" and (.Resource|type == "array") and (.Resource|map({(.):0})[]|has($arn)) and (.Resource|map({(.):0})[]|has($arn+"/*")) and .Condition.Bool."aws:SecureTransport" == "false")') + CHECK_BUCKET_STP_POLICY_PRESENT=$(cat $TEMP_STP_POLICY_FILE | jq --arg arn "arn:$AWS_PARTITION:s3:::${bucket}"'.Statement[]|select((((.Principal|type == "object") and .Principal.AWS == "*") or ((.Principal|type == "string") and .Principal == "*")) and .Action=="s3:*" and (.Resource|type == "array") and (.Resource|map({(.):0})[]|has($arn)) and (.Resource|map({(.):0})[]|has($arn+"/*")) and .Condition.Bool."aws:SecureTransport" == "true")') if [[ $CHECK_BUCKET_STP_POLICY_PRESENT ]]; then textPass "Bucket $bucket has S3 bucket policy to deny requests over insecure transport" else diff --git a/include/whoami b/include/whoami index 3bd2b174..4322cb96 100644 --- a/include/whoami +++ b/include/whoami @@ -25,6 +25,7 @@ fi CALLER_ARN=$($AWSCLI sts get-caller-identity --output text $PROFILE_OPT --region $REGION --query "Arn") USER_ID=$($AWSCLI sts get-caller-identity --output text $PROFILE_OPT --region $REGION --query "UserId") +AWS_PARTITION=$(echo $CALLER_ARN| cut -d: -f2) if [[ $ACCOUNT_TO_ASSUME ]]; then ACCOUNT_NUM=$ACCOUNT_TO_ASSUME From 1beb483be3742a8a088542e9902fbaadfda4edc5 Mon Sep 17 00:00:00 2001 From: Toni de la Fuente Date: Wed, 22 Apr 2020 20:40:18 +0200 Subject: [PATCH 094/104] Fixed issue with govcloud on extra764 #536 --- checks/check_extra764 | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/checks/check_extra764 b/checks/check_extra764 index ede847c7..96cf9100 100644 --- a/checks/check_extra764 +++ b/checks/check_extra764 @@ -37,13 +37,12 @@ extra764(){ fi # https://aws.amazon.com/premiumsupport/knowledge-center/s3-bucket-policy-for-config-rule/ - CHECK_BUCKET_STP_POLICY_PRESENT=$(cat $TEMP_STP_POLICY_FILE | jq --arg arn "arn:$AWS_PARTITION:s3:::${bucket}"'.Statement[]|select((((.Principal|type == "object") and .Principal.AWS == "*") or ((.Principal|type == "string") and .Principal == "*")) and .Action=="s3:*" and (.Resource|type == "array") and (.Resource|map({(.):0})[]|has($arn)) and (.Resource|map({(.):0})[]|has($arn+"/*")) and .Condition.Bool."aws:SecureTransport" == "true")') + CHECK_BUCKET_STP_POLICY_PRESENT=$(cat $TEMP_STP_POLICY_FILE | jq --arg arn "arn:${AWS_PARTITION}:s3:::${bucket}" '.Statement[]|select((((.Principal|type == "object") and .Principal.AWS == "*") or ((.Principal|type == "string") and .Principal == "*")) and .Action=="s3:*" and (.Resource|type == "array") and (.Resource|map({(.):0})[]|has($arn)) and (.Resource|map({(.):0})[]|has($arn+"/*")) and .Condition.Bool."aws:SecureTransport" == "false")') if [[ $CHECK_BUCKET_STP_POLICY_PRESENT ]]; then textPass "Bucket $bucket has S3 bucket policy to deny requests over insecure transport" else textFail "Bucket $bucket allows requests over insecure transport" fi - rm -fr $TEMP_STP_POLICY_FILE done From dbb3ed96633875182971d3fcde5c39fbff23f6a4 Mon Sep 17 00:00:00 2001 From: Toni de la Fuente Date: Wed, 22 Apr 2020 22:19:21 +0200 Subject: [PATCH 095/104] Improved extra734 for GovCloud --- checks/check_extra734 | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/checks/check_extra734 b/checks/check_extra734 index 02d7f4ec..35930bd0 100644 --- a/checks/check_extra734 +++ b/checks/check_extra734 @@ -18,7 +18,7 @@ CHECK_ASFF_RESOURCE_TYPE_extra734="AwsS3Bucket" CHECK_ALTERNATE_check734="extra734" extra734(){ - LIST_OF_BUCKETS=$($AWSCLI s3api list-buckets $PROFILE_OPT --query Buckets[*].Name --output text|xargs -n1) + LIST_OF_BUCKETS=$($AWSCLI s3api list-buckets $PROFILE_OPT --region $REGION --query Buckets[*].Name --output text|xargs -n1) if [[ $LIST_OF_BUCKETS ]]; then for bucket in $LIST_OF_BUCKETS;do @@ -28,7 +28,7 @@ extra734(){ # - Have bucket policy denying s3:PutObject when s3:x-amz-server-side-encryption is absent # query to get if has encryption enabled or not - RESULT=$($AWSCLI s3api get-bucket-encryption $PROFILE_OPT --bucket $bucket --query ServerSideEncryptionConfiguration.Rules[].ApplyServerSideEncryptionByDefault[].SSEAlgorithm --output text 2>&1) + RESULT=$($AWSCLI s3api get-bucket-encryption $PROFILE_OPT --region $REGION --bucket $bucket --query ServerSideEncryptionConfiguration.Rules[].ApplyServerSideEncryptionByDefault[].SSEAlgorithm --output text 2>&1) if [[ $(echo "$RESULT" | grep AccessDenied) ]]; then textFail "Access Denied Trying to Get Encryption for $bucket" continue @@ -43,7 +43,7 @@ extra734(){ TEMP_SSE_POLICY_FILE=$(mktemp -t prowler-${ACCOUNT_NUM}-${bucket}.policy.XXXXXXXXXX) # get bucket policy - $AWSCLI s3api get-bucket-policy $PROFILE_OPT --bucket $bucket --output text --query Policy > $TEMP_SSE_POLICY_FILE 2>&1 + $AWSCLI s3api get-bucket-policy $PROFILE_OPT --bucket $bucket --region $REGION --output text --query Policy > $TEMP_SSE_POLICY_FILE 2>&1 if [[ $(grep AccessDenied $TEMP_SSE_POLICY_FILE) ]]; then textFail "Access Denied Trying to Get Bucket Policy for $bucket" rm -f $TEMP_SSE_POLICY_FILE @@ -56,7 +56,7 @@ extra734(){ fi # check if the S3 policy forces SSE s3:x-amz-server-side-encryption:true - CHECK_BUCKET_SSE_POLICY_PRESENT=$(cat $TEMP_SSE_POLICY_FILE | jq --arg arn "arn:aws:s3:::${bucket}/*" '.Statement[]|select(.Effect=="Deny" and ((.Principal|type == "object") and .Principal.AWS == "*") or ((.Principal|type == "string") and .Principal == "*") and .Action=="s3:PutObject" and .Resource==$arn and .Condition.StringEquals."s3:x-amz-server-side-encryption" != null)') + CHECK_BUCKET_SSE_POLICY_PRESENT=$(cat $TEMP_SSE_POLICY_FILE | jq --arg arn "arn:${AWS_PARTITION}:s3:::${bucket}/*" '.Statement[]|select(.Effect=="Deny" and ((.Principal|type == "object") and .Principal.AWS == "*") or ((.Principal|type == "string") and .Principal == "*") and .Action=="s3:PutObject" and .Resource==$arn and .Condition.StringEquals."s3:x-amz-server-side-encryption" != null)') if [[ $CHECK_BUCKET_SSE_POLICY_PRESENT == "" ]]; then textFail "Bucket $bucket does not enforce encryption!" rm -f $TEMP_SSE_POLICY_FILE From 13ca147d02c186658d3918a89e312267b0ce3f83 Mon Sep 17 00:00:00 2001 From: Toni de la Fuente Date: Wed, 22 Apr 2020 23:23:17 +0200 Subject: [PATCH 096/104] Updated checks with hardcoded arn to support GovCloud partition --- checks/check114 | 2 +- checks/check_extra71 | 6 +++--- checks/check_extra720 | 2 +- checks/check_extra725 | 6 +++--- checks/check_extra771 | 6 +++--- 5 files changed, 11 insertions(+), 11 deletions(-) diff --git a/checks/check114 b/checks/check114 index fff68b72..d1777c10 100644 --- a/checks/check114 +++ b/checks/check114 @@ -19,7 +19,7 @@ check114(){ # "Ensure hardware MFA is enabled for the root account (Scored)" COMMAND113=$($AWSCLI iam get-account-summary $PROFILE_OPT --region $REGION --output json --query 'SummaryMap.AccountMFAEnabled') if [ "$COMMAND113" == "1" ]; then - COMMAND114=$($AWSCLI iam list-virtual-mfa-devices $PROFILE_OPT --region $REGION --output text --assignment-status Assigned --query 'VirtualMFADevices[*].[SerialNumber]' | grep '^arn:aws:iam::[0-9]\{12\}:mfa/root-account-mfa-device$') + COMMAND114=$($AWSCLI iam list-virtual-mfa-devices $PROFILE_OPT --region $REGION --output text --assignment-status Assigned --query 'VirtualMFADevices[*].[SerialNumber]' | grep '^arn:${AWS_PARTITION}:iam::[0-9]\{12\}:mfa/root-account-mfa-device$') if [[ "$COMMAND114" ]]; then textFail "Only Virtual MFA is enabled for root" else diff --git a/checks/check_extra71 b/checks/check_extra71 index 197eec97..19465244 100644 --- a/checks/check_extra71 +++ b/checks/check_extra71 @@ -22,15 +22,15 @@ CHECK_ALTERNATE_check701="extra71" extra71(){ # "Ensure users of groups with AdministratorAccess policy have MFA tokens enabled (Not Scored) (Not part of CIS benchmark)" ADMIN_GROUPS='' - AWS_GROUPS=$($AWSCLI $PROFILE_OPT iam list-groups --output text --query 'Groups[].GroupName') + AWS_GROUPS=$($AWSCLI $PROFILE_OPT iam list-groups --output text --region $REGION --query 'Groups[].GroupName') for grp in $AWS_GROUPS; do # aws --profile onlinetraining iam list-attached-group-policies --group-name Administrators --query 'AttachedPolicies[].PolicyArn' | grep 'arn:aws:iam::aws:policy/AdministratorAccess' # list-attached-group-policies - CHECK_ADMIN_GROUP=$($AWSCLI $PROFILE_OPT iam list-attached-group-policies --group-name $grp --output json --query 'AttachedPolicies[].PolicyArn' | grep 'arn:aws:iam::aws:policy/AdministratorAccess') + CHECK_ADMIN_GROUP=$($AWSCLI $PROFILE_OPT --region $REGION iam list-attached-group-policies --group-name $grp --output json --query 'AttachedPolicies[].PolicyArn' | grep 'arn:${AWS_PARTITION}:iam::aws:policy/AdministratorAccess') if [[ $CHECK_ADMIN_GROUP ]]; then ADMIN_GROUPS="$ADMIN_GROUPS $grp" textInfo "$grp group provides administrative access" - ADMIN_USERS=$($AWSCLI $PROFILE_OPT iam get-group --group-name $grp --output json --query 'Users[].UserName' | grep '"' | cut -d'"' -f2 ) + ADMIN_USERS=$($AWSCLI $PROFILE_OPT iam get-group --region $REGION --group-name $grp --output json --query 'Users[].UserName' | grep '"' | cut -d'"' -f2 ) for auser in $ADMIN_USERS; do # users in group are Administrators # users diff --git a/checks/check_extra720 b/checks/check_extra720 index 8f950bdb..f8b2a890 100644 --- a/checks/check_extra720 +++ b/checks/check_extra720 @@ -26,7 +26,7 @@ extra720(){ LIST_OF_TRAILS=$($AWSCLI cloudtrail describe-trails $PROFILE_OPT --region $regx --query trailList[].TrailARN --output text) if [[ $LIST_OF_TRAILS ]]; then for trail in $LIST_OF_TRAILS; do - FUNCTION_ENABLED_IN_TRAIL=$($AWSCLI cloudtrail get-event-selectors $PROFILE_OPT --trail-name $trail --region $regx --query "EventSelectors[*].DataResources[?Type == \`AWS::Lambda::Function\`].Values" --output text |xargs -n1| grep -E "^arn:aws:lambda.*function:$lambdafunction$|^arn:aws:lambda$") + FUNCTION_ENABLED_IN_TRAIL=$($AWSCLI cloudtrail get-event-selectors $PROFILE_OPT --trail-name $trail --region $regx --query "EventSelectors[*].DataResources[?Type == \`AWS::Lambda::Function\`].Values" --output text |xargs -n1| grep -E "^arn:${AWS_PARTITION}:lambda.*function:$lambdafunction$|^arn:${AWS_PARTITION}:lambda$") if [[ $FUNCTION_ENABLED_IN_TRAIL ]]; then textPass "$regx: Lambda function $lambdafunction enabled in trail $trail" "$regx" else diff --git a/checks/check_extra725 b/checks/check_extra725 index 88f43ce5..eb336b79 100644 --- a/checks/check_extra725 +++ b/checks/check_extra725 @@ -23,14 +23,14 @@ extra725(){ # "Check if S3 buckets have Object-level logging enabled in CloudTrail (Not Scored) (Not part of CIS benchmark)" textInfo "Looking for S3 Buckets Object-level logging information in all trails... " - LIST_OF_BUCKETS=$($AWSCLI s3api list-buckets $PROFILE_OPT --query 'Buckets[*].{Name:Name}' --output text) - LIST_OF_TRAILS=$($AWSCLI cloudtrail describe-trails $PROFILE_OPT --query 'trailList[].TrailARN' --output text) + LIST_OF_BUCKETS=$($AWSCLI s3api list-buckets $PROFILE_OPT --region $REGION --query 'Buckets[*].{Name:Name}' --output text) + LIST_OF_TRAILS=$($AWSCLI cloudtrail describe-trails $PROFILE_OPT --region $REGION --query 'trailList[].TrailARN' --output text) if [[ $LIST_OF_BUCKETS ]]; then for bucketName in $LIST_OF_BUCKETS;do if [[ $LIST_OF_TRAILS ]]; then BUCKET_ENABLED_TRAILS=() for trail in $LIST_OF_TRAILS; do - BUCKET_ENABLED_IN_TRAIL=$($AWSCLI cloudtrail get-event-selectors $PROFILE_OPT --trail-name $trail --query "EventSelectors[*].DataResources[?Type == \`AWS::S3::Object\`].Values" --output text |xargs -n1| grep -E "^arn:aws:s3:::$bucketName/\S*$|^arn:aws:s3$|^arn:aws:s3:::$") + BUCKET_ENABLED_IN_TRAIL=$($AWSCLI cloudtrail get-event-selectors --region $REGION $PROFILE_OPT --trail-name $trail --query "EventSelectors[*].DataResources[?Type == \`AWS::S3::Object\`].Values" --output text |xargs -n1| grep -E "^arn:${AWS_PARTITION}:s3:::$bucketName/\S*$|^arn:${AWS_PARTITION}:s3$|^arn:${AWS_PARTITION}:s3:::$") if [[ $BUCKET_ENABLED_IN_TRAIL ]]; then BUCKET_ENABLED_TRAILS+=($trail) # textPass "$regx: S3 bucket $bucketName has Object-level logging enabled in trail $trail" "$regx" diff --git a/checks/check_extra771 b/checks/check_extra771 index 0c940649..98d2da9b 100644 --- a/checks/check_extra771 +++ b/checks/check_extra771 @@ -18,14 +18,14 @@ CHECK_ASFF_RESOURCE_TYPE_extra771="AwsS3Bucket" CHECK_ALTERNATE_check771="extra771" extra771(){ - LIST_OF_BUCKETS=$($AWSCLI s3api list-buckets $PROFILE_OPT --query Buckets[*].Name --output text|xargs -n1) + LIST_OF_BUCKETS=$($AWSCLI s3api list-buckets $PROFILE_OPT --region $REGION --query Buckets[*].Name --output text|xargs -n1) if [[ $LIST_OF_BUCKETS ]]; then for bucket in $LIST_OF_BUCKETS;do - BUCKET_POLICY_STATEMENTS=$($AWSCLI s3api $PROFILE_OPT get-bucket-policy --bucket $bucket --output json --query Policy 2>&1) + BUCKET_POLICY_STATEMENTS=$($AWSCLI s3api $PROFILE_OPT get-bucket-policy --region $REGION --bucket $bucket --output json --query Policy 2>&1) if [[ $BUCKET_POLICY_STATEMENTS == *GetBucketPolicy* ]]; then textInfo "Bucket policy does not exist for bucket $bucket" else - BUCKET_POLICY_BAD_STATEMENTS=$(echo $BUCKET_POLICY_STATEMENTS | jq --arg arn "arn:aws:s3:::$bucket" 'fromjson | .Statement[]|select(.Effect=="Allow" and (((.Principal|type == "object") and .Principal.AWS == "*") or ((.Principal|type == "string") and .Principal == "*")) and (.Action|startswith("s3:Put") or startswith("s3:*")) and .Condition == null)') + BUCKET_POLICY_BAD_STATEMENTS=$(echo $BUCKET_POLICY_STATEMENTS | jq --arg arn "arn:${AWS_PARTITION}:s3:::$bucket" 'fromjson | .Statement[]|select(.Effect=="Allow" and (((.Principal|type == "object") and .Principal.AWS == "*") or ((.Principal|type == "string") and .Principal == "*")) and (.Action|startswith("s3:Put") or startswith("s3:*")) and .Condition == null)') if [[ $BUCKET_POLICY_BAD_STATEMENTS != "" ]]; then textFail "Bucket $bucket allows public write: $BUCKET_POLICY_BAD_STATEMENTS" else From 33523885f1f10534be1482886ab057b18482c0a7 Mon Sep 17 00:00:00 2001 From: Toni de la Fuente Date: Thu, 23 Apr 2020 16:27:59 +0200 Subject: [PATCH 097/104] Delete LIST_OF_CHECKS_AND_GROUPS.md --- LIST_OF_CHECKS_AND_GROUPS.md | 4 ---- 1 file changed, 4 deletions(-) delete mode 100644 LIST_OF_CHECKS_AND_GROUPS.md diff --git a/LIST_OF_CHECKS_AND_GROUPS.md b/LIST_OF_CHECKS_AND_GROUPS.md deleted file mode 100644 index b31b3c8d..00000000 --- a/LIST_OF_CHECKS_AND_GROUPS.md +++ /dev/null @@ -1,4 +0,0 @@ -``` -./prowler -l # to see all available checks and groups. -./prowler -L # to see all available groups only. -``` From e25125fbfc2ec48f2df7a21aea361b0e4a3dd422 Mon Sep 17 00:00:00 2001 From: Marc Jay Date: Sun, 26 Apr 2020 00:40:27 +0100 Subject: [PATCH 098/104] Ensure that hyphen is at end of tr string to prevent 'reverse collating sequence order' error in GNU tr Stop echo from adding newlines using `-n`, removing the need to stop replacing new-line characters with underscores Fixes #573 --- include/outputs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/include/outputs b/include/outputs index 4e61c0c3..f5455aec 100644 --- a/include/outputs +++ b/include/outputs @@ -201,7 +201,7 @@ generateJsonAsffOutput(){ --arg ACCOUNT_NUM "$ACCOUNT_NUM" \ --arg TITLE_TEXT "$TITLE_TEXT" \ --arg MESSAGE "$(echo -e "${message}" | sed -e 's/^[[:space:]]*//')" \ - --arg UNIQUE_ID "$(LC_ALL=C echo -e "${message}" | tr -cs '[:alnum:]._~-\n' '_')" \ + --arg UNIQUE_ID "$(LC_ALL=C echo -e -n "${message}" | tr -cs '[:alnum:]._~-' '_')" \ --arg STATUS "$status" \ --arg SEVERITY "$severity" \ --arg TITLE_ID "$TITLE_ID" \ From f84b843388d45a4090f65d293938bb556bdc835a Mon Sep 17 00:00:00 2001 From: Marc Jay Date: Sun, 26 Apr 2020 01:02:39 +0100 Subject: [PATCH 099/104] Wrap all mode checks with whitespace, along with comparison strings, so only exact string matches are allowed, preventing clashes when output modes are named similarly, e.g. 'json' and 'json-asff' Fixes #571 --- include/junit_integration | 2 +- include/outputs | 32 ++++++++++++++++---------------- prowler | 4 ++-- 3 files changed, 19 insertions(+), 19 deletions(-) diff --git a/include/junit_integration b/include/junit_integration index 54bcd892..c52ecb42 100644 --- a/include/junit_integration +++ b/include/junit_integration @@ -16,7 +16,7 @@ JUNIT_OUTPUT_DIRECTORY="junit-reports" is_junit_output_enabled() { - if [[ ${MODES[@]} =~ "junit-xml" ]]; then + if [[ " ${MODES[@]} " =~ " junit-xml " ]]; then true else false diff --git a/include/outputs b/include/outputs index 4e61c0c3..40913044 100644 --- a/include/outputs +++ b/include/outputs @@ -32,13 +32,13 @@ textPass(){ else REPREGION=$REGION fi - if [[ "${MODES[@]}" =~ "csv" ]]; then + if [[ " ${MODES[@]} " =~ " csv " ]]; then echo "$PROFILE${SEP}$ACCOUNT_NUM${SEP}$REPREGION${SEP}$TITLE_ID${SEP}PASS${SEP}$ITEM_SCORED${SEP}$ITEM_LEVEL${SEP}$TITLE_TEXT${SEP}$1" | tee -a ${OUTPUT_FILE_NAME}.$EXTENSION_CSV fi - if [[ "${MODES[@]}" =~ "json" ]]; then + if [[ " ${MODES[@]} " =~ " json " ]]; then generateJsonOutput "$1" "Pass" | tee -a ${OUTPUT_FILE_NAME}.$EXTENSION_JSON fi - if [[ "${MODES[@]}" =~ "json-asff" ]]; then + if [[ " ${MODES[@]} " =~ " json-asff " ]]; then JSON_ASFF_OUTPUT=$(generateJsonAsffOutput "$1" "PASSED" "INFORMATIONAL") echo "${JSON_ASFF_OUTPUT}" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_ASFF if [[ "${SEND_TO_SECURITY_HUB}" -eq 1 ]]; then @@ -48,10 +48,10 @@ textPass(){ if is_junit_output_enabled; then output_junit_success "$1" fi - if [[ "${MODES[@]}" =~ "mono" ]]; then + if [[ " ${MODES[@]} " =~ " mono " ]]; then echo " $OK PASS!$NORMAL $1" | tee -a ${OUTPUT_FILE_NAME}.$EXTENSION_TEXT fi - if [[ "${MODES[@]}" =~ "text" || "${MODES[@]}" =~ "mono" ]]; then + if [[ " ${MODES[@]} " =~ " text " || " ${MODES[@]} " =~ " mono " ]]; then echo " $OK PASS!$NORMAL $1" fi } @@ -66,19 +66,19 @@ textInfo(){ else REPREGION=$REGION fi - if [[ "${MODES[@]}" =~ "csv" ]]; then + if [[ " ${MODES[@]} " =~ " csv " ]]; then echo "$PROFILE${SEP}$ACCOUNT_NUM${SEP}$REPREGION${SEP}$TITLE_ID${SEP}INFO${SEP}$ITEM_SCORED${SEP}$ITEM_LEVEL${SEP}$TITLE_TEXT${SEP}$1" | tee -a ${OUTPUT_FILE_NAME}.${EXTENSION_CSV} fi - if [[ "${MODES[@]}" =~ "json" ]]; then + if [[ " ${MODES[@]} " =~ " json " ]]; then generateJsonOutput "$1" "Info" | tee -a ${OUTPUT_FILE_NAME}.${EXTENSION_JSON} fi if is_junit_output_enabled; then output_junit_info "$1" fi - if [[ "${MODES[@]}" =~ "mono" ]]; then + if [[ " ${MODES[@]} " =~ " mono " ]]; then echo " $NOTICE INFO! $1 $NORMAL" | tee -a ${OUTPUT_FILE_NAME}.$EXTENSION_TEXT fi - if [[ "${MODES[@]}" =~ "text" ]]; then + if [[ " ${MODES[@]} " =~ " text " ]]; then echo " $NOTICE INFO! $1 $NORMAL" fi } @@ -91,13 +91,13 @@ textFail(){ else REPREGION=$REGION fi - if [[ "${MODES[@]}" =~ "csv" ]]; then + if [[ " ${MODES[@]} " =~ " csv " ]]; then echo "$PROFILE${SEP}$ACCOUNT_NUM${SEP}$REPREGION${SEP}$TITLE_ID${SEP}FAIL${SEP}$ITEM_SCORED${SEP}$ITEM_LEVEL${SEP}$TITLE_TEXT${SEP}$1" | tee -a ${OUTPUT_FILE_NAME}.${EXTENSION_CSV} fi - if [[ "${MODES[@]}" =~ "json" ]]; then + if [[ " ${MODES[@]} " =~ " json " ]]; then generateJsonOutput "$1" "Fail" | tee -a ${OUTPUT_FILE_NAME}.${EXTENSION_JSON} fi - if [[ "${MODES[@]}" =~ "json-asff" ]]; then + if [[ " ${MODES[@]} " =~ " json-asff " ]]; then JSON_ASFF_OUTPUT=$(generateJsonAsffOutput "$1" "FAILED" "HIGH") echo "${JSON_ASFF_OUTPUT}" | tee -a ${OUTPUT_FILE_NAME}.${EXTENSION_ASFF} if [[ "${SEND_TO_SECURITY_HUB}" -eq 1 ]]; then @@ -107,10 +107,10 @@ textFail(){ if is_junit_output_enabled; then output_junit_failure "$1" fi - if [[ "${MODES[@]}" =~ "mono" ]]; then + if [[ " ${MODES[@]} " =~ " mono " ]]; then echo " $BAD FAIL! $1 $NORMAL" | tee -a ${OUTPUT_FILE_NAME}.$EXTENSION_TEXT fi - if [[ "${MODES[@]}" =~ "text" ]]; then + if [[ " ${MODES[@]} " =~ " text " ]]; then echo " $BAD FAIL! $1 $NORMAL" fi } @@ -150,9 +150,9 @@ textTitle(){ group_ids="$CYAN [$5] $NORMAL" fi - if [[ "${MODES[@]}" =~ "csv" ]]; then + if [[ " ${MODES[@]} " =~ " csv " ]]; then >&2 echo "$TITLE_ID $TITLE_TEXT" | tee -a ${OUTPUT_FILE_NAME}.${EXTENSION_CSV} - elif [[ "${MODES[@]}" =~ "json" || "${MODES[@]}" =~ "json-asff" ]]; then + elif [[ " ${MODES[@]} " =~ " json " || " ${MODES[@]} " =~ " json-asff " ]]; then : else if [[ "$ITEM_SCORED" == "Scored" ]]; then diff --git a/prowler b/prowler index 2e148724..3593b15f 100755 --- a/prowler +++ b/prowler @@ -445,7 +445,7 @@ get_all_checks_without_exclusion() { } ### All functions defined above ... run the workflow -if [[ ${MODES[@]} =~ "mono" || ${MODES[@]} =~ "text" ]]; then +if [[ " ${MODES[@]} " =~ " mono " || " ${MODES[@]} " =~ " text " ]]; then prowlerBanner fi @@ -462,7 +462,7 @@ if [[ $PRINTGROUPSONLY == "1" ]]; then fi # Check that jq is installed for JSON outputs -if [[ ${MODES[@]} =~ "json" || ${MODES[@]} =~ "json-asff" ]]; then +if [[ " ${MODES[@]} " =~ " json " || " ${MODES[@]} " =~ " json-asff " ]]; then . $PROWLER_DIR/include/jq_detector fi From 54f2b72cb6e011467321b64085e139922dac0bc1 Mon Sep 17 00:00:00 2001 From: Nimrod Kor Date: Tue, 28 Apr 2020 14:12:04 +0300 Subject: [PATCH 100/104] Fix check12's grep to find users who really have password access (cherry picked from commit 4006c581a06c449b66ede8892b9ae18c735ad34c) --- checks/check12 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/checks/check12 b/checks/check12 index 800b64ce..e2f9c12a 100644 --- a/checks/check12 +++ b/checks/check12 @@ -19,7 +19,7 @@ CHECK_ALTERNATE_check102="check12" check12(){ # "Ensure multi-factor authentication (MFA) is enabled for all IAM users that have a console password (Scored)" # List users with password enabled - COMMAND12_LIST_USERS_WITH_PASSWORD_ENABLED=$(cat $TEMP_REPORT_FILE|awk -F, '{ print $1,$4 }' |grep true | awk '{ print $1 }') + COMMAND12_LIST_USERS_WITH_PASSWORD_ENABLED=$(cat $TEMP_REPORT_FILE|awk -F, '{ print $1,$4 }' |grep -F ' true' | awk '{ print $1 }') COMMAND12=$( for i in $COMMAND12_LIST_USERS_WITH_PASSWORD_ENABLED; do cat $TEMP_REPORT_FILE|awk -F, '{ print $1,$8 }' |grep "^$i " |grep false | awk '{ print $1 }' From dbca70ef2e47ddfe0851756f83ee1883b75ed67b Mon Sep 17 00:00:00 2001 From: Nimrod Kor Date: Tue, 28 Apr 2020 14:28:59 +0300 Subject: [PATCH 101/104] Add $ to end of regex --- checks/check12 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/checks/check12 b/checks/check12 index e2f9c12a..adccb3c1 100644 --- a/checks/check12 +++ b/checks/check12 @@ -19,7 +19,7 @@ CHECK_ALTERNATE_check102="check12" check12(){ # "Ensure multi-factor authentication (MFA) is enabled for all IAM users that have a console password (Scored)" # List users with password enabled - COMMAND12_LIST_USERS_WITH_PASSWORD_ENABLED=$(cat $TEMP_REPORT_FILE|awk -F, '{ print $1,$4 }' |grep -F ' true' | awk '{ print $1 }') + COMMAND12_LIST_USERS_WITH_PASSWORD_ENABLED=$(cat $TEMP_REPORT_FILE|awk -F, '{ print $1,$4 }' |grep -F ' true$' | awk '{ print $1 }') COMMAND12=$( for i in $COMMAND12_LIST_USERS_WITH_PASSWORD_ENABLED; do cat $TEMP_REPORT_FILE|awk -F, '{ print $1,$8 }' |grep "^$i " |grep false | awk '{ print $1 }' From 1f949b4175e1c7987c36ee49cf153279eeaaa2fb Mon Sep 17 00:00:00 2001 From: Toni de la Fuente Date: Wed, 29 Apr 2020 12:06:47 +0200 Subject: [PATCH 102/104] Improved AWS partition handle --- checks/check_extra788 | 1 + include/assume_role | 6 +++--- include/outputs | 34 +++++++++++++++++----------------- 3 files changed, 21 insertions(+), 20 deletions(-) diff --git a/checks/check_extra788 b/checks/check_extra788 index 1afcb9db..f09a9fcc 100644 --- a/checks/check_extra788 +++ b/checks/check_extra788 @@ -40,6 +40,7 @@ extra788(){ if [[ $CHECK_ES_DOMAIN_POLICY_HAS_CONDITION ]]; then # get content of IpAddress."aws:SourceIp" and get a clean list LIST_CONDITION_IPS=$(cat $TEMP_POLICY_FILE | jq '.Statement[0] .Condition.IpAddress."aws:SourceIp"'| awk -F'"' '{print $2}' | tr -d '",^$' | sed '/^$/d') + unset CONDITION_HAS_PUBLIC_IP_ARRAY for condition_ip in "${LIST_CONDITION_IPS}";do CONDITION_HAS_PRIVATE_IP=$(echo "${condition_ip}" | grep -E '^(192\.168|10\.|172\.1[6789]\.|172\.2[0-9]\.|172\.3[01]\.)') if [[ $CONDITION_HAS_PRIVATE_IP ]];then diff --git a/include/assume_role b/include/assume_role index 58b21399..25a244f4 100644 --- a/include/assume_role +++ b/include/assume_role @@ -29,18 +29,18 @@ if [[ $ACCOUNT_TO_ASSUME ]]; then #Check if external ID has bee provided if so execute with external ID if not ignore if [[ -z $ROLE_EXTERNAL_ID ]]; then # assume role command - $AWSCLI $PROFILE_OPT sts assume-role --role-arn arn:aws:iam::$ACCOUNT_TO_ASSUME:role/$ROLE_TO_ASSUME \ + $AWSCLI $PROFILE_OPT sts assume-role --role-arn arn:${AWS_PARTITION}:iam::$ACCOUNT_TO_ASSUME:role/$ROLE_TO_ASSUME \ --role-session-name ProwlerAssessmentSession \ --duration-seconds $SESSION_DURATION_TO_ASSUME > $TEMP_STS_ASSUMED_FILE else - $AWSCLI $PROFILE_OPT sts assume-role --role-arn arn:aws:iam::$ACCOUNT_TO_ASSUME:role/$ROLE_TO_ASSUME \ + $AWSCLI $PROFILE_OPT sts assume-role --role-arn arn:${AWS_PARTITION}:iam::$ACCOUNT_TO_ASSUME:role/$ROLE_TO_ASSUME \ --role-session-name ProwlerAssessmentSession \ --duration-seconds $SESSION_DURATION_TO_ASSUME \ --external-id $ROLE_EXTERNAL_ID > $TEMP_STS_ASSUMED_FILE fi # assume role command - #$AWSCLI $PROFILE_OPT sts assume-role --role-arn arn:aws:iam::$ACCOUNT_TO_ASSUME:role/$ROLE_TO_ASSUME \ + #$AWSCLI $PROFILE_OPT sts assume-role --role-arn arn:${AWS_PARTITION}:iam::$ACCOUNT_TO_ASSUME:role/$ROLE_TO_ASSUME \ # --role-session-name ProwlerAssessmentSession \ # --duration-seconds $SESSION_DURATION_TO_ASSUME > $TEMP_STS_ASSUMED_FILE diff --git a/include/outputs b/include/outputs index 40913044..24fb83f4 100644 --- a/include/outputs +++ b/include/outputs @@ -32,13 +32,13 @@ textPass(){ else REPREGION=$REGION fi - if [[ " ${MODES[@]} " =~ " csv " ]]; then + if [[ "${MODES[@]}" =~ "csv" ]]; then echo "$PROFILE${SEP}$ACCOUNT_NUM${SEP}$REPREGION${SEP}$TITLE_ID${SEP}PASS${SEP}$ITEM_SCORED${SEP}$ITEM_LEVEL${SEP}$TITLE_TEXT${SEP}$1" | tee -a ${OUTPUT_FILE_NAME}.$EXTENSION_CSV fi - if [[ " ${MODES[@]} " =~ " json " ]]; then + if [[ "${MODES[@]}" =~ "json" ]]; then generateJsonOutput "$1" "Pass" | tee -a ${OUTPUT_FILE_NAME}.$EXTENSION_JSON fi - if [[ " ${MODES[@]} " =~ " json-asff " ]]; then + if [[ "${MODES[@]}" =~ "json-asff" ]]; then JSON_ASFF_OUTPUT=$(generateJsonAsffOutput "$1" "PASSED" "INFORMATIONAL") echo "${JSON_ASFF_OUTPUT}" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_ASFF if [[ "${SEND_TO_SECURITY_HUB}" -eq 1 ]]; then @@ -48,10 +48,10 @@ textPass(){ if is_junit_output_enabled; then output_junit_success "$1" fi - if [[ " ${MODES[@]} " =~ " mono " ]]; then + if [[ "${MODES[@]}" =~ "mono" ]]; then echo " $OK PASS!$NORMAL $1" | tee -a ${OUTPUT_FILE_NAME}.$EXTENSION_TEXT fi - if [[ " ${MODES[@]} " =~ " text " || " ${MODES[@]} " =~ " mono " ]]; then + if [[ "${MODES[@]}" =~ "text" || "${MODES[@]}" =~ "mono" ]]; then echo " $OK PASS!$NORMAL $1" fi } @@ -66,19 +66,19 @@ textInfo(){ else REPREGION=$REGION fi - if [[ " ${MODES[@]} " =~ " csv " ]]; then + if [[ "${MODES[@]}" =~ "csv" ]]; then echo "$PROFILE${SEP}$ACCOUNT_NUM${SEP}$REPREGION${SEP}$TITLE_ID${SEP}INFO${SEP}$ITEM_SCORED${SEP}$ITEM_LEVEL${SEP}$TITLE_TEXT${SEP}$1" | tee -a ${OUTPUT_FILE_NAME}.${EXTENSION_CSV} fi - if [[ " ${MODES[@]} " =~ " json " ]]; then + if [[ "${MODES[@]}" =~ "json" ]]; then generateJsonOutput "$1" "Info" | tee -a ${OUTPUT_FILE_NAME}.${EXTENSION_JSON} fi if is_junit_output_enabled; then output_junit_info "$1" fi - if [[ " ${MODES[@]} " =~ " mono " ]]; then + if [[ "${MODES[@]}" =~ "mono" ]]; then echo " $NOTICE INFO! $1 $NORMAL" | tee -a ${OUTPUT_FILE_NAME}.$EXTENSION_TEXT fi - if [[ " ${MODES[@]} " =~ " text " ]]; then + if [[ "${MODES[@]}" =~ "text" ]]; then echo " $NOTICE INFO! $1 $NORMAL" fi } @@ -91,13 +91,13 @@ textFail(){ else REPREGION=$REGION fi - if [[ " ${MODES[@]} " =~ " csv " ]]; then + if [[ "${MODES[@]}" =~ "csv" ]]; then echo "$PROFILE${SEP}$ACCOUNT_NUM${SEP}$REPREGION${SEP}$TITLE_ID${SEP}FAIL${SEP}$ITEM_SCORED${SEP}$ITEM_LEVEL${SEP}$TITLE_TEXT${SEP}$1" | tee -a ${OUTPUT_FILE_NAME}.${EXTENSION_CSV} fi - if [[ " ${MODES[@]} " =~ " json " ]]; then + if [[ "${MODES[@]}" =~ "json" ]]; then generateJsonOutput "$1" "Fail" | tee -a ${OUTPUT_FILE_NAME}.${EXTENSION_JSON} fi - if [[ " ${MODES[@]} " =~ " json-asff " ]]; then + if [[ "${MODES[@]}" =~ "json-asff" ]]; then JSON_ASFF_OUTPUT=$(generateJsonAsffOutput "$1" "FAILED" "HIGH") echo "${JSON_ASFF_OUTPUT}" | tee -a ${OUTPUT_FILE_NAME}.${EXTENSION_ASFF} if [[ "${SEND_TO_SECURITY_HUB}" -eq 1 ]]; then @@ -107,10 +107,10 @@ textFail(){ if is_junit_output_enabled; then output_junit_failure "$1" fi - if [[ " ${MODES[@]} " =~ " mono " ]]; then + if [[ "${MODES[@]}" =~ "mono" ]]; then echo " $BAD FAIL! $1 $NORMAL" | tee -a ${OUTPUT_FILE_NAME}.$EXTENSION_TEXT fi - if [[ " ${MODES[@]} " =~ " text " ]]; then + if [[ "${MODES[@]}" =~ "text" ]]; then echo " $BAD FAIL! $1 $NORMAL" fi } @@ -150,9 +150,9 @@ textTitle(){ group_ids="$CYAN [$5] $NORMAL" fi - if [[ " ${MODES[@]} " =~ " csv " ]]; then + if [[ "${MODES[@]}" =~ "csv" ]]; then >&2 echo "$TITLE_ID $TITLE_TEXT" | tee -a ${OUTPUT_FILE_NAME}.${EXTENSION_CSV} - elif [[ " ${MODES[@]} " =~ " json " || " ${MODES[@]} " =~ " json-asff " ]]; then + elif [[ "${MODES[@]}" =~ "json" || "${MODES[@]}" =~ "json-asff" ]]; then : else if [[ "$ITEM_SCORED" == "Scored" ]]; then @@ -213,7 +213,7 @@ generateJsonAsffOutput(){ -n '{ "SchemaVersion": "2018-10-08", "Id": "prowler-\($TITLE_ID)-\($ACCOUNT_NUM)-\($REPREGION)-\($UNIQUE_ID)", - "ProductArn": "arn:aws:securityhub:\($REPREGION):\($ACCOUNT_NUM):product/\($ACCOUNT_NUM)/default", + "ProductArn": "arn:${AWS_PARTITION}:securityhub:\($REPREGION):\($ACCOUNT_NUM):product/\($ACCOUNT_NUM)/default", "ProductFields": { "ProviderName": "Prowler", "ProviderVersion": $PROWLER_VERSION From a2cbcc00eb25275bfa56376fa4c76014453b9e85 Mon Sep 17 00:00:00 2001 From: Toni de la Fuente Date: Wed, 29 Apr 2020 18:10:41 +0200 Subject: [PATCH 103/104] Fix issue with aws-cli v2 and timestamp on check24 #585 --- include/os_detector | 28 ++++++++++++++++++++-------- 1 file changed, 20 insertions(+), 8 deletions(-) diff --git a/include/os_detector b/include/os_detector index 2d2faf76..b06754e2 100644 --- a/include/os_detector +++ b/include/os_detector @@ -32,16 +32,28 @@ bsd_how_older_from_today() { # function to convert from timestamp to date # output date format %Y-%m-%d gnu_timestamp_to_date() { - # remove fractions of a second - TIMESTAMP_TO_CONVERT=$(echo $1 | cut -f1 -d".") - OUTPUT_DATE=$("$DATE_CMD" -d @$TIMESTAMP_TO_CONVERT +'%Y-%m-%d') - echo $OUTPUT_DATE + # if date comes from cli v2 in format like 2020-04-29T10:13:09.191000-04:00 + # we have to get only '%Y-%m-%d' + if [[ $1 = 20* ]];then + echo $1 | cut -f1 -d"T" + else + # remove fractions of a second + TIMESTAMP_TO_CONVERT=$(echo $1 | cut -f1 -d".") + OUTPUT_DATE=$("$DATE_CMD" -d @$TIMESTAMP_TO_CONVERT +'%Y-%m-%d') + echo $OUTPUT_DATE + fi } bsd_timestamp_to_date() { - # remove fractions of a second - TIMESTAMP_TO_CONVERT=$(echo $1 | cut -f1 -d".") - OUTPUT_DATE=$("$DATE_CMD" -r $TIMESTAMP_TO_CONVERT +'%Y-%m-%d') - echo $OUTPUT_DATE + # if date comes from cli v2 in format like 2020-04-29T10:13:09.191000-04:00 + # we have to get only '%Y-%m-%d' + if [[ $1 = 20* ]];then + echo $1 | cut -f1 -d"T" + else + # remove fractions of a second + TIMESTAMP_TO_CONVERT=$(echo $1 | cut -f1 -d".") + OUTPUT_DATE=$("$DATE_CMD" -r $TIMESTAMP_TO_CONVERT +'%Y-%m-%d') + echo $OUTPUT_DATE + fi } gnu_decode_report() { From dd0ef8c0b4e2a6eac4b1a22742ca540b69cdd3e5 Mon Sep 17 00:00:00 2001 From: Nimrod Kor Date: Wed, 29 Apr 2020 21:39:00 +0300 Subject: [PATCH 104/104] If no local cloudtrail trail is found - check org trail --- checks/check21 | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/checks/check21 b/checks/check21 index ba297461..d354bad5 100644 --- a/checks/check21 +++ b/checks/check21 @@ -35,6 +35,11 @@ check21(){ done if [[ $trail_count == 0 ]]; then - textFail "No CloudTrail trails were found in the account" + ORG_TRAIL=$($AWSCLI cloudtrail describe-trails $PROFILE_OPT --region us-east-1 | jq '.trailList[] | select(.IsMultiRegionTrail and .IsOrganizationTrail) | .Name' | sed 's/"//g') + if [[ $ORG_TRAIL != "" ]]; then + textPass "$ORG_TRAIL trail in $regx is enabled for all regions" + else + textFail "No CloudTrail trails were found in the account" + fi fi } \ No newline at end of file