diff --git a/checks/check45 b/checks/check45 new file mode 100644 index 00000000..fd847076 --- /dev/null +++ b/checks/check45 @@ -0,0 +1,38 @@ +#!/usr/bin/env bash + +# Prowler - the handy cloud security tool (copyright 2019) by Toni de la Fuente +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy +# of the License at http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software distributed +# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +# CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. +CHECK_ID_check45="4.5" +CHECK_TITLE_check45="[check45] Ensure no Network ACLs allow ingress from 0.0.0.0/0 to SSH port 22" +CHECK_SCORED_check45="SCORED" +CHECK_TYPE_check45="LEVEL2" +CHECK_SEVERITY_check45="High" +CHECK_ASFF_TYPE_check45="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" +CHECK_ASFF_RESOURCE_TYPE_check45="AwsEc2NetworkAcl" +CHECK_ALTERNATE_check401="check45" +CHECK_SERVICENAME_check45="ec2" +CHECK_RISK_check45='Even having a perimeter firewall; having network acls open allows any user or malware with vpc access to scan for well known and sensitive ports and gain access to instance.' +CHECK_REMEDIATION_check45='Apply Zero Trust approach. Implement a process to scan and remediate unrestricted or overly permissive network acls. Recommended best practices is to narrow the definition for the minimum ports required.' +CHECK_DOC_check45='https://docs.aws.amazon.com/vpc/latest/userguide/vpc-network-acls.html' +CHECK_CAF_EPIC_check45='Infrastructure Security' + +check45(){ + for regx in $REGIONS; do + NACL_LIST=$($AWSCLI ec2 describe-network-acls --query 'NetworkAcls[?Entries[?(((!PortRange) || (PortRange.From<=`22` && PortRange.To>=`22`)) && ((CidrBlock == `0.0.0.0/0`) && (Egress == `false`) && (RuleAction == `allow`)))]].{NetworkAclId:NetworkAclId}' $PROFILE_OPT --region $regx --output text) + if [[ $NACL_LIST ]];then + for NACL in $NACL_LIST;do + textInfo "$regx: Found Network ACL: $NACL open to 0.0.0.0/0 for SSH port 22" "$regx" + done + else + textPass "$regx: No Network ACL found with SSH port 22 open to 0.0.0.0/0" "$regx" + fi + done +} diff --git a/checks/check46 b/checks/check46 new file mode 100644 index 00000000..b98eb2f0 --- /dev/null +++ b/checks/check46 @@ -0,0 +1,38 @@ +#!/usr/bin/env bash + +# Prowler - the handy cloud security tool (copyright 2019) by Toni de la Fuente +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy +# of the License at http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software distributed +# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +# CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. +CHECK_ID_check46="4.6" +CHECK_TITLE_check46="[check46] Ensure no Network ACLs allow ingress from 0.0.0.0/0 to Microsoft RDP port 3389" +CHECK_SCORED_check46="SCORED" +CHECK_TYPE_check46="LEVEL2" +CHECK_SEVERITY_check46="High" +CHECK_ASFF_TYPE_check46="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" +CHECK_ASFF_RESOURCE_TYPE_check46="AwsEc2NetworkAcl" +CHECK_ALTERNATE_check401="check46" +CHECK_SERVICENAME_check46="ec2" +CHECK_RISK_check46='Even having a perimeter firewall; having network acls open allows any user or malware with vpc access to scan for well known and sensitive ports and gain access to instance.' +CHECK_REMEDIATION_check46='Apply Zero Trust approach. Implement a process to scan and remediate unrestricted or overly permissive network acls. Recommended best practices is to narrow the definition for the minimum ports required.' +CHECK_DOC_check46='https://docs.aws.amazon.com/vpc/latest/userguide/vpc-network-acls.html' +CHECK_CAF_EPIC_check46='Infrastructure Security' + +check46(){ + for regx in $REGIONS; do + NACL_LIST=$($AWSCLI ec2 describe-network-acls --query 'NetworkAcls[?Entries[?(((!PortRange) || (PortRange.From<=`3389` && PortRange.To>=`3389`)) && ((CidrBlock == `0.0.0.0/0`) && (Egress == `false`) && (RuleAction == `allow`)))]].{NetworkAclId:NetworkAclId}' $PROFILE_OPT --region $regx --output text) + if [[ $NACL_LIST ]];then + for NACL in $NACL_LIST;do + textInfo "$regx: Found Network ACL: $NACL open to 0.0.0.0/0 for Microsoft RDP port 3389" "$regx" + done + else + textPass "$regx: No Network ACL found with Microsoft RDP port 3389 open to 0.0.0.0/0" "$regx" + fi + done +} diff --git a/checks/check_extra7113 b/checks/check_extra7113 index a9dcbcce..876ce7eb 100644 --- a/checks/check_extra7113 +++ b/checks/check_extra7113 @@ -36,20 +36,19 @@ CHECK_DOC_extra7113='https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/USER CHECK_CAF_EPIC_extra7113='Data Protection' extra7113(){ - textInfo "Looking for RDS Volumes in all regions... " for regx in $REGIONS; do - LIST_OF_RDS_INSTANCES=$($AWSCLI rds describe-db-instances $PROFILE_OPT --region $regx --query 'DBInstances[*].DBInstanceIdentifier' --output text) + LIST_OF_RDS_INSTANCES=$($AWSCLI rds describe-db-instances $PROFILE_OPT --region $regx --query "DBInstances[?Engine != 'docdb'].DBInstanceIdentifier" --output text) if [[ $LIST_OF_RDS_INSTANCES ]];then for rdsinstance in $LIST_OF_RDS_INSTANCES; do IS_DELETIONPROTECTION=$($AWSCLI rds describe-db-instances $PROFILE_OPT --region $regx --db-instance-identifier $rdsinstance --query 'DBInstances[*].DeletionProtection' --output text) if [[ $IS_DELETIONPROTECTION == "False" ]]; then - textFail "$regx: RDS instance $rdsinstance deletion protection is not enabled!" "$regx" + textFail "$regx: RDS instance $rdsinstance deletion protection is not enabled!" "$regx" "$rdsinstance" else - textPass "$regx: RDS instance $rdsinstance deletion protection is enabled" "$regx" + textPass "$regx: RDS instance $rdsinstance deletion protection is enabled" "$regx" "$rdsinstance" fi done else - textInfo "$regx: No RDS instances found" "$regx" + textInfo "$regx: No RDS instances found" "$regx" "$rdsinstance" fi done } diff --git a/checks/check_extra7126 b/checks/check_extra7126 index f1b80877..7b91e0e2 100644 --- a/checks/check_extra7126 +++ b/checks/check_extra7126 @@ -26,7 +26,7 @@ CHECK_CAF_EPIC_extra7126='Data Protection' extra7126(){ for regx in $REGIONS; do - LIST_OF_CUSTOMER_KMS_KEYS=$($AWSCLI kms list-aliases $PROFILE_OPT --region $regx --output text |grep -v :alias/aws/ |awk '{ print $4 }') + LIST_OF_CUSTOMER_KMS_KEYS=$($AWSCLI kms list-aliases $PROFILE_OPT --region $regx --query "Aliases[].[AliasName,TargetKeyId]" --output text |grep -v ^alias/aws/ |awk '{ print $2 }') if [[ $LIST_OF_CUSTOMER_KMS_KEYS ]];then for key in $LIST_OF_CUSTOMER_KMS_KEYS; do CHECK_STATUS=$($AWSCLI kms describe-key --key-id $key $PROFILE_OPT --region $regx --output json | jq -r '.KeyMetadata.KeyState') diff --git a/checks/check_extra7129 b/checks/check_extra7129 index 130c8074..a96ad6c2 100644 --- a/checks/check_extra7129 +++ b/checks/check_extra7129 @@ -28,30 +28,45 @@ extra7129(){ for regx in $REGIONS; do LIST_OF_ELBSV2=$($AWSCLI elbv2 describe-load-balancers $PROFILE_OPT --region $regx --query 'LoadBalancers[?Scheme == `internet-facing` && Type == `application`].[LoadBalancerName]' --output text) LIST_OF_WAFV2_WEBACL_ARN=$($AWSCLI wafv2 list-web-acls $PROFILE_OPT --region=$regx --scope=REGIONAL --query WebACLs[*].ARN --output text) + LIST_OF_WAFV1_WEBACL_WEBACLID=$($AWSCLI waf-regional list-web-acls $PROFILE_OPT --region $regx --query WebACLs[*].[WebACLId] --output text) + if [[ $LIST_OF_ELBSV2 ]]; then for alb in $LIST_OF_ELBSV2; do - if [[ $LIST_OF_WAFV2_WEBACL_ARN ]]; then + if [[ ${#LIST_OF_WAFV2_WEBACL_ARN[@]} -gt 0 || ${#LIST_OF_WAFV1_WEBACL_WEBACLID[@]} -gt 0 ]]; then WAF_PROTECTED_ALBS=() for wafaclarn in $LIST_OF_WAFV2_WEBACL_ARN; do ALB_RESOURCES_IN_WEBACL=$($AWSCLI wafv2 list-resources-for-web-acl $PROFILE_OPT --web-acl-arn $wafaclarn --region=$regx --resource-type APPLICATION_LOAD_BALANCER --query ResourceArns --output text | xargs -n1 | awk -F'/' '{ print $3 }'| grep $alb) - if [[ $ALB_RESOURCES_IN_WEBACL ]]; then + if [[ $ALB_RESOURCES_IN_WEBACL ]]; then WAF_PROTECTED_ALBS+=($wafaclarn) fi done - if [[ ${#WAF_PROTECTED_ALBS[@]} -gt 0 ]]; then - for wafaclarn in "${WAF_PROTECTED_ALBS[@]}"; do - WAFV2_WEBACL_ARN_SHORT=$(echo $wafaclarn | awk -F'/' '{ print $3 }') - textPass "$regx: Application Load Balancer $alb is protected by WAFv2 ACL $WAFV2_WEBACL_ARN_SHORT" "$regx" - done + for wafv1aclid in $LIST_OF_WAFV1_WEBACL_WEBACLID; do + ALB_RESOURCES_IN_WEBACL=$($AWSCLI waf-regional list-resources-for-web-acl $PROFILE_OPT --web-acl-id $wafv1aclid --region=$regx --resource-type APPLICATION_LOAD_BALANCER --output text --query "[ResourceArns]"| grep $alb) + if [[ $ALB_RESOURCES_IN_WEBACL ]]; then + WAFv1_PROTECTED_ALBS+=($wafv1aclid) + fi + done + if [[ ${#WAF_PROTECTED_ALBS[@]} -gt 0 || ${#WAFv1_PROTECTED_ALBS[@]} -gt 0 ]]; then + if [[ ${#WAF_PROTECTED_ALBS[@]} -gt 0 ]]; then + for wafaclarn in "${WAF_PROTECTED_ALBS[@]}"; do + WAFV2_WEBACL_ARN_SHORT=$(echo $wafaclarn | awk -F'/' '{ print $3 }') + textPass "$regx: Application Load Balancer $alb is protected by WAFv2 ACL $WAFV2_WEBACL_ARN_SHORT" "$regx" + done + fi + if [[ ${#WAFv1_PROTECTED_ALBS[@]} -gt 0 ]]; then + for wafv1aclid in "${WAFv1_PROTECTED_ALBS[@]}"; do + textPass "$regx: Application Load Balancer $alb is protected by WAFv1 ACL $wafv1aclid" "$regx" + done + fi else - textFail "$regx: Application Load Balancer $alb is not protected by WAFv2 ACL" "$regx" + textFail "$regx: Application Load Balancer $alb is not protected by WAF ACL" "$regx" fi - else - textFail "$regx: Application Load Balancer $alb is not protected no WAFv2 ACL found" "$regx" + else + textFail "$regx: Application Load Balancer $alb is not protected no WAF ACL found" "$regx" fi - done + done else textInfo "$regx: No Application Load Balancers found" "$regx" - fi + fi done } \ No newline at end of file diff --git a/checks/check_extra7131 b/checks/check_extra7131 index fc8266a1..946f1682 100644 --- a/checks/check_extra7131 +++ b/checks/check_extra7131 @@ -32,13 +32,13 @@ extra7131(){ RDS_NAME=$(echo $rds_instance | awk '{ print $1; }') RDS_AUTOMINORUPGRADE_FLAG=$(echo $rds_instance | awk '{ print $2; }') if [[ $RDS_AUTOMINORUPGRADE_FLAG == "True" ]];then - textPass "$regx: RDS instance: $RDS_NAME is has minor version upgrade enabled" "$regx" + textPass "$regx: RDS instance: $RDS_NAME is has minor version upgrade enabled" "$regx" "$RDS_NAME" else - textFail "$regx: RDS instance: $RDS_NAME does not have minor version upgrade enabled" "$regx" + textFail "$regx: RDS instance: $RDS_NAME does not have minor version upgrade enabled" "$regx" "$RDS_NAME" fi done <<< "$LIST_OF_RDS_INSTANCES" else - textInfo "$regx: no RDS instances found" "$regx" + textInfo "$regx: no RDS instances found" "$regx" "$RDS_NAME" fi done } diff --git a/checks/check_extra7132 b/checks/check_extra7132 index eb64827d..5eefb58f 100644 --- a/checks/check_extra7132 +++ b/checks/check_extra7132 @@ -25,19 +25,19 @@ CHECK_CAF_EPIC_extra7132='Logging and Monitoring' extra7132(){ for regx in $REGIONS; do - RDS_INSTANCES=$($AWSCLI rds describe-db-instances $PROFILE_OPT --region $regx --query 'DBInstances[*].DBInstanceIdentifier' --output text) + RDS_INSTANCES=$($AWSCLI rds describe-db-instances $PROFILE_OPT --region $regx --query "DBInstances[?Engine != 'docdb'].DBInstanceIdentifier" --output text) if [[ $RDS_INSTANCES ]];then for rdsinstance in ${RDS_INSTANCES}; do RDS_NAME="$rdsinstance" MONITORING_FLAG=$($AWSCLI rds describe-db-instances $PROFILE_OPT --region $regx --db-instance-identifier $rdsinstance --query 'DBInstances[*].[EnhancedMonitoringResourceArn]' --output text) if [[ $MONITORING_FLAG == "None" ]];then - textFail "$regx: RDS instance: $RDS_NAME has enhanced monitoring disabled!" "$rex" + textFail "$regx: RDS instance: $RDS_NAME has enhanced monitoring disabled!" "$rex" "$RDS_NAME" else - textPass "$regx: RDS instance: $RDS_NAME has enhanced monitoring enabled." "$regx" + textPass "$regx: RDS instance: $RDS_NAME has enhanced monitoring enabled." "$regx" "$RDS_NAME" fi done else - textInfo "$regx: no RDS instances found" "$regx" + textInfo "$regx: no RDS instances found" "$regx" "$RDS_NAME" fi done } diff --git a/checks/check_extra7133 b/checks/check_extra7133 index 2be3d662..c2eefd5e 100644 --- a/checks/check_extra7133 +++ b/checks/check_extra7133 @@ -25,19 +25,19 @@ CHECK_CAF_EPIC_extra7133='Data Protection' extra7133(){ for regx in $REGIONS; do - RDS_INSTANCES=$($AWSCLI rds describe-db-instances $PROFILE_OPT --region $regx --query 'DBInstances[*].DBInstanceIdentifier' --output text) + RDS_INSTANCES=$($AWSCLI rds describe-db-instances $PROFILE_OPT --region $regx --query "DBInstances[?Engine != 'docdb'].DBInstanceIdentifier" --output text) if [[ $RDS_INSTANCES ]];then for rdsinstance in ${RDS_INSTANCES}; do RDS_NAME="$rdsinstance" MULTIAZ_FLAG=$($AWSCLI rds describe-db-instances $PROFILE_OPT --region $regx --db-instance-identifier $rdsinstance --query 'DBInstances[*].MultiAZ' --output text) if [[ $MULTIAZ_FLAG == "True" ]];then - textPass "$regx: RDS instance: $RDS_NAME has multi-AZ enabled" "$rex" + textPass "$regx: RDS instance: $RDS_NAME has multi-AZ enabled" "$regx" "$RDS_NAME" else - textFail "$regx: RDS instance: $RDS_NAME has multi-AZ disabled!" "$regx" + textFail "$regx: RDS instance: $RDS_NAME has multi-AZ disabled!" "$regx" "$RDS_NAME" fi done else - textInfo "$regx: no RDS instances found" "$regx" + textInfo "$regx: no RDS instances found" "$regx" "$RDS_NAME" fi done } diff --git a/checks/check_extra7134 b/checks/check_extra7134 new file mode 100644 index 00000000..bb577b28 --- /dev/null +++ b/checks/check_extra7134 @@ -0,0 +1,37 @@ +#!/usr/bin/env bash + +# Prowler - the handy cloud security tool (copyright 2019) by Toni de la Fuente +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy +# of the License at http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software distributed +# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +# CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. +CHECK_ID_extra7134="7.134" +CHECK_TITLE_extra7134="[extra7134] Ensure no security groups allow ingress from 0.0.0.0/0 or ::/0 to FTP ports 20 or 21 (Not Scored) (Not part of CIS benchmark)" +CHECK_SCORED_extra7134="NOT_SCORED" +CHECK_TYPE_extra7134="EXTRA" +CHECK_SEVERITY_extra7134="High" +CHECK_ASFF_RESOURCE_TYPE_extra7134="AwsEc2SecurityGroup" +CHECK_ALTERNATE_check7134="extra7134" +CHECK_SERVICENAME_extra7134="ec2" +CHECK_RISK_extra7134='If Security groups are not properly configured the attack surface is increased. ' +CHECK_REMEDIATION_extra7134='Use a Zero Trust approach. Narrow ingress traffic as much as possible. Consider north-south as well as east-west traffic.' +CHECK_DOC_extra7134='https://docs.aws.amazon.com/vpc/latest/userguide/VPC_SecurityGroups.html' +CHECK_CAF_EPIC_extra7134='Infrastructure Security' + +extra7134(){ + for regx in $REGIONS; do + SG_LIST=$($AWSCLI ec2 describe-security-groups --query 'SecurityGroups[?length(IpPermissions[?((FromPort==null && ToPort==null) || (FromPort==`20` && ToPort==`21`)) && (contains(IpRanges[].CidrIp, `0.0.0.0/0`) || contains(Ipv6Ranges[].CidrIpv6, `::/0`))]) > `0`].{GroupId:GroupId}' $PROFILE_OPT --region $regx --output text) + if [[ $SG_LIST ]];then + for SG in $SG_LIST;do + textFail "$regx: Found Security Group: $SG open to 0.0.0.0/0 for FTP ports" "$regx" + done + else + textPass "$regx: No Security Groups found with any port open to 0.0.0.0/0 for FTP ports" "$regx" + fi + done +} \ No newline at end of file diff --git a/checks/check_extra7135 b/checks/check_extra7135 new file mode 100644 index 00000000..3150a2d1 --- /dev/null +++ b/checks/check_extra7135 @@ -0,0 +1,37 @@ +#!/usr/bin/env bash + +# Prowler - the handy cloud security tool (copyright 2019) by Toni de la Fuente +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy +# of the License at http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software distributed +# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +# CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. +CHECK_ID_extra7135="7.135" +CHECK_TITLE_extra7135="[extra7135] Ensure no security groups allow ingress from 0.0.0.0/0 or ::/0 to Kafka port 9092 (Not Scored) (Not part of CIS benchmark)" +CHECK_SCORED_extra7135="NOT_SCORED" +CHECK_TYPE_extra7135="EXTRA" +CHECK_SEVERITY_extra7135="High" +CHECK_ASFF_RESOURCE_TYPE_extra7135="AwsEc2SecurityGroup" +CHECK_ALTERNATE_check7135="extra7135" +CHECK_SERVICENAME_extra7135="ec2" +CHECK_RISK_extra7135='If Security groups are not properly configured the attack surface is increased. ' +CHECK_REMEDIATION_extra7135='Use a Zero Trust approach. Narrow ingress traffic as much as possible. Consider north-south as well as east-west traffic.' +CHECK_DOC_extra7135='https://docs.aws.amazon.com/vpc/latest/userguide/VPC_SecurityGroups.html' +CHECK_CAF_EPIC_extra7135='Infrastructure Security' + +extra7135(){ + for regx in $REGIONS; do + SG_LIST=$($AWSCLI ec2 describe-security-groups --query 'SecurityGroups[?length(IpPermissions[?((FromPort==null && ToPort==null) || (FromPort==`9092` && ToPort==`9092`)) && (contains(IpRanges[].CidrIp, `0.0.0.0/0`) || contains(Ipv6Ranges[].CidrIpv6, `::/0`))]) > `0`].{GroupId:GroupId}' $PROFILE_OPT --region $regx --output text) + if [[ $SG_LIST ]];then + for SG in $SG_LIST;do + textFail "$regx: Found Security Group: $SG open to 0.0.0.0/0 for Kafka ports" "$regx" + done + else + textPass "$regx: No Security Groups found with any port open to 0.0.0.0/0 for Kafka ports" "$regx" + fi + done +} \ No newline at end of file diff --git a/checks/check_extra7136 b/checks/check_extra7136 new file mode 100644 index 00000000..6117d61e --- /dev/null +++ b/checks/check_extra7136 @@ -0,0 +1,37 @@ +#!/usr/bin/env bash + +# Prowler - the handy cloud security tool (copyright 2019) by Toni de la Fuente +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy +# of the License at http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software distributed +# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +# CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. +CHECK_ID_extra7136="7.136" +CHECK_TITLE_extra7136="[extra7136] Ensure no security groups allow ingress from 0.0.0.0/0 or ::/0 to Telnet port 23 (Not Scored) (Not part of CIS benchmark)" +CHECK_SCORED_extra7136="NOT_SCORED" +CHECK_TYPE_extra7136="EXTRA" +CHECK_SEVERITY_extra7136="High" +CHECK_ASFF_RESOURCE_TYPE_extra7136="AwsEc2SecurityGroup" +CHECK_ALTERNATE_check7136="extra7136" +CHECK_SERVICENAME_extra7136="ec2" +CHECK_RISK_extra7136='If Security groups are not properly configured the attack surface is increased. ' +CHECK_REMEDIATION_extra7136='Use a Zero Trust approach. Narrow ingress traffic as much as possible. Consider north-south as well as east-west traffic.' +CHECK_DOC_extra7136='https://docs.aws.amazon.com/vpc/latest/userguide/VPC_SecurityGroups.html' +CHECK_CAF_EPIC_extra7136='Infrastructure Security' + +extra7136(){ + for regx in $REGIONS; do + SG_LIST=$($AWSCLI ec2 describe-security-groups --query 'SecurityGroups[?length(IpPermissions[?((FromPort==null && ToPort==null) || (FromPort==`23` && ToPort==`23`)) && (contains(IpRanges[].CidrIp, `0.0.0.0/0`) || contains(Ipv6Ranges[].CidrIpv6, `::/0`))]) > `0`].{GroupId:GroupId}' $PROFILE_OPT --region $regx --output text) + if [[ $SG_LIST ]];then + for SG in $SG_LIST;do + textFail "$regx: Found Security Group: $SG open to 0.0.0.0/0 for Telnet ports" "$regx" + done + else + textPass "$regx: No Security Groups found with any port open to 0.0.0.0/0 for Telnet ports" "$regx" + fi + done +} \ No newline at end of file diff --git a/checks/check_extra7137 b/checks/check_extra7137 new file mode 100644 index 00000000..81d45c98 --- /dev/null +++ b/checks/check_extra7137 @@ -0,0 +1,37 @@ +#!/usr/bin/env bash + +# Prowler - the handy cloud security tool (copyright 2019) by Toni de la Fuente +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy +# of the License at http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software distributed +# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +# CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. +CHECK_ID_extra7137="7.137" +CHECK_TITLE_extra7137="[extra7137] Ensure no security groups allow ingress from 0.0.0.0/0 or ::/0 to Windows SQL Server ports 1433 or 1434 (Not Scored) (Not part of CIS benchmark)" +CHECK_SCORED_extra7137="NOT_SCORED" +CHECK_TYPE_extra7137="EXTRA" +CHECK_SEVERITY_extra7137="High" +CHECK_ASFF_RESOURCE_TYPE_extra7137="AwsEc2SecurityGroup" +CHECK_ALTERNATE_check7137="extra7137" +CHECK_SERVICENAME_extra7137="ec2" +CHECK_RISK_extra7137='If Security groups are not properly configured the attack surface is increased. ' +CHECK_REMEDIATION_extra7137='Use a Zero Trust approach. Narrow ingress traffic as much as possible. Consider north-south as well as east-west traffic.' +CHECK_DOC_extra7137='https://docs.aws.amazon.com/vpc/latest/userguide/VPC_SecurityGroups.html' +CHECK_CAF_EPIC_extra7137='Infrastructure Security' + +extra7137(){ + for regx in $REGIONS; do + SG_LIST=$($AWSCLI ec2 describe-security-groups --query 'SecurityGroups[?length(IpPermissions[?((FromPort==null && ToPort==null) || (FromPort==`1433` && ToPort==`1434`)) && (contains(IpRanges[].CidrIp, `0.0.0.0/0`) || contains(Ipv6Ranges[].CidrIpv6, `::/0`))]) > `0`].{GroupId:GroupId}' $PROFILE_OPT --region $regx --output text) + if [[ $SG_LIST ]];then + for SG in $SG_LIST;do + textFail "$regx: Found Security Group: $SG open to 0.0.0.0/0 for Microsoft SQL Server ports" "$regx" + done + else + textPass "$regx: No Security Groups found with any port open to 0.0.0.0/0 for Microsoft SQL Server ports" "$regx" + fi + done +} \ No newline at end of file diff --git a/checks/check_extra7138 b/checks/check_extra7138 new file mode 100644 index 00000000..5af3d0ba --- /dev/null +++ b/checks/check_extra7138 @@ -0,0 +1,38 @@ +#!/usr/bin/env bash + +# Prowler - the handy cloud security tool (copyright 2019) by Toni de la Fuente +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy +# of the License at http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software distributed +# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +# CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. +CHECK_ID_extra7138="7.138" +CHECK_TITLE_extra7138="[extra7138] Ensure no Network ACLs allow ingress from 0.0.0.0/0 to any port" +CHECK_SCORED_extra7138="NOT SCORED" +CHECK_TYPE_extra7138="LEVEL2" +CHECK_SEVERITY_extra7138="High" +CHECK_ASFF_TYPE_extra7138="Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" +CHECK_ASFF_RESOURCE_TYPE_extra7138="AwsEc2NetworkAcl" +CHECK_ALTERNATE_check7138="extra7138" +CHECK_SERVICENAME_extra7138="ec2" +CHECK_RISK_extra7138='Even having a perimeter firewall; having network acls open allows any user or malware with vpc access to scan for well known and sensitive ports and gain access to instance.' +CHECK_REMEDIATION_extra7138='Apply Zero Trust approach. Implement a process to scan and remediate unrestricted or overly permissive network acls. Recommended best practices is to narrow the definition for the minimum ports required.' +CHECK_DOC_extra7138='https://docs.aws.amazon.com/vpc/latest/userguide/vpc-network-acls.html' +CHECK_CAF_EPIC_extra7138='Infrastructure Security' + +extra7138(){ + for regx in $REGIONS; do + NACL_LIST=$($AWSCLI ec2 describe-network-acls --query 'NetworkAcls[?Entries[?((!PortRange) && (CidrBlock == `0.0.0.0/0`) && (Egress == `false`) && (RuleAction == `allow`))]].{NetworkAclId:NetworkAclId}' $PROFILE_OPT --region $regx --output text) + if [[ $NACL_LIST ]];then + for NACL in $NACL_LIST;do + textInfo "$regx: Found Network ACL: $NACL open to 0.0.0.0/0 for any port" "$regx" + done + else + textPass "$regx: No Network ACL found with any port open to 0.0.0.0/0" "$regx" + fi + done +} \ No newline at end of file diff --git a/checks/check_extra7139 b/checks/check_extra7139 new file mode 100644 index 00000000..bfb10568 --- /dev/null +++ b/checks/check_extra7139 @@ -0,0 +1,43 @@ +#!/usr/bin/env bash +# Prowler - the handy cloud security tool (copyright 2019) by Toni de la Fuente +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy +# of the License at http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software distributed +# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +# CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. +CHECK_ID_extra7139="7.139" +CHECK_TITLE_extra7139="[extra7139] There are High severity GuardDuty findings (Not Scored) (Not part of CIS benchmark)" +CHECK_SCORED_extra7139="NOT_SCORED" +CHECK_TYPE_extra7139="EXTRA" +CHECK_SEVERITY_extra7139="High" +CHECK_ASFF_RESOURCE_TYPE_extra7139="AwsGuardDutyDetector" +CHECK_ALTERNATE_check7139="extra7139" +CHECK_SERVICENAME_extra7139="guardduty" +CHECK_RISK_extra7139='If critical findings are not addressed threats can spread in the environment.' +CHECK_REMEDIATION_extra7139='Review and remediate critical GuardDuty findings as quickly as possible.' +CHECK_DOC_extra7139='https://docs.aws.amazon.com/guardduty/latest/ug/guardduty_findings.html' +CHECK_CAF_EPIC_extra7139='Incident Response' +extra7139(){ + + for regx in $REGIONS; do + DETECTORS_LIST="" + DETECTORS_LIST=$($AWSCLI guardduty list-detectors --query DetectorIds $PROFILE_OPT --region $regx --output text) + if [[ $DETECTORS_LIST ]];then + for DETECTOR in $DETECTORS_LIST;do + FINDINGS_COUNT="" + FINDINGS_COUNT=$($AWSCLI $PROFILE_OPT --region $regx --output text guardduty list-findings --detector-id $DETECTOR --finding-criteria '{"Criterion":{"severity": {"Eq":["8"]}}}' 2> /dev/null | wc -l | xargs) # Severity LOW=2, MED=4, HIGH=8 + if [[ $FINDINGS_COUNT -gt 0 ]];then + textFail "$regx: GuardDuty has $FINDINGS_COUNT high severity findings." "$regx" + else + textPass "$regx: GuardDuty has no high severity findings." "$regx" + fi + done + else + textInfo "$regx: No GuardDuty detectors found." "$regx" + fi + done +} \ No newline at end of file diff --git a/checks/check_extra7140 b/checks/check_extra7140 new file mode 100644 index 00000000..1c605cdb --- /dev/null +++ b/checks/check_extra7140 @@ -0,0 +1,41 @@ +#!/usr/bin/env bash +# Prowler - the handy cloud security tool (copyright 2019) by Toni de la Fuente +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy +# of the License at http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software distributed +# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +# CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. +CHECK_ID_extra7140="7.140" +CHECK_TITLE_extra7140="[extra7140] Check if there are SSM Documents set as public" +CHECK_SCORED_extra7140="NOT_SCORED" +CHECK_TYPE_extra7140="EXTRA" +CHECK_SEVERITY_extra7140="High" +CHECK_ASFF_RESOURCE_TYPE_extra7140="AwsSsmDocument" +CHECK_ALTERNATE_check7140="extra7140" +CHECK_SERVICENAME_extra7140="ssm" +CHECK_RISK_extra7140='SSM Documents may contain private information or even secrets and tokens.' +CHECK_REMEDIATION_extra7140='Carefully review the contents of the document before is shared. Enable SSM Block public sharing for documents.' +CHECK_DOC_extra7140='https://docs.aws.amazon.com/systems-manager/latest/userguide/ssm-before-you-share.html' +CHECK_CAF_EPIC_extra7140='Data Protection' +extra7140(){ + + for regx in $REGIONS; do + SSM_DOCS=$($AWSCLI $PROFILE_OPT --region $regx ssm list-documents --filters Key=Owner,Values=Self --query DocumentIdentifiers[].Name --output text) + if [[ $SSM_DOCS ]];then + for ssmdoc in $SSM_DOCS; do + SSM_DOC_SHARED_ALL=$($AWSCLI $PROFILE_OPT --region $regx ssm describe-document-permission --name "$ssmdoc" --permission-type "Share" --query AccountIds[] --output text | grep all) + if [[ $SSM_DOC_SHARED_ALL ]];then + textFail "$regx: SSM Document $ssmdoc is public." "$regx" + else + textPass "$regx: SSM Document $ssmdoc is not public." "$regx" + fi + done + else + textInfo "$regx: No SSM Document found." "$regx" + fi + done +} \ No newline at end of file diff --git a/checks/check_extra7141 b/checks/check_extra7141 new file mode 100644 index 00000000..ff4ce69c --- /dev/null +++ b/checks/check_extra7141 @@ -0,0 +1,55 @@ +#!/usr/bin/env bash + +# Prowler - the handy cloud security tool (copyright 2019) by Toni de la Fuente +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy +# of the License at http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software distributed +# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +# CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. +CHECK_ID_extra7141="7.141" +CHECK_TITLE_extra7141="[extra7141] Find secrets in SSM Documents" +CHECK_SCORED_extra7141="NOT_SCORED" +CHECK_TYPE_extra7141="EXTRA" +CHECK_SEVERITY_extra7141="Critical" +CHECK_ASFF_RESOURCE_TYPE_extra7141="AwsSsmDocument" +CHECK_ALTERNATE_check7141="extra7141" +CHECK_SERVICENAME_extra7141="ssm" +CHECK_RISK_extra7141='Secrets hardcoded into SSM Documents by malware and bad actors to gain lateral access to other services.' +CHECK_REMEDIATION_extra7141='Implement automated detective control (e.g. using tools like Prowler) to scan accounts for passwords and secrets. Use Secrets Manager service to store and retrieve passwords and secrets.' +CHECK_DOC_extra7141='https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-secretsmanager-secret-generatesecretstring.html' +CHECK_CAF_EPIC_extra7141='IAM' + +extra7141(){ + SECRETS_TEMP_FOLDER="$PROWLER_DIR/secrets-$ACCOUNT_NUM" + if [[ ! -d $SECRETS_TEMP_FOLDER ]]; then + # this folder is deleted once this check is finished + mkdir $SECRETS_TEMP_FOLDER + fi + + for regx in $REGIONS; do + SSM_DOCS=$($AWSCLI $PROFILE_OPT --region $regx ssm list-documents --filters Key=Owner,Values=Self --query DocumentIdentifiers[].Name --output text) + if [[ $SSM_DOCS ]];then + for ssmdoc in $SSM_DOCS; do + SSM_DOC_FILE="$SECRETS_TEMP_FOLDER/extra7141-$ssmdoc-$regx-content.txt" + $AWSCLI $PROFILE_OPT --region $regx ssm get-document --name $ssmdoc --output text --document-format JSON > $SSM_DOC_FILE + FINDINGS=$(secretsDetector file $SSM_DOC_FILE) + if [[ $FINDINGS -eq 0 ]]; then + textPass "$regx: No secrets found in SSM Document $ssmdoc" "$regx" "$ssmdoc" + # delete file if nothing interesting is there + rm -f $SSM_DOC_FILE + else + textFail "$regx: Potential secret found SSM Document $ssmdoc" "$regx" "$ssmdoc" + # delete file to not leave trace, user must look at the CFN Stack + rm -f $SSM_DOC_FILE + fi + done + else + textInfo "$regx: No SSM Document found." "$regx" + fi + done + rm -rf $SECRETS_TEMP_FOLDER +} diff --git a/checks/check_extra7142 b/checks/check_extra7142 new file mode 100644 index 00000000..9822fce2 --- /dev/null +++ b/checks/check_extra7142 @@ -0,0 +1,39 @@ +#!/usr/bin/env bash + +# Prowler - the handy cloud security tool (copyright 2018) by Toni de la Fuente +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy +# of the License at http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software distributed +# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +# CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. +CHECK_ID_extra7142="7.142" +CHECK_TITLE_extra7142="[extra7142] Check if Application Load Balancer is dropping invalid packets to prevent header based http request smuggling" +CHECK_SCORED_extra7142="NOT_SCORED" +CHECK_TYPE_extra7142="EXTRA" +CHECK_SEVERITY_extra7142="Medium" +CHECK_ASFF_RESOURCE_TYPE_extra7142="AwsElasticLoadBalancingV2LoadBalancer" +CHECK_ALTERNATE_check7142="extra7142" +CHECK_ASFF_COMPLIANCE_TYPE_extra7142="" +CHECK_SERVICENAME_extra7142="elb" + +extra7142(){ + for regx in $REGIONS; do + LIST_OF_ELBSV2=$($AWSCLI elbv2 describe-load-balancers $PROFILE_OPT --region $regx --query 'LoadBalancers[?Type == `application`].[LoadBalancerArn]' --output text) + if [[ $LIST_OF_ELBSV2 ]];then + for alb in $LIST_OF_ELBSV2;do + CHECK_IF_DROP_INVALID_HEADER_FIELDS=$($AWSCLI elbv2 describe-load-balancer-attributes $PROFILE_OPT --region $regx --load-balancer-arn $alb --query 'Attributes[6]' --output text|grep -i true) + if [[ $CHECK_IF_DROP_INVALID_HEADER_FIELDS ]];then + textPass "$regx: Application Load Balancer $alb is dropping invalid header fields." "$regx" "$alb" + else + textFail "$regx: Application Load Balancer $alb is not dropping invalid header fields" "$regx" "$alb" + fi + done + else + textInfo "$regx: no ALBs found" + fi + done +} diff --git a/checks/check_extra723 b/checks/check_extra723 index 187f50ce..3e0cbd04 100644 --- a/checks/check_extra723 +++ b/checks/check_extra723 @@ -32,13 +32,13 @@ extra723(){ for rdssnapshot in $LIST_OF_RDS_SNAPSHOTS;do SNAPSHOT_IS_PUBLIC=$($AWSCLI rds describe-db-snapshot-attributes $PROFILE_OPT --region $regx --db-snapshot-identifier $rdssnapshot --query DBSnapshotAttributesResult.DBSnapshotAttributes[*] --output text|grep ^ATTRIBUTEVALUES|cut -f2|grep all) if [[ $SNAPSHOT_IS_PUBLIC ]];then - textFail "$regx: RDS Snapshot $rdssnapshot is public!" "$regx" + textFail "$regx: RDS Snapshot $rdssnapshot is public!" "$regx" "$rdssnapshot" else - textPass "$regx: RDS Snapshot $rdssnapshot is not shared" "$regx" + textPass "$regx: RDS Snapshot $rdssnapshot is not shared" "$regx" "$rdssnapshot" fi done else - textInfo "$regx: No RDS Snapshots found" "$regx" + textInfo "$regx: No RDS Snapshots found" "$regx" "$rdssnapshot" fi # RDS cluster snapshots LIST_OF_RDS_CLUSTER_SNAPSHOTS=$($AWSCLI rds describe-db-cluster-snapshots $PROFILE_OPT --region $regx --query DBClusterSnapshots[*].DBClusterSnapshotIdentifier --output text) @@ -46,13 +46,13 @@ extra723(){ for rdsclustersnapshot in $LIST_OF_RDS_CLUSTER_SNAPSHOTS;do CLUSTER_SNAPSHOT_IS_PUBLIC=$($AWSCLI rds describe-db-cluster-snapshot-attributes $PROFILE_OPT --region $regx --db-cluster-snapshot-identifier $rdsclustersnapshot --query DBClusterSnapshotAttributesResult.DBClusterSnapshotAttributes[*] --output text|grep ^ATTRIBUTEVALUES|cut -f2|grep all) if [[ $CLUSTER_SNAPSHOT_IS_PUBLIC ]];then - textFail "$regx: RDS Cluster Snapshot $rdsclustersnapshot is public!" "$regx" + textFail "$regx: RDS Cluster Snapshot $rdsclustersnapshot is public!" "$regx" "$rdsclustersnapshot" else - textPass "$regx: RDS Cluster Snapshot $rdsclustersnapshot is not shared" "$regx" + textPass "$regx: RDS Cluster Snapshot $rdsclustersnapshot is not shared" "$regx" "$rdsclustersnapshot" fi done else - textInfo "$regx: No RDS Cluster Snapshots found" "$regx" + textInfo "$regx: No RDS Cluster Snapshots found" "$regx" "$rdsclustersnapshot" fi done } diff --git a/checks/check_extra735 b/checks/check_extra735 index 0b789f5e..f1d07aba 100644 --- a/checks/check_extra735 +++ b/checks/check_extra735 @@ -25,20 +25,19 @@ CHECK_DOC_extra735='https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Overv CHECK_CAF_EPIC_extra735='Data Protection' extra735(){ - textInfo "Looking for RDS Volumes in all regions... " for regx in $REGIONS; do LIST_OF_RDS_INSTANCES=$($AWSCLI rds describe-db-instances $PROFILE_OPT --region $regx --query 'DBInstances[*].DBInstanceIdentifier' --output text) if [[ $LIST_OF_RDS_INSTANCES ]];then for rdsinstance in $LIST_OF_RDS_INSTANCES; do IS_ENCRYPTED=$($AWSCLI rds describe-db-instances $PROFILE_OPT --region $regx --db-instance-identifier $rdsinstance --query 'DBInstances[*].StorageEncrypted' --output text) if [[ $IS_ENCRYPTED == "False" ]]; then - textFail "$regx: RDS instance $rdsinstance is not encrypted!" "$regx" + textFail "$regx: RDS instance $rdsinstance is not encrypted!" "$regx" "$rdsinstance" else - textPass "$regx: RDS instance $rdsinstance is encrypted" "$regx" + textPass "$regx: RDS instance $rdsinstance is encrypted" "$regx" "$rdsinstance" fi done else - textInfo "$regx: No RDS instances found" "$regx" + textInfo "$regx: No RDS instances found" "$regx" "$rdsinstance" fi done } diff --git a/checks/check_extra736 b/checks/check_extra736 index f9266d65..937af033 100644 --- a/checks/check_extra736 +++ b/checks/check_extra736 @@ -27,7 +27,7 @@ CHECK_CAF_EPIC_extra736='Data Protection' extra736(){ textInfo "Looking for KMS keys in all regions... " for regx in $REGIONS; do - LIST_OF_CUSTOMER_KMS_KEYS=$($AWSCLI kms list-aliases $PROFILE_OPT --region $regx --output text |grep -v :alias/aws/ |awk '{ print $4 }') + LIST_OF_CUSTOMER_KMS_KEYS=$($AWSCLI kms list-aliases $PROFILE_OPT --region $regx --query "Aliases[].[AliasName,TargetKeyId]" --output text |grep -v ^alias/aws/ |awk '{ print $2 }') if [[ $LIST_OF_CUSTOMER_KMS_KEYS ]];then for key in $LIST_OF_CUSTOMER_KMS_KEYS; do CHECK_POLICY=$($AWSCLI kms get-key-policy --key-id $key --policy-name default $PROFILE_OPT --region $regx --output text|awk '/Principal/{n=NR+1} n>=NR' |grep AWS\"\ :\ \"\\*\"$) diff --git a/checks/check_extra737 b/checks/check_extra737 index dc159378..7e6eed0d 100644 --- a/checks/check_extra737 +++ b/checks/check_extra737 @@ -27,7 +27,7 @@ CHECK_CAF_EPIC_extra737='Data Protection' extra737(){ textInfo "Looking for KMS keys in all regions... " for regx in $REGIONS; do - LIST_OF_CUSTOMER_KMS_KEYS=$($AWSCLI kms list-aliases $PROFILE_OPT --region $regx --output text |grep -v :alias/aws/ |awk '{ print $4 }') + LIST_OF_CUSTOMER_KMS_KEYS=$($AWSCLI kms list-aliases $PROFILE_OPT --region $regx --query "Aliases[].[AliasName,TargetKeyId]" --output text |grep -v ^alias/aws/ |awk '{ print $2 }') if [[ $LIST_OF_CUSTOMER_KMS_KEYS ]];then for key in $LIST_OF_CUSTOMER_KMS_KEYS; do CHECK_ROTATION=$($AWSCLI kms get-key-rotation-status --key-id $key $PROFILE_OPT --region $regx --output text) diff --git a/checks/check_extra739 b/checks/check_extra739 index e36f4ab1..0cf5eb98 100644 --- a/checks/check_extra739 +++ b/checks/check_extra739 @@ -31,13 +31,13 @@ extra739(){ # if retention is 0 then is disabled BACKUP_RETENTION=$($AWSCLI rds describe-db-instances $PROFILE_OPT --region $regx --db-instance-identifier $rdsinstance --query 'DBInstances[*].BackupRetentionPeriod' --output text) if [[ $BACKUP_RETENTION == "0" ]]; then - textFail "$regx: RDS instance $rdsinstance has not backup enabled!" "$regx" + textFail "$regx: RDS instance $rdsinstance has not backup enabled!" "$regx" "$rdsinstance" else - textPass "$regx: RDS instance $rdsinstance has backup enabled with retention period $BACKUP_RETENTION days" "$regx" + textPass "$regx: RDS instance $rdsinstance has backup enabled with retention period $BACKUP_RETENTION days" "$regx" "$rdsinstance" fi done else - textInfo "$regx: No RDS instances found" "$regx" + textInfo "$regx: No RDS instances found" "$regx" "$rdsinstance" fi done } diff --git a/checks/check_extra747 b/checks/check_extra747 index f2473563..ec6a86d8 100644 --- a/checks/check_extra747 +++ b/checks/check_extra747 @@ -31,13 +31,13 @@ extra747(){ # if retention is 0 then is disabled ENABLED_CLOUDWATCHLOGS_EXPORTS=$($AWSCLI rds describe-db-instances $PROFILE_OPT --region $regx --db-instance-identifier $rdsinstance --query 'DBInstances[*].EnabledCloudwatchLogsExports' --output text) if [[ $ENABLED_CLOUDWATCHLOGS_EXPORTS ]]; then - textPass "$regx: RDS instance $rdsinstance is shipping $ENABLED_CLOUDWATCHLOGS_EXPORTS to CloudWatch Logs" "$regx" + textPass "$regx: RDS instance $rdsinstance is shipping $ENABLED_CLOUDWATCHLOGS_EXPORTS to CloudWatch Logs" "$regx" "$rdsinstance" else - textFail "$regx: RDS instance $rdsinstance has no CloudWatch Logs enabled!" "$regx" + textFail "$regx: RDS instance $rdsinstance has no CloudWatch Logs enabled!" "$regx" "$rdsinstance" fi done else - textInfo "$regx: No RDS instances found" "$regx" + textInfo "$regx: No RDS instances found" "$regx" "$rdsinstance" fi done } diff --git a/checks/check_extra78 b/checks/check_extra78 index 16d91ba2..a164eddb 100644 --- a/checks/check_extra78 +++ b/checks/check_extra78 @@ -27,17 +27,16 @@ CHECK_CAF_EPIC_extra78='Data Protection' extra78(){ # "Ensure there are no Public Accessible RDS instances (Not Scored) (Not part of CIS benchmark)" - textInfo "Looking for RDS instances in all regions... " for regx in $REGIONS; do LIST_OF_RDS_PUBLIC_INSTANCES=$($AWSCLI rds describe-db-instances $PROFILE_OPT --region $regx --query 'DBInstances[?PubliclyAccessible==`true` && DBInstanceStatus==`"available"`].[DBInstanceIdentifier,Endpoint.Address]' --output text) if [[ $LIST_OF_RDS_PUBLIC_INSTANCES ]];then while read -r rds_instance;do RDS_NAME=$(echo $rds_instance | awk '{ print $1; }') RDS_DNSNAME=$(echo $rds_instance | awk '{ print $2; }') - textFail "$regx: RDS instance: $RDS_NAME at $RDS_DNSNAME is set as Publicly Accessible!" "$regx" + textFail "$regx: RDS instance: $RDS_NAME at $RDS_DNSNAME is set as Publicly Accessible!" "$regx" "$RDS_NAME" done <<< "$LIST_OF_RDS_PUBLIC_INSTANCES" else - textPass "$regx: no Publicly Accessible RDS instances found" "$regx" + textPass "$regx: no Publicly Accessible RDS instances found" "$regx" "$RDS_NAME" fi done } diff --git a/groups/group11_secrets b/groups/group11_secrets index 52a2df02..76d46056 100644 --- a/groups/group11_secrets +++ b/groups/group11_secrets @@ -15,13 +15,8 @@ GROUP_ID[11]='secrets' GROUP_NUMBER[11]='11.0' GROUP_TITLE[11]='Look for keys secrets or passwords around resources - [secrets]' GROUP_RUN_BY_DEFAULT[11]='N' # but it runs when execute_all is called (default) -GROUP_CHECKS[11]='extra741,extra742,extra759,extra760,extra768,extra775' +GROUP_CHECKS[11]='extra741,extra742,extra759,extra760,extra768,extra775,extra7141' # requires https://github.com/Yelp/detect-secrets # `pip install detect-secrets` -# Initially: -# - EC2 UserData -# - CloudFormation Outputs -# - Lambda variables -# - Lambda code diff --git a/groups/group17_internetexposed b/groups/group17_internetexposed index 9cf2563c..33e30e18 100644 --- a/groups/group17_internetexposed +++ b/groups/group17_internetexposed @@ -15,7 +15,7 @@ GROUP_ID[17]='internet-exposed' GROUP_NUMBER[17]='17.0' GROUP_TITLE[17]='Find resources exposed to the internet - [internet-exposed] ***' GROUP_RUN_BY_DEFAULT[17]='N' # run it when execute_all is called -GROUP_CHECKS[17]='check41,check42,extra72,extra73,extra74,extra76,extra77,extra78,extra79,extra710,extra711,extra716,extra723,extra727,extra731,extra736,extra738,extra745,extra748,extra749,extra750,extra751,extra752,extra753,extra754,extra755,extra756,extra770,extra771,extra778,extra779,extra787,extra788,extra795,extra796,extra798,extra7102' +GROUP_CHECKS[17]='check41,check42,check45,check46,extra72,extra73,extra74,extra76,extra77,extra78,extra79,extra710,extra711,extra716,extra723,extra727,extra731,extra736,extra738,extra745,extra748,extra749,extra750,extra751,extra752,extra753,extra754,extra755,extra756,extra770,extra771,extra778,extra779,extra787,extra788,extra795,extra796,extra798,extra7102,extra7134,extra7135,extra7136,extra7137,extra7138' # 4.1 [check41] Ensure no security groups allow ingress from 0.0.0.0/0 or ::/0 to port 22 (Scored) [group4, cislevel1, cislevel2] # 4.2 [check42] Ensure no security groups allow ingress from 0.0.0.0/0 or ::/0 to port 3389 (Scored) [group4, cislevel1, cislevel2] diff --git a/groups/group21_soc2 b/groups/group21_soc2 index 20e3f0d5..84f2cac3 100644 --- a/groups/group21_soc2 +++ b/groups/group21_soc2 @@ -13,11 +13,11 @@ GROUP_ID[21]='soc2' GROUP_NUMBER[21]='21.0' -GROUP_TITLE[21]='SOC2 Readiness - ONLY AS REFERENCE - [soc2] ***' +GROUP_TITLE[21]='SOC2 Readiness - ONLY AS REFERENCE - [soc2] *******************' GROUP_RUN_BY_DEFAULT[21]='N' # run it when execute_all is called GROUP_CHECKS[21]='check110,check111,check113,check12,check122,check13,check15,check16,check17,check18,check19,check21,check31,check310,check32,check33,check34,check35,check36,check37,check38,check39,check41,check42,check43,extra711,extra72,extra723,extra729,extra731,extra734,extra735,extra739,extra76,extra78,extra792' # References: # 1. https://www.aicpa.org/content/dam/aicpa/interestareas/frc/assuranceadvisoryservices/downloadabledocuments/trust-services-criteria.pdf # 2. https://www.aicpa.org/interestareas/frc/assuranceadvisoryservices/mappingsrelevanttothesocsuiteofservices.html -# 3. https://www.aicpa.org/content/dam/aicpa/interestareas/frc/assuranceadvisoryservices/downloadabledocuments/othermapping/mapping-final-2017-tsc-to-extant-2016-tspc.xlsx \ No newline at end of file +# 3. https://www.aicpa.org/content/dam/aicpa/interestareas/frc/assuranceadvisoryservices/downloadabledocuments/othermapping/mapping-final-2017-tsc-to-extant-2016-tspc.xlsx diff --git a/groups/group4_networking b/groups/group4_networking index 05b307b4..ca124465 100644 --- a/groups/group4_networking +++ b/groups/group4_networking @@ -12,4 +12,4 @@ GROUP_ID[4]='group4' GROUP_NUMBER[4]='4.0' GROUP_TITLE[4]='Networking - CIS only - [group4] *******************************' GROUP_RUN_BY_DEFAULT[4]='Y' # run it when execute_all is called -GROUP_CHECKS[4]='check41,check42,check43,check44' +GROUP_CHECKS[4]='check41,check42,check43,check44,check45,check46' diff --git a/groups/group7_extras b/groups/group7_extras index e7333d62..2ecde56a 100644 --- a/groups/group7_extras +++ b/groups/group7_extras @@ -15,7 +15,7 @@ GROUP_ID[7]='extras' GROUP_NUMBER[7]='7.0' GROUP_TITLE[7]='Extras - all non CIS specific checks - [extras] ****************' GROUP_RUN_BY_DEFAULT[7]='Y' # run it when execute_all is called -GROUP_CHECKS[7]='extra71,extra72,extra73,extra74,extra75,extra76,extra77,extra78,extra79,extra710,extra711,extra712,extra713,extra714,extra715,extra716,extra717,extra718,extra719,extra720,extra721,extra722,extra723,extra724,extra725,extra726,extra727,extra728,extra729,extra730,extra731,extra732,extra733,extra734,extra735,extra736,extra737,extra738,extra739,extra740,extra741,extra742,extra743,extra744,extra745,extra746,extra747,extra748,extra749,extra750,extra751,extra752,extra753,extra754,extra755,extra756,extra757,extra758,extra761,extra762,extra763,extra764,extra765,extra767,extra768,extra769,extra770,extra771,extra772,extra773,extra774,extra775,extra776,extra777,extra778,extra779,extra780,extra781,extra782,extra783,extra784,extra785,extra786,extra787,extra788,extra791,extra792,extra793,extra794,extra795,extra796,extra797,extra798,extra799,extra7100,extra7101,extra7102,extra7103,extra7104,extra7105,extra7106,extra7107,extra7108,extra7109,extra7110,extra7111,extra7112,extra7113,extra7114,extra7115,extra7116,extra7117,extra7118,extra7119,extra7120,extra7121,extra7122,extra7123,extra7124,extra7125,extra7126,extra7127,extra7128,extra7129,extra7130,extra7131,extra7132,extra7133' +GROUP_CHECKS[7]='extra71,extra72,extra73,extra74,extra75,extra76,extra77,extra78,extra79,extra710,extra711,extra712,extra713,extra714,extra715,extra716,extra717,extra718,extra719,extra720,extra721,extra722,extra723,extra724,extra725,extra726,extra727,extra728,extra729,extra730,extra731,extra732,extra733,extra734,extra735,extra736,extra737,extra738,extra739,extra740,extra741,extra742,extra743,extra744,extra745,extra746,extra747,extra748,extra749,extra750,extra751,extra752,extra753,extra754,extra755,extra756,extra757,extra758,extra761,extra762,extra763,extra764,extra765,extra767,extra768,extra769,extra770,extra771,extra772,extra773,extra774,extra775,extra776,extra777,extra778,extra779,extra780,extra781,extra782,extra783,extra784,extra785,extra786,extra787,extra788,extra791,extra792,extra793,extra794,extra795,extra796,extra797,extra798,extra799,extra7100,extra7101,extra7102,extra7103,extra7104,extra7105,extra7106,extra7107,extra7108,extra7109,extra7110,extra7111,extra7112,extra7113,extra7114,extra7115,extra7116,extra7117,extra7118,extra7119,extra7120,extra7121,extra7122,extra7123,extra7124,extra7125,extra7126,extra7127,extra7128,extra7129,extra7130,extra7131,extra7132,extra7133,extra7134,extra7135,extra7136,extra7137,extra7138,extra7139,extra7140,extra7141,extra7142' # Extras 759 and 760 (lambda variables and code secrets finder are not included) # to run detect-secrets use `./prowler -g secrets` diff --git a/include/csv_header b/include/csv_header index 7a867815..3ab095c3 100644 --- a/include/csv_header +++ b/include/csv_header @@ -15,6 +15,6 @@ printCsvHeader() { # >&2 echo "" # >&2 echo "Generating \"${SEP}\" delimited report on stdout for profile $PROFILE, account $ACCOUNT_NUM" - echo "PROFILE${SEP}ACCOUNT_NUM${SEP}REGION${SEP}TITLE_ID${SEP}CHECK_RESULT${SEP}ITEM_SCORED${SEP}ITEM_LEVEL${SEP}TITLE_TEXT${SEP}CHECK_RESULT_EXTENDED${SEP}CHECK_ASFF_COMPLIANCE_TYPE${SEP}CHECK_SEVERITY${SEP}CHECK_SERVICENAME${SEP}CHECK_ASFF_RESOURCE_TYPE${SEP}CHECK_ASFF_TYPE${SEP}CHECK_RISK${SEP}CHECK_REMEDIATION${SEP}CHECK_DOC${SEP}CHECK_CAF_EPIC" | tee -a ${OUTPUT_FILE_NAME}.$EXTENSION_CSV + echo "PROFILE${SEP}ACCOUNT_NUM${SEP}REGION${SEP}TITLE_ID${SEP}CHECK_RESULT${SEP}ITEM_SCORED${SEP}ITEM_LEVEL${SEP}TITLE_TEXT${SEP}CHECK_RESULT_EXTENDED${SEP}CHECK_ASFF_COMPLIANCE_TYPE${SEP}CHECK_SEVERITY${SEP}CHECK_SERVICENAME${SEP}CHECK_ASFF_RESOURCE_TYPE${SEP}CHECK_ASFF_TYPE${SEP}CHECK_RISK${SEP}CHECK_REMEDIATION${SEP}CHECK_DOC${SEP}CHECK_CAF_EPIC${SEP}CHECK_RESOURCE_ID" | tee -a ${OUTPUT_FILE_NAME}.$EXTENSION_CSV # echo "PROFILE${SEP}ACCOUNT_NUM${SEP}REGION${SEP}TITLE_ID${SEP}RESULT${SEP}SCORED${SEP}LEVEL${SEP}TITLE_TEXT${SEP}NOTES${SEP}COMPLIANCE${SEP}SEVERITY${SEP}SERVICENAME" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_CSV } diff --git a/include/html_report b/include/html_report index ec694411..561f31ea 100644 --- a/include/html_report +++ b/include/html_report @@ -137,6 +137,7 @@ addHtmlHeader() { Risk Remediation Docs + Resource ID diff --git a/include/outputs b/include/outputs index 34503944..f2130405 100644 --- a/include/outputs +++ b/include/outputs @@ -19,8 +19,24 @@ EXTENSION_ASFF="asff-json" EXTENSION_TEXT="txt" EXTENSION_HTML="html" OUTPUT_DATE=$(date -u +"%Y%m%d%H%M%S") -OUTPUT_DIR="${PROWLER_DIR}/output" # default output if none -OUTPUT_FILE_NAME="${OUTPUT_DIR}/prowler-output-${ACCOUNT_NUM}-${OUTPUT_DATE}" +OUTPUT_DIR="${PROWLER_DIR}/output" # default output if none +if [[ $OUTPUT_DIR_CUSTOM ]]; then + # output mode has to be set to other than text + if [[ ! " ${MODES[@]} " =~ " text " || ${check_id} == 7.1 || ${check_id} == 7.74 ]]; then + if [[ ! -d $OUTPUT_DIR_CUSTOM ]]; then + echo "$OPTRED ERROR!$OPTNORMAL directory \"$OUTPUT_DIR_CUSTOM\" does not exist." + exit 1 + else + OUTPUT_DIR=$OUTPUT_DIR_CUSTOM + fi + else + echo "$OPTRED ERROR!$OPTNORMAL - Mode (-M) has to be set as well. Use -h for help." + exit 1 + fi +fi +if [ -z ${OUTPUT_FILE_NAME+x} ]; then + OUTPUT_FILE_NAME="${OUTPUT_DIR}/prowler-output-${ACCOUNT_NUM}-${OUTPUT_DATE}" +fi HTML_LOGO_URL="https://github.com/toniblyx/prowler/" HTML_LOGO_IMG="https://github.com/toniblyx/prowler/raw/2.4/util/html/prowler-logo-new.png" TIMESTAMP=$(get_iso8601_timestamp) @@ -32,19 +48,19 @@ PROWLER_PARAMETERS=$@ # $ACCOUNT_NUM AWS Account ID # $REPREGION AWS region scanned # $TITLE_ID Numeric identifier of each check (1.2, 2.3, etc), originally based on CIS checks. -# $CHECK_RESULT values can be PASS, FAIL, INFO or WARNING if whitelisted +# $CHECK_RESULT values can be PASS, FAIL, INFO or WARNING if whitelisted # $ITEM_SCORED corresponds to CHECK_SCORED, values can be Scored/Not Scored. This is CIS only, will be deprecated in Prowler. # $ITEM_LEVEL corresponds to CHECK_TYPE_ currently only for CIS Level 1, CIS Level 2 and Extras (all checks not part of CIS) -# $TITLE_TEXT corresponds to CHECK_TITLE_ shows title of each check +# $TITLE_TEXT corresponds to CHECK_TITLE_ shows title of each check # $CHECK_RESULT_EXTENDED shows response of each check per resource like sg-123438 is open! -# $CHECK_ASFF_COMPLIANCE_TYPE specify type from taxonomy https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-findings-format-type-taxonomy.html +# $CHECK_ASFF_COMPLIANCE_TYPE specify type from taxonomy https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-findings-format-type-taxonomy.html # $CHECK_SEVERITY severity Low, Medium, High, Critical # $CHECK_SERVICENAME AWS service name short name # $CHECK_ASFF_RESOURCE_TYPE values from https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-findings-format.html#asff-resources # $CHECK_ASFF_TYPE generic type from taxonomy here https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-findings-format-type-taxonomy.html -# $CHECK_RISK text about risk -# $CHECK_REMEDIATION text about remediation -# $CHECK_DOC link to related documentation +# $CHECK_RISK text about risk +# $CHECK_REMEDIATION text about remediation +# $CHECK_DOC link to related documentation # $CHECK_CAF_EPIC it can be Logging and Monitoring, IAM, Data Protection, Infrastructure Security. Incident Response is not included since CAF has not specific checks on it logs enablement are part of Logging and Monitoring. # Ensure that output directory always exists when -M is used @@ -63,6 +79,7 @@ fi textPass(){ CHECK_RESULT="PASS" CHECK_RESULT_EXTENDED="$1" + CHECK_RESOURCE_ID="$3" if [[ "$QUIET" == 1 ]]; then return @@ -75,13 +92,13 @@ textPass(){ REPREGION=$REGION fi if [[ "${MODES[@]}" =~ "csv" ]]; then - echo "$PROFILE${SEP}$ACCOUNT_NUM${SEP}$REPREGION${SEP}$TITLE_ID${SEP}$CHECK_RESULT${SEP}$ITEM_SCORED${SEP}$ITEM_LEVEL${SEP}$TITLE_TEXT${SEP}$CHECK_RESULT_EXTENDED${SEP}$CHECK_ASFF_COMPLIANCE_TYPE${SEP}$CHECK_SEVERITY${SEP}$CHECK_SERVICENAME${SEP}$CHECK_ASFF_RESOURCE_TYPE${SEP}$CHECK_ASFF_TYPE${SEP}$CHECK_RISK${SEP}$CHECK_REMEDIATION${SEP}$CHECK_DOC${SEP}$CHECK_CAF_EPIC" | tee -a ${OUTPUT_FILE_NAME}.$EXTENSION_CSV + echo "$PROFILE${SEP}$ACCOUNT_NUM${SEP}$REPREGION${SEP}$TITLE_ID${SEP}$CHECK_RESULT${SEP}$ITEM_SCORED${SEP}$ITEM_LEVEL${SEP}$TITLE_TEXT${SEP}$CHECK_RESULT_EXTENDED${SEP}$CHECK_ASFF_COMPLIANCE_TYPE${SEP}$CHECK_SEVERITY${SEP}$CHECK_SERVICENAME${SEP}$CHECK_ASFF_RESOURCE_TYPE${SEP}$CHECK_ASFF_TYPE${SEP}$CHECK_RISK${SEP}$CHECK_REMEDIATION${SEP}$CHECK_DOC${SEP}$CHECK_CAF_EPIC${SEP}$CHECK_RESOURCE_ID" | tee -a ${OUTPUT_FILE_NAME}.$EXTENSION_CSV fi if [[ "${MODES[@]}" =~ "json" ]]; then - generateJsonOutput "$1" "Pass" | tee -a ${OUTPUT_FILE_NAME}.$EXTENSION_JSON + generateJsonOutput "$1" "Pass" "$CHECK_RESOURCE_ID" | tee -a ${OUTPUT_FILE_NAME}.$EXTENSION_JSON fi if [[ "${MODES[@]}" =~ "json-asff" ]]; then - JSON_ASFF_OUTPUT=$(generateJsonAsffOutput "$1" "PASSED") + JSON_ASFF_OUTPUT=$(generateJsonAsffOutput "$1" "PASSED" "$CHECK_RESOURCE_ID") echo "${JSON_ASFF_OUTPUT}" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_ASFF if [[ "${SEND_TO_SECURITY_HUB}" -eq 1 ]]; then sendToSecurityHub "${JSON_ASFF_OUTPUT}" "${REPREGION}" @@ -104,6 +121,7 @@ textPass(){ textInfo(){ CHECK_RESULT="INFO" CHECK_RESULT_EXTENDED="$1" + CHECK_RESOURCE_ID="$3" if [[ "$QUIET" == 1 ]]; then return @@ -115,10 +133,10 @@ textInfo(){ REPREGION=$REGION fi if [[ "${MODES[@]}" =~ "csv" ]]; then - echo "$PROFILE${SEP}$ACCOUNT_NUM${SEP}$REPREGION${SEP}$TITLE_ID${SEP}$CHECK_RESULT${SEP}$ITEM_SCORED${SEP}$ITEM_LEVEL${SEP}$TITLE_TEXT${SEP}$CHECK_RESULT_EXTENDED${SEP}$CHECK_ASFF_COMPLIANCE_TYPE${SEP}$CHECK_SEVERITY${SEP}$CHECK_SERVICENAME${SEP}$CHECK_ASFF_RESOURCE_TYPE${SEP}$CHECK_ASFF_TYPE${SEP}$CHECK_RISK${SEP}$CHECK_REMEDIATION${SEP}$CHECK_DOC${SEP}$CHECK_CAF_EPIC" | tee -a ${OUTPUT_FILE_NAME}.$EXTENSION_CSV + echo "$PROFILE${SEP}$ACCOUNT_NUM${SEP}$REPREGION${SEP}$TITLE_ID${SEP}$CHECK_RESULT${SEP}$ITEM_SCORED${SEP}$ITEM_LEVEL${SEP}$TITLE_TEXT${SEP}$CHECK_RESULT_EXTENDED${SEP}$CHECK_ASFF_COMPLIANCE_TYPE${SEP}$CHECK_SEVERITY${SEP}$CHECK_SERVICENAME${SEP}$CHECK_ASFF_RESOURCE_TYPE${SEP}$CHECK_ASFF_TYPE${SEP}$CHECK_RISK${SEP}$CHECK_REMEDIATION${SEP}$CHECK_DOC${SEP}$CHECK_CAF_EPIC${SEP}$CHECK_RESOURCE_ID" | tee -a ${OUTPUT_FILE_NAME}.$EXTENSION_CSV fi if [[ "${MODES[@]}" =~ "json" ]]; then - generateJsonOutput "$1" "Info" | tee -a ${OUTPUT_FILE_NAME}.${EXTENSION_JSON} + generateJsonOutput "$1" "Info" "$CHECK_RESOURCE_ID" | tee -a ${OUTPUT_FILE_NAME}.${EXTENSION_JSON} fi if is_junit_output_enabled; then output_junit_info "$1" @@ -130,7 +148,7 @@ textInfo(){ echo " $NOTICE INFO! $1 $NORMAL" fi if [[ "${MODES[@]}" =~ "html" ]]; then - generateHtmlOutput "$1" "INFO" + generateHtmlOutput "$1" "INFO" "$CHECK_RESOURCE_ID" fi } @@ -162,6 +180,7 @@ textFail(){ CHECK_RESULT=$level CHECK_RESULT_EXTENDED="$1" + CHECK_RESOURCE_ID="$3" if [[ $2 ]]; then REPREGION=$2 @@ -170,13 +189,13 @@ textFail(){ fi if [[ "${MODES[@]}" =~ "csv" ]]; then - echo "$PROFILE${SEP}$ACCOUNT_NUM${SEP}$REPREGION${SEP}$TITLE_ID${SEP}$CHECK_RESULT${SEP}$ITEM_SCORED${SEP}$ITEM_LEVEL${SEP}$TITLE_TEXT${SEP}$CHECK_RESULT_EXTENDED${SEP}$CHECK_ASFF_COMPLIANCE_TYPE${SEP}$CHECK_SEVERITY${SEP}$CHECK_SERVICENAME${SEP}$CHECK_ASFF_RESOURCE_TYPE${SEP}$CHECK_ASFF_TYPE${SEP}$CHECK_RISK${SEP}$CHECK_REMEDIATION${SEP}$CHECK_DOC${SEP}$CHECK_CAF_EPIC" | tee -a ${OUTPUT_FILE_NAME}.$EXTENSION_CSV + echo "$PROFILE${SEP}$ACCOUNT_NUM${SEP}$REPREGION${SEP}$TITLE_ID${SEP}$CHECK_RESULT${SEP}$ITEM_SCORED${SEP}$ITEM_LEVEL${SEP}$TITLE_TEXT${SEP}$CHECK_RESULT_EXTENDED${SEP}$CHECK_ASFF_COMPLIANCE_TYPE${SEP}$CHECK_SEVERITY${SEP}$CHECK_SERVICENAME${SEP}$CHECK_ASFF_RESOURCE_TYPE${SEP}$CHECK_ASFF_TYPE${SEP}$CHECK_RISK${SEP}$CHECK_REMEDIATION${SEP}$CHECK_DOC${SEP}$CHECK_CAF_EPIC${SEP}$CHECK_RESOURCE_ID" | tee -a ${OUTPUT_FILE_NAME}.$EXTENSION_CSV fi if [[ "${MODES[@]}" =~ "json" ]]; then - generateJsonOutput "$1" "${level}" | tee -a ${OUTPUT_FILE_NAME}.${EXTENSION_JSON} + generateJsonOutput "$1" "${level}" "$CHECK_RESOURCE_ID"| tee -a ${OUTPUT_FILE_NAME}.${EXTENSION_JSON} fi if [[ "${MODES[@]}" =~ "json-asff" ]]; then - JSON_ASFF_OUTPUT=$(generateJsonAsffOutput "$1" "${level}") + JSON_ASFF_OUTPUT=$(generateJsonAsffOutput "$1" "${level}" "$CHECK_RESOURCE_ID") echo "${JSON_ASFF_OUTPUT}" | tee -a ${OUTPUT_FILE_NAME}.${EXTENSION_ASFF} if [[ "${SEND_TO_SECURITY_HUB}" -eq 1 ]]; then sendToSecurityHub "${JSON_ASFF_OUTPUT}" "${REPREGION}" @@ -196,61 +215,64 @@ textFail(){ echo " $colorcode ${level}! $1 $NORMAL" fi if [[ "${MODES[@]}" =~ "html" ]]; then - generateHtmlOutput "$1" "${level}" + generateHtmlOutput "$1" "${level}" "$CHECK_RESOURCE_ID" fi } textTitle(){ CHECKS_COUNTER=$((CHECKS_COUNTER+1)) - TITLE_ID=$1 + TITLE_ID="$BLUE$1$NORMAL" if [[ $NUMERAL ]]; then # Left-pad the check ID with zeros to simplify sorting, e.g. 1.1 -> 1.01 TITLE_ID=$(awk -F'.' '{ printf "%d.%02d", $1, $2 }' <<< "$TITLE_ID") fi TITLE_TEXT=$2 + CHECK_SERVICENAME="$MAGENTA$3$NORMAL" + CHECK_SEVERITY="$BROWN[$4]$NORMAL" - case "$3" in - 0|No|NOT_SCORED) - ITEM_SCORED="Not Scored" - ;; - 1|Yes|SCORED) - ITEM_SCORED="Scored" - ;; - *) - ITEM_SCORED="Unspecified" - ;; - esac + # case "$3" in + # 0|No|NOT_SCORED) + # ITEM_SCORED="Not Scored" + # ;; + # 1|Yes|SCORED) + # ITEM_SCORED="Scored" + # ;; + # *) + # ITEM_SCORED="Unspecified" + # ;; + # esac - case "$4" in - LEVEL1) ITEM_LEVEL="Level 1";; - LEVEL2) ITEM_LEVEL="Level 2";; - EXTRA) ITEM_LEVEL="Extra";; - SUPPORT) ITEM_LEVEL="Support";; - *) ITEM_LEVEL="Unspecified or Invalid";; - esac + # case "$4" in + # LEVEL1) ITEM_LEVEL="Level 1";; + # LEVEL2) ITEM_LEVEL="Level 2";; + # EXTRA) ITEM_LEVEL="Extra";; + # SUPPORT) ITEM_LEVEL="Support";; + # *) ITEM_LEVEL="Unspecified or Invalid";; + # esac local group_ids - if [[ -n "$5" ]]; then - group_ids="$CYAN [$5] $NORMAL" - fi + # if [[ -n "$4" ]]; then + group_ids="$CYAN[$5]$NORMAL" + # fi if [[ "${MODES[@]}" =~ "csv" ]]; then >&2 echo "$TITLE_ID $TITLE_TEXT" | tee -a ${OUTPUT_FILE_NAME}.${EXTENSION_CSV} elif [[ "${MODES[@]}" =~ "json" || "${MODES[@]}" =~ "json-asff" ]]; then : else - if [[ "$ITEM_SCORED" == "Scored" ]]; then - echo -e "\n$BLUE $TITLE_ID $NORMAL $TITLE_TEXT $6 $group_ids " - else - echo -e "\n$PURPLE $TITLE_ID $TITLE_TEXT $6 $NORMAL $group_ids " - fi + # if [[ "$ITEM_SCORED" == "Scored" ]]; then + echo -e "$TITLE_ID $CHECK_SERVICENAME $TITLE_TEXT $CHECK_SEVERITY $group_ids " + # else + # echo -e "\n$PURPLE $TITLE_ID $TITLE_TEXT $6 $NORMAL $group_ids " + # fi fi } generateJsonOutput(){ local message=$1 local status=$2 + local resource_id=$3 jq -M -c \ --arg PROFILE "$PROFILE" \ --arg ACCOUNT_NUM "$ACCOUNT_NUM" \ @@ -265,6 +287,11 @@ generateJsonOutput(){ --arg TYPE "$CHECK_ASFF_COMPLIANCE_TYPE" \ --arg TIMESTAMP "$(get_iso8601_timestamp)" \ --arg SERVICENAME "$CHECK_SERVICENAME" \ + --arg CHECK_CAF_EPIC "$CHECK_CAF_EPIC" \ + --arg CHECK_RISK "$CHECK_RISK" \ + --arg CHECK_REMEDIATION "$CHECK_REMEDIATION" \ + --arg CHECK_DOC "$CHECK_DOC" \ + --arg CHECK_RESOURCE_ID "$resource_id" \ -n '{ "Profile": $PROFILE, "Account Number": $ACCOUNT_NUM, @@ -278,7 +305,12 @@ generateJsonOutput(){ "Region": $REPREGION, "Timestamp": $TIMESTAMP, "Compliance": $TYPE, - "Service": $SERVICENAME + "Service": $SERVICENAME, + "CAF Epic": $CHECK_CAF_EPIC, + "Risk": $CHECK_RISK, + "Remediation": $CHECK_REMEDIATION, + "Doc link": $CHECK_DOC, + "Resource ID": $CHECK_RESOURCE_ID }' } @@ -375,7 +407,7 @@ generateHtmlOutput(){ echo '

'$CHECK_RISK'

' >> ${OUTPUT_FILE_NAME}.$EXTENSION_HTML echo '

'$CHECK_REMEDIATION'

' >> ${OUTPUT_FILE_NAME}.$EXTENSION_HTML echo ' ' >> ${OUTPUT_FILE_NAME}.$EXTENSION_HTML + echo ' '$CHECK_RESOURCE_ID'' >> ${OUTPUT_FILE_NAME}.$EXTENSION_HTML echo '' >> ${OUTPUT_FILE_NAME}.$EXTENSION_HTML echo '' >> ${OUTPUT_FILE_NAME}.$EXTENSION_HTML - } \ No newline at end of file diff --git a/include/outputs_bucket b/include/outputs_bucket new file mode 100644 index 00000000..41cbefe1 --- /dev/null +++ b/include/outputs_bucket @@ -0,0 +1,53 @@ +#!/usr/bin/env bash + +# Prowler - the handy cloud security tool (copyright 2021) by Toni de la Fuente +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy +# of the License at http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software distributed +# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +# CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. + +if [[ $OUTPUT_BUCKET ]]; then + # output mode has to be set to other than text + if [[ "${MODES[@]}" =~ "html" ]] || [[ "${MODES[@]}" =~ "csv" ]] || [[ "${MODES[@]}" =~ "json" ]] || [[ "${MODES[@]}" =~ "json-asff" ]]; then + OUTPUT_BUCKET_WITHOUT_FOLDERS=$(echo $OUTPUT_BUCKET | awk -F'/' '{ print $1 }') + OUTPUT_BUCKET_STATUS=$($AWSCLI s3api head-bucket --bucket "$OUTPUT_BUCKET" 2>&1 || true) + if [[ ! -z $OUTPUT_BUCKET_STATUS ]]; then + echo "$OPTRED ERROR!$OPTNORMAL wrong bucket name or not right permissions." + exit 1 + else + # need to make sure last / is not set to avoid // in S3 + if [[ $OUTPUT_BUCKET != *"/" ]]; then + OUTPUT_BUCKET="$OUTPUT_BUCKET" + else + OUTPUT_BUCKET=${OUTPUT_BUCKET::-1} + fi + fi + else + echo "$OPTRED ERROR!$OPTNORMAL - Mode (-M) has to be set as well. Use -h for help." + exit 1 + fi +fi + +copyToS3(){ + # Prowler will copy each format to its own folder in S3, that is for better handling + # and processing by Quicksight or others. + if [[ $OUTPUT_BUCKET ]]; then + if [[ "${MODES[@]}" =~ "csv" ]]; then + $AWSCLI s3 cp $OUTPUT_DIR/prowler-output-${ACCOUNT_NUM}-${OUTPUT_DATE}.$EXTENSION_CSV s3://$OUTPUT_BUCKET/csv/ + fi + if [[ "${MODES[@]}" =~ "html" ]]; then + $AWSCLI s3 cp $OUTPUT_DIR/prowler-output-${ACCOUNT_NUM}-${OUTPUT_DATE}.$EXTENSION_HTML s3://$OUTPUT_BUCKET/html/ + fi + if [[ "${MODES[@]}" =~ "json" ]]; then + $AWSCLI s3 cp $OUTPUT_DIR/prowler-output-${ACCOUNT_NUM}-${OUTPUT_DATE}.$EXTENSION_JSON s3://$OUTPUT_BUCKET/json/ + fi + if [[ "${MODES[@]}" =~ "json-asff" ]]; then + $AWSCLI s3 cp $OUTPUT_DIR/prowler-output-${ACCOUNT_NUM}-${OUTPUT_DATE}.$EXTENSION_ASFF s3://$OUTPUT_BUCKET/json-asff/ + fi + fi +} \ No newline at end of file diff --git a/include/whoami b/include/whoami index a2fa3ce2..a7c6256e 100644 --- a/include/whoami +++ b/include/whoami @@ -29,8 +29,9 @@ case "$REGION" in ;; esac -GETCALLER=$($AWSCLI sts get-caller-identity $PROFILE_OPT --region $REGION_FOR_STS 2>&1) -if [[ $(echo "$GETCALLER" | grep 'Unable') ]]; then +GETCALLER=$($AWSCLI sts get-caller-identity $PROFILE_OPT --output json --region $REGION_FOR_STS 2>&1) +ret=$? +if [[ $ret -ne 0 ]]; then if [[ $PRINTCHECKSONLY || $PRINTGROUPSONLY ]]; then echo Listing... else diff --git a/prowler b/prowler index 8d129916..b3cc2e09 100755 --- a/prowler +++ b/prowler @@ -32,7 +32,7 @@ OPTRED="" OPTNORMAL="" # Set the defaults variables -PROWLER_VERSION=2.4.0-07042021 +PROWLER_VERSION=2.5.0-15042021 PROWLER_DIR=$(dirname "$0") REGION="" @@ -74,7 +74,7 @@ USAGE: -g specify a group of checks by id, to see all available group of checks use "-L" (i.e.: "group3" for entire section 3, "cislevel1" for CIS Level 1 Profile Definitions or "forensics-ready") -f specify an AWS region to run checks against - (i.e.: us-west-1) + (i.e.: us-west-1 or for multiple regions use single quote like 'us-west-1 us-west-2') -m specify the maximum number of items to return for long-running requests (default: 100) -M output mode: text (default), mono, html, json, json-asff, junit-xml, csv. They can be used combined comma separated. (separator is ","; data is on stdout; progress on stderr). @@ -100,13 +100,18 @@ USAGE: -w whitelist file. See whitelist_sample.txt for reference and format (i.e.: whitelist_sample.txt) -N Shoadan API key used by check extra7102. + -o Custom output directory, if not specified will use default prowler/output, requires -M + (i.e.: -M csv -o /tmp/reports/) + -B Custom output bucket, requires -M and it can work also with -o flag. + (i.e.: -M csv -B my-bucket or -M csv -B my-bucket/folder/) + -F Custom output report name, if not specified will use default output/prowler-output-ACCOUNT_NUM-OUTPUT_DATE -V show version number & exit -h this help " exit } -while getopts ":hlLkqp:r:c:g:f:m:M:E:x:enbVsSI:A:R:T:w:N:" OPTION; do +while getopts ":hlLkqp:r:c:g:f:m:M:E:x:enbVsSI:A:R:T:w:N:o:B:F:" OPTION; do case $OPTION in h ) usage @@ -190,6 +195,15 @@ while getopts ":hlLkqp:r:c:g:f:m:M:E:x:enbVsSI:A:R:T:w:N:" OPTION; do N ) SHODAN_API_KEY=$OPTARG ;; + o ) + OUTPUT_DIR_CUSTOM=$OPTARG + ;; + B ) + OUTPUT_BUCKET=$OPTARG + ;; + F ) + OUTPUT_FILE_NAME=$OPTARG + ;; : ) echo "" echo "$OPTRED ERROR!$OPTNORMAL -$OPTARG requires an argument" @@ -243,6 +257,7 @@ unset AWS_DEFAULT_OUTPUT . $PROWLER_DIR/include/csv_header . $PROWLER_DIR/include/banner . $PROWLER_DIR/include/html_report +. $PROWLER_DIR/include/outputs_bucket . $PROWLER_DIR/include/outputs . $PROWLER_DIR/include/credentials_report . $PROWLER_DIR/include/scoring @@ -295,7 +310,8 @@ TOTAL_CHECKS=($(echo "${TOTAL_CHECKS[*]}" | tr ' ' '\n' | awk '!seen[$0]++' | so get_regions() { # Get list of regions based on include/whoami REGIONS=$($AWSCLI ec2 describe-regions --query 'Regions[].RegionName' --output text $PROFILE_OPT --region $REGION_FOR_STS --region-names $FILTERREGION 2>&1) - if [[ $(echo "$REGIONS" | grep 'AccessDenied\|UnauthorizedOperation') ]]; then + ret=$? + if [[ $ret -ne 0 ]]; then echo "$OPTRED Access Denied trying to describe regions! Review permissions as described here: https://github.com/toniblyx/prowler/#requirements-and-installation $OPTNORMAL" EXITCODE=1 exit $EXITCODE @@ -310,25 +326,30 @@ show_check_title() { local check_scored=CHECK_SCORED_$1 local check_type=CHECK_TYPE_$1 local check_asff_compliance_type=CHECK_ASFF_COMPLIANCE_TYPE_$1 + local check_severity=CHECK_SEVERITY_$1 + local check_servicename=CHECK_SERVICENAME_$1 local group_ids local group_index + local check_name # If requested ($2 is any non-null value) iterate all GROUP_CHECKS and produce a comma-separated list of all # the GROUP_IDs that include this particular check if [[ -n "$2" ]]; then for group_index in "${!GROUP_ID[@]}"; do - if [[ "${GROUP_CHECKS[$group_index]}" =~ "$1" ]]; then - if [[ -n "$group_ids" ]]; then - group_ids+=", " + for check_name in $(echo "${GROUP_CHECKS[$group_index]}" | sed "s/,/ /g");do + if [[ "$check_name" == "$1" ]]; then + if [[ -n "$group_ids" ]]; then + group_ids+=", " + fi + group_ids+="${GROUP_ID[$group_index]}" fi - group_ids+="${GROUP_ID[$group_index]}" - fi + done done fi # This shows ASFF_COMPLIANCE_TYPE if group used is ens, this si used to show ENS compliance ID control, can be used for other compliance groups as well. if [[ ${GROUP_ID_READ} == "ens" ]];then textTitle "${!check_id}" "${!check_title}" "${!check_scored}" "${!check_type}" "$group_ids" "(${!check_asff_compliance_type})" else - textTitle "${!check_id}" "${!check_title}" "${!check_scored}" "${!check_type}" "$group_ids" + textTitle "${!check_id}" "${!check_title}" "${!check_servicename}" "${!check_severity}" "$group_ids" fi } @@ -344,7 +365,7 @@ show_group_title() { execute_check() { if [[ $ACCOUNT_TO_ASSUME ]]; then - # Following logic looks for time remaining in the session and review it + # Following logic looks for time remaining in the session and review it # if it is less than 600 seconds, 10 minutes. CURRENT_TIMESTAMP=$(date -u "+%s") SESSION_TIME_REMAINING=$(expr $AWS_SESSION_EXPIRATION - $CURRENT_TIMESTAMP) @@ -357,7 +378,7 @@ execute_check() { fi fi - CHECK_ID="$1" + CHECK_ID="$1" # See if this is an alternate name for a check # for example, we might have been passed 1.01 which is another name for 1.1 @@ -370,7 +391,7 @@ execute_check() { local asff_compliance_type_var=CHECK_ASFF_COMPLIANCE_TYPE_$1 CHECK_ASFF_COMPLIANCE_TYPE="${!asff_compliance_type_var:-Software and Configuration Checks}" - + # See if this check defines an ASFF Resource Type, if so, use this, falling back to a sane default # For a list of Resource Types, see: https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-findings-format.html#asff-resources local asff_resource_type_var=CHECK_ASFF_RESOURCE_TYPE_$1 @@ -607,6 +628,7 @@ if [[ $GROUP_ID_READ ]];then fi cleanTemp scoring + copyToS3 exit $EXITCODE else textFail "Use a valid check group ID i.e.: group1, extras, forensics-ready, etc." @@ -634,6 +656,7 @@ if [[ $CHECK_ID ]];then if [[ "${MODES[@]}" =~ "html" ]]; then addHtmlFooter >> ${OUTPUT_FILE_NAME}.$EXTENSION_HTML fi + copyToS3 cleanTemp exit $EXITCODE fi @@ -643,8 +666,10 @@ execute_all if [[ "${MODES[@]}" =~ "html" ]]; then addHtmlFooter >> ${OUTPUT_FILE_NAME}.$EXTENSION_HTML fi + scoring cleanTemp +copyToS3 if [[ $ACCOUNT_TO_ASSUME ]]; then # unset env variables with assumed role credentials diff --git a/util/Dockerfile b/util/Dockerfile index f7a5ee67..87911e46 100644 --- a/util/Dockerfile +++ b/util/Dockerfile @@ -1,4 +1,4 @@ -FROM alpine:3.9 +FROM alpine:3.13 ARG USERNAME=prowler ARG USERID=34000