Security Hub integration improvement and adding severity for checks @xeroxnir

Security Hub integration improvement and adding severity for checks @xeroxnir
This commit is contained in:
Toni de la Fuente
2020-09-16 23:19:06 +02:00
committed by GitHub
153 changed files with 216 additions and 16 deletions

View File

@@ -103,10 +103,19 @@ bsd_get_time_in_milliseconds() {
gnu_get_iso8601_timestamp() {
"$DATE_CMD" -u +"%Y-%m-%dT%H:%M:%SZ"
}
gsu_get_iso8601_hundred_days_ago() {
"$DATE_CMD" -d "100 days ago" -u +"%Y-%m-%dT%H:%M:%SZ"
}
bsd_get_iso8601_timestamp() {
"$DATE_CMD" -u +"%Y-%m-%dT%H:%M:%SZ"
}
bsd_get_iso8601_hundred_days_ago() {
"$DATE_CMD" -v-100d -u +"%Y-%m-%dT%H:%M:%SZ"
}
gnu_test_tcp_connectivity() {
HOST=$1
PORT=$2
@@ -150,6 +159,9 @@ if [ "$OSTYPE" == "linux-gnu" ] || [ "$OSTYPE" == "linux-musl" ]; then
get_iso8601_timestamp() {
gnu_get_iso8601_timestamp
}
get_iso8601_hundred_days_ago() {
gsu_get_iso8601_hundred_days_ago
}
test_tcp_connectivity() {
gnu_test_tcp_connectivity "$1" "$2" "$3"
}
@@ -207,6 +219,9 @@ elif [[ "$OSTYPE" == "darwin"* ]]; then
get_iso8601_timestamp() {
bsd_get_iso8601_timestamp
}
get_iso8601_hundred_days_ago() {
bsd_get_iso8601_hundred_days_ago
}
fi
if "$BASE64_CMD" --version >/dev/null 2>&1 ; then
decode_report() {

View File

@@ -57,7 +57,7 @@ textPass(){
generateJsonOutput "$1" "Pass" | tee -a ${OUTPUT_FILE_NAME}.$EXTENSION_JSON
fi
if [[ "${MODES[@]}" =~ "json-asff" ]]; then
JSON_ASFF_OUTPUT=$(generateJsonAsffOutput "$1" "PASSED" "INFORMATIONAL")
JSON_ASFF_OUTPUT=$(generateJsonAsffOutput "$1" "PASSED")
echo "${JSON_ASFF_OUTPUT}" | tee -a $OUTPUT_FILE_NAME.$EXTENSION_ASFF
if [[ "${SEND_TO_SECURITY_HUB}" -eq 1 ]]; then
sendToSecurityHub "${JSON_ASFF_OUTPUT}" "${REPREGION}"
@@ -144,7 +144,7 @@ textFail(){
generateJsonOutput "$1" "${level}" | tee -a ${OUTPUT_FILE_NAME}.${EXTENSION_JSON}
fi
if [[ "${MODES[@]}" =~ "json-asff" ]]; then
JSON_ASFF_OUTPUT=$(generateJsonAsffOutput "$1" "${level}" "HIGH")
JSON_ASFF_OUTPUT=$(generateJsonAsffOutput "$1" "${level}")
echo "${JSON_ASFF_OUTPUT}" | tee -a ${OUTPUT_FILE_NAME}.${EXTENSION_ASFF}
if [[ "${SEND_TO_SECURITY_HUB}" -eq 1 ]]; then
sendToSecurityHub "${JSON_ASFF_OUTPUT}" "${REPREGION}"
@@ -225,6 +225,7 @@ generateJsonOutput(){
--arg TITLE_TEXT "$TITLE_TEXT" \
--arg MESSAGE "$(echo -e "${message}" | sed -e 's/^[[:space:]]*//')" \
--arg STATUS "$status" \
--arg SEVERITY "$CHECK_SEVERITY" \
--arg SCORED "$ITEM_SCORED" \
--arg ITEM_LEVEL "$ITEM_LEVEL" \
--arg TITLE_ID "$TITLE_ID" \
@@ -235,6 +236,7 @@ generateJsonOutput(){
"Account Number": $ACCOUNT_NUM,
"Control": $TITLE_TEXT,
"Message": $MESSAGE,
"Severity": $SEVERITY,
"Status": $STATUS,
"Scored": $SCORED,
"Level": $ITEM_LEVEL,
@@ -253,23 +255,22 @@ generateJsonAsffOutput(){
if [[ "$status" == "FAIL" ]]; then
status="FAILED"
fi
local severity=$3
jq -M -c \
--arg ACCOUNT_NUM "$ACCOUNT_NUM" \
--arg TITLE_TEXT "$TITLE_TEXT" \
--arg MESSAGE "$(echo -e "${message}" | sed -e 's/^[[:space:]]*//')" \
--arg UNIQUE_ID "$(LC_ALL=C echo -e -n "${message}" | tr -cs '[:alnum:]._~-' '_')" \
--arg STATUS "$status" \
--arg SEVERITY "$severity" \
--arg SEVERITY "$(echo $CHECK_SEVERITY| awk '{ print toupper($0) }')" \
--arg TITLE_ID "$TITLE_ID" \
--arg CHECK_ID "$CHECK_ID" \
--arg TYPE "$ASFF_TYPE" \
--arg RESOURCE_TYPE "$ASFF_RESOURCE_TYPE" \
--arg REPREGION "$REPREGION" \
--arg TIMESTAMP "$(get_iso8601_timestamp)" \
--arg PROWLER_VERSION "$PROWLER_VERSION" \
--arg AWS_PARTITION "$AWS_PARTITION" \
-n '{
-n '{
"SchemaVersion": "2018-10-08",
"Id": "prowler-\($TITLE_ID)-\($ACCOUNT_NUM)-\($REPREGION)-\($UNIQUE_ID)",
"ProductArn": "arn:\($AWS_PARTITION):securityhub:\($REPREGION):\($ACCOUNT_NUM):product/\($ACCOUNT_NUM)/default",
@@ -277,7 +278,7 @@ generateJsonAsffOutput(){
"ProviderName": "Prowler",
"ProviderVersion": $PROWLER_VERSION
},
"GeneratorId": "prowler-\($PROWLER_VERSION)",
"GeneratorId": "prowler-\($CHECK_ID)",
"AwsAccountId": $ACCOUNT_NUM,
"Types": [
$TYPE

View File

@@ -30,13 +30,39 @@ checkSecurityHubCompatibility(){
done
}
resolveSecurityHubPreviousFails(){
# Move previous check findings to Workflow to PASSED (as prowler didn't re-detect them)
for regx in $REGIONS; do
local check="$1"
NEW_TIMESTAMP=$(get_iso8601_timestamp)
PREVIOUS_DATE=$(get_iso8601_hundred_days_ago)
FILTER="{\"UpdatedAt\":[{\"Start\":\"$PREVIOUS_DATE\",\"End\":\"$TIMESTAMP\"}],\"GeneratorId\":[{\"Value\": \"prowler-$check\",\"Comparison\":\"PREFIX\"}],\"ComplianceStatus\":[{\"Value\": \"FAILED\",\"Comparison\":\"EQUALS\"}]}"
SECURITY_HUB_PREVIOUS_FINDINGS=$($AWSCLI securityhub --region "$regx" $PROFILE_OPT get-findings --filters "${FILTER}" | jq -c --arg updated_at $NEW_TIMESTAMP '[ .Findings[] | .Compliance = {"Status":"PASSED"} | .UpdatedAt = $updated_at ]')
if [[ $SECURITY_HUB_PREVIOUS_FINDINGS != "[]" ]]; then
BATCH_IMPORT_RESULT=$($AWSCLI securityhub --region "$regx" $PROFILE_OPT batch-import-findings --findings "${SECURITY_HUB_PREVIOUS_FINDINGS}")
# Check for success if imported
if [[ -z "${BATCH_IMPORT_RESULT}" ]] || ! jq -e '.SuccessCount == 1' <<< "${BATCH_IMPORT_RESULT}" > /dev/null 2>&1; then
echo -e "\n$RED ERROR!$NORMAL Failed to send check output to AWS Security Hub\n"
fi
fi
done
}
sendToSecurityHub(){
local findings="$1"
local region="$2"
BATCH_IMPORT_RESULT=$($AWSCLI securityhub --region "$region" $PROFILE_OPT batch-import-findings --findings "${findings}")
# A successful CLI response is: {"SuccessCount": 1,"FailedFindings": [],"FailedCount": 0}
# Therefore, check that SuccessCount is indeed 1
# Check for success if imported
if [[ -z "${BATCH_IMPORT_RESULT}" ]] || ! jq -e '.SuccessCount == 1' <<< "${BATCH_IMPORT_RESULT}" > /dev/null 2>&1; then
echo -e "\n$RED ERROR!$NORMAL Failed to send check output to AWS Security Hub\n"
fi
}