fix(asff): handle empty Recommendation Url (#2496)

Co-authored-by: Pepe Fagoaga <pepe@verica.io>
This commit is contained in:
Sergio Garcia
2023-06-16 12:17:09 +02:00
committed by GitHub
parent af2b19436f
commit 0d81bd457c
12 changed files with 130 additions and 30 deletions

View File

@@ -73,21 +73,16 @@ def fill_json_asff(finding_output, audit_info, finding, output_options):
compliance_summary.append(item)
# Ensures finding_status matches allowed values in ASFF
finding_status = ""
if finding.status == "PASS":
finding_status = "PASSED"
elif finding.status == "FAIL":
finding_status = "FAILED"
elif finding.status == "WARNING":
finding_status = "WARNING"
else:
finding_status = "NOT_AVAILABLE"
finding_status = generate_json_asff_status(finding.status)
finding_output.Compliance = Compliance(
Status=finding_status,
AssociatedStandards=associated_standards,
RelatedRequirements=compliance_summary,
)
# Fill Recommendation Url if it is blank
if not finding.check_metadata.Remediation.Recommendation.Url:
finding.check_metadata.Remediation.Recommendation.Url = "https://docs.aws.amazon.com/securityhub/latest/userguide/what-is-securityhub.html"
finding_output.Remediation = {
"Recommendation": finding.check_metadata.Remediation.Recommendation
}
@@ -95,6 +90,20 @@ def fill_json_asff(finding_output, audit_info, finding, output_options):
return finding_output
def generate_json_asff_status(status: str) -> str:
json_asff_status = ""
if status == "PASS":
json_asff_status = "PASSED"
elif status == "FAIL":
json_asff_status = "FAILED"
elif status == "WARNING":
json_asff_status = "WARNING"
else:
json_asff_status = "NOT_AVAILABLE"
return json_asff_status
def fill_json_ocsf(
finding_output: Check_Output_JSON_OCSF, audit_info, finding, output_options
):

View File

@@ -14,7 +14,7 @@
"ResourceType": "AwsBackupBackupPlan",
"Description": "This check ensures that there is at least one backup plan in place.",
"Risk": "Without a backup plan, an organization may be at risk of losing important data due to accidental deletion, system failures, or natural disasters. This can result in significant financial and reputational damage for the organization.",
"RelatedUrl": "",
"RelatedUrl": "https://docs.aws.amazon.com/aws-backup/latest/devguide/about-backup-plans.html",
"Remediation": {
"Code": {
"CLI": "aws backup create-backup-plan --backup-plan <backup_plan_name> --backup-plan-rule <backup_rule_name>",
@@ -24,7 +24,7 @@
},
"Recommendation": {
"Text": "Use AWS Backup to create backup plans for your critical data and services.",
"Url": ""
"Url": "https://docs.aws.amazon.com/aws-backup/latest/devguide/about-backup-plans.html"
}
},
"Categories": [],

View File

@@ -14,7 +14,7 @@
"ResourceType": "Other",
"Description": "This check ensures that there is at least one backup report plan in place.",
"Risk": "Without a backup report plan, an organization may lack visibility into the success or failure of backup operations.",
"RelatedUrl": "",
"RelatedUrl": "https://docs.aws.amazon.com/aws-backup/latest/devguide/create-report-plan-console.html",
"Remediation": {
"Code": {
"CLI": "aws backup create-report-plan --report-plan-name <report-plan-name> --report-delivery-channel <value> --report-setting <value>",
@@ -24,7 +24,7 @@
},
"Recommendation": {
"Text": "Use AWS Backup to create backup report plans that provide visibility into the success or failure of backup operations.",
"Url": ""
"Url": "https://docs.aws.amazon.com/aws-backup/latest/devguide/create-report-plan-console.html"
}
},
"Categories": [],

View File

@@ -15,17 +15,17 @@
"ResourceType": "AwsBackupBackupVault",
"Description": "This check ensures that AWS Backup vaults are encrypted with AWS KMS.",
"Risk": "Without encryption using AWS KMS, an organization's backup data may be at risk of unauthorized access, which can lead to data breaches and other security incidents.",
"RelatedUrl": "",
"RelatedUrl": "https://docs.aws.amazon.com/aws-backup/latest/devguide/encryption.html",
"Remediation": {
"Code": {
"CLI": "aws backup update-backup-vault --backup-vault-name <backup_vault_name> --encryption-key-arn <kms_key_arn>",
"NativeIaC": "",
"Other": "",
"Other": "https://www.trendmicro.com/cloudoneconformity/knowledge-base/aws/Athena/encrypted-with-cmk.html",
"Terraform": ""
},
"Recommendation": {
"Text": "Use AWS KMS to encrypt your AWS Backup vaults and backup data.",
"Url": ""
"Url": "https://docs.aws.amazon.com/aws-backup/latest/devguide/encryption.html"
}
},
"Categories": [],

View File

@@ -14,7 +14,7 @@
"ResourceType": "AwsBackupBackupVault",
"Description": "This check ensures that AWS Backup vaults exist to provide a secure and durable storage location for backup data.",
"Risk": "Without an AWS Backup vault, an organization's critical data may be at risk of being lost in the event of an accidental deletion, system failures, or natural disasters.",
"RelatedUrl": "",
"RelatedUrl": "https://docs.aws.amazon.com/aws-backup/latest/devguide/vaults.html",
"Remediation": {
"Code": {
"CLI": "aws backup create-backup-vault --backup-vault-name <backup_vault_name>",
@@ -24,7 +24,7 @@
},
"Recommendation": {
"Text": "Use AWS Backup to create backup vaults for your critical data and services.",
"Url": ""
"Url": "https://docs.aws.amazon.com/aws-backup/latest/devguide/vaults.html"
}
},
"Categories": [],

View File

@@ -20,7 +20,7 @@
},
"Recommendation": {
"Text": "Ensure FMS is enabled and all the policies are compliant across your AWS accounts",
"Url": ""
"Url": "https://docs.aws.amazon.com/waf/latest/developerguide/getting-started-fms-intro.html"
}
},
"Categories": [],

View File

@@ -10,17 +10,17 @@
"ResourceType": "AwsRdsDbInstance",
"Description": "Check if RDS is using a supported engine version for MariaDB, MySQL and PostgreSQL",
"Risk": "If not enabled RDS instances may be vulnerable to security issues",
"RelatedUrl": "",
"RelatedUrl": "https://docs.aws.amazon.com/cli/latest/reference/rds/describe-db-engine-versions.html",
"Remediation": {
"Code": {
"CLI": "aws rds describe-db-engine-versions --engine <my_engine>'",
"NativeIaC": "",
"Other": "https://docs.aws.amazon.com/cli/latest/reference/rds/describe-db-engine-versions.html",
"Other": "",
"Terraform": ""
},
"Recommendation": {
"Text": "",
"Url": ""
"Url": "https://docs.aws.amazon.com/cli/latest/reference/rds/describe-db-engine-versions.html"
}
},
"Categories": [],

View File

@@ -20,7 +20,7 @@
},
"Recommendation": {
"Text": "Enable SSM Incidents and create response plans",
"Url": ""
"Url": "https://docs.aws.amazon.com/incident-manager/latest/userguide/response-plans.html"
}
},
"Categories": [],

View File

@@ -22,7 +22,7 @@
},
"Recommendation": {
"Text": "Ensure there are vpcs in more than one region",
"Url": ""
"Url": "https://docs.aws.amazon.com/vpc/latest/userguide/vpc-example-private-subnets-nat.html"
}
},
"Categories": [],

View File

@@ -12,7 +12,7 @@
"ResourceType": "AwsEc2Vpc",
"Description": "Ensure all vpc has subnets in more than one availability zone",
"Risk": "",
"RelatedUrl": "https://docs.aws.amazon.com/vpc/latest/userguide/VPC_Scenario2.html",
"RelatedUrl": "https://docs.aws.amazon.com/vpc/latest/userguide/configure-subnets.html",
"Remediation": {
"Code": {
"CLI": "aws ec2 create-subnet",
@@ -22,7 +22,7 @@
},
"Recommendation": {
"Text": "Ensure all vpc has subnets in more than one availability zone",
"Url": ""
"Url": "https://docs.aws.amazon.com/vpc/latest/userguide/configure-subnets.html"
}
},
"Categories": [],

View File

@@ -22,7 +22,7 @@
},
"Recommendation": {
"Text": "Ensure all vpc has public and private subnets defined",
"Url": ""
"Url": "https://docs.aws.amazon.com/vpc/latest/userguide/VPC_Scenario2.html"
}
},
"Categories": [],

View File

@@ -24,7 +24,7 @@ from prowler.lib.check.compliance_models import (
)
from prowler.lib.check.models import Check_Report, load_check_metadata
from prowler.lib.outputs.file_descriptors import fill_file_descriptors
from prowler.lib.outputs.json import fill_json_asff
from prowler.lib.outputs.json import fill_json_asff, generate_json_asff_status
from prowler.lib.outputs.models import (
Check_Output_CSV,
Check_Output_JSON_ASFF,
@@ -428,8 +428,6 @@ class Test_Outputs:
finding.status = "PASS"
finding.status_extended = "This is a test"
input = Check_Output_JSON_ASFF()
expected = Check_Output_JSON_ASFF()
expected.Id = f"prowler-{finding.check_metadata.CheckID}-123456789012-eu-west-1-{hash_sha512('test-resource')}"
expected.ProductArn = "arn:aws:securityhub:eu-west-1::product/prowler/prowler"
@@ -462,6 +460,93 @@ class Test_Outputs:
expected.Remediation = {
"Recommendation": finding.check_metadata.Remediation.Recommendation
}
input = Check_Output_JSON_ASFF()
output_options = mock.MagicMock()
assert (
fill_json_asff(input, input_audit_info, finding, output_options) == expected
)
def test_fill_json_asff_without_remediation_recommendation_url(self):
input_audit_info = AWS_Audit_Info(
session_config=None,
original_session=None,
audit_session=None,
audited_account=AWS_ACCOUNT_ID,
audited_account_arn=f"arn:aws:iam::{AWS_ACCOUNT_ID}:root",
audited_identity_arn="test-arn",
audited_user_id="test",
audited_partition="aws",
profile="default",
profile_region="eu-west-1",
credentials=None,
assumed_role_info=None,
audited_regions=["eu-west-2", "eu-west-1"],
organizations_metadata=None,
audit_resources=None,
mfa_enabled=False,
)
finding = Check_Report(
load_check_metadata(
f"{path.dirname(path.realpath(__file__))}/fixtures/metadata.json"
).json()
)
# Empty the Remediation.Recomendation.URL
finding.check_metadata.Remediation.Recommendation.Url = ""
finding.resource_details = "Test resource details"
finding.resource_id = "test-resource"
finding.resource_arn = "test-arn"
finding.region = "eu-west-1"
finding.status = "PASS"
finding.status_extended = "This is a test"
expected = Check_Output_JSON_ASFF()
expected.Id = f"prowler-{finding.check_metadata.CheckID}-123456789012-eu-west-1-{hash_sha512('test-resource')}"
expected.ProductArn = "arn:aws:securityhub:eu-west-1::product/prowler/prowler"
expected.ProductFields = ProductFields(
ProviderVersion=prowler_version, ProwlerResourceName="test-arn"
)
expected.GeneratorId = "prowler-" + finding.check_metadata.CheckID
expected.AwsAccountId = AWS_ACCOUNT_ID
expected.Types = finding.check_metadata.CheckType
expected.FirstObservedAt = (
expected.UpdatedAt
) = expected.CreatedAt = timestamp_utc.strftime("%Y-%m-%dT%H:%M:%SZ")
expected.Severity = Severity(Label=finding.check_metadata.Severity.upper())
expected.Title = finding.check_metadata.CheckTitle
expected.Description = finding.status_extended
expected.Resources = [
Resource(
Id="test-arn",
Type=finding.check_metadata.ResourceType,
Partition="aws",
Region="eu-west-1",
)
]
expected.Compliance = Compliance(
Status="PASS" + "ED",
RelatedRequirements=[],
AssociatedStandards=[],
)
# Set the check's remediation
expected.Remediation = {
"Recommendation": finding.check_metadata.Remediation.Recommendation,
# "Code": finding.check_metadata.Remediation.Code,
}
expected.Remediation[
"Recommendation"
].Text = finding.check_metadata.Remediation.Recommendation.Text
expected.Remediation[
"Recommendation"
].Url = "https://docs.aws.amazon.com/securityhub/latest/userguide/what-is-securityhub.html"
input = Check_Output_JSON_ASFF()
output_options = mock.MagicMock()
assert (
@@ -832,3 +917,9 @@ class Test_Outputs:
"CIS-1.4": ["2.1.3"],
"CIS-1.5": ["2.1.3"],
}
def test_generate_json_asff_status(self):
assert generate_json_asff_status("PASS") == "PASSED"
assert generate_json_asff_status("FAIL") == "FAILED"
assert generate_json_asff_status("WARNING") == "WARNING"
assert generate_json_asff_status("SOMETHING ELSE") == "NOT_AVAILABLE"