fix(aws): Include missing ARNs (#2880)

This commit is contained in:
Pepe Fagoaga
2023-10-02 08:45:06 +02:00
committed by GitHub
parent 3a6c93dd37
commit a7980a202d
18 changed files with 184 additions and 108 deletions

View File

@@ -13,7 +13,7 @@ class guardduty_is_enabled(Check):
report.resource_tags = detector.tags
report.status = "PASS"
report.status_extended = f"GuardDuty detector {detector.id} enabled."
if not detector.id:
if detector.arn == guardduty_client.audited_account_arn:
report.status = "FAIL"
report.status_extended = "GuardDuty is not enabled."
elif detector.status is None:

View File

@@ -39,7 +39,11 @@ class GuardDuty(AWSService):
)
if not detectors:
self.detectors.append(
Detector(id="", arn="", region=regional_client.region)
Detector(
id=self.audited_account,
arn=self.audited_account_arn,
region=regional_client.region,
)
)
except Exception as error:
logger.error(

View File

@@ -49,7 +49,7 @@ class Organizations(AWSService):
):
self.organizations.append(
Organization(
arn="",
arn=self.audited_account_arn,
id="AWS Organization",
status="NOT_AVAILABLE",
master_id="",
@@ -77,7 +77,7 @@ class Organizations(AWSService):
# is filtered
self.organizations.append(
Organization(
arn="",
arn=self.audited_account_arn,
id="AWS Organization",
status="NOT_AVAILABLE",
master_id="",

View File

@@ -93,7 +93,7 @@ class Redshift(AWSService):
class Cluster(BaseModel):
id: str
arn: str = ""
arn: str
region: str
public_access: bool = None
endpoint_address: str = None

View File

@@ -25,7 +25,7 @@ class SecurityHub(AWSService):
if e.response["Error"]["Code"] == "InvalidAccessException":
self.securityhubs.append(
SecurityHubHub(
arn="",
arn=self.audited_account_arn,
id="Security Hub",
status="NOT_AVAILABLE",
standards="",
@@ -71,7 +71,7 @@ class SecurityHub(AWSService):
# SecurityHub is filtered
self.securityhubs.append(
SecurityHubHub(
arn="",
arn=self.audited_account_arn,
id="Security Hub",
status="NOT_AVAILABLE",
standards="",

View File

@@ -68,7 +68,7 @@ class WorkSpaces(AWSService):
class WorkSpace(BaseModel):
id: str
arn: str = ""
arn: str
region: str
user_volume_encryption_enabled: bool = None
root_volume_encryption_enabled: bool = None

View File

@@ -4,13 +4,12 @@ from uuid import uuid4
from prowler.providers.aws.services.guardduty.guardduty_service import Detector
AWS_REGION = "eu-west-1"
AWS_ACCOUNT_NUMBER = "123456789012"
AWS_REGION = "us-east-1"
AWS_ACCOUNT_ID = "123456789012"
AWS_ACCOUNT_ARN = f"arn:aws:iam::{AWS_ACCOUNT_ID}:root"
detector_id = str(uuid4())
detector_arn = (
f"arn:aws:guardduty:{AWS_REGION}:{AWS_ACCOUNT_NUMBER}:detector/{detector_id}"
)
detector_arn = f"arn:aws:guardduty:{AWS_REGION}:{AWS_ACCOUNT_ID}:detector/{detector_id}"
class Test_guardduty_is_enabled:
@@ -19,11 +18,12 @@ class Test_guardduty_is_enabled:
guardduty_client.detectors = []
guardduty_client.detectors.append(
Detector(
id="",
id=AWS_ACCOUNT_ID,
region=AWS_REGION,
arn="",
arn=AWS_ACCOUNT_ARN,
)
)
guardduty_client.audited_account_arn = AWS_ACCOUNT_ARN
with mock.patch(
"prowler.providers.aws.services.guardduty.guardduty_service.GuardDuty",
guardduty_client,
@@ -37,8 +37,9 @@ class Test_guardduty_is_enabled:
assert len(result) == 1
assert result[0].status == "FAIL"
assert search("is not enabled", result[0].status_extended)
assert result[0].resource_id == ""
assert result[0].resource_arn == ""
assert result[0].resource_id == AWS_ACCOUNT_ID
assert result[0].resource_arn == AWS_ACCOUNT_ARN
assert result[0].region == AWS_REGION
def test_guardduty_enabled(self):
guardduty_client = mock.MagicMock
@@ -66,6 +67,7 @@ class Test_guardduty_is_enabled:
assert search("enabled", result[0].status_extended)
assert result[0].resource_id == detector_id
assert result[0].resource_arn == detector_arn
assert result[0].region == AWS_REGION
def test_guardduty_configured_but_suspended(self):
guardduty_client = mock.MagicMock
@@ -93,6 +95,7 @@ class Test_guardduty_is_enabled:
assert search("configured but suspended", result[0].status_extended)
assert result[0].resource_id == detector_id
assert result[0].resource_arn == detector_arn
assert result[0].region == AWS_REGION
def test_guardduty_not_configured(self):
guardduty_client = mock.MagicMock
@@ -119,3 +122,4 @@ class Test_guardduty_is_enabled:
assert search("not configured", result[0].status_extended)
assert result[0].resource_id == detector_id
assert result[0].resource_arn == detector_arn
assert result[0].region == AWS_REGION

View File

@@ -7,7 +7,10 @@ from prowler.providers.aws.services.guardduty.guardduty_service import Detector
AWS_REGION = "eu-west-1"
AWS_ACCOUNT_NUMBER = "123456789012"
detector_id = str(uuid4())
DETECTOR_ID = str(uuid4())
DETECTOR_ARN = (
f"arn:aws:guardduty:{AWS_REGION}:{AWS_ACCOUNT_NUMBER}:detector/{DETECTOR_ID}"
)
class Test_guardduty_no_high_severity_findings:
@@ -31,8 +34,8 @@ class Test_guardduty_no_high_severity_findings:
guardduty_client.detectors = []
guardduty_client.detectors.append(
Detector(
id=detector_id,
arn="",
id=DETECTOR_ID,
arn=DETECTOR_ARN,
region=AWS_REGION,
)
)
@@ -51,17 +54,18 @@ class Test_guardduty_no_high_severity_findings:
assert search(
"does not have high severity findings.", result[0].status_extended
)
assert result[0].resource_id == detector_id
assert result[0].resource_arn == ""
assert result[0].resource_id == DETECTOR_ID
assert result[0].resource_arn == DETECTOR_ARN
assert result[0].region == AWS_REGION
def test_high_findings(self):
guardduty_client = mock.MagicMock
guardduty_client.detectors = []
guardduty_client.detectors.append(
Detector(
id=detector_id,
id=DETECTOR_ID,
region=AWS_REGION,
arn="",
arn=DETECTOR_ARN,
status=False,
findings=[str(uuid4())],
)
@@ -79,5 +83,6 @@ class Test_guardduty_no_high_severity_findings:
assert len(result) == 1
assert result[0].status == "FAIL"
assert search("has 1 high severity findings", result[0].status_extended)
assert result[0].resource_id == detector_id
assert result[0].resource_arn == ""
assert result[0].resource_id == DETECTOR_ID
assert result[0].resource_arn == DETECTOR_ARN
assert result[0].region == AWS_REGION

View File

@@ -11,6 +11,8 @@ from prowler.providers.aws.services.organizations.organizations_service import (
from prowler.providers.common.models import Audit_Metadata
AWS_REGION = "us-east-1"
AWS_ACCOUNT_ID = "123456789012"
AWS_ACCOUNT_ARN = f"arn:aws:iam::{AWS_ACCOUNT_ID}:root"
class Test_organizations_account_part_of_organizations:
@@ -23,8 +25,8 @@ class Test_organizations_account_part_of_organizations:
profile_name=None,
botocore_session=None,
),
audited_account=None,
audited_account_arn=None,
audited_account=AWS_ACCOUNT_ID,
audited_account_arn=f"arn:aws:iam::{AWS_ACCOUNT_ID}:root",
audited_user_id=None,
audited_partition="aws",
audited_identity_arn=None,
@@ -72,7 +74,7 @@ class Test_organizations_account_part_of_organizations:
result[0].status_extended,
)
assert result[0].resource_id == "AWS Organization"
assert result[0].resource_arn == ""
assert result[0].resource_arn == AWS_ACCOUNT_ARN
assert result[0].region == AWS_REGION
@mock_organizations

View File

@@ -11,6 +11,8 @@ from prowler.providers.aws.services.organizations.organizations_service import (
from prowler.providers.common.models import Audit_Metadata
AWS_REGION = "us-east-1"
AWS_ACCOUNT_ID = "123456789012"
AWS_ACCOUNT_ARN = f"arn:aws:iam::{AWS_ACCOUNT_ID}:root"
def scp_restrict_regions_with_deny():
@@ -27,8 +29,8 @@ class Test_organizations_scp_check_deny_regions:
profile_name=None,
botocore_session=None,
),
audited_account=None,
audited_account_arn=None,
audited_account=AWS_ACCOUNT_ID,
audited_account_arn=f"arn:aws:iam::{AWS_ACCOUNT_ID}:root",
audited_user_id=None,
audited_partition="aws",
audited_identity_arn=None,
@@ -76,7 +78,7 @@ class Test_organizations_scp_check_deny_regions:
result[0].status_extended,
)
assert result[0].resource_id == "AWS Organization"
assert result[0].resource_arn == ""
assert result[0].resource_arn == AWS_ACCOUNT_ARN
assert result[0].region == AWS_REGION
@mock_organizations

View File

@@ -10,7 +10,8 @@ from prowler.providers.aws.services.organizations.organizations_service import (
from prowler.providers.common.models import Audit_Metadata
AWS_REGION = "us-east-1"
AWS_ACCOUNT_NUMBER = "123456789012"
AWS_ACCOUNT_ID = "123456789012"
AWS_ACCOUNT_ARN = f"arn:aws:iam::{AWS_ACCOUNT_ID}:root"
# Moto: NotImplementedError: The TAG_POLICY policy type has not been implemented
# Needs to Mock manually
@@ -26,8 +27,8 @@ class Test_organizations_tags_policies_enabled_and_attached:
botocore_session=None,
region_name=AWS_REGION,
),
audited_account=AWS_ACCOUNT_NUMBER,
audited_account_arn=f"arn:aws:iam::{AWS_ACCOUNT_NUMBER}:root",
audited_account=AWS_ACCOUNT_ID,
audited_account_arn=AWS_ACCOUNT_ARN,
audited_user_id=None,
audited_partition="aws",
audited_identity_arn=None,
@@ -53,7 +54,7 @@ class Test_organizations_tags_policies_enabled_and_attached:
organizations_client.region = AWS_REGION
organizations_client.organizations = [
Organization(
arn="",
arn=AWS_ACCOUNT_ARN,
id="AWS Organization",
status="NOT_AVAILABLE",
master_id="",
@@ -85,7 +86,7 @@ class Test_organizations_tags_policies_enabled_and_attached:
== "AWS Organizations is not in-use for this AWS Account."
)
assert result[0].resource_id == "AWS Organization"
assert result[0].resource_arn == ""
assert result[0].resource_arn == AWS_ACCOUNT_ARN
assert result[0].region == AWS_REGION
def test_organization_with_tag_policies_not_attached(self):

View File

@@ -6,8 +6,8 @@ from prowler.providers.aws.services.redshift.redshift_service import Cluster
AWS_REGION = "eu-west-1"
AWS_ACCOUNT_NUMBER = "123456789012"
cluster_id = str(uuid4())
CLUSTER_ID = str(uuid4())
CLUSTER_ARN = f"arn:aws:redshift:{AWS_REGION}:{AWS_ACCOUNT_NUMBER}:cluster:{CLUSTER_ID}"
class Test_redshift_cluster_audit_logging:
@@ -31,7 +31,8 @@ class Test_redshift_cluster_audit_logging:
redshift_client.clusters = []
redshift_client.clusters.append(
Cluster(
id=cluster_id,
id=CLUSTER_ID,
arn=CLUSTER_ARN,
region=AWS_REGION,
logging_enabled=False,
)
@@ -48,15 +49,16 @@ class Test_redshift_cluster_audit_logging:
result = check.execute()
assert result[0].status == "FAIL"
assert search("has audit logging disabled", result[0].status_extended)
assert result[0].resource_id == cluster_id
assert result[0].resource_arn == ""
assert result[0].resource_id == CLUSTER_ID
assert result[0].resource_arn == CLUSTER_ARN
def test_cluster_is_audit_logging(self):
redshift_client = mock.MagicMock
redshift_client.clusters = []
redshift_client.clusters.append(
Cluster(
id=cluster_id,
id=CLUSTER_ID,
arn=CLUSTER_ARN,
region=AWS_REGION,
logging_enabled=True,
endpoint_address="192.192.192.192",
@@ -74,5 +76,5 @@ class Test_redshift_cluster_audit_logging:
result = check.execute()
assert result[0].status == "PASS"
assert search("has audit logging enabled", result[0].status_extended)
assert result[0].resource_id == cluster_id
assert result[0].resource_arn == ""
assert result[0].resource_id == CLUSTER_ID
assert result[0].resource_arn == CLUSTER_ARN

View File

@@ -7,7 +7,8 @@ from prowler.providers.aws.services.redshift.redshift_service import Cluster
AWS_REGION = "eu-west-1"
AWS_ACCOUNT_NUMBER = "123456789012"
cluster_id = str(uuid4())
CLUSTER_ID = str(uuid4())
CLUSTER_ARN = f"arn:aws:redshift:{AWS_REGION}:{AWS_ACCOUNT_NUMBER}:cluster:{CLUSTER_ID}"
class Test_redshift_cluster_automated_snapshot:
@@ -31,7 +32,8 @@ class Test_redshift_cluster_automated_snapshot:
redshift_client.clusters = []
redshift_client.clusters.append(
Cluster(
id=cluster_id,
id=CLUSTER_ID,
arn=CLUSTER_ARN,
region=AWS_REGION,
cluster_snapshots=False,
)
@@ -48,15 +50,16 @@ class Test_redshift_cluster_automated_snapshot:
result = check.execute()
assert result[0].status == "FAIL"
assert search("has automated snapshots disabled", result[0].status_extended)
assert result[0].resource_id == cluster_id
assert result[0].resource_arn == ""
assert result[0].resource_id == CLUSTER_ID
assert result[0].resource_arn == CLUSTER_ARN
def test_cluster_is_audit_logging(self):
redshift_client = mock.MagicMock
redshift_client.clusters = []
redshift_client.clusters.append(
Cluster(
id=cluster_id,
id=CLUSTER_ID,
arn=CLUSTER_ARN,
region=AWS_REGION,
cluster_snapshots=True,
)
@@ -73,5 +76,5 @@ class Test_redshift_cluster_automated_snapshot:
result = check.execute()
assert result[0].status == "PASS"
assert search("has automated snapshots", result[0].status_extended)
assert result[0].resource_id == cluster_id
assert result[0].resource_arn == ""
assert result[0].resource_id == CLUSTER_ID
assert result[0].resource_arn == CLUSTER_ARN

View File

@@ -6,8 +6,8 @@ from prowler.providers.aws.services.redshift.redshift_service import Cluster
AWS_REGION = "eu-west-1"
AWS_ACCOUNT_NUMBER = "123456789012"
cluster_id = str(uuid4())
CLUSTER_ID = str(uuid4())
CLUSTER_ARN = f"arn:aws:redshift:{AWS_REGION}:{AWS_ACCOUNT_NUMBER}:cluster:{CLUSTER_ID}"
class Test_redshift_cluster_automatic_upgrades:
@@ -31,7 +31,8 @@ class Test_redshift_cluster_automatic_upgrades:
redshift_client.clusters = []
redshift_client.clusters.append(
Cluster(
id=cluster_id,
id=CLUSTER_ID,
arn=CLUSTER_ARN,
region=AWS_REGION,
allow_version_upgrade=False,
)
@@ -48,14 +49,19 @@ class Test_redshift_cluster_automatic_upgrades:
result = check.execute()
assert result[0].status == "FAIL"
assert search("has AllowVersionUpgrade disabled", result[0].status_extended)
assert result[0].resource_id == cluster_id
assert result[0].resource_arn == ""
assert result[0].resource_id == CLUSTER_ID
assert result[0].resource_arn == CLUSTER_ARN
def test_cluster_automatic_upgrades(self):
redshift_client = mock.MagicMock
redshift_client.clusters = []
redshift_client.clusters.append(
Cluster(id=cluster_id, region=AWS_REGION, allow_version_upgrade=True)
Cluster(
id=CLUSTER_ID,
arn=CLUSTER_ARN,
region=AWS_REGION,
allow_version_upgrade=True,
)
)
with mock.patch(
"prowler.providers.aws.services.redshift.redshift_service.Redshift",
@@ -69,5 +75,5 @@ class Test_redshift_cluster_automatic_upgrades:
result = check.execute()
assert result[0].status == "PASS"
assert search("has AllowVersionUpgrade enabled", result[0].status_extended)
assert result[0].resource_id == cluster_id
assert result[0].resource_arn == ""
assert result[0].resource_id == CLUSTER_ID
assert result[0].resource_arn == CLUSTER_ARN

View File

@@ -6,8 +6,8 @@ from prowler.providers.aws.services.redshift.redshift_service import Cluster
AWS_REGION = "eu-west-1"
AWS_ACCOUNT_NUMBER = "123456789012"
cluster_id = str(uuid4())
CLUSTER_ID = str(uuid4())
CLUSTER_ARN = f"arn:aws:redshift:{AWS_REGION}:{AWS_ACCOUNT_NUMBER}:cluster:{CLUSTER_ID}"
class Test_redshift_cluster_public_access:
@@ -31,7 +31,8 @@ class Test_redshift_cluster_public_access:
redshift_client.clusters = []
redshift_client.clusters.append(
Cluster(
id=cluster_id,
id=CLUSTER_ID,
arn=CLUSTER_ARN,
region=AWS_REGION,
public_access=True,
endpoint_address="192.192.192.192",
@@ -49,15 +50,16 @@ class Test_redshift_cluster_public_access:
result = check.execute()
assert result[0].status == "FAIL"
assert search("is publicly accessible", result[0].status_extended)
assert result[0].resource_id == cluster_id
assert result[0].resource_arn == ""
assert result[0].resource_id == CLUSTER_ID
assert result[0].resource_arn == CLUSTER_ARN
def test_cluster_is_not_public1(self):
redshift_client = mock.MagicMock
redshift_client.clusters = []
redshift_client.clusters.append(
Cluster(
id=cluster_id,
id=CLUSTER_ID,
arn=CLUSTER_ARN,
region=AWS_REGION,
public_access=False,
endpoint_address="192.192.192.192",
@@ -75,15 +77,16 @@ class Test_redshift_cluster_public_access:
result = check.execute()
assert result[0].status == "PASS"
assert search("is not publicly accessible", result[0].status_extended)
assert result[0].resource_id == cluster_id
assert result[0].resource_arn == ""
assert result[0].resource_id == CLUSTER_ID
assert result[0].resource_arn == CLUSTER_ARN
def test_cluster_is_not_public2(self):
redshift_client = mock.MagicMock
redshift_client.clusters = []
redshift_client.clusters.append(
Cluster(
id=cluster_id,
id=CLUSTER_ID,
arn=CLUSTER_ARN,
region=AWS_REGION,
public_access=True,
)
@@ -100,5 +103,5 @@ class Test_redshift_cluster_public_access:
result = check.execute()
assert result[0].status == "PASS"
assert search("is not publicly accessible", result[0].status_extended)
assert result[0].resource_id == cluster_id
assert result[0].resource_arn == ""
assert result[0].resource_id == CLUSTER_ID
assert result[0].resource_arn == CLUSTER_ARN

View File

@@ -4,18 +4,22 @@ from prowler.providers.aws.services.securityhub.securityhub_service import (
SecurityHubHub,
)
AWS_REGION = "eu-west-1"
AWS_ACCOUNT_ID = "123456789012"
AWS_ACCOUNT_ARN = f"arn:aws:iam::{AWS_ACCOUNT_ID}:root"
class Test_securityhub_enabled:
def test_securityhub_hub_inactive(self):
securityhub_client = mock.MagicMock
securityhub_client.securityhubs = [
SecurityHubHub(
arn="",
arn=AWS_ACCOUNT_ARN,
id="Security Hub",
status="NOT_AVAILABLE",
standards="",
integrations="",
region="eu-west-1",
region=AWS_REGION,
)
]
with mock.patch(
@@ -33,6 +37,8 @@ class Test_securityhub_enabled:
assert result[0].status == "FAIL"
assert result[0].status_extended == "Security Hub is not enabled."
assert result[0].resource_id == "Security Hub"
assert result[0].resource_arn == AWS_ACCOUNT_ARN
assert result[0].region == AWS_REGION
def test_securityhub_hub_active_with_standards(self):
securityhub_client = mock.MagicMock
@@ -64,6 +70,11 @@ class Test_securityhub_enabled:
== "Security Hub is enabled with standards: cis-aws-foundations-benchmark/v/1.2.0."
)
assert result[0].resource_id == "default"
assert (
result[0].resource_arn
== "arn:aws:securityhub:us-east-1:0123456789012:hub/default"
)
assert result[0].region == AWS_REGION
def test_securityhub_hub_active_with_integrations(self):
securityhub_client = mock.MagicMock
@@ -95,6 +106,11 @@ class Test_securityhub_enabled:
== "Security Hub is enabled without standards but with integrations: prowler."
)
assert result[0].resource_id == "default"
assert (
result[0].resource_arn
== "arn:aws:securityhub:us-east-1:0123456789012:hub/default"
)
assert result[0].region == AWS_REGION
def test_securityhub_hub_active_without_integrations_or_standards(self):
securityhub_client = mock.MagicMock
@@ -126,3 +142,8 @@ class Test_securityhub_enabled:
== "Security Hub is enabled but without any standard or integration."
)
assert result[0].resource_id == "default"
assert (
result[0].resource_arn
== "arn:aws:securityhub:us-east-1:0123456789012:hub/default"
)
assert result[0].region == AWS_REGION

View File

@@ -6,8 +6,10 @@ from prowler.providers.aws.services.workspaces.workspaces_service import WorkSpa
AWS_REGION = "eu-west-1"
AWS_ACCOUNT_NUMBER = "123456789012"
workspace_id = str(uuid4())
WORKSPACE_ID = str(uuid4())
WORKSPACE_ARN = (
f"arn:aws:workspaces:{AWS_REGION}:{AWS_ACCOUNT_NUMBER}:workspace/{WORKSPACE_ID}"
)
class Test_workspaces_volume_encryption_enabled:
@@ -34,7 +36,8 @@ class Test_workspaces_volume_encryption_enabled:
workspaces_client.workspaces = []
workspaces_client.workspaces.append(
WorkSpace(
id=workspace_id,
id=WORKSPACE_ID,
arn=WORKSPACE_ARN,
region=AWS_REGION,
user_volume_encryption_enabled=True,
root_volume_encryption_enabled=True,
@@ -59,15 +62,17 @@ class Test_workspaces_volume_encryption_enabled:
assert search(
"without root or user unencrypted volumes", result[0].status_extended
)
assert result[0].resource_id == workspace_id
assert result[0].resource_arn == ""
assert result[0].resource_id == WORKSPACE_ID
assert result[0].resource_arn == WORKSPACE_ARN
assert result[0].region == AWS_REGION
def test_workspaces_user_not_encrypted(self):
workspaces_client = mock.MagicMock
workspaces_client.workspaces = []
workspaces_client.workspaces.append(
WorkSpace(
id=workspace_id,
id=WORKSPACE_ID,
arn=WORKSPACE_ARN,
region=AWS_REGION,
user_volume_encryption_enabled=False,
root_volume_encryption_enabled=True,
@@ -90,15 +95,17 @@ class Test_workspaces_volume_encryption_enabled:
assert len(result) == 1
assert result[0].status == "FAIL"
assert search("user unencrypted volumes", result[0].status_extended)
assert result[0].resource_id == workspace_id
assert result[0].resource_arn == ""
assert result[0].resource_id == WORKSPACE_ID
assert result[0].resource_arn == WORKSPACE_ARN
assert result[0].region == AWS_REGION
def test_workspaces_root_not_encrypted(self):
workspaces_client = mock.MagicMock
workspaces_client.workspaces = []
workspaces_client.workspaces.append(
WorkSpace(
id=workspace_id,
id=WORKSPACE_ID,
arn=WORKSPACE_ARN,
region=AWS_REGION,
user_volume_encryption_enabled=True,
root_volume_encryption_enabled=False,
@@ -121,15 +128,17 @@ class Test_workspaces_volume_encryption_enabled:
assert len(result) == 1
assert result[0].status == "FAIL"
assert search("root unencrypted volumes", result[0].status_extended)
assert result[0].resource_id == workspace_id
assert result[0].resource_arn == ""
assert result[0].resource_id == WORKSPACE_ID
assert result[0].resource_arn == WORKSPACE_ARN
assert result[0].region == AWS_REGION
def test_workspaces_user_and_root_not_encrypted(self):
workspaces_client = mock.MagicMock
workspaces_client.workspaces = []
workspaces_client.workspaces.append(
WorkSpace(
id=workspace_id,
id=WORKSPACE_ID,
arn=WORKSPACE_ARN,
region=AWS_REGION,
user_volume_encryption_enabled=False,
root_volume_encryption_enabled=False,
@@ -154,5 +163,6 @@ class Test_workspaces_volume_encryption_enabled:
assert search(
"with root and user unencrypted volumes", result[0].status_extended
)
assert result[0].resource_id == workspace_id
assert result[0].resource_arn == ""
assert result[0].resource_id == WORKSPACE_ID
assert result[0].resource_arn == WORKSPACE_ARN
assert result[0].region == AWS_REGION

View File

@@ -11,7 +11,10 @@ from prowler.providers.common.models import Audit_Metadata
AWS_REGION = "eu-west-1"
AWS_ACCOUNT_NUMBER = "123456789012"
workspace_id = str(uuid4())
WORKSPACE_ID = str(uuid4())
WORKSPACE_ARN = (
f"arn:aws:workspaces:{AWS_REGION}:{AWS_ACCOUNT_NUMBER}:workspace/{WORKSPACE_ID}"
)
class Test_workspaces_vpc_2private_1public_subnets_nat:
@@ -73,7 +76,8 @@ class Test_workspaces_vpc_2private_1public_subnets_nat:
workspaces_client.workspaces = []
workspaces_client.workspaces.append(
WorkSpace(
id=workspace_id,
id=WORKSPACE_ID,
arn=WORKSPACE_ARN,
region=AWS_REGION,
user_volume_encryption_enabled=True,
root_volume_encryption_enabled=True,
@@ -104,10 +108,11 @@ class Test_workspaces_vpc_2private_1public_subnets_nat:
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== f"Workspace {workspace_id} is not in a private subnet or its VPC does not have 1 public subnet and 2 private subnets with a NAT Gateway attached."
== f"Workspace {WORKSPACE_ID} is not in a private subnet or its VPC does not have 1 public subnet and 2 private subnets with a NAT Gateway attached."
)
assert result[0].resource_id == workspace_id
assert result[0].resource_arn == ""
assert result[0].resource_id == WORKSPACE_ID
assert result[0].resource_arn == WORKSPACE_ARN
assert result[0].region == AWS_REGION
@mock_ec2
def test_workspaces_vpc_one_private_subnet(self):
@@ -138,7 +143,8 @@ class Test_workspaces_vpc_2private_1public_subnets_nat:
workspaces_client.workspaces = []
workspaces_client.workspaces.append(
WorkSpace(
id=workspace_id,
id=WORKSPACE_ID,
arn=WORKSPACE_ARN,
region=AWS_REGION,
user_volume_encryption_enabled=True,
root_volume_encryption_enabled=True,
@@ -170,10 +176,11 @@ class Test_workspaces_vpc_2private_1public_subnets_nat:
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== f"Workspace {workspace_id} is not in a private subnet or its VPC does not have 1 public subnet and 2 private subnets with a NAT Gateway attached."
== f"Workspace {WORKSPACE_ID} is not in a private subnet or its VPC does not have 1 public subnet and 2 private subnets with a NAT Gateway attached."
)
assert result[0].resource_id == workspace_id
assert result[0].resource_arn == ""
assert result[0].resource_id == WORKSPACE_ID
assert result[0].resource_arn == WORKSPACE_ARN
assert result[0].region == AWS_REGION
@mock_ec2
def test_workspaces_vpc_two_private_subnet(self):
@@ -221,7 +228,8 @@ class Test_workspaces_vpc_2private_1public_subnets_nat:
workspaces_client.workspaces = []
workspaces_client.workspaces.append(
WorkSpace(
id=workspace_id,
id=WORKSPACE_ID,
arn=WORKSPACE_ARN,
region=AWS_REGION,
user_volume_encryption_enabled=True,
root_volume_encryption_enabled=True,
@@ -253,10 +261,11 @@ class Test_workspaces_vpc_2private_1public_subnets_nat:
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== f"Workspace {workspace_id} is not in a private subnet or its VPC does not have 1 public subnet and 2 private subnets with a NAT Gateway attached."
== f"Workspace {WORKSPACE_ID} is not in a private subnet or its VPC does not have 1 public subnet and 2 private subnets with a NAT Gateway attached."
)
assert result[0].resource_id == workspace_id
assert result[0].resource_arn == ""
assert result[0].resource_id == WORKSPACE_ID
assert result[0].resource_arn == WORKSPACE_ARN
assert result[0].region == AWS_REGION
@mock_ec2
def test_workspaces_vpc_two_private_subnet_one_public(self):
@@ -323,7 +332,8 @@ class Test_workspaces_vpc_2private_1public_subnets_nat:
workspaces_client.workspaces = []
workspaces_client.workspaces.append(
WorkSpace(
id=workspace_id,
id=WORKSPACE_ID,
arn=WORKSPACE_ARN,
region=AWS_REGION,
user_volume_encryption_enabled=True,
root_volume_encryption_enabled=True,
@@ -355,10 +365,11 @@ class Test_workspaces_vpc_2private_1public_subnets_nat:
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== f"Workspace {workspace_id} is not in a private subnet or its VPC does not have 1 public subnet and 2 private subnets with a NAT Gateway attached."
== f"Workspace {WORKSPACE_ID} is not in a private subnet or its VPC does not have 1 public subnet and 2 private subnets with a NAT Gateway attached."
)
assert result[0].resource_id == workspace_id
assert result[0].resource_arn == ""
assert result[0].resource_id == WORKSPACE_ID
assert result[0].resource_arn == WORKSPACE_ARN
assert result[0].region == AWS_REGION
@mock_ec2
def test_workspaces_vpc_two_private_subnet_one_public_and_nat(self):
@@ -433,7 +444,8 @@ class Test_workspaces_vpc_2private_1public_subnets_nat:
workspaces_client.workspaces = []
workspaces_client.workspaces.append(
WorkSpace(
id=workspace_id,
id=WORKSPACE_ID,
arn=WORKSPACE_ARN,
region=AWS_REGION,
user_volume_encryption_enabled=True,
root_volume_encryption_enabled=True,
@@ -465,7 +477,8 @@ class Test_workspaces_vpc_2private_1public_subnets_nat:
assert result[0].status == "PASS"
assert (
result[0].status_extended
== f"Workspace {workspace_id} is in a private subnet within a VPC which has 1 public subnet 2 private subnets with a NAT Gateway attached."
== f"Workspace {WORKSPACE_ID} is in a private subnet within a VPC which has 1 public subnet 2 private subnets with a NAT Gateway attached."
)
assert result[0].resource_id == workspace_id
assert result[0].resource_arn == ""
assert result[0].resource_id == WORKSPACE_ID
assert result[0].resource_arn == WORKSPACE_ARN
assert result[0].region == AWS_REGION