chore: Move shared to lib/ for AWS (#1321)

* chore: Move shared to lib/

* chore: Move shared to lib/ for AWS

Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
This commit is contained in:
Pepe Fagoaga
2022-08-22 11:41:09 +02:00
committed by GitHub
parent d18b430c16
commit c7a43b09ce
21 changed files with 215 additions and 198 deletions

View File

@@ -1,7 +1,4 @@
import json
import sys
from itertools import groupby
from operator import itemgetter
from arnparse import arnparse
from boto3 import client, session
@@ -9,11 +6,10 @@ from botocore.credentials import RefreshableCredentials
from botocore.session import get_session
from colorama import Fore, Style
from config.config import aws_services_json_file, json_asff_file_suffix, timestamp_utc
from lib.arn.arn import arn_parsing
from config.config import aws_services_json_file
from lib.logger import logger
from lib.outputs.models import Check_Output_JSON_ASFF
from lib.utils.utils import open_file, parse_json_file
from providers.aws.lib.arn.arn import arn_parsing
from providers.aws.models import (
AWS_Assume_Role,
AWS_Audit_Info,
@@ -326,105 +322,3 @@ def generate_regional_clients(service, audit_info):
regional_clients[region] = regional_client
# regional_clients.append(regional_client)
return regional_clients
def send_to_security_hub(
region: str, finding_output: Check_Output_JSON_ASFF, session: session.Session
):
try:
logger.info("Sending findings to Security Hub.")
# Check if security hub is enabled in current region
security_hub_client = session.client("securityhub", region_name=region)
security_hub_client.describe_hub()
# Check if Prowler integration is enabled in Security Hub
if "prowler/prowler" not in str(
security_hub_client.list_enabled_products_for_import()
):
logger.error(
f"Security Hub is enabled in {region} but Prowler integration does not accept findings. More info: https://github.com/prowler-cloud/prowler/#security-hub-integration"
)
# Send finding to Security Hub
batch_import = security_hub_client.batch_import_findings(
Findings=[finding_output.dict()]
)
if batch_import["FailedCount"] > 0:
failed_import = batch_import["FailedFindings"][0]
logger.error(
f"Failed to send archived findings to AWS Security Hub -- {failed_import['ErrorCode']} -- {failed_import['ErrorMessage']}"
)
except Exception as error:
logger.error(f"{error.__class__.__name__} -- {error} in region {region}")
# Move previous Security Hub check findings to ARCHIVED (as prowler didn't re-detect them)
def resolve_security_hub_previous_findings(
output_directory: str, audit_info: AWS_Audit_Info
) -> list:
logger.info("Checking previous findings in Security Hub to archive them.")
# Read current findings from json-asff file
with open(
f"{output_directory}/prowler-output-{audit_info.audited_account}-{json_asff_file_suffix}"
) as f:
json_asff_file = json.load(f)
# Sort by region
json_asff_file = sorted(json_asff_file, key=itemgetter("ProductArn"))
# Group by region
for product_arn, current_findings in groupby(
json_asff_file, key=itemgetter("ProductArn")
):
region = product_arn.split(":")[3]
try:
# Check if security hub is enabled in current region
security_hub_client = audit_info.audit_session.client(
"securityhub", region_name=region
)
security_hub_client.describe_hub()
# Get current findings IDs
current_findings_ids = []
for finding in current_findings:
current_findings_ids.append(finding["Id"])
# Get findings of that region
security_hub_client = audit_info.audit_session.client(
"securityhub", region_name=region
)
findings_filter = {
"ProductName": [{"Value": "Prowler", "Comparison": "EQUALS"}],
"RecordState": [{"Value": "ACTIVE", "Comparison": "EQUALS"}],
"AwsAccountId": [
{"Value": audit_info.audited_account, "Comparison": "EQUALS"}
],
"Region": [{"Value": region, "Comparison": "EQUALS"}],
}
get_findings_paginator = security_hub_client.get_paginator("get_findings")
findings_to_archive = []
for page in get_findings_paginator.paginate(Filters=findings_filter):
# Archive findings that have not appear in this execution
for finding in page["Findings"]:
if finding["Id"] not in current_findings_ids:
finding["RecordState"] = "ARCHIVED"
finding["UpdatedAt"] = timestamp_utc.strftime(
"%Y-%m-%dT%H:%M:%SZ"
)
findings_to_archive.append(finding)
logger.info(f"Archiving {len(findings_to_archive)} findings.")
# Send archive findings to SHub
list_chunked = [
findings_to_archive[i : i + 100]
for i in range(0, len(findings_to_archive), 100)
]
for findings in list_chunked:
batch_import = security_hub_client.batch_import_findings(
Findings=findings
)
if batch_import["FailedCount"] > 0:
failed_import = batch_import["FailedFindings"][0]
logger.error(
f"Failed to send archived findings to AWS Security Hub -- {failed_import['ErrorCode']} -- {failed_import['ErrorMessage']}"
)
except Exception as error:
logger.error(f"{error.__class__.__name__} -- {error} in region {region}")

View File

View File

View File

@@ -0,0 +1,45 @@
from arnparse import arnparse
from providers.aws.lib.arn.error import (
RoleArnParsingEmptyResource,
RoleArnParsingFailedMissingFields,
RoleArnParsingIAMRegionNotEmpty,
RoleArnParsingInvalidAccountID,
RoleArnParsingInvalidResourceType,
RoleArnParsingPartitionEmpty,
RoleArnParsingServiceNotIAM,
)
def arn_parsing(arn):
# check for number of fields, must be six
if len(arn.split(":")) != 6:
raise RoleArnParsingFailedMissingFields
else:
arn_parsed = arnparse(arn)
# First check if region is empty (in IAM arns region is always empty)
if arn_parsed.region != None:
raise RoleArnParsingIAMRegionNotEmpty
else:
# check if needed fields are filled:
# - partition
# - service
# - account_id
# - resource_type
# - resource
if arn_parsed.partition == None:
raise RoleArnParsingPartitionEmpty
elif arn_parsed.service != "iam":
raise RoleArnParsingServiceNotIAM
elif (
arn_parsed.account_id == None
or len(arn_parsed.account_id) != 12
or not arn_parsed.account_id.isnumeric()
):
raise RoleArnParsingInvalidAccountID
elif arn_parsed.resource_type != "role":
raise RoleArnParsingInvalidResourceType
elif arn_parsed.resource == "":
raise RoleArnParsingEmptyResource
else:
return arn_parsed

View File

@@ -0,0 +1,33 @@
import sure # noqa
from providers.aws.lib.arn.arn import arn_parsing
ACCOUNT_ID = "123456789012"
RESOURCE_TYPE = "role"
IAM_ROLE = "test-role"
class Test_ARN_Parsing:
def test_arn_parsing(self):
test_cases = [
{
"input_arn": f"arn:aws:iam::{ACCOUNT_ID}:{RESOURCE_TYPE}/{IAM_ROLE}",
"expected": {
"partition": "aws",
"service": "iam",
"region": None,
"account_id": ACCOUNT_ID,
"resource_type": RESOURCE_TYPE,
"resource": IAM_ROLE,
},
}
]
for test in test_cases:
input_arn = test["input_arn"]
parsed_arn = arn_parsing(input_arn)
parsed_arn.partition.should.equal(test["expected"]["partition"])
parsed_arn.service.should.equal(test["expected"]["service"])
parsed_arn.region.should.equal(test["expected"]["region"])
parsed_arn.account_id.should.equal(test["expected"]["account_id"])
parsed_arn.resource_type.should.equal(test["expected"]["resource_type"])
parsed_arn.resource.should.equal(test["expected"]["resource"])

View File

@@ -0,0 +1,43 @@
class RoleArnParsingFailedMissingFields(Exception):
# The arn contains a numberof fields different than six separated by :"
def __init__(self):
self.message = "The assumed role arn contains a number of fields different than six separated by :, please input a valid arn"
super().__init__(self.message)
class RoleArnParsingIAMRegionNotEmpty(Exception):
# The arn contains a non-empty value for region, since it is an IAM arn is not valid
def __init__(self):
self.message = "The assumed role arn contains a non-empty value for region, since it is an IAM arn is not valid, please input a valid arn"
super().__init__(self.message)
class RoleArnParsingPartitionEmpty(Exception):
# The arn contains an empty value for partition
def __init__(self):
self.message = "The assumed role arn does not contain a value for partition, please input a valid arn"
super().__init__(self.message)
class RoleArnParsingServiceNotIAM(Exception):
def __init__(self):
self.message = "The assumed role arn contains a value for service distinct than iam, please input a valid arn"
super().__init__(self.message)
class RoleArnParsingInvalidAccountID(Exception):
def __init__(self):
self.message = "The assumed role arn contains a value for account id empty or invalid, a valid account id must be composed of 12 numbers, please input a valid arn"
super().__init__(self.message)
class RoleArnParsingInvalidResourceType(Exception):
def __init__(self):
self.message = "The assumed role arn contains a value for resource type different than role, please input a valid arn"
super().__init__(self.message)
class RoleArnParsingEmptyResource(Exception):
def __init__(self):
self.message = "The assumed role arn does not contain a value for resource, please input a valid arn"
super().__init__(self.message)

View File

@@ -0,0 +1,112 @@
import json
from itertools import groupby
from operator import itemgetter
from boto3 import session
from config.config import json_asff_file_suffix, timestamp_utc
from lib.logger import logger
from lib.outputs.models import Check_Output_JSON_ASFF
from providers.aws.models import AWS_Audit_Info
def send_to_security_hub(
region: str, finding_output: Check_Output_JSON_ASFF, session: session.Session
):
try:
logger.info("Sending findings to Security Hub.")
# Check if security hub is enabled in current region
security_hub_client = session.client("securityhub", region_name=region)
security_hub_client.describe_hub()
# Check if Prowler integration is enabled in Security Hub
if "prowler/prowler" not in str(
security_hub_client.list_enabled_products_for_import()
):
logger.error(
f"Security Hub is enabled in {region} but Prowler integration does not accept findings. More info: https://github.com/prowler-cloud/prowler/#security-hub-integration"
)
# Send finding to Security Hub
batch_import = security_hub_client.batch_import_findings(
Findings=[finding_output.dict()]
)
if batch_import["FailedCount"] > 0:
failed_import = batch_import["FailedFindings"][0]
logger.error(
f"Failed to send archived findings to AWS Security Hub -- {failed_import['ErrorCode']} -- {failed_import['ErrorMessage']}"
)
except Exception as error:
logger.error(f"{error.__class__.__name__} -- {error} in region {region}")
# Move previous Security Hub check findings to ARCHIVED (as prowler didn't re-detect them)
def resolve_security_hub_previous_findings(
output_directory: str, audit_info: AWS_Audit_Info
) -> list:
logger.info("Checking previous findings in Security Hub to archive them.")
# Read current findings from json-asff file
with open(
f"{output_directory}/prowler-output-{audit_info.audited_account}-{json_asff_file_suffix}"
) as f:
json_asff_file = json.load(f)
# Sort by region
json_asff_file = sorted(json_asff_file, key=itemgetter("ProductArn"))
# Group by region
for product_arn, current_findings in groupby(
json_asff_file, key=itemgetter("ProductArn")
):
region = product_arn.split(":")[3]
try:
# Check if security hub is enabled in current region
security_hub_client = audit_info.audit_session.client(
"securityhub", region_name=region
)
security_hub_client.describe_hub()
# Get current findings IDs
current_findings_ids = []
for finding in current_findings:
current_findings_ids.append(finding["Id"])
# Get findings of that region
security_hub_client = audit_info.audit_session.client(
"securityhub", region_name=region
)
findings_filter = {
"ProductName": [{"Value": "Prowler", "Comparison": "EQUALS"}],
"RecordState": [{"Value": "ACTIVE", "Comparison": "EQUALS"}],
"AwsAccountId": [
{"Value": audit_info.audited_account, "Comparison": "EQUALS"}
],
"Region": [{"Value": region, "Comparison": "EQUALS"}],
}
get_findings_paginator = security_hub_client.get_paginator("get_findings")
findings_to_archive = []
for page in get_findings_paginator.paginate(Filters=findings_filter):
# Archive findings that have not appear in this execution
for finding in page["Findings"]:
if finding["Id"] not in current_findings_ids:
finding["RecordState"] = "ARCHIVED"
finding["UpdatedAt"] = timestamp_utc.strftime(
"%Y-%m-%dT%H:%M:%SZ"
)
findings_to_archive.append(finding)
logger.info(f"Archiving {len(findings_to_archive)} findings.")
# Send archive findings to SHub
list_chunked = [
findings_to_archive[i : i + 100]
for i in range(0, len(findings_to_archive), 100)
]
for findings in list_chunked:
batch_import = security_hub_client.batch_import_findings(
Findings=findings
)
if batch_import["FailedCount"] > 0:
failed_import = batch_import["FailedFindings"][0]
logger.error(
f"Failed to send archived findings to AWS Security Hub -- {failed_import['ErrorCode']} -- {failed_import['ErrorMessage']}"
)
except Exception as error:
logger.error(f"{error.__class__.__name__} -- {error} in region {region}")

View File

@@ -1,5 +1,6 @@
from lib.check.models import Check, Check_Report
from providers.aws.services.ec2.ec2_service import check_network_acl, ec2_client
from providers.aws.services.ec2.ec2_service import ec2_client
from providers.aws.services.ec2.lib.network_acls import check_network_acl
class ec2_networkacl_allow_ingress_tcp_port_22(Check):

View File

@@ -1,5 +1,6 @@
from lib.check.models import Check, Check_Report
from providers.aws.services.ec2.ec2_service import check_network_acl, ec2_client
from providers.aws.services.ec2.ec2_service import ec2_client
from providers.aws.services.ec2.lib.network_acls import check_network_acl
class ec2_networkacl_allow_ingress_tcp_port_3389(Check):

View File

@@ -1,5 +1,6 @@
from lib.check.models import Check, Check_Report
from providers.aws.services.ec2.ec2_service import check_security_group, ec2_client
from providers.aws.services.ec2.ec2_service import ec2_client
from providers.aws.services.ec2.lib.security_groups import check_security_group
class ec2_securitygroup_allow_ingress_from_internet_to_any_port(Check):

View File

@@ -1,5 +1,6 @@
from lib.check.models import Check, Check_Report
from providers.aws.services.ec2.ec2_service import check_security_group, ec2_client
from providers.aws.services.ec2.ec2_service import ec2_client
from providers.aws.services.ec2.lib.security_groups import check_security_group
class ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22(Check):

View File

@@ -1,5 +1,7 @@
from lib.check.models import Check, Check_Report
from providers.aws.services.ec2.ec2_service import check_security_group, ec2_client
from providers.aws.services.ec2.ec2_service import ec2_client
from providers.aws.services.ec2.lib.security_groups import check_security_group
class ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_3389(Check):
def execute(self):

View File

@@ -1,5 +1,7 @@
from lib.check.models import Check, Check_Report
from providers.aws.services.ec2.ec2_service import check_security_group, ec2_client
from providers.aws.services.ec2.ec2_service import ec2_client
from providers.aws.services.ec2.lib.security_groups import check_security_group
class ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_mysql_3306(Check):
def execute(self):

View File

@@ -1,5 +1,7 @@
from lib.check.models import Check, Check_Report
from providers.aws.services.ec2.ec2_service import check_security_group, ec2_client
from providers.aws.services.ec2.ec2_service import ec2_client
from providers.aws.services.ec2.lib.security_groups import check_security_group
class ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_oracle_1521_2483(Check):
def execute(self):
@@ -23,4 +25,4 @@ class ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_oracle_1521_2483
report.resource_id = security_group.id
findings.append(report)
return findings
return findings

View File

@@ -1,6 +1,5 @@
import threading
from dataclasses import dataclass
from typing import Any
from lib.logger import logger
from providers.aws.aws_provider import current_audit_info, generate_regional_clients
@@ -238,76 +237,3 @@ class NetworkACL:
ec2_client = EC2(current_audit_info)
################## Security Groups
# Check if the security group ingress rule has public access to the check_ports using the protocol
def check_security_group(ingress_rule: Any, protocol: str, ports: list = []) -> bool:
public_IPv4 = "0.0.0.0/0"
public_IPv6 = "::/0"
# Check for all traffic ingress rules regardless of the protocol
if ingress_rule["IpProtocol"] == "-1" and (
(
"0.0.0.0/0" in str(ingress_rule["IpRanges"])
or "::/0" in str(ingress_rule["Ipv6Ranges"])
)
):
return True
# Check for specific ports in ingress rules
if "FromPort" in ingress_rule:
# All ports
if ingress_rule["FromPort"] == 0 and ingress_rule["ToPort"] == 65535:
return True
# If there is a port range
if ingress_rule["FromPort"] != ingress_rule["ToPort"]:
# Calculate port range, adding 1
diff = (ingress_rule["ToPort"] - ingress_rule["FromPort"]) + 1
ingress_port_range = []
for x in range(diff):
ingress_port_range.append(int(ingress_rule["FromPort"]) + x)
# If FromPort and ToPort are the same
else:
ingress_port_range = []
ingress_port_range.append(int(ingress_rule["FromPort"]))
# Test Security Group
for port in ports:
if (
(
public_IPv4 in str(ingress_rule["IpRanges"])
or public_IPv6 in str(ingress_rule["Ipv6Ranges"])
)
and port in ingress_port_range
and ingress_rule["IpProtocol"] == protocol
):
return True
return False
################## Network ACLs
# Check if the network acls ingress rule has public access to the check_ports using the protocol
def check_network_acl(entry: Any, protocol: str, port: str, ip_version: str) -> bool:
# For IPv4
if ip_version == "IPv4":
entry_value = "CidrBlock"
public_ip = "0.0.0.0/0"
# For IPv6
elif ip_version == "IPv6":
entry_value = "Ipv6CidrBlock"
public_ip = "::/0"
if (
entry[entry_value] == public_ip
and entry["RuleAction"] == "allow"
and not entry["Egress"]
):
if entry["Protocol"] == "-1" or (
entry["PortRange"]["From"] == port
and entry["PortRange"]["To"] == port
and entry["Protocol"] == protocol
):
return True
return False

View File

@@ -0,0 +1,28 @@
from typing import Any
################## Network ACLs
# Check if the network acls ingress rule has public access to the check_ports using the protocol
def check_network_acl(entry: Any, protocol: str, port: str, ip_version: str) -> bool:
# For IPv4
if ip_version == "IPv4":
entry_value = "CidrBlock"
public_ip = "0.0.0.0/0"
# For IPv6
elif ip_version == "IPv6":
entry_value = "Ipv6CidrBlock"
public_ip = "::/0"
if (
entry[entry_value] == public_ip
and entry["RuleAction"] == "allow"
and not entry["Egress"]
):
if entry["Protocol"] == "-1" or (
entry["PortRange"]["From"] == port
and entry["PortRange"]["To"] == port
and entry["Protocol"] == protocol
):
return True
return False

View File

@@ -0,0 +1,48 @@
from typing import Any
################## Security Groups
# Check if the security group ingress rule has public access to the check_ports using the protocol
def check_security_group(ingress_rule: Any, protocol: str, ports: list = []) -> bool:
public_IPv4 = "0.0.0.0/0"
public_IPv6 = "::/0"
# Check for all traffic ingress rules regardless of the protocol
if ingress_rule["IpProtocol"] == "-1" and (
(
"0.0.0.0/0" in str(ingress_rule["IpRanges"])
or "::/0" in str(ingress_rule["Ipv6Ranges"])
)
):
return True
# Check for specific ports in ingress rules
if "FromPort" in ingress_rule:
# All ports
if ingress_rule["FromPort"] == 0 and ingress_rule["ToPort"] == 65535:
return True
# If there is a port range
if ingress_rule["FromPort"] != ingress_rule["ToPort"]:
# Calculate port range, adding 1
diff = (ingress_rule["ToPort"] - ingress_rule["FromPort"]) + 1
ingress_port_range = []
for x in range(diff):
ingress_port_range.append(int(ingress_rule["FromPort"]) + x)
# If FromPort and ToPort are the same
else:
ingress_port_range = []
ingress_port_range.append(int(ingress_rule["FromPort"]))
# Test Security Group
for port in ports:
if (
(
public_IPv4 in str(ingress_rule["IpRanges"])
or public_IPv6 in str(ingress_rule["Ipv6Ranges"])
)
and port in ingress_port_range
and ingress_rule["IpProtocol"] == protocol
):
return True
return False