mirror of
https://github.com/ghndrx/prowler.git
synced 2026-02-10 14:55:00 +00:00
chore: Move shared to lib/ for AWS (#1321)
* chore: Move shared to lib/ * chore: Move shared to lib/ for AWS Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
This commit is contained in:
@@ -150,7 +150,8 @@ def recover_checks_from_provider(provider: str, service: str = None) -> list:
|
||||
for module_name in modules:
|
||||
# Format: "providers.{provider}.services.{service}.{check_name}.{check_name}"
|
||||
check_name = module_name.name
|
||||
if check_name.count(".") == 5:
|
||||
# We need to exclude common shared libraries in services
|
||||
if check_name.count(".") == 5 and "lib" not in check_name:
|
||||
checks.append(check_name)
|
||||
return checks
|
||||
|
||||
|
||||
@@ -22,7 +22,7 @@ from lib.outputs.models import (
|
||||
Severity,
|
||||
)
|
||||
from lib.utils.utils import file_exists, hash_sha512, open_file
|
||||
from providers.aws.aws_provider import send_to_security_hub
|
||||
from providers.aws.lib.security_hub import send_to_security_hub
|
||||
|
||||
|
||||
def report(check_findings, output_options, audit_info):
|
||||
|
||||
@@ -1,7 +1,4 @@
|
||||
import json
|
||||
import sys
|
||||
from itertools import groupby
|
||||
from operator import itemgetter
|
||||
|
||||
from arnparse import arnparse
|
||||
from boto3 import client, session
|
||||
@@ -9,11 +6,10 @@ from botocore.credentials import RefreshableCredentials
|
||||
from botocore.session import get_session
|
||||
from colorama import Fore, Style
|
||||
|
||||
from config.config import aws_services_json_file, json_asff_file_suffix, timestamp_utc
|
||||
from lib.arn.arn import arn_parsing
|
||||
from config.config import aws_services_json_file
|
||||
from lib.logger import logger
|
||||
from lib.outputs.models import Check_Output_JSON_ASFF
|
||||
from lib.utils.utils import open_file, parse_json_file
|
||||
from providers.aws.lib.arn.arn import arn_parsing
|
||||
from providers.aws.models import (
|
||||
AWS_Assume_Role,
|
||||
AWS_Audit_Info,
|
||||
@@ -326,105 +322,3 @@ def generate_regional_clients(service, audit_info):
|
||||
regional_clients[region] = regional_client
|
||||
# regional_clients.append(regional_client)
|
||||
return regional_clients
|
||||
|
||||
|
||||
def send_to_security_hub(
|
||||
region: str, finding_output: Check_Output_JSON_ASFF, session: session.Session
|
||||
):
|
||||
try:
|
||||
logger.info("Sending findings to Security Hub.")
|
||||
# Check if security hub is enabled in current region
|
||||
security_hub_client = session.client("securityhub", region_name=region)
|
||||
security_hub_client.describe_hub()
|
||||
|
||||
# Check if Prowler integration is enabled in Security Hub
|
||||
if "prowler/prowler" not in str(
|
||||
security_hub_client.list_enabled_products_for_import()
|
||||
):
|
||||
logger.error(
|
||||
f"Security Hub is enabled in {region} but Prowler integration does not accept findings. More info: https://github.com/prowler-cloud/prowler/#security-hub-integration"
|
||||
)
|
||||
|
||||
# Send finding to Security Hub
|
||||
batch_import = security_hub_client.batch_import_findings(
|
||||
Findings=[finding_output.dict()]
|
||||
)
|
||||
if batch_import["FailedCount"] > 0:
|
||||
failed_import = batch_import["FailedFindings"][0]
|
||||
logger.error(
|
||||
f"Failed to send archived findings to AWS Security Hub -- {failed_import['ErrorCode']} -- {failed_import['ErrorMessage']}"
|
||||
)
|
||||
|
||||
except Exception as error:
|
||||
logger.error(f"{error.__class__.__name__} -- {error} in region {region}")
|
||||
|
||||
|
||||
# Move previous Security Hub check findings to ARCHIVED (as prowler didn't re-detect them)
|
||||
def resolve_security_hub_previous_findings(
|
||||
output_directory: str, audit_info: AWS_Audit_Info
|
||||
) -> list:
|
||||
logger.info("Checking previous findings in Security Hub to archive them.")
|
||||
# Read current findings from json-asff file
|
||||
with open(
|
||||
f"{output_directory}/prowler-output-{audit_info.audited_account}-{json_asff_file_suffix}"
|
||||
) as f:
|
||||
json_asff_file = json.load(f)
|
||||
|
||||
# Sort by region
|
||||
json_asff_file = sorted(json_asff_file, key=itemgetter("ProductArn"))
|
||||
# Group by region
|
||||
for product_arn, current_findings in groupby(
|
||||
json_asff_file, key=itemgetter("ProductArn")
|
||||
):
|
||||
region = product_arn.split(":")[3]
|
||||
try:
|
||||
# Check if security hub is enabled in current region
|
||||
security_hub_client = audit_info.audit_session.client(
|
||||
"securityhub", region_name=region
|
||||
)
|
||||
security_hub_client.describe_hub()
|
||||
# Get current findings IDs
|
||||
current_findings_ids = []
|
||||
for finding in current_findings:
|
||||
current_findings_ids.append(finding["Id"])
|
||||
# Get findings of that region
|
||||
security_hub_client = audit_info.audit_session.client(
|
||||
"securityhub", region_name=region
|
||||
)
|
||||
findings_filter = {
|
||||
"ProductName": [{"Value": "Prowler", "Comparison": "EQUALS"}],
|
||||
"RecordState": [{"Value": "ACTIVE", "Comparison": "EQUALS"}],
|
||||
"AwsAccountId": [
|
||||
{"Value": audit_info.audited_account, "Comparison": "EQUALS"}
|
||||
],
|
||||
"Region": [{"Value": region, "Comparison": "EQUALS"}],
|
||||
}
|
||||
get_findings_paginator = security_hub_client.get_paginator("get_findings")
|
||||
findings_to_archive = []
|
||||
for page in get_findings_paginator.paginate(Filters=findings_filter):
|
||||
# Archive findings that have not appear in this execution
|
||||
for finding in page["Findings"]:
|
||||
if finding["Id"] not in current_findings_ids:
|
||||
finding["RecordState"] = "ARCHIVED"
|
||||
finding["UpdatedAt"] = timestamp_utc.strftime(
|
||||
"%Y-%m-%dT%H:%M:%SZ"
|
||||
)
|
||||
|
||||
findings_to_archive.append(finding)
|
||||
logger.info(f"Archiving {len(findings_to_archive)} findings.")
|
||||
# Send archive findings to SHub
|
||||
list_chunked = [
|
||||
findings_to_archive[i : i + 100]
|
||||
for i in range(0, len(findings_to_archive), 100)
|
||||
]
|
||||
for findings in list_chunked:
|
||||
batch_import = security_hub_client.batch_import_findings(
|
||||
Findings=findings
|
||||
)
|
||||
if batch_import["FailedCount"] > 0:
|
||||
failed_import = batch_import["FailedFindings"][0]
|
||||
logger.error(
|
||||
f"Failed to send archived findings to AWS Security Hub -- {failed_import['ErrorCode']} -- {failed_import['ErrorMessage']}"
|
||||
)
|
||||
except Exception as error:
|
||||
logger.error(f"{error.__class__.__name__} -- {error} in region {region}")
|
||||
|
||||
0
providers/aws/lib/arn/__init__.py
Normal file
0
providers/aws/lib/arn/__init__.py
Normal file
@@ -1,6 +1,6 @@
|
||||
from arnparse import arnparse
|
||||
|
||||
from lib.arn.error import (
|
||||
from providers.aws.lib.arn.error import (
|
||||
RoleArnParsingEmptyResource,
|
||||
RoleArnParsingFailedMissingFields,
|
||||
RoleArnParsingIAMRegionNotEmpty,
|
||||
@@ -1,6 +1,6 @@
|
||||
import sure # noqa
|
||||
|
||||
from lib.arn.arn import arn_parsing
|
||||
from providers.aws.lib.arn.arn import arn_parsing
|
||||
|
||||
ACCOUNT_ID = "123456789012"
|
||||
RESOURCE_TYPE = "role"
|
||||
112
providers/aws/lib/security_hub.py
Normal file
112
providers/aws/lib/security_hub.py
Normal file
@@ -0,0 +1,112 @@
|
||||
import json
|
||||
from itertools import groupby
|
||||
from operator import itemgetter
|
||||
|
||||
from boto3 import session
|
||||
|
||||
from config.config import json_asff_file_suffix, timestamp_utc
|
||||
from lib.logger import logger
|
||||
from lib.outputs.models import Check_Output_JSON_ASFF
|
||||
from providers.aws.models import AWS_Audit_Info
|
||||
|
||||
|
||||
def send_to_security_hub(
|
||||
region: str, finding_output: Check_Output_JSON_ASFF, session: session.Session
|
||||
):
|
||||
try:
|
||||
logger.info("Sending findings to Security Hub.")
|
||||
# Check if security hub is enabled in current region
|
||||
security_hub_client = session.client("securityhub", region_name=region)
|
||||
security_hub_client.describe_hub()
|
||||
|
||||
# Check if Prowler integration is enabled in Security Hub
|
||||
if "prowler/prowler" not in str(
|
||||
security_hub_client.list_enabled_products_for_import()
|
||||
):
|
||||
logger.error(
|
||||
f"Security Hub is enabled in {region} but Prowler integration does not accept findings. More info: https://github.com/prowler-cloud/prowler/#security-hub-integration"
|
||||
)
|
||||
|
||||
# Send finding to Security Hub
|
||||
batch_import = security_hub_client.batch_import_findings(
|
||||
Findings=[finding_output.dict()]
|
||||
)
|
||||
if batch_import["FailedCount"] > 0:
|
||||
failed_import = batch_import["FailedFindings"][0]
|
||||
logger.error(
|
||||
f"Failed to send archived findings to AWS Security Hub -- {failed_import['ErrorCode']} -- {failed_import['ErrorMessage']}"
|
||||
)
|
||||
|
||||
except Exception as error:
|
||||
logger.error(f"{error.__class__.__name__} -- {error} in region {region}")
|
||||
|
||||
|
||||
# Move previous Security Hub check findings to ARCHIVED (as prowler didn't re-detect them)
|
||||
def resolve_security_hub_previous_findings(
|
||||
output_directory: str, audit_info: AWS_Audit_Info
|
||||
) -> list:
|
||||
logger.info("Checking previous findings in Security Hub to archive them.")
|
||||
# Read current findings from json-asff file
|
||||
with open(
|
||||
f"{output_directory}/prowler-output-{audit_info.audited_account}-{json_asff_file_suffix}"
|
||||
) as f:
|
||||
json_asff_file = json.load(f)
|
||||
|
||||
# Sort by region
|
||||
json_asff_file = sorted(json_asff_file, key=itemgetter("ProductArn"))
|
||||
# Group by region
|
||||
for product_arn, current_findings in groupby(
|
||||
json_asff_file, key=itemgetter("ProductArn")
|
||||
):
|
||||
region = product_arn.split(":")[3]
|
||||
try:
|
||||
# Check if security hub is enabled in current region
|
||||
security_hub_client = audit_info.audit_session.client(
|
||||
"securityhub", region_name=region
|
||||
)
|
||||
security_hub_client.describe_hub()
|
||||
# Get current findings IDs
|
||||
current_findings_ids = []
|
||||
for finding in current_findings:
|
||||
current_findings_ids.append(finding["Id"])
|
||||
# Get findings of that region
|
||||
security_hub_client = audit_info.audit_session.client(
|
||||
"securityhub", region_name=region
|
||||
)
|
||||
findings_filter = {
|
||||
"ProductName": [{"Value": "Prowler", "Comparison": "EQUALS"}],
|
||||
"RecordState": [{"Value": "ACTIVE", "Comparison": "EQUALS"}],
|
||||
"AwsAccountId": [
|
||||
{"Value": audit_info.audited_account, "Comparison": "EQUALS"}
|
||||
],
|
||||
"Region": [{"Value": region, "Comparison": "EQUALS"}],
|
||||
}
|
||||
get_findings_paginator = security_hub_client.get_paginator("get_findings")
|
||||
findings_to_archive = []
|
||||
for page in get_findings_paginator.paginate(Filters=findings_filter):
|
||||
# Archive findings that have not appear in this execution
|
||||
for finding in page["Findings"]:
|
||||
if finding["Id"] not in current_findings_ids:
|
||||
finding["RecordState"] = "ARCHIVED"
|
||||
finding["UpdatedAt"] = timestamp_utc.strftime(
|
||||
"%Y-%m-%dT%H:%M:%SZ"
|
||||
)
|
||||
|
||||
findings_to_archive.append(finding)
|
||||
logger.info(f"Archiving {len(findings_to_archive)} findings.")
|
||||
# Send archive findings to SHub
|
||||
list_chunked = [
|
||||
findings_to_archive[i : i + 100]
|
||||
for i in range(0, len(findings_to_archive), 100)
|
||||
]
|
||||
for findings in list_chunked:
|
||||
batch_import = security_hub_client.batch_import_findings(
|
||||
Findings=findings
|
||||
)
|
||||
if batch_import["FailedCount"] > 0:
|
||||
failed_import = batch_import["FailedFindings"][0]
|
||||
logger.error(
|
||||
f"Failed to send archived findings to AWS Security Hub -- {failed_import['ErrorCode']} -- {failed_import['ErrorMessage']}"
|
||||
)
|
||||
except Exception as error:
|
||||
logger.error(f"{error.__class__.__name__} -- {error} in region {region}")
|
||||
@@ -1,5 +1,6 @@
|
||||
from lib.check.models import Check, Check_Report
|
||||
from providers.aws.services.ec2.ec2_service import check_network_acl, ec2_client
|
||||
from providers.aws.services.ec2.ec2_service import ec2_client
|
||||
from providers.aws.services.ec2.lib.network_acls import check_network_acl
|
||||
|
||||
|
||||
class ec2_networkacl_allow_ingress_tcp_port_22(Check):
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
from lib.check.models import Check, Check_Report
|
||||
from providers.aws.services.ec2.ec2_service import check_network_acl, ec2_client
|
||||
from providers.aws.services.ec2.ec2_service import ec2_client
|
||||
from providers.aws.services.ec2.lib.network_acls import check_network_acl
|
||||
|
||||
|
||||
class ec2_networkacl_allow_ingress_tcp_port_3389(Check):
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
from lib.check.models import Check, Check_Report
|
||||
from providers.aws.services.ec2.ec2_service import check_security_group, ec2_client
|
||||
from providers.aws.services.ec2.ec2_service import ec2_client
|
||||
from providers.aws.services.ec2.lib.security_groups import check_security_group
|
||||
|
||||
|
||||
class ec2_securitygroup_allow_ingress_from_internet_to_any_port(Check):
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
from lib.check.models import Check, Check_Report
|
||||
from providers.aws.services.ec2.ec2_service import check_security_group, ec2_client
|
||||
from providers.aws.services.ec2.ec2_service import ec2_client
|
||||
from providers.aws.services.ec2.lib.security_groups import check_security_group
|
||||
|
||||
|
||||
class ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22(Check):
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
from lib.check.models import Check, Check_Report
|
||||
from providers.aws.services.ec2.ec2_service import check_security_group, ec2_client
|
||||
from providers.aws.services.ec2.ec2_service import ec2_client
|
||||
from providers.aws.services.ec2.lib.security_groups import check_security_group
|
||||
|
||||
|
||||
class ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_3389(Check):
|
||||
def execute(self):
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
from lib.check.models import Check, Check_Report
|
||||
from providers.aws.services.ec2.ec2_service import check_security_group, ec2_client
|
||||
from providers.aws.services.ec2.ec2_service import ec2_client
|
||||
from providers.aws.services.ec2.lib.security_groups import check_security_group
|
||||
|
||||
|
||||
class ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_mysql_3306(Check):
|
||||
def execute(self):
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
from lib.check.models import Check, Check_Report
|
||||
from providers.aws.services.ec2.ec2_service import check_security_group, ec2_client
|
||||
from providers.aws.services.ec2.ec2_service import ec2_client
|
||||
from providers.aws.services.ec2.lib.security_groups import check_security_group
|
||||
|
||||
|
||||
class ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_oracle_1521_2483(Check):
|
||||
def execute(self):
|
||||
@@ -23,4 +25,4 @@ class ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_oracle_1521_2483
|
||||
report.resource_id = security_group.id
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
return findings
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import threading
|
||||
from dataclasses import dataclass
|
||||
from typing import Any
|
||||
|
||||
from lib.logger import logger
|
||||
from providers.aws.aws_provider import current_audit_info, generate_regional_clients
|
||||
@@ -238,76 +237,3 @@ class NetworkACL:
|
||||
|
||||
|
||||
ec2_client = EC2(current_audit_info)
|
||||
|
||||
################## Security Groups
|
||||
# Check if the security group ingress rule has public access to the check_ports using the protocol
|
||||
def check_security_group(ingress_rule: Any, protocol: str, ports: list = []) -> bool:
|
||||
public_IPv4 = "0.0.0.0/0"
|
||||
public_IPv6 = "::/0"
|
||||
|
||||
# Check for all traffic ingress rules regardless of the protocol
|
||||
if ingress_rule["IpProtocol"] == "-1" and (
|
||||
(
|
||||
"0.0.0.0/0" in str(ingress_rule["IpRanges"])
|
||||
or "::/0" in str(ingress_rule["Ipv6Ranges"])
|
||||
)
|
||||
):
|
||||
return True
|
||||
|
||||
# Check for specific ports in ingress rules
|
||||
if "FromPort" in ingress_rule:
|
||||
# All ports
|
||||
if ingress_rule["FromPort"] == 0 and ingress_rule["ToPort"] == 65535:
|
||||
return True
|
||||
|
||||
# If there is a port range
|
||||
if ingress_rule["FromPort"] != ingress_rule["ToPort"]:
|
||||
# Calculate port range, adding 1
|
||||
diff = (ingress_rule["ToPort"] - ingress_rule["FromPort"]) + 1
|
||||
ingress_port_range = []
|
||||
for x in range(diff):
|
||||
ingress_port_range.append(int(ingress_rule["FromPort"]) + x)
|
||||
# If FromPort and ToPort are the same
|
||||
else:
|
||||
ingress_port_range = []
|
||||
ingress_port_range.append(int(ingress_rule["FromPort"]))
|
||||
|
||||
# Test Security Group
|
||||
for port in ports:
|
||||
if (
|
||||
(
|
||||
public_IPv4 in str(ingress_rule["IpRanges"])
|
||||
or public_IPv6 in str(ingress_rule["Ipv6Ranges"])
|
||||
)
|
||||
and port in ingress_port_range
|
||||
and ingress_rule["IpProtocol"] == protocol
|
||||
):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
################## Network ACLs
|
||||
# Check if the network acls ingress rule has public access to the check_ports using the protocol
|
||||
def check_network_acl(entry: Any, protocol: str, port: str, ip_version: str) -> bool:
|
||||
# For IPv4
|
||||
if ip_version == "IPv4":
|
||||
entry_value = "CidrBlock"
|
||||
public_ip = "0.0.0.0/0"
|
||||
# For IPv6
|
||||
elif ip_version == "IPv6":
|
||||
entry_value = "Ipv6CidrBlock"
|
||||
public_ip = "::/0"
|
||||
|
||||
if (
|
||||
entry[entry_value] == public_ip
|
||||
and entry["RuleAction"] == "allow"
|
||||
and not entry["Egress"]
|
||||
):
|
||||
if entry["Protocol"] == "-1" or (
|
||||
entry["PortRange"]["From"] == port
|
||||
and entry["PortRange"]["To"] == port
|
||||
and entry["Protocol"] == protocol
|
||||
):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
0
providers/aws/services/ec2/lib/__init__.py
Normal file
0
providers/aws/services/ec2/lib/__init__.py
Normal file
28
providers/aws/services/ec2/lib/network_acls.py
Normal file
28
providers/aws/services/ec2/lib/network_acls.py
Normal file
@@ -0,0 +1,28 @@
|
||||
from typing import Any
|
||||
|
||||
|
||||
################## Network ACLs
|
||||
# Check if the network acls ingress rule has public access to the check_ports using the protocol
|
||||
def check_network_acl(entry: Any, protocol: str, port: str, ip_version: str) -> bool:
|
||||
# For IPv4
|
||||
if ip_version == "IPv4":
|
||||
entry_value = "CidrBlock"
|
||||
public_ip = "0.0.0.0/0"
|
||||
# For IPv6
|
||||
elif ip_version == "IPv6":
|
||||
entry_value = "Ipv6CidrBlock"
|
||||
public_ip = "::/0"
|
||||
|
||||
if (
|
||||
entry[entry_value] == public_ip
|
||||
and entry["RuleAction"] == "allow"
|
||||
and not entry["Egress"]
|
||||
):
|
||||
if entry["Protocol"] == "-1" or (
|
||||
entry["PortRange"]["From"] == port
|
||||
and entry["PortRange"]["To"] == port
|
||||
and entry["Protocol"] == protocol
|
||||
):
|
||||
return True
|
||||
|
||||
return False
|
||||
48
providers/aws/services/ec2/lib/security_groups.py
Normal file
48
providers/aws/services/ec2/lib/security_groups.py
Normal file
@@ -0,0 +1,48 @@
|
||||
from typing import Any
|
||||
|
||||
|
||||
################## Security Groups
|
||||
# Check if the security group ingress rule has public access to the check_ports using the protocol
|
||||
def check_security_group(ingress_rule: Any, protocol: str, ports: list = []) -> bool:
|
||||
public_IPv4 = "0.0.0.0/0"
|
||||
public_IPv6 = "::/0"
|
||||
|
||||
# Check for all traffic ingress rules regardless of the protocol
|
||||
if ingress_rule["IpProtocol"] == "-1" and (
|
||||
(
|
||||
"0.0.0.0/0" in str(ingress_rule["IpRanges"])
|
||||
or "::/0" in str(ingress_rule["Ipv6Ranges"])
|
||||
)
|
||||
):
|
||||
return True
|
||||
|
||||
# Check for specific ports in ingress rules
|
||||
if "FromPort" in ingress_rule:
|
||||
# All ports
|
||||
if ingress_rule["FromPort"] == 0 and ingress_rule["ToPort"] == 65535:
|
||||
return True
|
||||
|
||||
# If there is a port range
|
||||
if ingress_rule["FromPort"] != ingress_rule["ToPort"]:
|
||||
# Calculate port range, adding 1
|
||||
diff = (ingress_rule["ToPort"] - ingress_rule["FromPort"]) + 1
|
||||
ingress_port_range = []
|
||||
for x in range(diff):
|
||||
ingress_port_range.append(int(ingress_rule["FromPort"]) + x)
|
||||
# If FromPort and ToPort are the same
|
||||
else:
|
||||
ingress_port_range = []
|
||||
ingress_port_range.append(int(ingress_rule["FromPort"]))
|
||||
|
||||
# Test Security Group
|
||||
for port in ports:
|
||||
if (
|
||||
(
|
||||
public_IPv4 in str(ingress_rule["IpRanges"])
|
||||
or public_IPv6 in str(ingress_rule["Ipv6Ranges"])
|
||||
)
|
||||
and port in ingress_port_range
|
||||
and ingress_rule["IpProtocol"] == protocol
|
||||
):
|
||||
return True
|
||||
return False
|
||||
6
prowler
6
prowler
@@ -24,10 +24,8 @@ from lib.check.check import (
|
||||
from lib.check.checks_loader import load_checks_to_execute
|
||||
from lib.logger import logger, set_logging_config
|
||||
from lib.outputs.outputs import close_json
|
||||
from providers.aws.aws_provider import (
|
||||
provider_set_session,
|
||||
resolve_security_hub_previous_findings,
|
||||
)
|
||||
from providers.aws.aws_provider import provider_set_session
|
||||
from providers.aws.lib.security_hub import resolve_security_hub_previous_findings
|
||||
|
||||
if __name__ == "__main__":
|
||||
# CLI Arguments
|
||||
|
||||
Reference in New Issue
Block a user