feat(quick_inventory): add quick inventory (#1533)

Co-authored-by: sergargar <sergio@verica.io>
This commit is contained in:
Sergio Garcia
2022-12-07 19:02:05 +01:00
committed by GitHub
parent 7bffe6b2d5
commit fbb39a364e
4 changed files with 276 additions and 50 deletions

View File

@@ -340,5 +340,5 @@ def execute_checks(
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
bar.title = f"-> {Fore.GREEN}Scan is completed!{Style.RESET_ALL}"
bar.title = f"-> {Fore.GREEN}Scan completed!{Style.RESET_ALL}"
return all_findings

View File

@@ -0,0 +1,216 @@
import csv
import json
from alive_progress import alive_bar
from colorama import Fore, Style
from tabulate import tabulate
from config.config import (
csv_file_suffix,
json_file_suffix,
orange_color,
output_file_timestamp,
)
from lib.logger import logger
from providers.aws.lib.audit_info.models import AWS_Audit_Info
def quick_inventory(audit_info: AWS_Audit_Info, output_directory: str):
print(
f"-=- Running Quick Inventory for AWS Account {Fore.YELLOW}{audit_info.audited_account}{Style.RESET_ALL} -=-\n"
)
resources = []
# If not inputed regions, check all of them
if not audit_info.audited_regions:
# EC2 client for describing all regions
ec2_client = audit_info.audit_session.client("ec2", region_name="us-east-1")
# Get all the available regions
audit_info.audited_regions = [
region["RegionName"] for region in ec2_client.describe_regions()["Regions"]
]
with alive_bar(
total=len(audit_info.audited_regions),
ctrl_c=False,
bar="blocks",
spinner="classic",
stats=False,
enrich_print=False,
) as bar:
for region in sorted(audit_info.audited_regions):
bar.title = f"-> Scanning {orange_color}{region}{Style.RESET_ALL} region"
resources_in_region = []
try:
# If us-east-1 get IAM resources
iam_client = audit_info.audit_session.client("iam")
if region == "us-east-1":
get_roles_paginator = iam_client.get_paginator("list_roles")
for page in get_roles_paginator.paginate():
for role in page["Roles"]:
# Avoid aws-service-role roles
if "aws-service-role" not in role["Arn"]:
resources_in_region.append(role["Arn"])
get_users_paginator = iam_client.get_paginator("list_users")
for page in get_users_paginator.paginate():
for user in page["Users"]:
resources_in_region.append(user["Arn"])
get_groups_paginator = iam_client.get_paginator("list_groups")
for page in get_groups_paginator.paginate():
for group in page["Groups"]:
resources_in_region.append(group["Arn"])
get_policies_paginator = iam_client.get_paginator("list_policies")
for page in get_policies_paginator.paginate(Scope="Local"):
for policy in page["Policies"]:
resources_in_region.append(policy["Arn"])
for saml_provider in iam_client.list_saml_providers()[
"SAMLProviderList"
]:
resources_in_region.append(saml_provider["Arn"])
client = audit_info.audit_session.client(
"resourcegroupstaggingapi", region_name=region
)
# Get all the resources
resources_count = 0
get_resources_paginator = client.get_paginator("get_resources")
for page in get_resources_paginator.paginate():
resources_count += len(page["ResourceTagMappingList"])
for resource in page["ResourceTagMappingList"]:
resources_in_region.append(resource["ResourceARN"])
bar()
print(
f"Found {Fore.GREEN}{len(resources_in_region)}{Style.RESET_ALL} resources in region {Fore.YELLOW}{region}{Style.RESET_ALL}"
)
print("\n")
except Exception as error:
logger.error(
f"{region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
bar()
resources.extend(resources_in_region)
bar.title = f"-> {Fore.GREEN}Quick Inventory completed!{Style.RESET_ALL}"
inventory_table = create_inventory_table(resources)
print(
f"\nQuick Inventory of AWS Account {Fore.YELLOW}{audit_info.audited_account}{Style.RESET_ALL}:"
)
print(tabulate(inventory_table, headers="keys", tablefmt="rounded_grid"))
print(f"\nTotal resources found: {Fore.GREEN}{len(resources)}{Style.RESET_ALL}")
create_output(resources, audit_info, output_directory)
def create_inventory_table(resources: list) -> dict:
services = {}
# { "S3":
# 123,
# "IAM":
# 239,
# }
resources_type = {}
# { "S3":
# "Buckets": 13,
# "IAM":
# "Roles": 143,
# "Users": 22,
# }
for resource in sorted(resources):
service = resource.split(":")[2]
if service not in services:
services[service] = 0
services[service] += 1
if service == "s3":
resource_type = "bucket"
else:
resource_type = resource.split(":")[5].split("/")[0]
if service not in resources_type:
resources_type[service] = {}
if resource_type not in resources_type[service]:
resources_type[service][resource_type] = 0
resources_type[service][resource_type] += 1
# Add results to inventory table
inventory_table = {
"Service": [],
"Total": [],
"Count per resource types": [],
}
for service in services:
summary = ""
inventory_table["Service"].append(f"{service}")
inventory_table["Total"].append(
f"{Fore.GREEN}{services[service]}{Style.RESET_ALL}"
)
for resource_type in resources_type[service]:
summary += f"{resource_type} {Fore.GREEN}{resources_type[service][resource_type]}{Style.RESET_ALL}\n"
inventory_table["Count per resource types"].append(summary)
return inventory_table
def create_output(resources: list, audit_info: AWS_Audit_Info, output_directory: str):
json_output = []
output_file = f"{output_directory}/prowler-inventory-{audit_info.audited_account}-{output_file_timestamp}"
for item in sorted(resources):
resource = {}
resource["AWS_AccountID"] = audit_info.audited_account
resource["AWS_Region"] = item.split(":")[3]
resource["AWS_Partition"] = item.split(":")[1]
resource["AWS_Service"] = item.split(":")[2]
resource["AWS_ResourceType"] = item.split(":")[5].split("/")[0]
resource["AWS_ResourceID"] = ""
if len(item.split("/")) > 1:
resource["AWS_ResourceID"] = item.split("/")[-1]
elif len(item.split(":")) > 6:
resource["AWS_ResourceID"] = item.split(":")[-1]
resource["AWS_ResourceARN"] = item
# Cover S3 case
if resource["AWS_Service"] == "s3":
resource["AWS_ResourceType"] = "bucket"
resource["AWS_ResourceID"] = item.split(":")[-1]
# Cover WAFv2 case
if resource["AWS_Service"] == "wafv2":
resource["AWS_ResourceType"] = "/".join(item.split(":")[-1].split("/")[:-2])
resource["AWS_ResourceID"] = "/".join(item.split(":")[-1].split("/")[2:])
# Cover Config case
if resource["AWS_Service"] == "config":
resource["AWS_ResourceID"] = "/".join(item.split(":")[-1].split("/")[1:])
json_output.append(resource)
# Serializing json
json_object = json.dumps(json_output, indent=4)
# Writing to sample.json
with open(output_file + json_file_suffix, "w") as outfile:
outfile.write(json_object)
csv_file = open(output_file + csv_file_suffix, "w", newline="")
csv_writer = csv.writer(csv_file)
count = 0
for data in json_output:
if count == 0:
header = data.keys()
csv_writer.writerow(header)
count += 1
csv_writer.writerow(data.values())
csv_file.close()
print("\nMore details in files:")
print(f" - CSV: {Fore.GREEN}{output_file+csv_file_suffix}{Style.RESET_ALL}")
print(f" - JSON: {Fore.GREEN}{output_file+json_file_suffix}{Style.RESET_ALL}")

108
prowler
View File

@@ -39,6 +39,7 @@ from lib.outputs.outputs import (
)
from providers.aws.aws_provider import aws_provider_set_session
from providers.aws.lib.allowlist.allowlist import parse_allowlist_file
from providers.aws.lib.quick_inventory.quick_inventory import quick_inventory
from providers.aws.lib.security_hub.security_hub import (
resolve_security_hub_previous_findings,
)
@@ -204,6 +205,12 @@ if __name__ == "__main__":
action="store_true",
help="Send check output to AWS Security Hub",
)
parser.add_argument(
"-i",
"--quick-inventory",
action="store_true",
help="Run Prowler Quick Inventory. The inventory will be stored in an output csv by default",
)
bucket = parser.add_mutually_exclusive_group()
bucket.add_argument(
"-B",
@@ -452,58 +459,61 @@ if __name__ == "__main__":
args.verbose,
)
# Execute checks
findings = []
if len(checks_to_execute):
findings = execute_checks(
checks_to_execute, provider, audit_info, audit_output_options
)
if args.quick_inventory and provider == "aws":
quick_inventory(audit_info, output_directory)
else:
logger.error(
"There are no checks to execute. Please, check your input arguments"
)
# Execute checks
findings = []
if len(checks_to_execute):
findings = execute_checks(
checks_to_execute, provider, audit_info, audit_output_options
)
else:
logger.error(
"There are no checks to execute. Please, check your input arguments"
)
if output_modes:
for mode in output_modes:
# Close json file if exists
if mode == "json" or mode == "json-asff":
close_json(output_filename, output_directory, mode)
if mode == "html":
add_html_footer(output_filename, output_directory)
# Send output to S3 if needed (-B / -D)
if args.output_bucket or args.output_bucket_no_assume:
output_bucket = args.output_bucket
bucket_session = audit_info.audit_session
# Check if -D was input
if args.output_bucket_no_assume:
output_bucket = args.output_bucket_no_assume
bucket_session = audit_info.original_session
send_to_s3_bucket(
output_filename,
output_directory,
mode,
output_bucket,
bucket_session,
)
if output_modes:
for mode in output_modes:
# Close json file if exists
if mode == "json" or mode == "json-asff":
close_json(output_filename, output_directory, mode)
if mode == "html":
add_html_footer(output_filename, output_directory)
# Send output to S3 if needed (-B / -D)
if args.output_bucket or args.output_bucket_no_assume:
output_bucket = args.output_bucket
bucket_session = audit_info.audit_session
# Check if -D was input
if args.output_bucket_no_assume:
output_bucket = args.output_bucket_no_assume
bucket_session = audit_info.original_session
send_to_s3_bucket(
output_filename,
output_directory,
mode,
output_bucket,
bucket_session,
)
# Resolve previous fails of Security Hub
if args.security_hub:
resolve_security_hub_previous_findings(output_directory, audit_info)
# Resolve previous fails of Security Hub
if args.security_hub:
resolve_security_hub_previous_findings(output_directory, audit_info)
# Display summary table
display_summary_table(
findings,
audit_info,
audit_output_options,
provider,
)
if compliance_framework and findings:
# Display compliance table
display_compliance_table(
# Display summary table
display_summary_table(
findings,
bulk_checks_metadata,
compliance_framework,
audit_output_options.output_filename,
audit_output_options.output_directory,
audit_info,
audit_output_options,
provider,
)
if compliance_framework and findings:
# Display compliance table
display_compliance_table(
findings,
bulk_checks_metadata,
compliance_framework,
audit_output_options.output_filename,
audit_output_options.output_directory,
)