chore(s3): Move lib to the AWS provider and include tests (#2664)

This commit is contained in:
Pepe Fagoaga
2023-08-23 16:12:48 +02:00
committed by GitHub
parent b17cc563ff
commit cb76e5a23c
12 changed files with 197 additions and 254 deletions

26
docs/tutorials/aws/s3.md Normal file
View File

@@ -0,0 +1,26 @@
# Send report to AWS S3 Bucket
To save your report in an S3 bucket, use `-B`/`--output-bucket`.
```sh
prowler <provider> -B my-bucket
```
If you can use a custom folder and/or filename, use `-o`/`--output-directory` and/or `-F`/`--output-filename`.
```sh
prowler <provider> \
-B my-bucket \
--output-directory test-folder \
--output-filename output-filename
```
By default Prowler sends HTML, JSON and CSV output formats, if you want to send a custom output format or a single one of the defaults you can specify it with the `-M`/`--output-modes` flag.
```sh
prowler <provider> -M csv -B my-bucket
```
> In the case you do not want to use the assumed role credentials but the initial credentials to put the reports into the S3 bucket, use `-D`/`--output-bucket-no-assume` instead of `-B`/`--output-bucket`.
> Make sure that the used credentials have `s3:PutObject` permissions in the S3 path where the reports are going to be uploaded.

View File

@@ -23,23 +23,6 @@ prowler <provider> -M csv json json-asff html -o <custom_report_directory>
```console
prowler <provider> -M csv json json-asff html -F <custom_report_name> -o <custom_report_directory>
```
## Send report to AWS S3 Bucket
To save your report in an S3 bucket, use `-B`/`--output-bucket`.
```sh
prowler <provider> -B my-bucket/folder/
```
By default Prowler sends HTML, JSON and CSV output formats, if you want to send a custom output format or a single one of the defaults you can specify it with the `-M` flag.
```sh
prowler <provider> -M csv -B my-bucket/folder/
```
> In the case you do not want to use the assumed role credentials but the initial credentials to put the reports into the S3 bucket, use `-D`/`--output-bucket-no-assume` instead of `-B`/`--output-bucket`.
> Make sure that the used credentials have s3:PutObject permissions in the S3 path where the reports are going to be uploaded.
## Output Formats

View File

@@ -46,6 +46,7 @@ nav:
- AWS Organizations: tutorials/aws/organizations.md
- AWS Regions and Partitions: tutorials/aws/regions-and-partitions.md
- Scan Multiple AWS Accounts: tutorials/aws/multiaccount.md
- Send reports to AWS S3: tutorials/aws/s3.md
- AWS CloudShell: tutorials/aws/cloudshell.md
- Checks v2 to v3 Mapping: tutorials/aws/v2_to_v3_checks_mapping.md
- Tag-based Scan: tutorials/aws/tag-based-scan.md

View File

@@ -29,9 +29,10 @@ from prowler.lib.logger import logger, set_logging_config
from prowler.lib.outputs.compliance import display_compliance_table
from prowler.lib.outputs.html import add_html_footer, fill_html_overview_statistics
from prowler.lib.outputs.json import close_json
from prowler.lib.outputs.outputs import extract_findings_statistics, send_to_s3_bucket
from prowler.lib.outputs.outputs import extract_findings_statistics
from prowler.lib.outputs.slack import send_slack_message
from prowler.lib.outputs.summary_table import display_summary_table
from prowler.providers.aws.lib.s3.s3 import send_to_s3_bucket
from prowler.providers.aws.lib.security_hub.security_hub import (
resolve_security_hub_previous_findings,
)

View File

@@ -1,17 +1,8 @@
import json
import sys
from colorama import Fore, Style
from prowler.config.config import (
available_compliance_frameworks,
csv_file_suffix,
html_file_suffix,
json_asff_file_suffix,
json_file_suffix,
json_ocsf_file_suffix,
orange_color,
)
from prowler.config.config import available_compliance_frameworks, orange_color
from prowler.lib.logger import logger
from prowler.lib.outputs.compliance import add_manual_controls, fill_compliance
from prowler.lib.outputs.file_descriptors import fill_file_descriptors
@@ -209,41 +200,6 @@ def set_report_color(status: str) -> str:
return color
def send_to_s3_bucket(
output_filename, output_directory, output_mode, output_bucket, audit_session
):
try:
filename = ""
# Get only last part of the path
if output_mode == "csv":
filename = f"{output_filename}{csv_file_suffix}"
elif output_mode == "json":
filename = f"{output_filename}{json_file_suffix}"
elif output_mode == "json-asff":
filename = f"{output_filename}{json_asff_file_suffix}"
elif output_mode == "json-ocsf":
filename = f"{output_filename}{json_ocsf_file_suffix}"
elif output_mode == "html":
filename = f"{output_filename}{html_file_suffix}"
else: # Compliance output mode
filename = f"{output_filename}_{output_mode}{csv_file_suffix}"
logger.info(f"Sending outputs to S3 bucket {output_bucket}")
bucket_remote_dir = output_directory
while "prowler/" in bucket_remote_dir: # Check if it is not a custom directory
bucket_remote_dir = bucket_remote_dir.partition("prowler/")[-1]
file_name = output_directory + "/" + filename
bucket_name = output_bucket
object_name = bucket_remote_dir + "/" + output_mode + "/" + filename
s3_client = audit_session.client("s3")
s3_client.upload_file(file_name, bucket_name, object_name)
except Exception as error:
logger.critical(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
)
sys.exit(1)
def extract_findings_statistics(findings: list) -> dict:
"""
extract_findings_statistics takes a list of findings and returns the following dict with the aggregated statistics

View File

@@ -14,9 +14,9 @@ from prowler.config.config import (
output_file_timestamp,
)
from prowler.lib.logger import logger
from prowler.lib.outputs.outputs import send_to_s3_bucket
from prowler.providers.aws.lib.arn.models import get_arn_resource_type
from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
from prowler.providers.aws.lib.s3.s3 import send_to_s3_bucket
def quick_inventory(audit_info: AWS_Audit_Info, args):

View File

@@ -0,0 +1,55 @@
import sys
from prowler.config.config import (
csv_file_suffix,
html_file_suffix,
json_asff_file_suffix,
json_file_suffix,
json_ocsf_file_suffix,
)
from prowler.lib.logger import logger
def send_to_s3_bucket(
output_filename, output_directory, output_mode, output_bucket_name, audit_session
):
try:
filename = ""
# Get only last part of the path
if output_mode == "csv":
filename = f"{output_filename}{csv_file_suffix}"
elif output_mode == "json":
filename = f"{output_filename}{json_file_suffix}"
elif output_mode == "json-asff":
filename = f"{output_filename}{json_asff_file_suffix}"
elif output_mode == "json-ocsf":
filename = f"{output_filename}{json_ocsf_file_suffix}"
elif output_mode == "html":
filename = f"{output_filename}{html_file_suffix}"
else: # Compliance output mode
filename = f"{output_filename}_{output_mode}{csv_file_suffix}"
logger.info(f"Sending outputs to S3 bucket {output_bucket_name}")
# File location
file_name = output_directory + "/" + filename
# S3 Object name
bucket_directory = get_s3_object_path(output_directory)
object_name = bucket_directory + "/" + output_mode + "/" + filename
s3_client = audit_session.client("s3")
s3_client.upload_file(file_name, output_bucket_name, object_name)
except Exception as error:
logger.critical(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
)
sys.exit(1)
def get_s3_object_path(output_directory: str) -> str:
bucket_remote_dir = output_directory
if "prowler/" in bucket_remote_dir: # Check if it is not a custom directory
bucket_remote_dir = bucket_remote_dir.partition("prowler/")[-1]
return bucket_remote_dir

View File

@@ -1,5 +1,5 @@
import os
from os import getcwd, path, remove
from os import path, remove
from unittest import mock
import boto3
@@ -7,7 +7,6 @@ import botocore
import pytest
from colorama import Fore
from mock import patch
from moto import mock_s3
from prowler.config.config import (
csv_file_suffix,
@@ -62,11 +61,7 @@ from prowler.lib.outputs.models import (
unroll_list,
unroll_tags,
)
from prowler.lib.outputs.outputs import (
extract_findings_statistics,
send_to_s3_bucket,
set_report_color,
)
from prowler.lib.outputs.outputs import extract_findings_statistics, set_report_color
from prowler.lib.utils.utils import hash_sha512, open_file
from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
from prowler.providers.aws.lib.security_hub.security_hub import send_to_security_hub
@@ -1105,189 +1100,6 @@ class Test_Outputs:
output_options = mock.MagicMock()
assert fill_json_ocsf(input_audit_info, finding, output_options) == expected
@mock_s3
def test_send_to_s3_bucket(self):
# Create mock session
session = boto3.session.Session(
region_name="us-east-1",
)
# Create mock audit_info
input_audit_info = AWS_Audit_Info(
session_config=None,
original_session=None,
audit_session=session,
audited_account=AWS_ACCOUNT_ID,
audited_account_arn=f"arn:aws:iam::{AWS_ACCOUNT_ID}:root",
audited_identity_arn="test-arn",
audited_user_id="test",
audited_partition="aws",
profile="default",
profile_region="eu-west-1",
credentials=None,
assumed_role_info=None,
audited_regions=["eu-west-2", "eu-west-1"],
organizations_metadata=None,
audit_resources=None,
mfa_enabled=False,
audit_metadata=Audit_Metadata(
services_scanned=0,
expected_checks=[],
completed_checks=0,
audit_progress=0,
),
)
# Creat mock bucket
bucket_name = "test_bucket"
client = boto3.client("s3")
client.create_bucket(Bucket=bucket_name)
# Create mock csv output file
fixtures_dir = "tests/lib/outputs/fixtures"
output_directory = getcwd() + "/" + fixtures_dir
output_mode = "csv"
filename = f"prowler-output-{input_audit_info.audited_account}"
# Send mock csv file to mock S3 Bucket
send_to_s3_bucket(
filename,
output_directory,
output_mode,
bucket_name,
input_audit_info.audit_session,
)
# Check if the file has been sent by checking its content type
assert (
client.get_object(
Bucket=bucket_name,
Key=fixtures_dir + "/" + output_mode + "/" + filename + csv_file_suffix,
)["ContentType"]
== "binary/octet-stream"
)
@mock_s3
def test_send_to_s3_bucket_compliance(self):
# Create mock session
session = boto3.session.Session(
region_name="us-east-1",
)
# Create mock audit_info
input_audit_info = AWS_Audit_Info(
session_config=None,
original_session=None,
audit_session=session,
audited_account=AWS_ACCOUNT_ID,
audited_account_arn=f"arn:aws:iam::{AWS_ACCOUNT_ID}:root",
audited_identity_arn="test-arn",
audited_user_id="test",
audited_partition="aws",
profile="default",
profile_region="eu-west-1",
credentials=None,
assumed_role_info=None,
audited_regions=["eu-west-2", "eu-west-1"],
organizations_metadata=None,
audit_resources=None,
mfa_enabled=False,
audit_metadata=Audit_Metadata(
services_scanned=0,
expected_checks=[],
completed_checks=0,
audit_progress=0,
),
)
# Creat mock bucket
bucket_name = "test_bucket"
client = boto3.client("s3")
client.create_bucket(Bucket=bucket_name)
# Create mock csv output file
fixtures_dir = "tests/lib/outputs/fixtures"
output_directory = getcwd() + "/" + fixtures_dir
output_mode = "cis_1.4_aws"
filename = f"prowler-output-{input_audit_info.audited_account}"
# Send mock csv file to mock S3 Bucket
send_to_s3_bucket(
filename,
output_directory,
output_mode,
bucket_name,
input_audit_info.audit_session,
)
# Check if the file has been sent by checking its content type
assert (
client.get_object(
Bucket=bucket_name,
Key=fixtures_dir
+ "/"
+ output_mode
+ "/"
+ filename
+ "_"
+ output_mode
+ csv_file_suffix,
)["ContentType"]
== "binary/octet-stream"
)
@mock_s3
def test_send_to_s3_bucket_custom_directory(self):
# Create mock session
session = boto3.session.Session(
region_name="us-east-1",
)
# Create mock audit_info
input_audit_info = AWS_Audit_Info(
session_config=None,
original_session=None,
audit_session=session,
audited_account=AWS_ACCOUNT_ID,
audited_account_arn=f"arn:aws:iam::{AWS_ACCOUNT_ID}:root",
audited_identity_arn="test-arn",
audited_user_id="test",
audited_partition="aws",
profile="default",
profile_region="eu-west-1",
credentials=None,
assumed_role_info=None,
audited_regions=["eu-west-2", "eu-west-1"],
organizations_metadata=None,
audit_resources=None,
mfa_enabled=False,
audit_metadata=Audit_Metadata(
services_scanned=0,
expected_checks=[],
completed_checks=0,
audit_progress=0,
),
)
# Creat mock bucket
bucket_name = "test_bucket"
client = boto3.client("s3")
client.create_bucket(Bucket=bucket_name)
# Create mock csv output file
fixtures_dir = "fixtures"
output_directory = f"tests/lib/outputs/{fixtures_dir}"
output_mode = "csv"
filename = f"prowler-output-{input_audit_info.audited_account}"
# Send mock csv file to mock S3 Bucket
send_to_s3_bucket(
filename,
output_directory,
output_mode,
bucket_name,
input_audit_info.audit_session,
)
# Check if the file has been sent by checking its content type
assert (
client.get_object(
Bucket=bucket_name,
Key=output_directory
+ "/"
+ output_mode
+ "/"
+ filename
+ csv_file_suffix,
)["ContentType"]
== "binary/octet-stream"
)
def test_extract_findings_statistics_different_resources(self):
finding_1 = mock.MagicMock()
finding_1.status = "PASS"

View File

@@ -0,0 +1,109 @@
from os import path
from pathlib import Path
import boto3
from mock import MagicMock
from moto import mock_s3
from prowler.config.config import csv_file_suffix
from prowler.providers.aws.lib.s3.s3 import get_s3_object_path, send_to_s3_bucket
AWS_ACCOUNT_ID = "123456789012"
AWS_REGION = "us-east-1"
ACTUAL_DIRECTORY = Path(path.dirname(path.realpath(__file__)))
FIXTURES_DIR_NAME = "fixtures"
S3_BUCKET_NAME = "test_bucket"
OUTPUT_MODE_CSV = "csv"
OUTPUT_MODE_CIS_1_4_AWS = "cis_1.4_aws"
class TestS3:
@mock_s3
def test_send_to_s3_bucket(self):
# Mock Audit Info
audit_info = MagicMock()
# Create mock session
audit_info.audit_session = boto3.session.Session(region_name=AWS_REGION)
audit_info.audited_account = AWS_ACCOUNT_ID
# Create mock bucket
client = audit_info.audit_session.client("s3")
client.create_bucket(Bucket=S3_BUCKET_NAME)
# Mocked CSV output file
output_directory = f"{ACTUAL_DIRECTORY}/{FIXTURES_DIR_NAME}"
filename = f"prowler-output-{audit_info.audited_account}"
# Send mock CSV file to mock S3 Bucket
send_to_s3_bucket(
filename,
output_directory,
OUTPUT_MODE_CSV,
S3_BUCKET_NAME,
audit_info.audit_session,
)
bucket_directory = get_s3_object_path(output_directory)
object_name = (
f"{bucket_directory}/{OUTPUT_MODE_CSV}/{filename}{csv_file_suffix}"
)
assert (
client.get_object(
Bucket=S3_BUCKET_NAME,
Key=object_name,
)["ContentType"]
== "binary/octet-stream"
)
@mock_s3
def test_send_to_s3_bucket_compliance(self):
# Mock Audit Info
audit_info = MagicMock()
# Create mock session
audit_info.audit_session = boto3.session.Session(region_name=AWS_REGION)
audit_info.audited_account = AWS_ACCOUNT_ID
# Create mock bucket
client = audit_info.audit_session.client("s3")
client.create_bucket(Bucket=S3_BUCKET_NAME)
# Mocked CSV output file
output_directory = f"{ACTUAL_DIRECTORY}/{FIXTURES_DIR_NAME}"
filename = f"prowler-output-{audit_info.audited_account}"
# Send mock CSV file to mock S3 Bucket
send_to_s3_bucket(
filename,
output_directory,
OUTPUT_MODE_CIS_1_4_AWS,
S3_BUCKET_NAME,
audit_info.audit_session,
)
bucket_directory = get_s3_object_path(output_directory)
object_name = f"{bucket_directory}/{OUTPUT_MODE_CIS_1_4_AWS}/{filename}_{OUTPUT_MODE_CIS_1_4_AWS}{csv_file_suffix}"
assert (
client.get_object(
Bucket=S3_BUCKET_NAME,
Key=object_name,
)["ContentType"]
== "binary/octet-stream"
)
def test_get_s3_object_path_with_prowler(self):
output_directory = "/Users/admin/prowler/"
assert (
get_s3_object_path(output_directory)
== output_directory.partition("prowler/")[-1]
)
def test_get_s3_object_path_without_prowler(self):
output_directory = "/Users/admin/"
assert get_s3_object_path(output_directory) == output_directory