feat(s3_output): send outputs to S3 bucket (#1343)

This commit is contained in:
Sergio Garcia
2022-08-29 07:43:34 +01:00
committed by GitHub
parent a63c42f59c
commit 44f514f02c
4 changed files with 69 additions and 10 deletions

View File

@@ -5,7 +5,7 @@
],
"CheckID": "iam_disable_30_days_credentials",
"CheckTitle": "Ensure credentials unused for 30 days or greater are disabled",
"CheckType": "Software and Configuration Checks",
"CheckType": ["Software and Configuration Checks"],
"Compliance": [
{
"Control": [

View File

@@ -27,7 +27,7 @@ class Check_Output_JSON(BaseModel):
CheckID: str
# CheckName: str
CheckTitle: str
CheckType: str
CheckType: List[str]
ServiceName: str
SubServiceName: str
Status: str = ""

View File

@@ -271,13 +271,11 @@ def send_to_s3_bucket(
elif output_mode == "json-asff":
filename = f"{output_filename}{json_asff_file_suffix}"
logger.info(f"Sending outputs to S3 bucket {output_bucket}")
# Check if security hub is enabled in current region
file_name = output_directory + "/" + filename
bucket_name = output_bucket
object_name = output_directory + "/" + output_mode + "/" + filename
s3_client = audit_session.client("s3")
s3_client.upload_file(
output_directory + "/" + filename,
output_bucket,
output_directory + "/" + output_mode + "/" + filename,
)
s3_client.upload_file(file_name, bucket_name, object_name)
except Exception as error:
logger.critical(f"{error.__class__.__name__} -- {error}")

View File

@@ -1,7 +1,9 @@
import os
from os import path, remove
import boto3
from colorama import Fore
from moto import mock_s3
from config.config import (
csv_file_suffix,
@@ -26,6 +28,7 @@ from lib.outputs.outputs import (
fill_json,
fill_json_asff,
generate_csv_fields,
send_to_s3_bucket,
set_report_color,
)
from lib.utils.utils import hash_sha512, open_file
@@ -236,7 +239,7 @@ class Test_Outputs:
)
expected.GeneratorId = "prowler-" + finding.check_metadata.CheckID
expected.AwsAccountId = "123456789012"
expected.Types = [finding.check_metadata.CheckType]
expected.Types = finding.check_metadata.CheckType
expected.FirstObservedAt = (
expected.UpdatedAt
) = expected.CreatedAt = timestamp_utc.strftime("%Y-%m-%dT%H:%M:%SZ")
@@ -254,10 +257,68 @@ class Test_Outputs:
expected.Compliance = Compliance(
Status="PASS" + "ED",
RelatedRequirements=[finding.check_metadata.CheckType],
RelatedRequirements=finding.check_metadata.CheckType,
)
expected.Remediation = {
"Recommendation": finding.check_metadata.Remediation.Recommendation
}
assert fill_json_asff(input, input_audit_info, finding) == expected
@mock_s3
def test_send_to_s3_bucket(self):
# Create mock session
session = boto3.session.Session(
region_name="us-east-1",
)
# Create mock audit_info
input_audit_info = AWS_Audit_Info(
original_session=None,
audit_session=session,
audited_account="123456789012",
audited_identity_arn="test-arn",
audited_user_id="test",
audited_partition="aws",
profile="default",
profile_region="eu-west-1",
credentials=None,
assumed_role_info=None,
audited_regions=["eu-west-2", "eu-west-1"],
organizations_metadata=None,
)
# Creat mock bucket
bucket_name = "test_bucket"
client = boto3.client("s3")
client.create_bucket(Bucket=bucket_name)
# Create mock csv output file
output_directory = "."
output_mode = "csv"
filename = (
f"prowler-output-{input_audit_info.audited_account}-{output_file_timestamp}"
)
file_descriptor = open_file(
f"{output_directory}/{filename}{csv_file_suffix}",
"a",
)
# Send mock csv file to mock S3 Bucket
send_to_s3_bucket(
filename,
output_directory,
output_mode,
bucket_name,
input_audit_info.audit_session,
)
# Check if the file has been sent by checking its content type
assert (
client.get_object(
Bucket=bucket_name,
Key=output_directory
+ "/"
+ output_mode
+ "/"
+ filename
+ csv_file_suffix,
)["ContentType"]
== "binary/octet-stream"
)
remove(f"{output_directory}/{filename}{csv_file_suffix}")