mirror of
https://github.com/ghndrx/prowler.git
synced 2026-02-10 06:45:08 +00:00
feat(gcp): add --project-ids flag and scan all projects by default (#2393)
Co-authored-by: Pepe Fagoaga <pepe@verica.io>
This commit is contained in:
@@ -184,7 +184,7 @@ Those credentials must be associated to a user or service account with proper pe
|
|||||||
- Security Reviewer
|
- Security Reviewer
|
||||||
- Stackdriver Account Viewer
|
- Stackdriver Account Viewer
|
||||||
|
|
||||||
> `prowler` will scan the project associated with the credentials.
|
> By default, `prowler` will scan all accessible GCP Projects, use flag `--project-ids` to specify the projects to be scanned.
|
||||||
|
|
||||||
# 💻 Basic Usage
|
# 💻 Basic Usage
|
||||||
|
|
||||||
@@ -267,7 +267,7 @@ Optionally, you can provide the location of an application credential JSON file
|
|||||||
```console
|
```console
|
||||||
prowler gcp --credentials-file path
|
prowler gcp --credentials-file path
|
||||||
```
|
```
|
||||||
|
> By default, `prowler` will scan all accessible GCP Projects, use flag `--project-ids` to specify the projects to be scanned.
|
||||||
|
|
||||||
# 📃 License
|
# 📃 License
|
||||||
|
|
||||||
|
|||||||
@@ -96,4 +96,4 @@ Those credentials must be associated to a user or service account with proper pe
|
|||||||
- Security Reviewer
|
- Security Reviewer
|
||||||
- Stackdriver Account Viewer
|
- Stackdriver Account Viewer
|
||||||
|
|
||||||
> `prowler` will scan the project associated with the credentials.
|
> By default, `prowler` will scan all accessible GCP Projects, use flag `--project-ids` to specify the projects to be scanned.
|
||||||
|
|||||||
@@ -276,7 +276,7 @@ prowler azure --managed-identity-auth
|
|||||||
|
|
||||||
See more details about Azure Authentication in [Requirements](getting-started/requirements.md)
|
See more details about Azure Authentication in [Requirements](getting-started/requirements.md)
|
||||||
|
|
||||||
Prowler by default scans all the subscriptions that is allowed to scan, if you want to scan a single subscription or various concrete subscriptions you can use the following flag (using az cli auth as example):
|
Prowler by default scans all the subscriptions that is allowed to scan, if you want to scan a single subscription or various specific subscriptions you can use the following flag (using az cli auth as example):
|
||||||
```console
|
```console
|
||||||
prowler azure --az-cli-auth --subscription-ids <subscription ID 1> <subscription ID 2> ... <subscription ID N>
|
prowler azure --az-cli-auth --subscription-ids <subscription ID 1> <subscription ID 2> ... <subscription ID N>
|
||||||
```
|
```
|
||||||
@@ -296,6 +296,9 @@ Otherwise, you can generate and download Service Account keys in JSON format (re
|
|||||||
prowler gcp --credentials-file path
|
prowler gcp --credentials-file path
|
||||||
```
|
```
|
||||||
|
|
||||||
> `prowler` will scan the GCP project associated with the credentials.
|
Prowler by default scans all the GCP Projects that is allowed to scan, if you want to scan a single project or various specific projects you can use the following flag:
|
||||||
|
```console
|
||||||
|
prowler gcp --project-ids <Project ID 1> <Project ID 2> ... <Project ID N>
|
||||||
|
```
|
||||||
|
|
||||||
See more details about GCP Authentication in [Requirements](getting-started/requirements.md)
|
See more details about GCP Authentication in [Requirements](getting-started/requirements.md)
|
||||||
|
|||||||
@@ -443,7 +443,7 @@ Detailed documentation at https://docs.prowler.cloud
|
|||||||
"--subscription-ids",
|
"--subscription-ids",
|
||||||
nargs="+",
|
nargs="+",
|
||||||
default=[],
|
default=[],
|
||||||
help="Azure subscription ids to be scanned by prowler",
|
help="Azure Subscription IDs to be scanned by Prowler",
|
||||||
)
|
)
|
||||||
azure_parser.add_argument(
|
azure_parser.add_argument(
|
||||||
"--tenant-id",
|
"--tenant-id",
|
||||||
@@ -466,3 +466,11 @@ Detailed documentation at https://docs.prowler.cloud
|
|||||||
metavar="FILE_PATH",
|
metavar="FILE_PATH",
|
||||||
help="Authenticate using a Google Service Account Application Credentials JSON file",
|
help="Authenticate using a Google Service Account Application Credentials JSON file",
|
||||||
)
|
)
|
||||||
|
# Subscriptions
|
||||||
|
gcp_subscriptions_subparser = gcp_parser.add_argument_group("Projects")
|
||||||
|
gcp_subscriptions_subparser.add_argument(
|
||||||
|
"--project-ids",
|
||||||
|
nargs="+",
|
||||||
|
default=[],
|
||||||
|
help="GCP Project IDs to be scanned by Prowler",
|
||||||
|
)
|
||||||
|
|||||||
@@ -491,8 +491,8 @@ def get_gcp_html_assessment_summary(audit_info):
|
|||||||
</div>
|
</div>
|
||||||
<ul class="list-group list-group-flush">
|
<ul class="list-group list-group-flush">
|
||||||
<li class="list-group-item">
|
<li class="list-group-item">
|
||||||
<b>GCP Project ID:</b> """
|
<b>GCP Project IDs:</b> """
|
||||||
+ audit_info.project_id
|
+ ", ".join(audit_info.project_ids)
|
||||||
+ """
|
+ """
|
||||||
</li>
|
</li>
|
||||||
</ul>
|
</ul>
|
||||||
|
|||||||
@@ -363,7 +363,7 @@ def generate_provider_output_json(
|
|||||||
)
|
)
|
||||||
|
|
||||||
if provider == "gcp":
|
if provider == "gcp":
|
||||||
finding_output.ProjectId = audit_info.project_id
|
finding_output.ProjectId = finding.project_id
|
||||||
finding_output.Location = finding.location
|
finding_output.Location = finding.location
|
||||||
finding_output.ResourceId = finding.resource_id
|
finding_output.ResourceId = finding.resource_id
|
||||||
finding_output.ResourceName = finding.resource_name
|
finding_output.ResourceName = finding.resource_name
|
||||||
|
|||||||
@@ -30,7 +30,7 @@ def create_message_identity(provider, audit_info):
|
|||||||
if provider == "aws":
|
if provider == "aws":
|
||||||
identity = f"AWS Account *{audit_info.audited_account}*"
|
identity = f"AWS Account *{audit_info.audited_account}*"
|
||||||
elif provider == "gcp":
|
elif provider == "gcp":
|
||||||
identity = f"GCP Project *{audit_info.project_id}*"
|
identity = f"GCP Projects *{', '.join(audit_info.project_ids)}*"
|
||||||
logo = gcp_logo
|
logo = gcp_logo
|
||||||
elif provider == "azure":
|
elif provider == "azure":
|
||||||
printed_subscriptions = []
|
printed_subscriptions = []
|
||||||
|
|||||||
@@ -30,8 +30,8 @@ def display_summary_table(
|
|||||||
entity_type = "Tenant ID/s"
|
entity_type = "Tenant ID/s"
|
||||||
audited_entities = " ".join(audit_info.identity.tenant_ids)
|
audited_entities = " ".join(audit_info.identity.tenant_ids)
|
||||||
elif provider == "gcp":
|
elif provider == "gcp":
|
||||||
entity_type = "Project ID"
|
entity_type = "Project ID/s"
|
||||||
audited_entities = audit_info.project_id
|
audited_entities = ", ".join(audit_info.project_ids)
|
||||||
|
|
||||||
if findings:
|
if findings:
|
||||||
current = {
|
current = {
|
||||||
|
|||||||
@@ -50,7 +50,7 @@ class Audit_Info:
|
|||||||
report = f"""
|
report = f"""
|
||||||
This report is being generated using credentials below:
|
This report is being generated using credentials below:
|
||||||
|
|
||||||
GCP Account: {Fore.YELLOW}[{profile}]{Style.RESET_ALL} GCP Project ID: {Fore.YELLOW}[{audit_info.project_id}]{Style.RESET_ALL}
|
GCP Account: {Fore.YELLOW}[{profile}]{Style.RESET_ALL} GCP Project IDs: {Fore.YELLOW}[{", ".join(audit_info.project_ids)}]{Style.RESET_ALL}
|
||||||
"""
|
"""
|
||||||
print(report)
|
print(report)
|
||||||
|
|
||||||
@@ -301,17 +301,20 @@ Azure Identity Type: {Fore.YELLOW}[{audit_info.identity.identity_type}]{Style.RE
|
|||||||
set_gcp_audit_info returns the GCP_Audit_Info
|
set_gcp_audit_info returns the GCP_Audit_Info
|
||||||
"""
|
"""
|
||||||
logger.info("Setting GCP session ...")
|
logger.info("Setting GCP session ...")
|
||||||
|
project_ids = arguments.get("project_ids")
|
||||||
|
|
||||||
logger.info("Checking if any credentials mode is set ...")
|
logger.info("Checking if any credentials mode is set ...")
|
||||||
credentials_file = arguments.get("credentials_file")
|
credentials_file = arguments.get("credentials_file")
|
||||||
|
|
||||||
gcp_provider = GCP_Provider(
|
gcp_provider = GCP_Provider(
|
||||||
credentials_file,
|
credentials_file,
|
||||||
|
project_ids,
|
||||||
)
|
)
|
||||||
|
|
||||||
(
|
(
|
||||||
gcp_audit_info.credentials,
|
gcp_audit_info.credentials,
|
||||||
gcp_audit_info.project_id,
|
gcp_audit_info.default_project_id,
|
||||||
|
gcp_audit_info.project_ids,
|
||||||
) = gcp_provider.get_credentials()
|
) = gcp_provider.get_credentials()
|
||||||
|
|
||||||
if not arguments.get("only_logs"):
|
if not arguments.get("only_logs"):
|
||||||
|
|||||||
@@ -86,9 +86,7 @@ class Gcp_Output_Options(Provider_Output_Options):
|
|||||||
not hasattr(arguments, "output_filename")
|
not hasattr(arguments, "output_filename")
|
||||||
or arguments.output_filename is None
|
or arguments.output_filename is None
|
||||||
):
|
):
|
||||||
self.output_filename = (
|
self.output_filename = f"prowler-output-{audit_info.default_project_id}-{output_file_timestamp}"
|
||||||
f"prowler-output-{audit_info.project_id}-{output_file_timestamp}"
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
self.output_filename = arguments.output_filename
|
self.output_filename = arguments.output_filename
|
||||||
|
|
||||||
|
|||||||
@@ -13,13 +13,35 @@ class GCP_Provider:
|
|||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
credentials_file: str,
|
credentials_file: str,
|
||||||
|
input_project_ids: list,
|
||||||
):
|
):
|
||||||
logger.info("Instantiating GCP Provider ...")
|
logger.info("Instantiating GCP Provider ...")
|
||||||
self.credentials, self.project_id = self.__set_credentials__(credentials_file)
|
self.credentials, self.default_project_id = self.__set_credentials__(
|
||||||
if not self.project_id:
|
credentials_file
|
||||||
|
)
|
||||||
|
if not self.default_project_id:
|
||||||
logger.critical("No Project ID associated to Google Credentials.")
|
logger.critical("No Project ID associated to Google Credentials.")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
self.project_ids = []
|
||||||
|
accessible_projects = self.get_project_ids()
|
||||||
|
if not accessible_projects:
|
||||||
|
logger.critical("No Project IDs can be accessed via Google Credentials.")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
if input_project_ids:
|
||||||
|
for input_project in input_project_ids:
|
||||||
|
if input_project in accessible_projects:
|
||||||
|
self.project_ids.append(input_project)
|
||||||
|
else:
|
||||||
|
logger.critical(
|
||||||
|
f"Project {input_project} cannot be accessed via Google Credentials."
|
||||||
|
)
|
||||||
|
sys.exit(1)
|
||||||
|
else:
|
||||||
|
# If not projects were input, all accessible projects are scanned by default
|
||||||
|
self.project_ids = accessible_projects
|
||||||
|
|
||||||
def __set_credentials__(self, credentials_file):
|
def __set_credentials__(self, credentials_file):
|
||||||
try:
|
try:
|
||||||
if credentials_file:
|
if credentials_file:
|
||||||
@@ -27,7 +49,9 @@ class GCP_Provider:
|
|||||||
|
|
||||||
return auth.default()
|
return auth.default()
|
||||||
except Exception as error:
|
except Exception as error:
|
||||||
logger.critical(f"{error.__class__.__name__} -- {error}")
|
logger.critical(
|
||||||
|
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||||
|
)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
def __set_gcp_creds_env_var__(self, credentials_file):
|
def __set_gcp_creds_env_var__(self, credentials_file):
|
||||||
@@ -38,7 +62,34 @@ class GCP_Provider:
|
|||||||
os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = client_secrets_path
|
os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = client_secrets_path
|
||||||
|
|
||||||
def get_credentials(self):
|
def get_credentials(self):
|
||||||
return self.credentials, self.project_id
|
return self.credentials, self.default_project_id, self.project_ids
|
||||||
|
|
||||||
|
def get_project_ids(self):
|
||||||
|
try:
|
||||||
|
project_ids = []
|
||||||
|
|
||||||
|
service = discovery.build(
|
||||||
|
"cloudresourcemanager", "v1", credentials=self.credentials
|
||||||
|
)
|
||||||
|
|
||||||
|
request = service.projects().list()
|
||||||
|
|
||||||
|
while request is not None:
|
||||||
|
response = request.execute()
|
||||||
|
|
||||||
|
for project in response.get("projects", []):
|
||||||
|
project_ids.append(project["projectId"])
|
||||||
|
|
||||||
|
request = service.projects().list_next(
|
||||||
|
previous_request=request, previous_response=response
|
||||||
|
)
|
||||||
|
|
||||||
|
return project_ids
|
||||||
|
except Exception as error:
|
||||||
|
logger.error(
|
||||||
|
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||||
|
)
|
||||||
|
return []
|
||||||
|
|
||||||
|
|
||||||
def generate_client(
|
def generate_client(
|
||||||
|
|||||||
@@ -2,7 +2,8 @@ from prowler.providers.gcp.lib.audit_info.models import GCP_Audit_Info
|
|||||||
|
|
||||||
gcp_audit_info = GCP_Audit_Info(
|
gcp_audit_info = GCP_Audit_Info(
|
||||||
credentials=None,
|
credentials=None,
|
||||||
project_id=None,
|
default_project_id=None,
|
||||||
|
project_ids=[],
|
||||||
audit_resources=None,
|
audit_resources=None,
|
||||||
audit_metadata=None,
|
audit_metadata=None,
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -7,12 +7,21 @@ from google.oauth2.credentials import Credentials
|
|||||||
@dataclass
|
@dataclass
|
||||||
class GCP_Audit_Info:
|
class GCP_Audit_Info:
|
||||||
credentials: Credentials
|
credentials: Credentials
|
||||||
project_id: str
|
default_project_id: str
|
||||||
|
project_ids: list
|
||||||
audit_resources: Optional[Any]
|
audit_resources: Optional[Any]
|
||||||
audit_metadata: Optional[Any]
|
audit_metadata: Optional[Any]
|
||||||
|
|
||||||
def __init__(self, credentials, project_id, audit_metadata, audit_resources):
|
def __init__(
|
||||||
|
self,
|
||||||
|
credentials,
|
||||||
|
default_project_id,
|
||||||
|
project_ids,
|
||||||
|
audit_metadata,
|
||||||
|
audit_resources,
|
||||||
|
):
|
||||||
self.credentials = credentials
|
self.credentials = credentials
|
||||||
self.project_id = project_id
|
self.default_project_id = default_project_id
|
||||||
|
self.project_ids = project_ids
|
||||||
self.audit_metadata = audit_metadata
|
self.audit_metadata = audit_metadata
|
||||||
self.audit_resources = audit_resources
|
self.audit_resources = audit_resources
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ class bigquery_dataset_cmk_encryption(Check):
|
|||||||
findings = []
|
findings = []
|
||||||
for dataset in bigquery_client.datasets:
|
for dataset in bigquery_client.datasets:
|
||||||
report = Check_Report_GCP(self.metadata())
|
report = Check_Report_GCP(self.metadata())
|
||||||
report.project_id = bigquery_client.project_id
|
report.project_id = dataset.project_id
|
||||||
report.resource_id = dataset.id
|
report.resource_id = dataset.id
|
||||||
report.resource_name = dataset.name
|
report.resource_name = dataset.name
|
||||||
report.location = dataset.region
|
report.location = dataset.region
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ class bigquery_dataset_public_access(Check):
|
|||||||
findings = []
|
findings = []
|
||||||
for dataset in bigquery_client.datasets:
|
for dataset in bigquery_client.datasets:
|
||||||
report = Check_Report_GCP(self.metadata())
|
report = Check_Report_GCP(self.metadata())
|
||||||
report.project_id = bigquery_client.project_id
|
report.project_id = dataset.project_id
|
||||||
report.resource_id = dataset.id
|
report.resource_id = dataset.id
|
||||||
report.resource_name = dataset.name
|
report.resource_name = dataset.name
|
||||||
report.location = dataset.region
|
report.location = dataset.region
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ class BigQuery:
|
|||||||
def __init__(self, audit_info):
|
def __init__(self, audit_info):
|
||||||
self.service = "bigquery"
|
self.service = "bigquery"
|
||||||
self.api_version = "v2"
|
self.api_version = "v2"
|
||||||
self.project_id = audit_info.project_id
|
self.project_ids = audit_info.project_ids
|
||||||
self.client = generate_client(self.service, self.api_version, audit_info)
|
self.client = generate_client(self.service, self.api_version, audit_info)
|
||||||
self.datasets = []
|
self.datasets = []
|
||||||
self.tables = []
|
self.tables = []
|
||||||
@@ -17,52 +17,54 @@ class BigQuery:
|
|||||||
self.__get_tables__()
|
self.__get_tables__()
|
||||||
|
|
||||||
def __get_datasets__(self):
|
def __get_datasets__(self):
|
||||||
try:
|
for project_id in self.project_ids:
|
||||||
request = self.client.datasets().list(projectId=self.project_id)
|
try:
|
||||||
while request is not None:
|
request = self.client.datasets().list(projectId=project_id)
|
||||||
response = request.execute()
|
while request is not None:
|
||||||
|
response = request.execute()
|
||||||
|
|
||||||
for dataset in response.get("datasets", []):
|
for dataset in response.get("datasets", []):
|
||||||
dataset_info = (
|
dataset_info = (
|
||||||
self.client.datasets()
|
self.client.datasets()
|
||||||
.get(
|
.get(
|
||||||
projectId=self.project_id,
|
projectId=project_id,
|
||||||
datasetId=dataset["datasetReference"]["datasetId"],
|
datasetId=dataset["datasetReference"]["datasetId"],
|
||||||
|
)
|
||||||
|
.execute()
|
||||||
)
|
)
|
||||||
.execute()
|
cmk_encryption = False
|
||||||
)
|
public = False
|
||||||
cmk_encryption = False
|
roles = dataset_info.get("access", "")
|
||||||
public = False
|
if "allAuthenticatedUsers" in str(roles) or "allUsers" in str(
|
||||||
roles = dataset_info.get("access", "")
|
roles
|
||||||
if "allAuthenticatedUsers" in str(roles) or "allUsers" in str(
|
):
|
||||||
roles
|
public = True
|
||||||
):
|
if dataset_info.get("defaultEncryptionConfiguration"):
|
||||||
public = True
|
cmk_encryption = True
|
||||||
if dataset_info.get("defaultEncryptionConfiguration"):
|
self.datasets.append(
|
||||||
cmk_encryption = True
|
Dataset(
|
||||||
self.datasets.append(
|
name=dataset["datasetReference"]["datasetId"],
|
||||||
Dataset(
|
id=dataset["id"],
|
||||||
name=dataset["datasetReference"]["datasetId"],
|
region=dataset["location"],
|
||||||
id=dataset["id"],
|
cmk_encryption=cmk_encryption,
|
||||||
region=dataset["location"],
|
public=public,
|
||||||
cmk_encryption=cmk_encryption,
|
project_id=project_id,
|
||||||
public=public,
|
)
|
||||||
)
|
)
|
||||||
)
|
|
||||||
|
|
||||||
request = self.client.datasets().list_next(
|
request = self.client.datasets().list_next(
|
||||||
previous_request=request, previous_response=response
|
previous_request=request, previous_response=response
|
||||||
|
)
|
||||||
|
except Exception as error:
|
||||||
|
logger.error(
|
||||||
|
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||||
)
|
)
|
||||||
except Exception as error:
|
|
||||||
logger.error(
|
|
||||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
|
||||||
)
|
|
||||||
|
|
||||||
def __get_tables__(self):
|
def __get_tables__(self):
|
||||||
try:
|
for dataset in self.datasets:
|
||||||
for dataset in self.datasets:
|
try:
|
||||||
request = self.client.tables().list(
|
request = self.client.tables().list(
|
||||||
projectId=self.project_id, datasetId=dataset.name
|
projectId=dataset.project_id, datasetId=dataset.name
|
||||||
)
|
)
|
||||||
while request is not None:
|
while request is not None:
|
||||||
response = request.execute()
|
response = request.execute()
|
||||||
@@ -72,7 +74,7 @@ class BigQuery:
|
|||||||
if (
|
if (
|
||||||
self.client.tables()
|
self.client.tables()
|
||||||
.get(
|
.get(
|
||||||
projectId=self.project_id,
|
projectId=dataset.project_id,
|
||||||
datasetId=dataset.name,
|
datasetId=dataset.name,
|
||||||
tableId=table["tableReference"]["tableId"],
|
tableId=table["tableReference"]["tableId"],
|
||||||
)
|
)
|
||||||
@@ -86,16 +88,17 @@ class BigQuery:
|
|||||||
id=table["id"],
|
id=table["id"],
|
||||||
region=dataset.region,
|
region=dataset.region,
|
||||||
cmk_encryption=cmk_encryption,
|
cmk_encryption=cmk_encryption,
|
||||||
|
project_id=dataset.project_id,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
request = self.client.tables().list_next(
|
request = self.client.tables().list_next(
|
||||||
previous_request=request, previous_response=response
|
previous_request=request, previous_response=response
|
||||||
)
|
)
|
||||||
except Exception as error:
|
except Exception as error:
|
||||||
logger.error(
|
logger.error(
|
||||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class Dataset(BaseModel):
|
class Dataset(BaseModel):
|
||||||
@@ -104,6 +107,7 @@ class Dataset(BaseModel):
|
|||||||
region: str
|
region: str
|
||||||
cmk_encryption: bool
|
cmk_encryption: bool
|
||||||
public: bool
|
public: bool
|
||||||
|
project_id: str
|
||||||
|
|
||||||
|
|
||||||
class Table(BaseModel):
|
class Table(BaseModel):
|
||||||
@@ -111,3 +115,4 @@ class Table(BaseModel):
|
|||||||
id: str
|
id: str
|
||||||
region: str
|
region: str
|
||||||
cmk_encryption: bool
|
cmk_encryption: bool
|
||||||
|
project_id: str
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ class bigquery_table_cmk_encryption(Check):
|
|||||||
findings = []
|
findings = []
|
||||||
for table in bigquery_client.tables:
|
for table in bigquery_client.tables:
|
||||||
report = Check_Report_GCP(self.metadata())
|
report = Check_Report_GCP(self.metadata())
|
||||||
report.project_id = bigquery_client.project_id
|
report.project_id = table.project_id
|
||||||
report.resource_id = table.id
|
report.resource_id = table.id
|
||||||
report.resource_name = table.name
|
report.resource_name = table.name
|
||||||
report.location = table.region
|
report.location = table.region
|
||||||
|
|||||||
@@ -10,7 +10,7 @@ class CloudResourceManager:
|
|||||||
self.service = "cloudresourcemanager"
|
self.service = "cloudresourcemanager"
|
||||||
self.api_version = "v1"
|
self.api_version = "v1"
|
||||||
self.region = "global"
|
self.region = "global"
|
||||||
self.project_id = audit_info.project_id
|
self.project_ids = audit_info.project_ids
|
||||||
self.client = generate_client(self.service, self.api_version, audit_info)
|
self.client = generate_client(self.service, self.api_version, audit_info)
|
||||||
self.bindings = []
|
self.bindings = []
|
||||||
self.__get_iam_policy__()
|
self.__get_iam_policy__()
|
||||||
@@ -19,23 +19,26 @@ class CloudResourceManager:
|
|||||||
return self.client
|
return self.client
|
||||||
|
|
||||||
def __get_iam_policy__(self):
|
def __get_iam_policy__(self):
|
||||||
try:
|
for project_id in self.project_ids:
|
||||||
policy = (
|
try:
|
||||||
self.client.projects().getIamPolicy(resource=self.project_id).execute()
|
policy = (
|
||||||
)
|
self.client.projects().getIamPolicy(resource=project_id).execute()
|
||||||
for binding in policy["bindings"]:
|
)
|
||||||
self.bindings.append(
|
for binding in policy["bindings"]:
|
||||||
Binding(
|
self.bindings.append(
|
||||||
role=binding["role"],
|
Binding(
|
||||||
members=binding["members"],
|
role=binding["role"],
|
||||||
)
|
members=binding["members"],
|
||||||
|
project_id=project_id,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
except Exception as error:
|
||||||
|
logger.error(
|
||||||
|
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||||
)
|
)
|
||||||
except Exception as error:
|
|
||||||
logger.error(
|
|
||||||
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class Binding(BaseModel):
|
class Binding(BaseModel):
|
||||||
role: str
|
role: str
|
||||||
members: list
|
members: list
|
||||||
|
project_id: str
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ class cloudsql_instance_automated_backups(Check):
|
|||||||
findings = []
|
findings = []
|
||||||
for instance in cloudsql_client.instances:
|
for instance in cloudsql_client.instances:
|
||||||
report = Check_Report_GCP(self.metadata())
|
report = Check_Report_GCP(self.metadata())
|
||||||
report.project_id = cloudsql_client.project_id
|
report.project_id = instance.project_id
|
||||||
report.resource_id = instance.name
|
report.resource_id = instance.name
|
||||||
report.resource_name = instance.name
|
report.resource_name = instance.name
|
||||||
report.location = instance.region
|
report.location = instance.region
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ class cloudsql_instance_mysql_local_infile_flag(Check):
|
|||||||
for instance in cloudsql_client.instances:
|
for instance in cloudsql_client.instances:
|
||||||
if "MYSQL" in instance.version:
|
if "MYSQL" in instance.version:
|
||||||
report = Check_Report_GCP(self.metadata())
|
report = Check_Report_GCP(self.metadata())
|
||||||
report.project_id = cloudsql_client.project_id
|
report.project_id = instance.project_id
|
||||||
report.resource_id = instance.name
|
report.resource_id = instance.name
|
||||||
report.resource_name = instance.name
|
report.resource_name = instance.name
|
||||||
report.location = instance.region
|
report.location = instance.region
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ class cloudsql_instance_mysql_skip_show_database_flag(Check):
|
|||||||
for instance in cloudsql_client.instances:
|
for instance in cloudsql_client.instances:
|
||||||
if "MYSQL" in instance.version:
|
if "MYSQL" in instance.version:
|
||||||
report = Check_Report_GCP(self.metadata())
|
report = Check_Report_GCP(self.metadata())
|
||||||
report.project_id = cloudsql_client.project_id
|
report.project_id = instance.project_id
|
||||||
report.resource_id = instance.name
|
report.resource_id = instance.name
|
||||||
report.resource_name = instance.name
|
report.resource_name = instance.name
|
||||||
report.location = instance.region
|
report.location = instance.region
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ class cloudsql_instance_postgres_enable_pgaudit_flag(Check):
|
|||||||
for instance in cloudsql_client.instances:
|
for instance in cloudsql_client.instances:
|
||||||
if "POSTGRES" in instance.version:
|
if "POSTGRES" in instance.version:
|
||||||
report = Check_Report_GCP(self.metadata())
|
report = Check_Report_GCP(self.metadata())
|
||||||
report.project_id = cloudsql_client.project_id
|
report.project_id = instance.project_id
|
||||||
report.resource_id = instance.name
|
report.resource_id = instance.name
|
||||||
report.resource_name = instance.name
|
report.resource_name = instance.name
|
||||||
report.location = instance.region
|
report.location = instance.region
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ class cloudsql_instance_postgres_log_connections_flag(Check):
|
|||||||
for instance in cloudsql_client.instances:
|
for instance in cloudsql_client.instances:
|
||||||
if "POSTGRES" in instance.version:
|
if "POSTGRES" in instance.version:
|
||||||
report = Check_Report_GCP(self.metadata())
|
report = Check_Report_GCP(self.metadata())
|
||||||
report.project_id = cloudsql_client.project_id
|
report.project_id = instance.project_id
|
||||||
report.resource_id = instance.name
|
report.resource_id = instance.name
|
||||||
report.resource_name = instance.name
|
report.resource_name = instance.name
|
||||||
report.location = instance.region
|
report.location = instance.region
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ class cloudsql_instance_postgres_log_disconnections_flag(Check):
|
|||||||
for instance in cloudsql_client.instances:
|
for instance in cloudsql_client.instances:
|
||||||
if "POSTGRES" in instance.version:
|
if "POSTGRES" in instance.version:
|
||||||
report = Check_Report_GCP(self.metadata())
|
report = Check_Report_GCP(self.metadata())
|
||||||
report.project_id = cloudsql_client.project_id
|
report.project_id = instance.project_id
|
||||||
report.resource_id = instance.name
|
report.resource_id = instance.name
|
||||||
report.resource_name = instance.name
|
report.resource_name = instance.name
|
||||||
report.location = instance.region
|
report.location = instance.region
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ class cloudsql_instance_postgres_log_error_verbosity_flag(Check):
|
|||||||
for instance in cloudsql_client.instances:
|
for instance in cloudsql_client.instances:
|
||||||
if "POSTGRES" in instance.version:
|
if "POSTGRES" in instance.version:
|
||||||
report = Check_Report_GCP(self.metadata())
|
report = Check_Report_GCP(self.metadata())
|
||||||
report.project_id = cloudsql_client.project_id
|
report.project_id = instance.project_id
|
||||||
report.resource_id = instance.name
|
report.resource_id = instance.name
|
||||||
report.resource_name = instance.name
|
report.resource_name = instance.name
|
||||||
report.location = instance.region
|
report.location = instance.region
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ class cloudsql_instance_postgres_log_min_duration_statement_flag(Check):
|
|||||||
for instance in cloudsql_client.instances:
|
for instance in cloudsql_client.instances:
|
||||||
if "POSTGRES" in instance.version:
|
if "POSTGRES" in instance.version:
|
||||||
report = Check_Report_GCP(self.metadata())
|
report = Check_Report_GCP(self.metadata())
|
||||||
report.project_id = cloudsql_client.project_id
|
report.project_id = instance.project_id
|
||||||
report.resource_id = instance.name
|
report.resource_id = instance.name
|
||||||
report.resource_name = instance.name
|
report.resource_name = instance.name
|
||||||
report.location = instance.region
|
report.location = instance.region
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ class cloudsql_instance_postgres_log_min_error_statement_flag(Check):
|
|||||||
for instance in cloudsql_client.instances:
|
for instance in cloudsql_client.instances:
|
||||||
if "POSTGRES" in instance.version:
|
if "POSTGRES" in instance.version:
|
||||||
report = Check_Report_GCP(self.metadata())
|
report = Check_Report_GCP(self.metadata())
|
||||||
report.project_id = cloudsql_client.project_id
|
report.project_id = instance.project_id
|
||||||
report.resource_id = instance.name
|
report.resource_id = instance.name
|
||||||
report.resource_name = instance.name
|
report.resource_name = instance.name
|
||||||
report.location = instance.region
|
report.location = instance.region
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ class cloudsql_instance_postgres_log_min_messages_flag(Check):
|
|||||||
for instance in cloudsql_client.instances:
|
for instance in cloudsql_client.instances:
|
||||||
if "POSTGRES" in instance.version:
|
if "POSTGRES" in instance.version:
|
||||||
report = Check_Report_GCP(self.metadata())
|
report = Check_Report_GCP(self.metadata())
|
||||||
report.project_id = cloudsql_client.project_id
|
report.project_id = instance.project_id
|
||||||
report.resource_id = instance.name
|
report.resource_id = instance.name
|
||||||
report.resource_name = instance.name
|
report.resource_name = instance.name
|
||||||
report.location = instance.region
|
report.location = instance.region
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ class cloudsql_instance_postgres_log_statement_flag(Check):
|
|||||||
for instance in cloudsql_client.instances:
|
for instance in cloudsql_client.instances:
|
||||||
if "POSTGRES" in instance.version:
|
if "POSTGRES" in instance.version:
|
||||||
report = Check_Report_GCP(self.metadata())
|
report = Check_Report_GCP(self.metadata())
|
||||||
report.project_id = cloudsql_client.project_id
|
report.project_id = instance.project_id
|
||||||
report.resource_id = instance.name
|
report.resource_id = instance.name
|
||||||
report.resource_name = instance.name
|
report.resource_name = instance.name
|
||||||
report.location = instance.region
|
report.location = instance.region
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ class cloudsql_instance_private_ip_assignment(Check):
|
|||||||
findings = []
|
findings = []
|
||||||
for instance in cloudsql_client.instances:
|
for instance in cloudsql_client.instances:
|
||||||
report = Check_Report_GCP(self.metadata())
|
report = Check_Report_GCP(self.metadata())
|
||||||
report.project_id = cloudsql_client.project_id
|
report.project_id = instance.project_id
|
||||||
report.resource_id = instance.name
|
report.resource_id = instance.name
|
||||||
report.resource_name = instance.name
|
report.resource_name = instance.name
|
||||||
report.location = instance.region
|
report.location = instance.region
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ class cloudsql_instance_public_access(Check):
|
|||||||
findings = []
|
findings = []
|
||||||
for instance in cloudsql_client.instances:
|
for instance in cloudsql_client.instances:
|
||||||
report = Check_Report_GCP(self.metadata())
|
report = Check_Report_GCP(self.metadata())
|
||||||
report.project_id = cloudsql_client.project_id
|
report.project_id = instance.project_id
|
||||||
report.resource_id = instance.name
|
report.resource_id = instance.name
|
||||||
report.resource_name = instance.name
|
report.resource_name = instance.name
|
||||||
report.location = instance.region
|
report.location = instance.region
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ class cloudsql_instance_public_ip(Check):
|
|||||||
findings = []
|
findings = []
|
||||||
for instance in cloudsql_client.instances:
|
for instance in cloudsql_client.instances:
|
||||||
report = Check_Report_GCP(self.metadata())
|
report = Check_Report_GCP(self.metadata())
|
||||||
report.project_id = cloudsql_client.project_id
|
report.project_id = instance.project_id
|
||||||
report.resource_id = instance.name
|
report.resource_id = instance.name
|
||||||
report.resource_name = instance.name
|
report.resource_name = instance.name
|
||||||
report.location = instance.region
|
report.location = instance.region
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ class cloudsql_instance_sqlserver_contained_database_authentication_flag(Check):
|
|||||||
for instance in cloudsql_client.instances:
|
for instance in cloudsql_client.instances:
|
||||||
if "SQLSERVER" in instance.version:
|
if "SQLSERVER" in instance.version:
|
||||||
report = Check_Report_GCP(self.metadata())
|
report = Check_Report_GCP(self.metadata())
|
||||||
report.project_id = cloudsql_client.project_id
|
report.project_id = instance.project_id
|
||||||
report.resource_id = instance.name
|
report.resource_id = instance.name
|
||||||
report.resource_name = instance.name
|
report.resource_name = instance.name
|
||||||
report.location = instance.region
|
report.location = instance.region
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ class cloudsql_instance_sqlserver_cross_db_ownership_chaining_flag(Check):
|
|||||||
for instance in cloudsql_client.instances:
|
for instance in cloudsql_client.instances:
|
||||||
if "SQLSERVER" in instance.version:
|
if "SQLSERVER" in instance.version:
|
||||||
report = Check_Report_GCP(self.metadata())
|
report = Check_Report_GCP(self.metadata())
|
||||||
report.project_id = cloudsql_client.project_id
|
report.project_id = instance.project_id
|
||||||
report.resource_id = instance.name
|
report.resource_id = instance.name
|
||||||
report.resource_name = instance.name
|
report.resource_name = instance.name
|
||||||
report.location = instance.region
|
report.location = instance.region
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ class cloudsql_instance_sqlserver_external_scripts_enabled_flag(Check):
|
|||||||
for instance in cloudsql_client.instances:
|
for instance in cloudsql_client.instances:
|
||||||
if "SQLSERVER" in instance.version:
|
if "SQLSERVER" in instance.version:
|
||||||
report = Check_Report_GCP(self.metadata())
|
report = Check_Report_GCP(self.metadata())
|
||||||
report.project_id = cloudsql_client.project_id
|
report.project_id = instance.project_id
|
||||||
report.resource_id = instance.name
|
report.resource_id = instance.name
|
||||||
report.resource_name = instance.name
|
report.resource_name = instance.name
|
||||||
report.location = instance.region
|
report.location = instance.region
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ class cloudsql_instance_sqlserver_remote_access_flag(Check):
|
|||||||
for instance in cloudsql_client.instances:
|
for instance in cloudsql_client.instances:
|
||||||
if "SQLSERVER" in instance.version:
|
if "SQLSERVER" in instance.version:
|
||||||
report = Check_Report_GCP(self.metadata())
|
report = Check_Report_GCP(self.metadata())
|
||||||
report.project_id = cloudsql_client.project_id
|
report.project_id = instance.project_id
|
||||||
report.resource_id = instance.name
|
report.resource_id = instance.name
|
||||||
report.resource_name = instance.name
|
report.resource_name = instance.name
|
||||||
report.location = instance.region
|
report.location = instance.region
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ class cloudsql_instance_sqlserver_trace_flag(Check):
|
|||||||
for instance in cloudsql_client.instances:
|
for instance in cloudsql_client.instances:
|
||||||
if "SQLSERVER" in instance.version:
|
if "SQLSERVER" in instance.version:
|
||||||
report = Check_Report_GCP(self.metadata())
|
report = Check_Report_GCP(self.metadata())
|
||||||
report.project_id = cloudsql_client.project_id
|
report.project_id = instance.project_id
|
||||||
report.resource_id = instance.name
|
report.resource_id = instance.name
|
||||||
report.resource_name = instance.name
|
report.resource_name = instance.name
|
||||||
report.location = instance.region
|
report.location = instance.region
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ class cloudsql_instance_sqlserver_user_connections_flag(Check):
|
|||||||
for instance in cloudsql_client.instances:
|
for instance in cloudsql_client.instances:
|
||||||
if "SQLSERVER" in instance.version:
|
if "SQLSERVER" in instance.version:
|
||||||
report = Check_Report_GCP(self.metadata())
|
report = Check_Report_GCP(self.metadata())
|
||||||
report.project_id = cloudsql_client.project_id
|
report.project_id = instance.project_id
|
||||||
report.resource_id = instance.name
|
report.resource_id = instance.name
|
||||||
report.resource_name = instance.name
|
report.resource_name = instance.name
|
||||||
report.location = instance.region
|
report.location = instance.region
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ class cloudsql_instance_sqlserver_user_options_flag(Check):
|
|||||||
for instance in cloudsql_client.instances:
|
for instance in cloudsql_client.instances:
|
||||||
if "SQLSERVER" in instance.version:
|
if "SQLSERVER" in instance.version:
|
||||||
report = Check_Report_GCP(self.metadata())
|
report = Check_Report_GCP(self.metadata())
|
||||||
report.project_id = cloudsql_client.project_id
|
report.project_id = instance.project_id
|
||||||
report.resource_id = instance.name
|
report.resource_id = instance.name
|
||||||
report.resource_name = instance.name
|
report.resource_name = instance.name
|
||||||
report.location = instance.region
|
report.location = instance.region
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ class cloudsql_instance_ssl_connections(Check):
|
|||||||
findings = []
|
findings = []
|
||||||
for instance in cloudsql_client.instances:
|
for instance in cloudsql_client.instances:
|
||||||
report = Check_Report_GCP(self.metadata())
|
report = Check_Report_GCP(self.metadata())
|
||||||
report.project_id = cloudsql_client.project_id
|
report.project_id = instance.project_id
|
||||||
report.resource_id = instance.name
|
report.resource_id = instance.name
|
||||||
report.resource_name = instance.name
|
report.resource_name = instance.name
|
||||||
report.location = instance.region
|
report.location = instance.region
|
||||||
|
|||||||
@@ -9,49 +9,51 @@ class CloudSQL:
|
|||||||
def __init__(self, audit_info):
|
def __init__(self, audit_info):
|
||||||
self.service = "sqladmin"
|
self.service = "sqladmin"
|
||||||
self.api_version = "v1"
|
self.api_version = "v1"
|
||||||
self.project_id = audit_info.project_id
|
self.project_ids = audit_info.project_ids
|
||||||
self.client = generate_client(self.service, self.api_version, audit_info)
|
self.client = generate_client(self.service, self.api_version, audit_info)
|
||||||
self.instances = []
|
self.instances = []
|
||||||
self.__get_instances__()
|
self.__get_instances__()
|
||||||
|
|
||||||
def __get_instances__(self):
|
def __get_instances__(self):
|
||||||
try:
|
for project_id in self.project_ids:
|
||||||
request = self.client.instances().list(project=self.project_id)
|
try:
|
||||||
while request is not None:
|
request = self.client.instances().list(project=project_id)
|
||||||
response = request.execute()
|
while request is not None:
|
||||||
|
response = request.execute()
|
||||||
|
|
||||||
for instance in response.get("items", []):
|
for instance in response.get("items", []):
|
||||||
public_ip = False
|
public_ip = False
|
||||||
for address in instance.get("ipAddresses", []):
|
for address in instance.get("ipAddresses", []):
|
||||||
if address["type"] == "PRIMARY":
|
if address["type"] == "PRIMARY":
|
||||||
public_ip = True
|
public_ip = True
|
||||||
self.instances.append(
|
self.instances.append(
|
||||||
Instance(
|
Instance(
|
||||||
name=instance["name"],
|
name=instance["name"],
|
||||||
version=instance["databaseVersion"],
|
version=instance["databaseVersion"],
|
||||||
region=instance["region"],
|
region=instance["region"],
|
||||||
ip_addresses=instance.get("ipAddresses", []),
|
ip_addresses=instance.get("ipAddresses", []),
|
||||||
public_ip=public_ip,
|
public_ip=public_ip,
|
||||||
ssl=instance["settings"]["ipConfiguration"].get(
|
ssl=instance["settings"]["ipConfiguration"].get(
|
||||||
"requireSsl", False
|
"requireSsl", False
|
||||||
),
|
),
|
||||||
automated_backups=instance["settings"][
|
automated_backups=instance["settings"][
|
||||||
"backupConfiguration"
|
"backupConfiguration"
|
||||||
]["enabled"],
|
]["enabled"],
|
||||||
authorized_networks=instance["settings"]["ipConfiguration"][
|
authorized_networks=instance["settings"][
|
||||||
"authorizedNetworks"
|
"ipConfiguration"
|
||||||
],
|
]["authorizedNetworks"],
|
||||||
flags=instance["settings"].get("databaseFlags", []),
|
flags=instance["settings"].get("databaseFlags", []),
|
||||||
|
project_id=project_id,
|
||||||
|
)
|
||||||
)
|
)
|
||||||
)
|
|
||||||
|
|
||||||
request = self.client.instances().list_next(
|
request = self.client.instances().list_next(
|
||||||
previous_request=request, previous_response=response
|
previous_request=request, previous_response=response
|
||||||
|
)
|
||||||
|
except Exception as error:
|
||||||
|
logger.error(
|
||||||
|
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||||
)
|
)
|
||||||
except Exception as error:
|
|
||||||
logger.error(
|
|
||||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class Instance(BaseModel):
|
class Instance(BaseModel):
|
||||||
@@ -64,3 +66,4 @@ class Instance(BaseModel):
|
|||||||
ssl: bool
|
ssl: bool
|
||||||
automated_backups: bool
|
automated_backups: bool
|
||||||
flags: list
|
flags: list
|
||||||
|
project_id: str
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ class cloudstorage_bucket_public_access(Check):
|
|||||||
findings = []
|
findings = []
|
||||||
for bucket in cloudstorage_client.buckets:
|
for bucket in cloudstorage_client.buckets:
|
||||||
report = Check_Report_GCP(self.metadata())
|
report = Check_Report_GCP(self.metadata())
|
||||||
report.project_id = cloudstorage_client.project_id
|
report.project_id = bucket.project_id
|
||||||
report.resource_id = bucket.id
|
report.resource_id = bucket.id
|
||||||
report.resource_name = bucket.name
|
report.resource_name = bucket.name
|
||||||
report.location = bucket.region
|
report.location = bucket.region
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ class cloudstorage_bucket_uniform_bucket_level_access(Check):
|
|||||||
findings = []
|
findings = []
|
||||||
for bucket in cloudstorage_client.buckets:
|
for bucket in cloudstorage_client.buckets:
|
||||||
report = Check_Report_GCP(self.metadata())
|
report = Check_Report_GCP(self.metadata())
|
||||||
report.project_id = cloudstorage_client.project_id
|
report.project_id = bucket.project_id
|
||||||
report.resource_id = bucket.id
|
report.resource_id = bucket.id
|
||||||
report.resource_name = bucket.name
|
report.resource_name = bucket.name
|
||||||
report.location = bucket.region
|
report.location = bucket.region
|
||||||
|
|||||||
@@ -9,46 +9,48 @@ class CloudStorage:
|
|||||||
def __init__(self, audit_info):
|
def __init__(self, audit_info):
|
||||||
self.service = "storage"
|
self.service = "storage"
|
||||||
self.api_version = "v1"
|
self.api_version = "v1"
|
||||||
self.project_id = audit_info.project_id
|
self.project_ids = audit_info.project_ids
|
||||||
self.client = generate_client(self.service, self.api_version, audit_info)
|
self.client = generate_client(self.service, self.api_version, audit_info)
|
||||||
self.buckets = []
|
self.buckets = []
|
||||||
self.__get_buckets__()
|
self.__get_buckets__()
|
||||||
|
|
||||||
def __get_buckets__(self):
|
def __get_buckets__(self):
|
||||||
try:
|
for project_id in self.project_ids:
|
||||||
request = self.client.buckets().list(project=self.project_id)
|
try:
|
||||||
while request is not None:
|
request = self.client.buckets().list(project=project_id)
|
||||||
response = request.execute()
|
while request is not None:
|
||||||
for bucket in response.get("items", []):
|
response = request.execute()
|
||||||
bucket_iam = (
|
for bucket in response.get("items", []):
|
||||||
self.client.buckets()
|
bucket_iam = (
|
||||||
.getIamPolicy(bucket=bucket["id"])
|
self.client.buckets()
|
||||||
.execute()["bindings"]
|
.getIamPolicy(bucket=bucket["id"])
|
||||||
)
|
.execute()["bindings"]
|
||||||
public = False
|
)
|
||||||
if "allAuthenticatedUsers" in str(bucket_iam) or "allUsers" in str(
|
public = False
|
||||||
bucket_iam
|
if "allAuthenticatedUsers" in str(
|
||||||
):
|
bucket_iam
|
||||||
public = True
|
) or "allUsers" in str(bucket_iam):
|
||||||
self.buckets.append(
|
public = True
|
||||||
Bucket(
|
self.buckets.append(
|
||||||
name=bucket["name"],
|
Bucket(
|
||||||
id=bucket["id"],
|
name=bucket["name"],
|
||||||
region=bucket["location"],
|
id=bucket["id"],
|
||||||
uniform_bucket_level_access=bucket["iamConfiguration"][
|
region=bucket["location"],
|
||||||
"uniformBucketLevelAccess"
|
uniform_bucket_level_access=bucket["iamConfiguration"][
|
||||||
]["enabled"],
|
"uniformBucketLevelAccess"
|
||||||
public=public,
|
]["enabled"],
|
||||||
|
public=public,
|
||||||
|
project_id=project_id,
|
||||||
|
)
|
||||||
)
|
)
|
||||||
)
|
|
||||||
|
|
||||||
request = self.client.buckets().list_next(
|
request = self.client.buckets().list_next(
|
||||||
previous_request=request, previous_response=response
|
previous_request=request, previous_response=response
|
||||||
|
)
|
||||||
|
except Exception as error:
|
||||||
|
logger.error(
|
||||||
|
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||||
)
|
)
|
||||||
except Exception as error:
|
|
||||||
logger.error(
|
|
||||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class Bucket(BaseModel):
|
class Bucket(BaseModel):
|
||||||
@@ -57,3 +59,4 @@ class Bucket(BaseModel):
|
|||||||
region: str
|
region: str
|
||||||
uniform_bucket_level_access: bool
|
uniform_bucket_level_access: bool
|
||||||
public: bool
|
public: bool
|
||||||
|
project_id: str
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ class compute_default_service_account_in_use(Check):
|
|||||||
findings = []
|
findings = []
|
||||||
for instance in compute_client.instances:
|
for instance in compute_client.instances:
|
||||||
report = Check_Report_GCP(self.metadata())
|
report = Check_Report_GCP(self.metadata())
|
||||||
report.project_id = compute_client.project_id
|
report.project_id = instance.project_id
|
||||||
report.resource_id = instance.id
|
report.resource_id = instance.id
|
||||||
report.resource_name = instance.name
|
report.resource_name = instance.name
|
||||||
report.location = instance.zone
|
report.location = instance.zone
|
||||||
@@ -16,10 +16,7 @@ class compute_default_service_account_in_use(Check):
|
|||||||
if (
|
if (
|
||||||
any(
|
any(
|
||||||
[
|
[
|
||||||
(
|
("-compute@developer.gserviceaccount.com" in sa["email"])
|
||||||
sa["email"]
|
|
||||||
== f"{compute_client.project_id}-compute@developer.gserviceaccount.com"
|
|
||||||
)
|
|
||||||
for sa in instance.service_accounts
|
for sa in instance.service_accounts
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ class compute_default_service_account_in_use_with_full_api_access(Check):
|
|||||||
findings = []
|
findings = []
|
||||||
for instance in compute_client.instances:
|
for instance in compute_client.instances:
|
||||||
report = Check_Report_GCP(self.metadata())
|
report = Check_Report_GCP(self.metadata())
|
||||||
report.project_id = compute_client.project_id
|
report.project_id = instance.project_id
|
||||||
report.resource_id = instance.id
|
report.resource_id = instance.id
|
||||||
report.resource_name = instance.name
|
report.resource_name = instance.name
|
||||||
report.location = instance.zone
|
report.location = instance.zone
|
||||||
@@ -15,8 +15,7 @@ class compute_default_service_account_in_use_with_full_api_access(Check):
|
|||||||
report.status_extended = f"The VM Instance {instance.name} is not configured to use the default service account with full access to all cloud APIs "
|
report.status_extended = f"The VM Instance {instance.name} is not configured to use the default service account with full access to all cloud APIs "
|
||||||
for service_account in instance.service_accounts:
|
for service_account in instance.service_accounts:
|
||||||
if (
|
if (
|
||||||
service_account["email"]
|
"-compute@developer.gserviceaccount.com" in service_account["email"]
|
||||||
== f"{compute_client.project_id}-compute@developer.gserviceaccount.com"
|
|
||||||
and "https://www.googleapis.com/auth/cloud-platform"
|
and "https://www.googleapis.com/auth/cloud-platform"
|
||||||
in service_account["scopes"]
|
in service_account["scopes"]
|
||||||
and instance.name[:4] != "gke-"
|
and instance.name[:4] != "gke-"
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ class compute_instance_public_ip(Check):
|
|||||||
findings = []
|
findings = []
|
||||||
for instance in compute_client.instances:
|
for instance in compute_client.instances:
|
||||||
report = Check_Report_GCP(self.metadata())
|
report = Check_Report_GCP(self.metadata())
|
||||||
report.project_id = compute_client.project_id
|
report.project_id = instance.project_id
|
||||||
report.resource_id = instance.id
|
report.resource_id = instance.id
|
||||||
report.resource_name = instance.name
|
report.resource_name = instance.name
|
||||||
report.location = instance.zone
|
report.location = instance.zone
|
||||||
|
|||||||
@@ -5,18 +5,32 @@ from prowler.providers.gcp.services.compute.compute_client import compute_client
|
|||||||
class compute_network_default_in_use(Check):
|
class compute_network_default_in_use(Check):
|
||||||
def execute(self) -> Check_Report_GCP:
|
def execute(self) -> Check_Report_GCP:
|
||||||
findings = []
|
findings = []
|
||||||
report = Check_Report_GCP(self.metadata())
|
# Check if default network is in use for each project
|
||||||
report.project_id = compute_client.project_id
|
projects_with_default_network = set()
|
||||||
report.resource_id = "default"
|
|
||||||
report.resource_name = "default"
|
|
||||||
report.location = "global"
|
|
||||||
report.status = "PASS"
|
|
||||||
report.status_extended = "Default network does not exist"
|
|
||||||
for network in compute_client.networks:
|
for network in compute_client.networks:
|
||||||
if network.name == "default":
|
if network.name == "default":
|
||||||
|
projects_with_default_network.add(network.project_id)
|
||||||
|
report = Check_Report_GCP(self.metadata())
|
||||||
|
report.project_id = network.project_id
|
||||||
|
report.resource_id = "default"
|
||||||
|
report.resource_name = "default"
|
||||||
|
report.location = "global"
|
||||||
report.status = "FAIL"
|
report.status = "FAIL"
|
||||||
report.status_extended = "Default network is in use"
|
report.status_extended = (
|
||||||
|
f"Default network is in use in project {network.project_id}"
|
||||||
|
)
|
||||||
|
findings.append(report)
|
||||||
|
|
||||||
findings.append(report)
|
for project in compute_client.project_ids:
|
||||||
|
if project not in projects_with_default_network:
|
||||||
|
report = Check_Report_GCP(self.metadata())
|
||||||
|
report.project_id = project
|
||||||
|
report.resource_id = "default"
|
||||||
|
report.resource_name = "default"
|
||||||
|
report.location = "global"
|
||||||
|
report.status = "PASS"
|
||||||
|
report.status_extended = (
|
||||||
|
f"Default network does not exist in project {project}"
|
||||||
|
)
|
||||||
|
|
||||||
return findings
|
return findings
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ class compute_serial_ports_in_use(Check):
|
|||||||
findings = []
|
findings = []
|
||||||
for instance in compute_client.instances:
|
for instance in compute_client.instances:
|
||||||
report = Check_Report_GCP(self.metadata())
|
report = Check_Report_GCP(self.metadata())
|
||||||
report.project_id = compute_client.project_id
|
report.project_id = instance.project_id
|
||||||
report.resource_id = instance.id
|
report.resource_id = instance.id
|
||||||
report.resource_name = instance.name
|
report.resource_name = instance.name
|
||||||
report.location = instance.zone
|
report.location = instance.zone
|
||||||
|
|||||||
@@ -9,9 +9,10 @@ class Compute:
|
|||||||
def __init__(self, audit_info):
|
def __init__(self, audit_info):
|
||||||
self.service = "compute"
|
self.service = "compute"
|
||||||
self.api_version = "v1"
|
self.api_version = "v1"
|
||||||
self.project_id = audit_info.project_id
|
self.project_ids = audit_info.project_ids
|
||||||
|
self.default_project_id = audit_info.default_project_id
|
||||||
self.client = generate_client(self.service, self.api_version, audit_info)
|
self.client = generate_client(self.service, self.api_version, audit_info)
|
||||||
self.zones = []
|
self.zones = set()
|
||||||
self.instances = []
|
self.instances = []
|
||||||
self.networks = []
|
self.networks = []
|
||||||
self.__get_zones__()
|
self.__get_zones__()
|
||||||
@@ -19,83 +20,88 @@ class Compute:
|
|||||||
self.__get_networks__()
|
self.__get_networks__()
|
||||||
|
|
||||||
def __get_zones__(self):
|
def __get_zones__(self):
|
||||||
try:
|
for project_id in self.project_ids:
|
||||||
request = self.client.zones().list(project=self.project_id)
|
try:
|
||||||
while request is not None:
|
request = self.client.zones().list(project=project_id)
|
||||||
response = request.execute()
|
|
||||||
|
|
||||||
for zone in response.get("items", []):
|
|
||||||
self.zones.append(zone["name"])
|
|
||||||
|
|
||||||
request = self.client.zones().list_next(
|
|
||||||
previous_request=request, previous_response=response
|
|
||||||
)
|
|
||||||
except Exception as error:
|
|
||||||
logger.error(
|
|
||||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
|
||||||
)
|
|
||||||
|
|
||||||
def __get_instances__(self):
|
|
||||||
try:
|
|
||||||
for zone in self.zones:
|
|
||||||
request = self.client.instances().list(
|
|
||||||
project=self.project_id, zone=zone
|
|
||||||
)
|
|
||||||
while request is not None:
|
while request is not None:
|
||||||
response = request.execute()
|
response = request.execute()
|
||||||
|
|
||||||
for instance in response.get("items", []):
|
for zone in response.get("items", []):
|
||||||
public_ip = False
|
self.zones.add(zone["name"])
|
||||||
for interface in instance["networkInterfaces"]:
|
|
||||||
for config in interface.get("accessConfigs", []):
|
request = self.client.zones().list_next(
|
||||||
if "natIP" in config:
|
previous_request=request, previous_response=response
|
||||||
public_ip = True
|
)
|
||||||
self.instances.append(
|
except Exception as error:
|
||||||
Instance(
|
logger.error(
|
||||||
name=instance["name"],
|
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||||
id=instance["id"],
|
)
|
||||||
zone=zone,
|
|
||||||
public_ip=public_ip,
|
def __get_instances__(self):
|
||||||
metadata=instance["metadata"],
|
for project_id in self.project_ids:
|
||||||
shielded_enabled_vtpm=instance[
|
try:
|
||||||
"shieldedInstanceConfig"
|
for zone in self.zones:
|
||||||
]["enableVtpm"],
|
request = self.client.instances().list(
|
||||||
shielded_enabled_integrity_monitoring=instance[
|
project=project_id, zone=zone
|
||||||
"shieldedInstanceConfig"
|
)
|
||||||
]["enableIntegrityMonitoring"],
|
while request is not None:
|
||||||
service_accounts=instance["serviceAccounts"],
|
response = request.execute()
|
||||||
|
|
||||||
|
for instance in response.get("items", []):
|
||||||
|
public_ip = False
|
||||||
|
for interface in instance["networkInterfaces"]:
|
||||||
|
for config in interface.get("accessConfigs", []):
|
||||||
|
if "natIP" in config:
|
||||||
|
public_ip = True
|
||||||
|
self.instances.append(
|
||||||
|
Instance(
|
||||||
|
name=instance["name"],
|
||||||
|
id=instance["id"],
|
||||||
|
zone=zone,
|
||||||
|
public_ip=public_ip,
|
||||||
|
metadata=instance["metadata"],
|
||||||
|
shielded_enabled_vtpm=instance[
|
||||||
|
"shieldedInstanceConfig"
|
||||||
|
]["enableVtpm"],
|
||||||
|
shielded_enabled_integrity_monitoring=instance[
|
||||||
|
"shieldedInstanceConfig"
|
||||||
|
]["enableIntegrityMonitoring"],
|
||||||
|
service_accounts=instance["serviceAccounts"],
|
||||||
|
project_id=project_id,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
request = self.client.instances().list_next(
|
||||||
|
previous_request=request, previous_response=response
|
||||||
|
)
|
||||||
|
except Exception as error:
|
||||||
|
logger.error(
|
||||||
|
f"{zone} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||||
|
)
|
||||||
|
|
||||||
|
def __get_networks__(self):
|
||||||
|
for project_id in self.project_ids:
|
||||||
|
try:
|
||||||
|
request = self.client.networks().list(project=project_id)
|
||||||
|
while request is not None:
|
||||||
|
response = request.execute()
|
||||||
|
|
||||||
|
for network in response.get("items", []):
|
||||||
|
self.networks.append(
|
||||||
|
Network(
|
||||||
|
name=network["name"],
|
||||||
|
id=network["id"],
|
||||||
|
project_id=project_id,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
request = self.client.instances().list_next(
|
request = self.client.networks().list_next(
|
||||||
previous_request=request, previous_response=response
|
previous_request=request, previous_response=response
|
||||||
)
|
)
|
||||||
except Exception as error:
|
except Exception as error:
|
||||||
logger.error(
|
logger.error(
|
||||||
f"{zone} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||||
)
|
|
||||||
|
|
||||||
def __get_networks__(self):
|
|
||||||
try:
|
|
||||||
request = self.client.networks().list(project=self.project_id)
|
|
||||||
while request is not None:
|
|
||||||
response = request.execute()
|
|
||||||
|
|
||||||
for network in response.get("items", []):
|
|
||||||
self.networks.append(
|
|
||||||
Network(
|
|
||||||
name=network["name"],
|
|
||||||
id=network["id"],
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
request = self.client.networks().list_next(
|
|
||||||
previous_request=request, previous_response=response
|
|
||||||
)
|
)
|
||||||
except Exception as error:
|
|
||||||
logger.error(
|
|
||||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class Instance(BaseModel):
|
class Instance(BaseModel):
|
||||||
@@ -103,6 +109,7 @@ class Instance(BaseModel):
|
|||||||
id: str
|
id: str
|
||||||
zone: str
|
zone: str
|
||||||
public_ip: bool
|
public_ip: bool
|
||||||
|
project_id: str
|
||||||
metadata: dict
|
metadata: dict
|
||||||
shielded_enabled_vtpm: bool
|
shielded_enabled_vtpm: bool
|
||||||
shielded_enabled_integrity_monitoring: bool
|
shielded_enabled_integrity_monitoring: bool
|
||||||
@@ -112,3 +119,4 @@ class Instance(BaseModel):
|
|||||||
class Network(BaseModel):
|
class Network(BaseModel):
|
||||||
name: str
|
name: str
|
||||||
id: str
|
id: str
|
||||||
|
project_id: str
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ class compute_shielded_vm_enabled(Check):
|
|||||||
findings = []
|
findings = []
|
||||||
for instance in compute_client.instances:
|
for instance in compute_client.instances:
|
||||||
report = Check_Report_GCP(self.metadata())
|
report = Check_Report_GCP(self.metadata())
|
||||||
report.project_id = compute_client.project_id
|
report.project_id = instance.project_id
|
||||||
report.resource_id = instance.id
|
report.resource_id = instance.id
|
||||||
report.resource_name = instance.name
|
report.resource_name = instance.name
|
||||||
report.location = instance.zone
|
report.location = instance.zone
|
||||||
|
|||||||
@@ -10,7 +10,7 @@ class iam_sa_no_administrative_privileges(Check):
|
|||||||
findings = []
|
findings = []
|
||||||
for account in iam_client.service_accounts:
|
for account in iam_client.service_accounts:
|
||||||
report = Check_Report_GCP(self.metadata())
|
report = Check_Report_GCP(self.metadata())
|
||||||
report.project_id = iam_client.project_id
|
report.project_id = account.project_id
|
||||||
report.resource_id = account.email
|
report.resource_id = account.email
|
||||||
report.resource_name = account.name
|
report.resource_name = account.name
|
||||||
report.location = iam_client.region
|
report.location = iam_client.region
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ class iam_sa_no_user_managed_keys(Check):
|
|||||||
findings = []
|
findings = []
|
||||||
for account in iam_client.service_accounts:
|
for account in iam_client.service_accounts:
|
||||||
report = Check_Report_GCP(self.metadata())
|
report = Check_Report_GCP(self.metadata())
|
||||||
report.project_id = iam_client.project_id
|
report.project_id = account.project_id
|
||||||
report.resource_id = account.email
|
report.resource_id = account.email
|
||||||
report.resource_name = account.name
|
report.resource_name = account.name
|
||||||
report.location = iam_client.region
|
report.location = iam_client.region
|
||||||
|
|||||||
@@ -12,7 +12,7 @@ class iam_sa_user_managed_key_rotate_90_days(Check):
|
|||||||
if key.type == "USER_MANAGED":
|
if key.type == "USER_MANAGED":
|
||||||
last_rotated = (datetime.now() - key.valid_after).days
|
last_rotated = (datetime.now() - key.valid_after).days
|
||||||
report = Check_Report_GCP(self.metadata())
|
report = Check_Report_GCP(self.metadata())
|
||||||
report.project_id = iam_client.project_id
|
report.project_id = account.project_id
|
||||||
report.resource_id = key.name
|
report.resource_id = key.name
|
||||||
report.resource_name = account.email
|
report.resource_name = account.email
|
||||||
report.location = iam_client.region
|
report.location = iam_client.region
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ class IAM:
|
|||||||
def __init__(self, audit_info):
|
def __init__(self, audit_info):
|
||||||
self.service = "iam"
|
self.service = "iam"
|
||||||
self.api_version = "v1"
|
self.api_version = "v1"
|
||||||
self.project_id = audit_info.project_id
|
self.project_ids = audit_info.project_ids
|
||||||
self.region = "global"
|
self.region = "global"
|
||||||
self.client = generate_client(self.service, self.api_version, audit_info)
|
self.client = generate_client(self.service, self.api_version, audit_info)
|
||||||
self.service_accounts = []
|
self.service_accounts = []
|
||||||
@@ -22,33 +22,35 @@ class IAM:
|
|||||||
return self.client
|
return self.client
|
||||||
|
|
||||||
def __get_service_accounts__(self):
|
def __get_service_accounts__(self):
|
||||||
try:
|
for project_id in self.project_ids:
|
||||||
request = (
|
try:
|
||||||
self.client.projects()
|
|
||||||
.serviceAccounts()
|
|
||||||
.list(name="projects/" + self.project_id)
|
|
||||||
)
|
|
||||||
while request is not None:
|
|
||||||
response = request.execute()
|
|
||||||
|
|
||||||
for account in response["accounts"]:
|
|
||||||
self.service_accounts.append(
|
|
||||||
ServiceAccount(
|
|
||||||
name=account["name"],
|
|
||||||
email=account["email"],
|
|
||||||
display_name=account.get("displayName", ""),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
request = (
|
request = (
|
||||||
self.client.projects()
|
self.client.projects()
|
||||||
.serviceAccounts()
|
.serviceAccounts()
|
||||||
.list_next(previous_request=request, previous_response=response)
|
.list(name="projects/" + project_id)
|
||||||
|
)
|
||||||
|
while request is not None:
|
||||||
|
response = request.execute()
|
||||||
|
|
||||||
|
for account in response["accounts"]:
|
||||||
|
self.service_accounts.append(
|
||||||
|
ServiceAccount(
|
||||||
|
name=account["name"],
|
||||||
|
email=account["email"],
|
||||||
|
display_name=account.get("displayName", ""),
|
||||||
|
project_id=project_id,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
request = (
|
||||||
|
self.client.projects()
|
||||||
|
.serviceAccounts()
|
||||||
|
.list_next(previous_request=request, previous_response=response)
|
||||||
|
)
|
||||||
|
except Exception as error:
|
||||||
|
logger.error(
|
||||||
|
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||||
)
|
)
|
||||||
except Exception as error:
|
|
||||||
logger.error(
|
|
||||||
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
|
||||||
)
|
|
||||||
|
|
||||||
def __get_service_accounts_keys__(self):
|
def __get_service_accounts_keys__(self):
|
||||||
try:
|
try:
|
||||||
@@ -59,7 +61,7 @@ class IAM:
|
|||||||
.keys()
|
.keys()
|
||||||
.list(
|
.list(
|
||||||
name="projects/"
|
name="projects/"
|
||||||
+ self.project_id
|
+ sa.project_id
|
||||||
+ "/serviceAccounts/"
|
+ "/serviceAccounts/"
|
||||||
+ sa.email
|
+ sa.email
|
||||||
)
|
)
|
||||||
@@ -100,3 +102,4 @@ class ServiceAccount(BaseModel):
|
|||||||
email: str
|
email: str
|
||||||
display_name: str
|
display_name: str
|
||||||
keys: list[Key] = []
|
keys: list[Key] = []
|
||||||
|
project_id: str
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ class kms_key_not_publicly_accessible(Check):
|
|||||||
findings = []
|
findings = []
|
||||||
for key in kms_client.crypto_keys:
|
for key in kms_client.crypto_keys:
|
||||||
report = Check_Report_GCP(self.metadata())
|
report = Check_Report_GCP(self.metadata())
|
||||||
report.project_id = kms_client.project_id
|
report.project_id = key.project_id
|
||||||
report.resource_id = key.name
|
report.resource_id = key.name
|
||||||
report.resource_name = key.name
|
report.resource_name = key.name
|
||||||
report.location = key.location
|
report.location = key.location
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ class kms_key_rotation_enabled(Check):
|
|||||||
findings = []
|
findings = []
|
||||||
for key in kms_client.crypto_keys:
|
for key in kms_client.crypto_keys:
|
||||||
report = Check_Report_GCP(self.metadata())
|
report = Check_Report_GCP(self.metadata())
|
||||||
report.project_id = kms_client.project_id
|
report.project_id = key.project_id
|
||||||
report.resource_id = key.name
|
report.resource_id = key.name
|
||||||
report.resource_name = key.name
|
report.resource_name = key.name
|
||||||
report.location = key.location
|
report.location = key.location
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ class KMS:
|
|||||||
def __init__(self, audit_info):
|
def __init__(self, audit_info):
|
||||||
self.service = "cloudkms"
|
self.service = "cloudkms"
|
||||||
self.api_version = "v1"
|
self.api_version = "v1"
|
||||||
self.project_id = audit_info.project_id
|
self.project_ids = audit_info.project_ids
|
||||||
self.region = "global"
|
self.region = "global"
|
||||||
self.client = generate_client(self.service, self.api_version, audit_info)
|
self.client = generate_client(self.service, self.api_version, audit_info)
|
||||||
self.locations = []
|
self.locations = []
|
||||||
@@ -26,33 +26,39 @@ class KMS:
|
|||||||
return self.client
|
return self.client
|
||||||
|
|
||||||
def __get_locations__(self):
|
def __get_locations__(self):
|
||||||
try:
|
for project_id in self.project_ids:
|
||||||
request = (
|
try:
|
||||||
self.client.projects()
|
|
||||||
.locations()
|
|
||||||
.list(name="projects/" + self.project_id)
|
|
||||||
)
|
|
||||||
while request is not None:
|
|
||||||
response = request.execute()
|
|
||||||
|
|
||||||
for location in response["locations"]:
|
|
||||||
self.locations.append(location["name"])
|
|
||||||
|
|
||||||
request = (
|
request = (
|
||||||
self.client.projects()
|
self.client.projects()
|
||||||
.locations()
|
.locations()
|
||||||
.list_next(previous_request=request, previous_response=response)
|
.list(name="projects/" + project_id)
|
||||||
|
)
|
||||||
|
while request is not None:
|
||||||
|
response = request.execute()
|
||||||
|
|
||||||
|
for location in response["locations"]:
|
||||||
|
self.locations.append(
|
||||||
|
KeyLocation(name=location["name"], project_id=project_id)
|
||||||
|
)
|
||||||
|
|
||||||
|
request = (
|
||||||
|
self.client.projects()
|
||||||
|
.locations()
|
||||||
|
.list_next(previous_request=request, previous_response=response)
|
||||||
|
)
|
||||||
|
except Exception as error:
|
||||||
|
logger.error(
|
||||||
|
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||||
)
|
)
|
||||||
except Exception as error:
|
|
||||||
logger.error(
|
|
||||||
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
|
||||||
)
|
|
||||||
|
|
||||||
def __get_key_rings__(self):
|
def __get_key_rings__(self):
|
||||||
try:
|
for location in self.locations:
|
||||||
for location in self.locations:
|
try:
|
||||||
request = (
|
request = (
|
||||||
self.client.projects().locations().keyRings().list(parent=location)
|
self.client.projects()
|
||||||
|
.locations()
|
||||||
|
.keyRings()
|
||||||
|
.list(parent=location.name)
|
||||||
)
|
)
|
||||||
while request is not None:
|
while request is not None:
|
||||||
response = request.execute()
|
response = request.execute()
|
||||||
@@ -61,6 +67,7 @@ class KMS:
|
|||||||
self.key_rings.append(
|
self.key_rings.append(
|
||||||
KeyRing(
|
KeyRing(
|
||||||
name=ring["name"],
|
name=ring["name"],
|
||||||
|
project_id=location.project_id,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -70,14 +77,14 @@ class KMS:
|
|||||||
.keyRings()
|
.keyRings()
|
||||||
.list_next(previous_request=request, previous_response=response)
|
.list_next(previous_request=request, previous_response=response)
|
||||||
)
|
)
|
||||||
except Exception as error:
|
except Exception as error:
|
||||||
logger.error(
|
logger.error(
|
||||||
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||||
)
|
)
|
||||||
|
|
||||||
def __get_crypto_keys__(self):
|
def __get_crypto_keys__(self):
|
||||||
try:
|
for ring in self.key_rings:
|
||||||
for ring in self.key_rings:
|
try:
|
||||||
request = (
|
request = (
|
||||||
self.client.projects()
|
self.client.projects()
|
||||||
.locations()
|
.locations()
|
||||||
@@ -95,6 +102,7 @@ class KMS:
|
|||||||
location=key["name"].split("/")[3],
|
location=key["name"].split("/")[3],
|
||||||
rotation_period=key.get("rotationPeriod"),
|
rotation_period=key.get("rotationPeriod"),
|
||||||
key_ring=ring.name,
|
key_ring=ring.name,
|
||||||
|
project_id=ring.project_id,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -105,14 +113,14 @@ class KMS:
|
|||||||
.cryptoKeys()
|
.cryptoKeys()
|
||||||
.list_next(previous_request=request, previous_response=response)
|
.list_next(previous_request=request, previous_response=response)
|
||||||
)
|
)
|
||||||
except Exception as error:
|
except Exception as error:
|
||||||
logger.error(
|
logger.error(
|
||||||
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||||
)
|
)
|
||||||
|
|
||||||
def __get_crypto_keys_iam_policy__(self):
|
def __get_crypto_keys_iam_policy__(self):
|
||||||
try:
|
for key in self.crypto_keys:
|
||||||
for key in self.crypto_keys:
|
try:
|
||||||
request = (
|
request = (
|
||||||
self.client.projects()
|
self.client.projects()
|
||||||
.locations()
|
.locations()
|
||||||
@@ -124,14 +132,20 @@ class KMS:
|
|||||||
|
|
||||||
for binding in response.get("bindings", []):
|
for binding in response.get("bindings", []):
|
||||||
key.members.extend(binding.get("members", []))
|
key.members.extend(binding.get("members", []))
|
||||||
except Exception as error:
|
except Exception as error:
|
||||||
logger.error(
|
logger.error(
|
||||||
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class KeyLocation(BaseModel):
|
||||||
|
name: str
|
||||||
|
project_id: str
|
||||||
|
|
||||||
|
|
||||||
class KeyRing(BaseModel):
|
class KeyRing(BaseModel):
|
||||||
name: str
|
name: str
|
||||||
|
project_id: str
|
||||||
|
|
||||||
|
|
||||||
class CriptoKey(BaseModel):
|
class CriptoKey(BaseModel):
|
||||||
@@ -140,3 +154,4 @@ class CriptoKey(BaseModel):
|
|||||||
rotation_period: Optional[str]
|
rotation_period: Optional[str]
|
||||||
key_ring: str
|
key_ring: str
|
||||||
members: list = []
|
members: list = []
|
||||||
|
project_id: str
|
||||||
|
|||||||
@@ -10,32 +10,37 @@ class logging_log_metric_filter_and_alert_for_audit_configuration_changes_enable
|
|||||||
):
|
):
|
||||||
def execute(self) -> Check_Report_GCP:
|
def execute(self) -> Check_Report_GCP:
|
||||||
findings = []
|
findings = []
|
||||||
report = Check_Report_GCP(self.metadata())
|
projects_with_metric = set()
|
||||||
report.project_id = logging_client.project_id
|
for metric in logging_client.metrics:
|
||||||
report.resource_id = logging_client.project_id
|
if (
|
||||||
report.resource_name = ""
|
'protoPayload.methodName="SetIamPolicy" AND protoPayload.serviceData.policyDelta.auditConfigDeltas:*'
|
||||||
report.location = logging_client.region
|
in metric.filter
|
||||||
report.status = "FAIL"
|
):
|
||||||
report.status_extended = "There are no log metric filters or alerts associated."
|
report = Check_Report_GCP(self.metadata())
|
||||||
if logging_client.metrics:
|
projects_with_metric.add(metric.project_id)
|
||||||
for metric in logging_client.metrics:
|
report.project_id = metric.project_id
|
||||||
if (
|
report.resource_id = metric.name
|
||||||
'protoPayload.methodName="SetIamPolicy" AND protoPayload.serviceData.policyDelta.auditConfigDeltas:*'
|
report.resource_name = metric.name
|
||||||
in metric.filter
|
report.location = logging_client.region
|
||||||
):
|
report.status = "FAIL"
|
||||||
report = Check_Report_GCP(self.metadata())
|
report.status_extended = f"Log metric filter {metric.name} found but no alerts associated in project {metric.project_id}."
|
||||||
report.project_id = logging_client.project_id
|
for alert_policy in monitoring_client.alert_policies:
|
||||||
report.resource_id = metric.name
|
for filter in alert_policy.filters:
|
||||||
report.resource_name = metric.name
|
if metric.name in filter:
|
||||||
report.location = logging_client.region
|
report.status = "PASS"
|
||||||
report.status = "FAIL"
|
report.status_extended = f"Log metric filter {metric.name} found with alert policy {alert_policy.display_name} associated in project {metric.project_id}."
|
||||||
report.status_extended = f"Log metric filter {metric.name} found but no alerts associated."
|
break
|
||||||
for alert_policy in monitoring_client.alert_policies:
|
findings.append(report)
|
||||||
for filter in alert_policy.filters:
|
|
||||||
if metric.name in filter:
|
for project in logging_client.project_ids:
|
||||||
report.status = "PASS"
|
if project not in projects_with_metric:
|
||||||
report.status_extended = f"Log metric filter {metric.name} found with alert policy {alert_policy.display_name} associated."
|
report = Check_Report_GCP(self.metadata())
|
||||||
break
|
report.project_id = project
|
||||||
findings.append(report)
|
report.resource_id = project
|
||||||
|
report.resource_name = ""
|
||||||
|
report.location = logging_client.region
|
||||||
|
report.status = "FAIL"
|
||||||
|
report.status_extended = f"There are no log metric filters or alerts associated in project {project}."
|
||||||
|
findings.append(report)
|
||||||
|
|
||||||
return findings
|
return findings
|
||||||
|
|||||||
@@ -8,32 +8,37 @@ from prowler.providers.gcp.services.monitoring.monitoring_client import (
|
|||||||
class logging_log_metric_filter_and_alert_for_bucket_permission_changes_enabled(Check):
|
class logging_log_metric_filter_and_alert_for_bucket_permission_changes_enabled(Check):
|
||||||
def execute(self) -> Check_Report_GCP:
|
def execute(self) -> Check_Report_GCP:
|
||||||
findings = []
|
findings = []
|
||||||
report = Check_Report_GCP(self.metadata())
|
projects_with_metric = set()
|
||||||
report.project_id = logging_client.project_id
|
for metric in logging_client.metrics:
|
||||||
report.resource_id = logging_client.project_id
|
if (
|
||||||
report.resource_name = ""
|
'resource.type="gcs_bucket" AND protoPayload.methodName="storage.setIamPermissions"'
|
||||||
report.location = logging_client.region
|
in metric.filter
|
||||||
report.status = "FAIL"
|
):
|
||||||
report.status_extended = "There are no log metric filters or alerts associated."
|
report = Check_Report_GCP(self.metadata())
|
||||||
if logging_client.metrics:
|
projects_with_metric.add(metric.project_id)
|
||||||
for metric in logging_client.metrics:
|
report.project_id = metric.project_id
|
||||||
if (
|
report.resource_id = metric.name
|
||||||
'resource.type="gcs_bucket" AND protoPayload.methodName="storage.setIamPermissions"'
|
report.resource_name = metric.name
|
||||||
in metric.filter
|
report.location = logging_client.region
|
||||||
):
|
report.status = "FAIL"
|
||||||
report = Check_Report_GCP(self.metadata())
|
report.status_extended = f"Log metric filter {metric.name} found but no alerts associated in project {metric.project_id}."
|
||||||
report.project_id = logging_client.project_id
|
for alert_policy in monitoring_client.alert_policies:
|
||||||
report.resource_id = metric.name
|
for filter in alert_policy.filters:
|
||||||
report.resource_name = metric.name
|
if metric.name in filter:
|
||||||
report.location = logging_client.region
|
report.status = "PASS"
|
||||||
report.status = "FAIL"
|
report.status_extended = f"Log metric filter {metric.name} found with alert policy {alert_policy.display_name} associated in project {metric.project_id}."
|
||||||
report.status_extended = f"Log metric filter {metric.name} found but no alerts associated."
|
break
|
||||||
for alert_policy in monitoring_client.alert_policies:
|
findings.append(report)
|
||||||
for filter in alert_policy.filters:
|
|
||||||
if metric.name in filter:
|
for project in logging_client.project_ids:
|
||||||
report.status = "PASS"
|
if project not in projects_with_metric:
|
||||||
report.status_extended = f"Log metric filter {metric.name} found with alert policy {alert_policy.display_name} associated."
|
report = Check_Report_GCP(self.metadata())
|
||||||
break
|
report.project_id = project
|
||||||
findings.append(report)
|
report.resource_id = project
|
||||||
|
report.resource_name = ""
|
||||||
|
report.location = logging_client.region
|
||||||
|
report.status = "FAIL"
|
||||||
|
report.status_extended = f"There are no log metric filters or alerts associated in project {project}."
|
||||||
|
findings.append(report)
|
||||||
|
|
||||||
return findings
|
return findings
|
||||||
|
|||||||
@@ -8,32 +8,37 @@ from prowler.providers.gcp.services.monitoring.monitoring_client import (
|
|||||||
class logging_log_metric_filter_and_alert_for_custom_role_changes_enabled(Check):
|
class logging_log_metric_filter_and_alert_for_custom_role_changes_enabled(Check):
|
||||||
def execute(self) -> Check_Report_GCP:
|
def execute(self) -> Check_Report_GCP:
|
||||||
findings = []
|
findings = []
|
||||||
report = Check_Report_GCP(self.metadata())
|
projects_with_metric = set()
|
||||||
report.project_id = logging_client.project_id
|
for metric in logging_client.metrics:
|
||||||
report.resource_id = logging_client.project_id
|
if (
|
||||||
report.resource_name = ""
|
'resource.type="iam_role" AND (protoPayload.methodName="google.iam.admin.v1.CreateRole" OR protoPayload.methodName="google.iam.admin.v1.DeleteRole" OR protoPayload.methodName="google.iam.admin.v1.UpdateRole")'
|
||||||
report.location = logging_client.region
|
in metric.filter
|
||||||
report.status = "FAIL"
|
):
|
||||||
report.status_extended = "There are no log metric filters or alerts associated."
|
report = Check_Report_GCP(self.metadata())
|
||||||
if logging_client.metrics:
|
projects_with_metric.add(metric.project_id)
|
||||||
for metric in logging_client.metrics:
|
report.project_id = metric.project_id
|
||||||
if (
|
report.resource_id = metric.name
|
||||||
'resource.type="iam_role" AND (protoPayload.methodName="google.iam.admin.v1.CreateRole" OR protoPayload.methodName="google.iam.admin.v1.DeleteRole" OR protoPayload.methodName="google.iam.admin.v1.UpdateRole")'
|
report.resource_name = metric.name
|
||||||
in metric.filter
|
report.location = logging_client.region
|
||||||
):
|
report.status = "FAIL"
|
||||||
report = Check_Report_GCP(self.metadata())
|
report.status_extended = f"Log metric filter {metric.name} found but no alerts associated in project {metric.project_id}."
|
||||||
report.project_id = logging_client.project_id
|
for alert_policy in monitoring_client.alert_policies:
|
||||||
report.resource_id = metric.name
|
for filter in alert_policy.filters:
|
||||||
report.resource_name = metric.name
|
if metric.name in filter:
|
||||||
report.location = logging_client.region
|
report.status = "PASS"
|
||||||
report.status = "FAIL"
|
report.status_extended = f"Log metric filter {metric.name} found with alert policy {alert_policy.display_name} associated in project {metric.project_id}."
|
||||||
report.status_extended = f"Log metric filter {metric.name} found but no alerts associated."
|
break
|
||||||
for alert_policy in monitoring_client.alert_policies:
|
findings.append(report)
|
||||||
for filter in alert_policy.filters:
|
|
||||||
if metric.name in filter:
|
for project in logging_client.project_ids:
|
||||||
report.status = "PASS"
|
if project not in projects_with_metric:
|
||||||
report.status_extended = f"Log metric filter {metric.name} found with alert policy {alert_policy.display_name} associated."
|
report = Check_Report_GCP(self.metadata())
|
||||||
break
|
report.project_id = project
|
||||||
findings.append(report)
|
report.resource_id = project
|
||||||
|
report.resource_name = ""
|
||||||
|
report.location = logging_client.region
|
||||||
|
report.status = "FAIL"
|
||||||
|
report.status_extended = f"There are no log metric filters or alerts associated in project {project}."
|
||||||
|
findings.append(report)
|
||||||
|
|
||||||
return findings
|
return findings
|
||||||
|
|||||||
@@ -8,32 +8,37 @@ from prowler.providers.gcp.services.monitoring.monitoring_client import (
|
|||||||
class logging_log_metric_filter_and_alert_for_project_ownership_changes_enabled(Check):
|
class logging_log_metric_filter_and_alert_for_project_ownership_changes_enabled(Check):
|
||||||
def execute(self) -> Check_Report_GCP:
|
def execute(self) -> Check_Report_GCP:
|
||||||
findings = []
|
findings = []
|
||||||
report = Check_Report_GCP(self.metadata())
|
projects_with_metric = set()
|
||||||
report.project_id = logging_client.project_id
|
for metric in logging_client.metrics:
|
||||||
report.resource_id = logging_client.project_id
|
if (
|
||||||
report.resource_name = ""
|
'(protoPayload.serviceName="cloudresourcemanager.googleapis.com") AND (ProjectOwnership OR projectOwnerInvitee) OR (protoPayload.serviceData.policyDelta.bindingDeltas.action="REMOVE" AND protoPayload.serviceData.policyDelta.bindingDeltas.role="roles/owner") OR (protoPayload.serviceData.policyDelta.bindingDeltas.action="ADD" AND protoPayload.serviceData.policyDelta.bindingDeltas.role="roles/owner")'
|
||||||
report.location = logging_client.region
|
in metric.filter
|
||||||
report.status = "FAIL"
|
):
|
||||||
report.status_extended = "There are no log metric filters or alerts associated."
|
report = Check_Report_GCP(self.metadata())
|
||||||
if logging_client.metrics:
|
projects_with_metric.add(metric.project_id)
|
||||||
for metric in logging_client.metrics:
|
report.project_id = metric.project_id
|
||||||
if (
|
report.resource_id = metric.name
|
||||||
'(protoPayload.serviceName="cloudresourcemanager.googleapis.com") AND (ProjectOwnership OR projectOwnerInvitee) OR (protoPayload.serviceData.policyDelta.bindingDeltas.action="REMOVE" AND protoPayload.serviceData.policyDelta.bindingDeltas.role="roles/owner") OR (protoPayload.serviceData.policyDelta.bindingDeltas.action="ADD" AND protoPayload.serviceData.policyDelta.bindingDeltas.role="roles/owner")'
|
report.resource_name = metric.name
|
||||||
in metric.filter
|
report.location = logging_client.region
|
||||||
):
|
report.status = "FAIL"
|
||||||
report = Check_Report_GCP(self.metadata())
|
report.status_extended = f"Log metric filter {metric.name} found but no alerts associated in project {metric.project_id}."
|
||||||
report.project_id = logging_client.project_id
|
for alert_policy in monitoring_client.alert_policies:
|
||||||
report.resource_id = metric.name
|
for filter in alert_policy.filters:
|
||||||
report.resource_name = metric.name
|
if metric.name in filter:
|
||||||
report.location = logging_client.region
|
report.status = "PASS"
|
||||||
report.status = "FAIL"
|
report.status_extended = f"Log metric filter {metric.name} found with alert policy {alert_policy.display_name} associated in project {metric.project_id}."
|
||||||
report.status_extended = f"Log metric filter {metric.name} found but no alerts associated."
|
break
|
||||||
for alert_policy in monitoring_client.alert_policies:
|
findings.append(report)
|
||||||
for filter in alert_policy.filters:
|
|
||||||
if metric.name in filter:
|
for project in logging_client.project_ids:
|
||||||
report.status = "PASS"
|
if project not in projects_with_metric:
|
||||||
report.status_extended = f"Log metric filter {metric.name} found with alert policy {alert_policy.display_name} associated."
|
report = Check_Report_GCP(self.metadata())
|
||||||
break
|
report.project_id = project
|
||||||
findings.append(report)
|
report.resource_id = project
|
||||||
|
report.resource_name = ""
|
||||||
|
report.location = logging_client.region
|
||||||
|
report.status = "FAIL"
|
||||||
|
report.status_extended = f"There are no log metric filters or alerts associated in project {project}."
|
||||||
|
findings.append(report)
|
||||||
|
|
||||||
return findings
|
return findings
|
||||||
|
|||||||
@@ -10,32 +10,34 @@ class logging_log_metric_filter_and_alert_for_sql_instance_configuration_changes
|
|||||||
):
|
):
|
||||||
def execute(self) -> Check_Report_GCP:
|
def execute(self) -> Check_Report_GCP:
|
||||||
findings = []
|
findings = []
|
||||||
report = Check_Report_GCP(self.metadata())
|
projects_with_metric = set()
|
||||||
report.project_id = logging_client.project_id
|
for metric in logging_client.metrics:
|
||||||
report.resource_id = logging_client.project_id
|
if 'protoPayload.methodName="cloudsql.instances.update"' in metric.filter:
|
||||||
report.resource_name = ""
|
report = Check_Report_GCP(self.metadata())
|
||||||
report.location = logging_client.region
|
projects_with_metric.add(metric.project_id)
|
||||||
report.status = "FAIL"
|
report.project_id = metric.project_id
|
||||||
report.status_extended = "There are no log metric filters or alerts associated."
|
report.resource_id = metric.name
|
||||||
if logging_client.metrics:
|
report.resource_name = metric.name
|
||||||
for metric in logging_client.metrics:
|
report.location = logging_client.region
|
||||||
if (
|
report.status = "FAIL"
|
||||||
'protoPayload.methodName="cloudsql.instances.update"'
|
report.status_extended = f"Log metric filter {metric.name} found but no alerts associated in project {metric.project_id}."
|
||||||
in metric.filter
|
for alert_policy in monitoring_client.alert_policies:
|
||||||
):
|
for filter in alert_policy.filters:
|
||||||
report = Check_Report_GCP(self.metadata())
|
if metric.name in filter:
|
||||||
report.project_id = logging_client.project_id
|
report.status = "PASS"
|
||||||
report.resource_id = metric.name
|
report.status_extended = f"Log metric filter {metric.name} found with alert policy {alert_policy.display_name} associated in project {metric.project_id}."
|
||||||
report.resource_name = metric.name
|
break
|
||||||
report.location = logging_client.region
|
findings.append(report)
|
||||||
report.status = "FAIL"
|
|
||||||
report.status_extended = f"Log metric filter {metric.name} found but no alerts associated."
|
for project in logging_client.project_ids:
|
||||||
for alert_policy in monitoring_client.alert_policies:
|
if project not in projects_with_metric:
|
||||||
for filter in alert_policy.filters:
|
report = Check_Report_GCP(self.metadata())
|
||||||
if metric.name in filter:
|
report.project_id = project
|
||||||
report.status = "PASS"
|
report.resource_id = project
|
||||||
report.status_extended = f"Log metric filter {metric.name} found with alert policy {alert_policy.display_name} associated."
|
report.resource_name = ""
|
||||||
break
|
report.location = logging_client.region
|
||||||
findings.append(report)
|
report.status = "FAIL"
|
||||||
|
report.status_extended = f"There are no log metric filters or alerts associated in project {project}."
|
||||||
|
findings.append(report)
|
||||||
|
|
||||||
return findings
|
return findings
|
||||||
|
|||||||
@@ -8,32 +8,37 @@ from prowler.providers.gcp.services.monitoring.monitoring_client import (
|
|||||||
class logging_log_metric_filter_and_alert_for_vpc_firewall_rule_changes_enabled(Check):
|
class logging_log_metric_filter_and_alert_for_vpc_firewall_rule_changes_enabled(Check):
|
||||||
def execute(self) -> Check_Report_GCP:
|
def execute(self) -> Check_Report_GCP:
|
||||||
findings = []
|
findings = []
|
||||||
report = Check_Report_GCP(self.metadata())
|
projects_with_metric = set()
|
||||||
report.project_id = logging_client.project_id
|
for metric in logging_client.metrics:
|
||||||
report.resource_id = logging_client.project_id
|
if (
|
||||||
report.resource_name = ""
|
'resource.type="gce_firewall_rule" AND (protoPayload.methodName:"compute.firewalls.patch" OR protoPayload.methodName:"compute.firewalls.insert" OR protoPayload.methodName:"compute.firewalls.delete")'
|
||||||
report.location = logging_client.region
|
in metric.filter
|
||||||
report.status = "FAIL"
|
):
|
||||||
report.status_extended = "There are no log metric filters or alerts associated."
|
report = Check_Report_GCP(self.metadata())
|
||||||
if logging_client.metrics:
|
projects_with_metric.add(metric.project_id)
|
||||||
for metric in logging_client.metrics:
|
report.project_id = metric.project_id
|
||||||
if (
|
report.resource_id = metric.name
|
||||||
'resource.type="gce_firewall_rule" AND (protoPayload.methodName:"compute.firewalls.patch" OR protoPayload.methodName:"compute.firewalls.insert" OR protoPayload.methodName:"compute.firewalls.delete")'
|
report.resource_name = metric.name
|
||||||
in metric.filter
|
report.location = logging_client.region
|
||||||
):
|
report.status = "FAIL"
|
||||||
report = Check_Report_GCP(self.metadata())
|
report.status_extended = f"Log metric filter {metric.name} found but no alerts associated in project {metric.project_id}."
|
||||||
report.project_id = logging_client.project_id
|
for alert_policy in monitoring_client.alert_policies:
|
||||||
report.resource_id = metric.name
|
for filter in alert_policy.filters:
|
||||||
report.resource_name = metric.name
|
if metric.name in filter:
|
||||||
report.location = logging_client.region
|
report.status = "PASS"
|
||||||
report.status = "FAIL"
|
report.status_extended = f"Log metric filter {metric.name} found with alert policy {alert_policy.display_name} associated in project {metric.project_id}."
|
||||||
report.status_extended = f"Log metric filter {metric.name} found but no alerts associated."
|
break
|
||||||
for alert_policy in monitoring_client.alert_policies:
|
findings.append(report)
|
||||||
for filter in alert_policy.filters:
|
|
||||||
if metric.name in filter:
|
for project in logging_client.project_ids:
|
||||||
report.status = "PASS"
|
if project not in projects_with_metric:
|
||||||
report.status_extended = f"Log metric filter {metric.name} found with alert policy {alert_policy.display_name} associated."
|
report = Check_Report_GCP(self.metadata())
|
||||||
break
|
report.project_id = project
|
||||||
findings.append(report)
|
report.resource_id = project
|
||||||
|
report.resource_name = ""
|
||||||
|
report.location = logging_client.region
|
||||||
|
report.status = "FAIL"
|
||||||
|
report.status_extended = f"There are no log metric filters or alerts associated in project {project}."
|
||||||
|
findings.append(report)
|
||||||
|
|
||||||
return findings
|
return findings
|
||||||
|
|||||||
@@ -8,32 +8,37 @@ from prowler.providers.gcp.services.monitoring.monitoring_client import (
|
|||||||
class logging_log_metric_filter_and_alert_for_vpc_network_changes_enabled(Check):
|
class logging_log_metric_filter_and_alert_for_vpc_network_changes_enabled(Check):
|
||||||
def execute(self) -> Check_Report_GCP:
|
def execute(self) -> Check_Report_GCP:
|
||||||
findings = []
|
findings = []
|
||||||
report = Check_Report_GCP(self.metadata())
|
projects_with_metric = set()
|
||||||
report.project_id = logging_client.project_id
|
for metric in logging_client.metrics:
|
||||||
report.resource_id = logging_client.project_id
|
if (
|
||||||
report.resource_name = ""
|
'resource.type="gce_network" AND (protoPayload.methodName:"compute.networks.insert" OR protoPayload.methodName:"compute.networks.patch" OR protoPayload.methodName:"compute.networks.delete" OR protoPayload.methodName:"compute.networks.removePeering" OR protoPayload.methodName:"compute.networks.addPeering")'
|
||||||
report.location = logging_client.region
|
in metric.filter
|
||||||
report.status = "FAIL"
|
):
|
||||||
report.status_extended = "There are no log metric filters or alerts associated."
|
report = Check_Report_GCP(self.metadata())
|
||||||
if logging_client.metrics:
|
projects_with_metric.add(metric.project_id)
|
||||||
for metric in logging_client.metrics:
|
report.project_id = metric.project_id
|
||||||
if (
|
report.resource_id = metric.name
|
||||||
'resource.type="gce_network" AND (protoPayload.methodName:"compute.networks.insert" OR protoPayload.methodName:"compute.networks.patch" OR protoPayload.methodName:"compute.networks.delete" OR protoPayload.methodName:"compute.networks.removePeering" OR protoPayload.methodName:"compute.networks.addPeering")'
|
report.resource_name = metric.name
|
||||||
in metric.filter
|
report.location = logging_client.region
|
||||||
):
|
report.status = "FAIL"
|
||||||
report = Check_Report_GCP(self.metadata())
|
report.status_extended = f"Log metric filter {metric.name} found but no alerts associated in project {metric.project_id}."
|
||||||
report.project_id = logging_client.project_id
|
for alert_policy in monitoring_client.alert_policies:
|
||||||
report.resource_id = metric.name
|
for filter in alert_policy.filters:
|
||||||
report.resource_name = metric.name
|
if metric.name in filter:
|
||||||
report.location = logging_client.region
|
report.status = "PASS"
|
||||||
report.status = "FAIL"
|
report.status_extended = f"Log metric filter {metric.name} found with alert policy {alert_policy.display_name} associated in project {metric.project_id}."
|
||||||
report.status_extended = f"Log metric filter {metric.name} found but no alerts associated."
|
break
|
||||||
for alert_policy in monitoring_client.alert_policies:
|
findings.append(report)
|
||||||
for filter in alert_policy.filters:
|
|
||||||
if metric.name in filter:
|
for project in logging_client.project_ids:
|
||||||
report.status = "PASS"
|
if project not in projects_with_metric:
|
||||||
report.status_extended = f"Log metric filter {metric.name} found with alert policy {alert_policy.display_name} associated."
|
report = Check_Report_GCP(self.metadata())
|
||||||
break
|
report.project_id = project
|
||||||
findings.append(report)
|
report.resource_id = project
|
||||||
|
report.resource_name = ""
|
||||||
|
report.location = logging_client.region
|
||||||
|
report.status = "FAIL"
|
||||||
|
report.status_extended = f"There are no log metric filters or alerts associated in project {project}."
|
||||||
|
findings.append(report)
|
||||||
|
|
||||||
return findings
|
return findings
|
||||||
|
|||||||
@@ -8,32 +8,37 @@ from prowler.providers.gcp.services.monitoring.monitoring_client import (
|
|||||||
class logging_log_metric_filter_and_alert_for_vpc_network_route_changes_enabled(Check):
|
class logging_log_metric_filter_and_alert_for_vpc_network_route_changes_enabled(Check):
|
||||||
def execute(self) -> Check_Report_GCP:
|
def execute(self) -> Check_Report_GCP:
|
||||||
findings = []
|
findings = []
|
||||||
report = Check_Report_GCP(self.metadata())
|
projects_with_metric = set()
|
||||||
report.project_id = logging_client.project_id
|
for metric in logging_client.metrics:
|
||||||
report.resource_id = logging_client.project_id
|
if (
|
||||||
report.resource_name = ""
|
'resource.type="gce_route" AND (protoPayload.methodName:"compute.routes.delete" OR protoPayload.methodName:"compute.routes.insert")'
|
||||||
report.location = logging_client.region
|
in metric.filter
|
||||||
report.status = "FAIL"
|
):
|
||||||
report.status_extended = "There are no log metric filters or alerts associated."
|
report = Check_Report_GCP(self.metadata())
|
||||||
if logging_client.metrics:
|
projects_with_metric.add(metric.project_id)
|
||||||
for metric in logging_client.metrics:
|
report.project_id = metric.project_id
|
||||||
if (
|
report.resource_id = metric.name
|
||||||
'resource.type="gce_route" AND (protoPayload.methodName:"compute.routes.delete" OR protoPayload.methodName:"compute.routes.insert")'
|
report.resource_name = metric.name
|
||||||
in metric.filter
|
report.location = logging_client.region
|
||||||
):
|
report.status = "FAIL"
|
||||||
report = Check_Report_GCP(self.metadata())
|
report.status_extended = f"Log metric filter {metric.name} found but no alerts associated in project {metric.project_id}."
|
||||||
report.project_id = logging_client.project_id
|
for alert_policy in monitoring_client.alert_policies:
|
||||||
report.resource_id = metric.name
|
for filter in alert_policy.filters:
|
||||||
report.resource_name = metric.name
|
if metric.name in filter:
|
||||||
report.location = logging_client.region
|
report.status = "PASS"
|
||||||
report.status = "FAIL"
|
report.status_extended = f"Log metric filter {metric.name} found with alert policy {alert_policy.display_name} associated in project {metric.project_id}."
|
||||||
report.status_extended = f"Log metric filter {metric.name} found but no alerts associated."
|
break
|
||||||
for alert_policy in monitoring_client.alert_policies:
|
findings.append(report)
|
||||||
for filter in alert_policy.filters:
|
|
||||||
if metric.name in filter:
|
for project in logging_client.project_ids:
|
||||||
report.status = "PASS"
|
if project not in projects_with_metric:
|
||||||
report.status_extended = f"Log metric filter {metric.name} found with alert policy {alert_policy.display_name} associated."
|
report = Check_Report_GCP(self.metadata())
|
||||||
break
|
report.project_id = project
|
||||||
findings.append(report)
|
report.resource_id = project
|
||||||
|
report.resource_name = ""
|
||||||
|
report.location = logging_client.region
|
||||||
|
report.status = "FAIL"
|
||||||
|
report.status_extended = f"There are no log metric filters or alerts associated in project {project}."
|
||||||
|
findings.append(report)
|
||||||
|
|
||||||
return findings
|
return findings
|
||||||
|
|||||||
@@ -10,7 +10,8 @@ class Logging:
|
|||||||
self.service = "logging"
|
self.service = "logging"
|
||||||
self.api_version = "v2"
|
self.api_version = "v2"
|
||||||
self.region = "global"
|
self.region = "global"
|
||||||
self.project_id = audit_info.project_id
|
self.project_ids = audit_info.project_ids
|
||||||
|
self.default_project_id = audit_info.default_project_id
|
||||||
self.client = generate_client(self.service, self.api_version, audit_info)
|
self.client = generate_client(self.service, self.api_version, audit_info)
|
||||||
self.sinks = []
|
self.sinks = []
|
||||||
self.metrics = []
|
self.metrics = []
|
||||||
@@ -18,65 +19,71 @@ class Logging:
|
|||||||
self.__get_metrics__()
|
self.__get_metrics__()
|
||||||
|
|
||||||
def __get_sinks__(self):
|
def __get_sinks__(self):
|
||||||
try:
|
for project_id in self.project_ids:
|
||||||
request = self.client.sinks().list(parent=f"projects/{self.project_id}")
|
try:
|
||||||
while request is not None:
|
request = self.client.sinks().list(parent=f"projects/{project_id}")
|
||||||
response = request.execute()
|
while request is not None:
|
||||||
|
response = request.execute()
|
||||||
|
|
||||||
for sink in response.get("sinks", []):
|
for sink in response.get("sinks", []):
|
||||||
self.sinks.append(
|
self.sinks.append(
|
||||||
Sink(
|
Sink(
|
||||||
name=sink["name"],
|
name=sink["name"],
|
||||||
destination=sink["destination"],
|
destination=sink["destination"],
|
||||||
filter=sink.get("filter", "all"),
|
filter=sink.get("filter", "all"),
|
||||||
|
project_id=project_id,
|
||||||
|
)
|
||||||
)
|
)
|
||||||
)
|
|
||||||
|
|
||||||
request = self.client.sinks().list_next(
|
request = self.client.sinks().list_next(
|
||||||
previous_request=request, previous_response=response
|
previous_request=request, previous_response=response
|
||||||
|
)
|
||||||
|
except Exception as error:
|
||||||
|
logger.error(
|
||||||
|
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||||
)
|
)
|
||||||
except Exception as error:
|
|
||||||
logger.error(
|
|
||||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
|
||||||
)
|
|
||||||
|
|
||||||
def __get_metrics__(self):
|
def __get_metrics__(self):
|
||||||
try:
|
for project_id in self.project_ids:
|
||||||
request = (
|
try:
|
||||||
self.client.projects()
|
|
||||||
.metrics()
|
|
||||||
.list(parent=f"projects/{self.project_id}")
|
|
||||||
)
|
|
||||||
while request is not None:
|
|
||||||
response = request.execute()
|
|
||||||
|
|
||||||
for metric in response.get("metrics", []):
|
|
||||||
self.metrics.append(
|
|
||||||
Metric(
|
|
||||||
name=metric["name"],
|
|
||||||
type=metric["metricDescriptor"]["type"],
|
|
||||||
filter=metric["filter"],
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
request = (
|
request = (
|
||||||
self.client.projects()
|
self.client.projects()
|
||||||
.metrics()
|
.metrics()
|
||||||
.list_next(previous_request=request, previous_response=response)
|
.list(parent=f"projects/{project_id}")
|
||||||
|
)
|
||||||
|
while request is not None:
|
||||||
|
response = request.execute()
|
||||||
|
|
||||||
|
for metric in response.get("metrics", []):
|
||||||
|
self.metrics.append(
|
||||||
|
Metric(
|
||||||
|
name=metric["name"],
|
||||||
|
type=metric["metricDescriptor"]["type"],
|
||||||
|
filter=metric["filter"],
|
||||||
|
project_id=project_id,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
request = (
|
||||||
|
self.client.projects()
|
||||||
|
.metrics()
|
||||||
|
.list_next(previous_request=request, previous_response=response)
|
||||||
|
)
|
||||||
|
except Exception as error:
|
||||||
|
logger.error(
|
||||||
|
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||||
)
|
)
|
||||||
except Exception as error:
|
|
||||||
logger.error(
|
|
||||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class Sink(BaseModel):
|
class Sink(BaseModel):
|
||||||
name: str
|
name: str
|
||||||
destination: str
|
destination: str
|
||||||
filter: str
|
filter: str
|
||||||
|
project_id: str
|
||||||
|
|
||||||
|
|
||||||
class Metric(BaseModel):
|
class Metric(BaseModel):
|
||||||
name: str
|
name: str
|
||||||
type: str
|
type: str
|
||||||
filter: str
|
filter: str
|
||||||
|
project_id: str
|
||||||
|
|||||||
@@ -5,28 +5,30 @@ from prowler.providers.gcp.services.logging.logging_client import logging_client
|
|||||||
class logging_sink_created(Check):
|
class logging_sink_created(Check):
|
||||||
def execute(self) -> Check_Report_GCP:
|
def execute(self) -> Check_Report_GCP:
|
||||||
findings = []
|
findings = []
|
||||||
if not logging_client.sinks:
|
projects_with_sink = set()
|
||||||
|
for sink in logging_client.sinks:
|
||||||
report = Check_Report_GCP(self.metadata())
|
report = Check_Report_GCP(self.metadata())
|
||||||
report.project_id = logging_client.project_id
|
projects_with_sink.add(sink.project_id)
|
||||||
report.resource_id = logging_client.project_id
|
report.project_id = sink.project_id
|
||||||
report.resource_name = ""
|
report.resource_id = sink.name
|
||||||
|
report.resource_name = sink.name
|
||||||
report.location = logging_client.region
|
report.location = logging_client.region
|
||||||
report.status = "FAIL"
|
report.status = "FAIL"
|
||||||
report.status_extended = (
|
report.status_extended = f"Sink {sink.name} is enabled but not exporting copies of all the log entries in project {sink.project_id}"
|
||||||
"There are no logging sinks to export copies of all the log entries"
|
if sink.filter == "all":
|
||||||
)
|
report.status = "PASS"
|
||||||
else:
|
report.status_extended = f"Sink {sink.name} is enabled exporting copies of all the log entries in project {sink.project_id}"
|
||||||
for sink in logging_client.sinks:
|
findings.append(report)
|
||||||
|
|
||||||
|
for project in logging_client.project_ids:
|
||||||
|
if project not in projects_with_sink:
|
||||||
report = Check_Report_GCP(self.metadata())
|
report = Check_Report_GCP(self.metadata())
|
||||||
report.project_id = logging_client.project_id
|
report.project_id = project
|
||||||
report.resource_id = sink.name
|
report.resource_id = project
|
||||||
report.resource_name = sink.name
|
report.resource_name = ""
|
||||||
report.location = logging_client.region
|
report.location = logging_client.region
|
||||||
report.status = "FAIL"
|
report.status = "FAIL"
|
||||||
report.status_extended = f"Sink {sink.name} is enabled but not exporting copies of all the log entries"
|
report.status_extended = f"There are no logging sinks to export copies of all the log entries in project {project}"
|
||||||
if sink.filter == "all":
|
|
||||||
report.status = "PASS"
|
|
||||||
report.status_extended = f"Sink {sink.name} is enabled exporting copies of all the log entries"
|
|
||||||
findings.append(report)
|
findings.append(report)
|
||||||
|
|
||||||
return findings
|
return findings
|
||||||
|
|||||||
@@ -10,43 +10,45 @@ class Monitoring:
|
|||||||
self.service = "monitoring"
|
self.service = "monitoring"
|
||||||
self.api_version = "v3"
|
self.api_version = "v3"
|
||||||
self.region = "global"
|
self.region = "global"
|
||||||
self.project_id = audit_info.project_id
|
self.project_ids = audit_info.project_ids
|
||||||
self.client = generate_client(self.service, self.api_version, audit_info)
|
self.client = generate_client(self.service, self.api_version, audit_info)
|
||||||
self.alert_policies = []
|
self.alert_policies = []
|
||||||
self.__get_alert_policies__()
|
self.__get_alert_policies__()
|
||||||
|
|
||||||
def __get_alert_policies__(self):
|
def __get_alert_policies__(self):
|
||||||
try:
|
for project_id in self.project_ids:
|
||||||
request = (
|
try:
|
||||||
self.client.projects()
|
|
||||||
.alertPolicies()
|
|
||||||
.list(name=f"projects/{self.project_id}")
|
|
||||||
)
|
|
||||||
while request is not None:
|
|
||||||
response = request.execute()
|
|
||||||
|
|
||||||
for policy in response.get("alertPolicies", []):
|
|
||||||
filters = []
|
|
||||||
for condition in policy["conditions"]:
|
|
||||||
filters.append(condition["conditionThreshold"]["filter"])
|
|
||||||
self.alert_policies.append(
|
|
||||||
AlertPolicy(
|
|
||||||
name=policy["name"],
|
|
||||||
display_name=policy["displayName"],
|
|
||||||
enabled=policy["enabled"],
|
|
||||||
filters=filters,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
request = (
|
request = (
|
||||||
self.client.projects()
|
self.client.projects()
|
||||||
.alertPolicies()
|
.alertPolicies()
|
||||||
.list_next(previous_request=request, previous_response=response)
|
.list(name=f"projects/{project_id}")
|
||||||
|
)
|
||||||
|
while request is not None:
|
||||||
|
response = request.execute()
|
||||||
|
|
||||||
|
for policy in response.get("alertPolicies", []):
|
||||||
|
filters = []
|
||||||
|
for condition in policy["conditions"]:
|
||||||
|
filters.append(condition["conditionThreshold"]["filter"])
|
||||||
|
self.alert_policies.append(
|
||||||
|
AlertPolicy(
|
||||||
|
name=policy["name"],
|
||||||
|
display_name=policy["displayName"],
|
||||||
|
enabled=policy["enabled"],
|
||||||
|
filters=filters,
|
||||||
|
project_id=project_id,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
request = (
|
||||||
|
self.client.projects()
|
||||||
|
.alertPolicies()
|
||||||
|
.list_next(previous_request=request, previous_response=response)
|
||||||
|
)
|
||||||
|
except Exception as error:
|
||||||
|
logger.error(
|
||||||
|
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||||
)
|
)
|
||||||
except Exception as error:
|
|
||||||
logger.error(
|
|
||||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class AlertPolicy(BaseModel):
|
class AlertPolicy(BaseModel):
|
||||||
@@ -54,3 +56,4 @@ class AlertPolicy(BaseModel):
|
|||||||
display_name: str
|
display_name: str
|
||||||
filters: list[str]
|
filters: list[str]
|
||||||
enabled: bool
|
enabled: bool
|
||||||
|
project_id: str
|
||||||
|
|||||||
@@ -980,3 +980,14 @@ class Test_Parser:
|
|||||||
parsed = self.parser.parse(command)
|
parsed = self.parser.parse(command)
|
||||||
assert parsed.provider == "gcp"
|
assert parsed.provider == "gcp"
|
||||||
assert parsed.credentials_file == file
|
assert parsed.credentials_file == file
|
||||||
|
|
||||||
|
def test_parser_gcp_project_ids(self):
|
||||||
|
argument = "--project-ids"
|
||||||
|
project_1 = "test_project_1"
|
||||||
|
project_2 = "test_project_2"
|
||||||
|
command = [prowler_command, "gcp", argument, project_1, project_2]
|
||||||
|
parsed = self.parser.parse(command)
|
||||||
|
assert parsed.provider == "gcp"
|
||||||
|
assert len(parsed.project_ids) == 2
|
||||||
|
assert parsed.project_ids[0] == project_1
|
||||||
|
assert parsed.project_ids[1] == project_2
|
||||||
|
|||||||
@@ -45,7 +45,8 @@ class Test_Slack_Integration:
|
|||||||
)
|
)
|
||||||
gcp_audit_info = GCP_Audit_Info(
|
gcp_audit_info = GCP_Audit_Info(
|
||||||
credentials=None,
|
credentials=None,
|
||||||
project_id="test-project",
|
default_project_id="test-project1",
|
||||||
|
project_ids=["test-project1", "test-project2"],
|
||||||
audit_resources=None,
|
audit_resources=None,
|
||||||
audit_metadata=None,
|
audit_metadata=None,
|
||||||
)
|
)
|
||||||
@@ -69,7 +70,7 @@ class Test_Slack_Integration:
|
|||||||
aws_logo,
|
aws_logo,
|
||||||
)
|
)
|
||||||
assert create_message_identity("gcp", gcp_audit_info) == (
|
assert create_message_identity("gcp", gcp_audit_info) == (
|
||||||
f"GCP Project *{gcp_audit_info.project_id}*",
|
f"GCP Projects *{', '.join(gcp_audit_info.project_ids)}*",
|
||||||
gcp_logo,
|
gcp_logo,
|
||||||
)
|
)
|
||||||
assert create_message_identity("azure", azure_audit_info) == (
|
assert create_message_identity("azure", azure_audit_info) == (
|
||||||
|
|||||||
@@ -83,6 +83,10 @@ def mock_set_gcp_credentials(*_):
|
|||||||
return (None, "project")
|
return (None, "project")
|
||||||
|
|
||||||
|
|
||||||
|
def mock_get_project_ids(*_):
|
||||||
|
return ["project"]
|
||||||
|
|
||||||
|
|
||||||
class Test_Set_Audit_Info:
|
class Test_Set_Audit_Info:
|
||||||
# Mocked Audit Info
|
# Mocked Audit Info
|
||||||
def set_mocked_audit_info(self):
|
def set_mocked_audit_info(self):
|
||||||
@@ -166,6 +170,7 @@ class Test_Set_Audit_Info:
|
|||||||
assert isinstance(audit_info, Azure_Audit_Info)
|
assert isinstance(audit_info, Azure_Audit_Info)
|
||||||
|
|
||||||
@patch.object(GCP_Provider, "__set_credentials__", new=mock_set_gcp_credentials)
|
@patch.object(GCP_Provider, "__set_credentials__", new=mock_set_gcp_credentials)
|
||||||
|
@patch.object(GCP_Provider, "get_project_ids", new=mock_get_project_ids)
|
||||||
@patch.object(Audit_Info, "print_gcp_credentials", new=mock_print_audit_credentials)
|
@patch.object(Audit_Info, "print_gcp_credentials", new=mock_print_audit_credentials)
|
||||||
def test_set_audit_info_gcp(self):
|
def test_set_audit_info_gcp(self):
|
||||||
provider = "gcp"
|
provider = "gcp"
|
||||||
@@ -179,6 +184,7 @@ class Test_Set_Audit_Info:
|
|||||||
"subscriptions": None,
|
"subscriptions": None,
|
||||||
# We need to set exactly one auth method
|
# We need to set exactly one auth method
|
||||||
"credentials_file": None,
|
"credentials_file": None,
|
||||||
|
"project_ids": ["project"],
|
||||||
}
|
}
|
||||||
|
|
||||||
audit_info = set_provider_audit_info(provider, arguments)
|
audit_info = set_provider_audit_info(provider, arguments)
|
||||||
|
|||||||
@@ -45,7 +45,8 @@ class Test_Common_Output_Options:
|
|||||||
def set_mocked_gcp_audit_info(self):
|
def set_mocked_gcp_audit_info(self):
|
||||||
audit_info = GCP_Audit_Info(
|
audit_info = GCP_Audit_Info(
|
||||||
credentials=None,
|
credentials=None,
|
||||||
project_id="test-project",
|
default_project_id="test-project1",
|
||||||
|
project_ids=["test-project1", "test-project2"],
|
||||||
audit_resources=None,
|
audit_resources=None,
|
||||||
audit_metadata=None,
|
audit_metadata=None,
|
||||||
)
|
)
|
||||||
@@ -347,7 +348,7 @@ class Test_Common_Output_Options:
|
|||||||
</div>
|
</div>
|
||||||
<ul class="list-group list-group-flush">
|
<ul class="list-group list-group-flush">
|
||||||
<li class="list-group-item">
|
<li class="list-group-item">
|
||||||
<b>GCP Project ID:</b> {audit_info.project_id}
|
<b>GCP Project IDs:</b> {', '.join(audit_info.project_ids)}
|
||||||
</li>
|
</li>
|
||||||
</ul>
|
</ul>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -32,11 +32,12 @@ class Test_compute_default_service_account_in_use:
|
|||||||
metadata={},
|
metadata={},
|
||||||
shielded_enabled_vtpm=True,
|
shielded_enabled_vtpm=True,
|
||||||
shielded_enabled_integrity_monitoring=True,
|
shielded_enabled_integrity_monitoring=True,
|
||||||
service_accounts=[{"email": "123-compute@developer.gserviceaccount.com"}],
|
service_accounts=[{"email": "custom@developer.gserviceaccount.com"}],
|
||||||
|
project_id=GCP_PROJECT_ID,
|
||||||
)
|
)
|
||||||
|
|
||||||
compute_client = mock.MagicMock
|
compute_client = mock.MagicMock
|
||||||
compute_client.project_id = GCP_PROJECT_ID
|
compute_client.project_ids = [GCP_PROJECT_ID]
|
||||||
compute_client.instances = [instance]
|
compute_client.instances = [instance]
|
||||||
|
|
||||||
with mock.patch(
|
with mock.patch(
|
||||||
@@ -72,10 +73,11 @@ class Test_compute_default_service_account_in_use:
|
|||||||
service_accounts=[
|
service_accounts=[
|
||||||
{"email": f"{GCP_PROJECT_ID}-compute@developer.gserviceaccount.com"}
|
{"email": f"{GCP_PROJECT_ID}-compute@developer.gserviceaccount.com"}
|
||||||
],
|
],
|
||||||
|
project_id=GCP_PROJECT_ID,
|
||||||
)
|
)
|
||||||
|
|
||||||
compute_client = mock.MagicMock
|
compute_client = mock.MagicMock
|
||||||
compute_client.project_id = GCP_PROJECT_ID
|
compute_client.project_ids = [GCP_PROJECT_ID]
|
||||||
compute_client.instances = [instance]
|
compute_client.instances = [instance]
|
||||||
|
|
||||||
with mock.patch(
|
with mock.patch(
|
||||||
@@ -111,10 +113,11 @@ class Test_compute_default_service_account_in_use:
|
|||||||
service_accounts=[
|
service_accounts=[
|
||||||
{"email": f"{GCP_PROJECT_ID}-compute@developer.gserviceaccount.com"}
|
{"email": f"{GCP_PROJECT_ID}-compute@developer.gserviceaccount.com"}
|
||||||
],
|
],
|
||||||
|
project_id=GCP_PROJECT_ID,
|
||||||
)
|
)
|
||||||
|
|
||||||
compute_client = mock.MagicMock
|
compute_client = mock.MagicMock
|
||||||
compute_client.project_id = GCP_PROJECT_ID
|
compute_client.project_ids = [GCP_PROJECT_ID]
|
||||||
compute_client.instances = [instance]
|
compute_client.instances = [instance]
|
||||||
|
|
||||||
with mock.patch(
|
with mock.patch(
|
||||||
|
|||||||
@@ -35,10 +35,11 @@ class Test_compute_default_service_account_in_use_with_full_api_access:
|
|||||||
service_accounts=[
|
service_accounts=[
|
||||||
{"email": "123-compute@developer.gserviceaccount.com", "scopes": []}
|
{"email": "123-compute@developer.gserviceaccount.com", "scopes": []}
|
||||||
],
|
],
|
||||||
|
project_id=GCP_PROJECT_ID,
|
||||||
)
|
)
|
||||||
|
|
||||||
compute_client = mock.MagicMock
|
compute_client = mock.MagicMock
|
||||||
compute_client.project_id = GCP_PROJECT_ID
|
compute_client.project_ids = [GCP_PROJECT_ID]
|
||||||
compute_client.instances = [instance]
|
compute_client.instances = [instance]
|
||||||
|
|
||||||
with mock.patch(
|
with mock.patch(
|
||||||
@@ -77,10 +78,11 @@ class Test_compute_default_service_account_in_use_with_full_api_access:
|
|||||||
"scopes": ["https://www.googleapis.com/auth/cloud-platform"],
|
"scopes": ["https://www.googleapis.com/auth/cloud-platform"],
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
|
project_id=GCP_PROJECT_ID,
|
||||||
)
|
)
|
||||||
|
|
||||||
compute_client = mock.MagicMock
|
compute_client = mock.MagicMock
|
||||||
compute_client.project_id = GCP_PROJECT_ID
|
compute_client.project_ids = [GCP_PROJECT_ID]
|
||||||
compute_client.instances = [instance]
|
compute_client.instances = [instance]
|
||||||
|
|
||||||
with mock.patch(
|
with mock.patch(
|
||||||
@@ -119,10 +121,11 @@ class Test_compute_default_service_account_in_use_with_full_api_access:
|
|||||||
"scopes": ["https://www.googleapis.com/auth/cloud-platform"],
|
"scopes": ["https://www.googleapis.com/auth/cloud-platform"],
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
|
project_id=GCP_PROJECT_ID,
|
||||||
)
|
)
|
||||||
|
|
||||||
compute_client = mock.MagicMock
|
compute_client = mock.MagicMock
|
||||||
compute_client.project_id = GCP_PROJECT_ID
|
compute_client.project_ids = [GCP_PROJECT_ID]
|
||||||
compute_client.instances = [instance]
|
compute_client.instances = [instance]
|
||||||
|
|
||||||
with mock.patch(
|
with mock.patch(
|
||||||
|
|||||||
@@ -34,6 +34,7 @@ class Test_compute_serial_ports_in_use:
|
|||||||
shielded_enabled_vtpm=True,
|
shielded_enabled_vtpm=True,
|
||||||
shielded_enabled_integrity_monitoring=True,
|
shielded_enabled_integrity_monitoring=True,
|
||||||
service_accounts=[],
|
service_accounts=[],
|
||||||
|
project_id=GCP_PROJECT_ID,
|
||||||
)
|
)
|
||||||
|
|
||||||
compute_client = mock.MagicMock
|
compute_client = mock.MagicMock
|
||||||
@@ -71,6 +72,7 @@ class Test_compute_serial_ports_in_use:
|
|||||||
shielded_enabled_vtpm=True,
|
shielded_enabled_vtpm=True,
|
||||||
shielded_enabled_integrity_monitoring=True,
|
shielded_enabled_integrity_monitoring=True,
|
||||||
service_accounts=[],
|
service_accounts=[],
|
||||||
|
project_id=GCP_PROJECT_ID,
|
||||||
)
|
)
|
||||||
|
|
||||||
compute_client = mock.MagicMock
|
compute_client = mock.MagicMock
|
||||||
@@ -108,6 +110,7 @@ class Test_compute_serial_ports_in_use:
|
|||||||
shielded_enabled_vtpm=True,
|
shielded_enabled_vtpm=True,
|
||||||
shielded_enabled_integrity_monitoring=True,
|
shielded_enabled_integrity_monitoring=True,
|
||||||
service_accounts=[],
|
service_accounts=[],
|
||||||
|
project_id=GCP_PROJECT_ID,
|
||||||
)
|
)
|
||||||
|
|
||||||
compute_client = mock.MagicMock
|
compute_client = mock.MagicMock
|
||||||
@@ -145,6 +148,7 @@ class Test_compute_serial_ports_in_use:
|
|||||||
shielded_enabled_vtpm=True,
|
shielded_enabled_vtpm=True,
|
||||||
shielded_enabled_integrity_monitoring=True,
|
shielded_enabled_integrity_monitoring=True,
|
||||||
service_accounts=[],
|
service_accounts=[],
|
||||||
|
project_id=GCP_PROJECT_ID,
|
||||||
)
|
)
|
||||||
|
|
||||||
compute_client = mock.MagicMock
|
compute_client = mock.MagicMock
|
||||||
@@ -182,6 +186,7 @@ class Test_compute_serial_ports_in_use:
|
|||||||
shielded_enabled_vtpm=True,
|
shielded_enabled_vtpm=True,
|
||||||
shielded_enabled_integrity_monitoring=True,
|
shielded_enabled_integrity_monitoring=True,
|
||||||
service_accounts=[],
|
service_accounts=[],
|
||||||
|
project_id=GCP_PROJECT_ID,
|
||||||
)
|
)
|
||||||
|
|
||||||
compute_client = mock.MagicMock
|
compute_client = mock.MagicMock
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ GCP_PROJECT_ID = "123456789012"
|
|||||||
class Test_compute_shielded_vm_enabled:
|
class Test_compute_shielded_vm_enabled:
|
||||||
def test_compute_no_instances(self):
|
def test_compute_no_instances(self):
|
||||||
compute_client = mock.MagicMock
|
compute_client = mock.MagicMock
|
||||||
compute_client.project_id = GCP_PROJECT_ID
|
compute_client.project_ids = [GCP_PROJECT_ID]
|
||||||
compute_client.instances = []
|
compute_client.instances = []
|
||||||
|
|
||||||
with mock.patch(
|
with mock.patch(
|
||||||
@@ -34,10 +34,11 @@ class Test_compute_shielded_vm_enabled:
|
|||||||
shielded_enabled_vtpm=True,
|
shielded_enabled_vtpm=True,
|
||||||
shielded_enabled_integrity_monitoring=True,
|
shielded_enabled_integrity_monitoring=True,
|
||||||
service_accounts=[],
|
service_accounts=[],
|
||||||
|
project_id=GCP_PROJECT_ID,
|
||||||
)
|
)
|
||||||
|
|
||||||
compute_client = mock.MagicMock
|
compute_client = mock.MagicMock
|
||||||
compute_client.project_id = GCP_PROJECT_ID
|
compute_client.project_ids = [GCP_PROJECT_ID]
|
||||||
compute_client.instances = [instance]
|
compute_client.instances = [instance]
|
||||||
|
|
||||||
with mock.patch(
|
with mock.patch(
|
||||||
@@ -71,10 +72,11 @@ class Test_compute_shielded_vm_enabled:
|
|||||||
shielded_enabled_vtpm=False,
|
shielded_enabled_vtpm=False,
|
||||||
shielded_enabled_integrity_monitoring=True,
|
shielded_enabled_integrity_monitoring=True,
|
||||||
service_accounts=[],
|
service_accounts=[],
|
||||||
|
project_id=GCP_PROJECT_ID,
|
||||||
)
|
)
|
||||||
|
|
||||||
compute_client = mock.MagicMock
|
compute_client = mock.MagicMock
|
||||||
compute_client.project_id = GCP_PROJECT_ID
|
compute_client.project_ids = [GCP_PROJECT_ID]
|
||||||
compute_client.instances = [instance]
|
compute_client.instances = [instance]
|
||||||
|
|
||||||
with mock.patch(
|
with mock.patch(
|
||||||
@@ -108,10 +110,11 @@ class Test_compute_shielded_vm_enabled:
|
|||||||
shielded_enabled_vtpm=True,
|
shielded_enabled_vtpm=True,
|
||||||
shielded_enabled_integrity_monitoring=False,
|
shielded_enabled_integrity_monitoring=False,
|
||||||
service_accounts=[],
|
service_accounts=[],
|
||||||
|
project_id=GCP_PROJECT_ID,
|
||||||
)
|
)
|
||||||
|
|
||||||
compute_client = mock.MagicMock
|
compute_client = mock.MagicMock
|
||||||
compute_client.project_id = GCP_PROJECT_ID
|
compute_client.project_ids = [GCP_PROJECT_ID]
|
||||||
compute_client.instances = [instance]
|
compute_client.instances = [instance]
|
||||||
|
|
||||||
with mock.patch(
|
with mock.patch(
|
||||||
|
|||||||
Reference in New Issue
Block a user