feat(pip): Prepare for PyPI (#1531)

This commit is contained in:
Sergio Garcia
2022-12-13 09:07:55 +01:00
committed by GitHub
parent 0cd13b90f4
commit bb09267f2a
1461 changed files with 6625 additions and 6904 deletions

View File

@@ -2,11 +2,11 @@
### Use Case:
Customers look to use multiple auditing tools in order to provide quick assessments about their AWS environments. These tools allow for reports to be generated for review by the customer and appropriate teams, which in turns helps them begin security remediation efforts.
Customers look to use multiple auditing tools in order to provide quick assessments about their AWS environments. These tools allow for reports to be generated for review by the customer and appropriate teams, which in turns helps them begin security remediation efforts.
Prowler and ScoutSuite are two publicly available security auditing tools that provide comprehensive reports for customers using AWS.
ShortCut is a mechanism for customers to use to run both Prowler and ScoutSuite within an AWS account, using AWS CloudShell. When customers use ShortCut, this allows for customers to quickly perform an audit on their environment, without having to provision IAM Access Keys or EC2 instances.
ShortCut is a mechanism for customers to use to run both Prowler and ScoutSuite within an AWS account, using AWS CloudShell. When customers use ShortCut, this allows for customers to quickly perform an audit on their environment, without having to provision IAM Access Keys or EC2 instances.
### Prerequisites:
@@ -54,4 +54,4 @@ screen -r scoutsuite
8. In the Download File prompt, use the file path and file name to download the results.
![Alt text](screenshots/download_prompt.png)
![Alt text](screenshots/download_prompt.png)

View File

@@ -17,7 +17,7 @@ mkdir ${account}-results
cd ~
git clone https://github.com/prowler-cloud/prowler
pip3 install detect-secrets --user
cd prowler
cd prowler
screen -dmS prowler sh -c "./prowler -M csv,html;cd ~;zip -r ${account}-results/prowler-${account}.zip /home/cloudshell-user/prowler/output"
# ScoutSuite

View File

@@ -12,14 +12,14 @@ Parameters:
Type: Number
Default: 3
AllowedValues: [1, 3, 5, 7, 14, 30, 60, 90, 180, 365]
ProwlerOptions:
Description: 'Options to pass to Prowler command, make sure at least -M junit-xml is used for CodeBuild reports. Use -r for the region to send API queries, -f to filter only one region, -M output formats, -c for comma separated checks, for all checks do not use -c or -g, for more options see -h. For a complete assessment use "-M text,junit-xml,html,csv,json", for SecurityHub integration use "-r region -f region -M text,junit-xml,html,csv,json,json-asff -S -q"'
Type: String
# Prowler command below runs a set of checks, configure it base on your needs, no options will run all regions all checks.
# option -M junit-xml is requirede in order to get the report in CodeBuild.
Default: -r eu-west-1 -f eu-west-1 -M text,junit-xml,html,csv,json -c check11,check12,check13,check14
ProwlerScheduler:
Description: The time when Prowler will run in cron format. Default is daily at 22:00h or 10PM 'cron(0 22 * * ? *)', for every 5 hours also works 'rate(5 hours)'. More info here https://docs.aws.amazon.com/AmazonCloudWatch/latest/events/ScheduledEvents.html.
Type: String
@@ -297,14 +297,14 @@ Resources:
python: 3.9
commands:
- echo "Installing Prowler and dependencies..."
- pip3 install detect-secrets
- pip3 install detect-secrets
build:
commands:
- echo "Running Prowler as ./prowler $PROWLER_OPTIONS"
- ./prowler $PROWLER_OPTIONS
- ./prowler $PROWLER_OPTIONS
post_build:
commands:
- echo "Uploading reports to S3..."
- echo "Uploading reports to S3..."
- aws s3 cp --sse AES256 output/ s3://$BUCKET_REPORT/ --recursive
- echo "Done!"
reports:

View File

@@ -12,15 +12,15 @@
# specific language governing permissions and limitations under the License.
## This script helps to generate a single html report from a single or multiple csv
## This script helps to generate a single html report from a single or multiple csv
# output reports.
# I use it when I want to visualize multiple accounts reports in a single view.
# Report information and Assessment Summary will be empty due to the variables
# Report information and Assessment Summary will be empty due to the variables
# that are not set here.
## First: Remove the CSV header from each output report.
## Second: If you want to aggretate all csv files in you can do like this:
## Second: If you want to aggretate all csv files in you can do like this:
# find . -type f -name '*.csv' -exec cat {} + > prowler-output-unified-csv.file
# use .file instead of .csv unless you want to get into an infinite loop ;)
@@ -30,7 +30,7 @@
OUTPUT_FILE_NAME="report-unified-csv"
EXTENSION_HTML="html"
INPUT=$1
IFS=',' # used inside the while loop for csv delimiter
IFS=',' # used inside the while loop for csv delimiter
HTML_LOGO_URL="https://github.com/prowler-cloud/prowler/"
HTML_LOGO_IMG="https://raw.githubusercontent.com/prowler-cloud/prowler/master/util/html/prowler-logo-new.png"
@@ -41,7 +41,7 @@ addHtmlHeader() {
if [[ $PROFILE == "" ]];then
PROFILE="ENV"
fi
if [[ -z $HTML_REPORT_INIT ]]; then
if [[ -z $HTML_REPORT_INIT ]]; then
cat <<EOF
<!DOCTYPE html>
<html lang="en">
@@ -112,7 +112,7 @@ addHtmlHeader() {
</li>
</ul>
</div>
* Sortable columns are CheckID (default) and Result
* Sortable columns are CheckID (default) and Result
</div>
</div>
<div class="row mt-3">
@@ -135,7 +135,7 @@ addHtmlHeader() {
<tbody>
EOF
fi
fi
}
addHtmlFooter() {
@@ -170,7 +170,7 @@ unset HTML_REPORT_INIT
addHtmlHeader > ${OUTPUT_FILE_NAME}.$EXTENSION_HTML
while IFS=, read -r PROFILE ACCOUNT_NUM REPREGION TITLE_ID RESULT SCORED LEVEL TITLE_TEXT NOTES ASFF_COMPLIANCE_TYPE CHECK_SEVERITY CHECK_SERVICENAME;do
if [[ $RESULT == "INFO" ]]; then
if [[ $RESULT == "INFO" ]]; then
echo '<tr class="table-info">' >> ${OUTPUT_FILE_NAME}.$EXTENSION_HTML
echo '<td><i class="fas fa-info-circle"></i></td>' >> ${OUTPUT_FILE_NAME}.$EXTENSION_HTML
echo '<td>INFO</td>' >> ${OUTPUT_FILE_NAME}.$EXTENSION_HTML
@@ -183,8 +183,8 @@ while IFS=, read -r PROFILE ACCOUNT_NUM REPREGION TITLE_ID RESULT SCORED LEVEL T
echo '<td>'$TITLE_TEXT'</td>' >> ${OUTPUT_FILE_NAME}.$EXTENSION_HTML
echo '<td>'$NOTES'</td>' >> ${OUTPUT_FILE_NAME}.$EXTENSION_HTML
echo '</tr>' >> ${OUTPUT_FILE_NAME}.$EXTENSION_HTML
fi
if [[ $RESULT == "PASS" ]]; then
fi
if [[ $RESULT == "PASS" ]]; then
echo '<tr class="p-3 mb-2 bg-success">' >> ${OUTPUT_FILE_NAME}.$EXTENSION_HTML
echo '<td><i class="fas fa-thumbs-up"></i></td>' >> ${OUTPUT_FILE_NAME}.$EXTENSION_HTML
echo '<td>PASS</td>' >> ${OUTPUT_FILE_NAME}.$EXTENSION_HTML
@@ -197,8 +197,8 @@ while IFS=, read -r PROFILE ACCOUNT_NUM REPREGION TITLE_ID RESULT SCORED LEVEL T
echo '<td>'$TITLE_TEXT'</td>' >> ${OUTPUT_FILE_NAME}.$EXTENSION_HTML
echo '<td>'$NOTES'</td>' >> ${OUTPUT_FILE_NAME}.$EXTENSION_HTML
echo '</tr>' >> ${OUTPUT_FILE_NAME}.$EXTENSION_HTML
fi
if [[ $RESULT == "FAIL" ]]; then
fi
if [[ $RESULT == "FAIL" ]]; then
echo '<tr class="table-danger" >' >> ${OUTPUT_FILE_NAME}.$EXTENSION_HTML
echo '<td> <i class="fas fa-thumbs-down"></i></td>' >> ${OUTPUT_FILE_NAME}.$EXTENSION_HTML
echo '<td>FAIL</td>' >> ${OUTPUT_FILE_NAME}.$EXTENSION_HTML
@@ -212,7 +212,7 @@ while IFS=, read -r PROFILE ACCOUNT_NUM REPREGION TITLE_ID RESULT SCORED LEVEL T
echo '<td>'$NOTES'</td>' >> ${OUTPUT_FILE_NAME}.$EXTENSION_HTML
echo '</tr>' >> ${OUTPUT_FILE_NAME}.$EXTENSION_HTML
fi
if [[ $RESULT == "WARNING" ]]; then
if [[ $RESULT == "WARNING" ]]; then
echo '<tr class="table-warning">' >> ${OUTPUT_FILE_NAME}.$EXTENSION_HTML
echo '<td><i class="fas fa-exclamation-triangle"></i></td>' >> ${OUTPUT_FILE_NAME}.$EXTENSION_HTML
echo '<td>WARN</td>' >> ${OUTPUT_FILE_NAME}.$EXTENSION_HTML
@@ -228,6 +228,3 @@ while IFS=, read -r PROFILE ACCOUNT_NUM REPREGION TITLE_ID RESULT SCORED LEVEL T
fi
done < $INPUT
addHtmlFooter >> ${OUTPUT_FILE_NAME}.$EXTENSION_HTML

View File

@@ -1,9 +1,9 @@
## K8S - Cronjob
Simple instructions to add a cronjob on K8S to execute a prowler and save the results on AWS S3.
### Files:
### Files:
cronjob.yml ---> is a **cronjob** for K8S, you must set the frequency and probes from yours scans \
secret.yml -----> is a **secret** file with AWS ID/Secret and the name of bucket
secret.yml -----> is a **secret** file with AWS ID/Secret and the name of bucket
### To apply:

View File

@@ -16,7 +16,7 @@ spec:
- name: prowler
image: toniblyx/prowler:latest
imagePullPolicy: Always
command:
command:
- "./prowler"
args: [ "-g", "hipaa", "-M", "csv,json,html", "-B", "$(awsS3Bucket)" ]
env:
@@ -30,11 +30,11 @@ spec:
secretKeyRef:
name: devsecops-prowler-cronjob-secret
key: awsSecretKey
- name: awsS3Bucket
- name: awsS3Bucket
valueFrom:
secretKeyRef:
name: devsecops-prowler-cronjob-secret
key: awsS3Bucket
key: awsS3Bucket
imagePullPolicy: IfNotPresent
restartPolicy: OnFailure
backoffLimit: 3

View File

@@ -1,3 +1,3 @@
export ROLE=ProwlerXA-Role
export PARALLEL_ACCOUNTS=1
export REGION=us-east-1
export REGION=us-east-1

View File

@@ -42,4 +42,4 @@ RUN \
USER ${USERNAME}
# Run script
ENTRYPOINT ["/run-prowler-securityhub.sh"]
ENTRYPOINT ["/run-prowler-securityhub.sh"]

View File

@@ -15,7 +15,7 @@ Originally based on [org-multi-account](https://github.com/prowler-cloud/prowler
The solution is designed to be very simple. Prowler is run via an ECS Task definition that launches a single Fargate container. This Task Definition is executed on a schedule using an EventBridge Rule.
## CloudFormation Templates
### CF-Prowler-IAM.yml
Creates the following IAM Roles:
@@ -33,7 +33,7 @@ Creates the following resources:
5. **ProwlerTaskScheduler**: EventBridge Rule that schedules the execution of the Task Definition. The cron expression is specified as a CloudFormation template parameter.
### CF-Prowler-CrossAccountRole.yml
Creates the cross account IAM Role required for Prowler to run. Deploy it as StackSet in every account in the AWS Organization.
Creates the cross account IAM Role required for Prowler to run. Deploy it as StackSet in every account in the AWS Organization.
## Docker Container
@@ -42,7 +42,7 @@ The Dockerfile does the following:
1. Uses amazonlinux:2022 as a base.
2. Downloads required dependencies.
3. Copies the .awsvariables and run-prowler-securityhub.sh files into the root.
4. Downloads the specified version of Prowler as recommended in the release notes.
4. Downloads the specified version of Prowler as recommended in the release notes.
5. Assigns permissions to a lower privileged user and then drops to it.
6. Runs the script.
@@ -58,10 +58,10 @@ The script gets the list of accounts in AWS Organizations, and then executes Pro
The logs that are generated and sent to Cloudwatch are error logs, and assessment start and finish logs.
## Instructions
1. Create a Private ECR Repository in the account that will host the Prowler container. The Audit account is recommended, but any account can be used.
1. Create a Private ECR Repository in the account that will host the Prowler container. The Audit account is recommended, but any account can be used.
2. Configure the .awsvariables file. Note the ROLE name chosen as it will be the CrossAccountRole.
3. Follow the steps from "View Push Commands" to build and upload the container image. You need to have Docker and AWS CLI installed, and use the cli to login to the account first. After upload note the Image URI, as it is required for the CF-Prowler-ECS template.
4. Make sure SecurityHub is enabled in every account in AWS Organizations, and that the SecurityHub integration is enabled as explained in [Prowler - Security Hub Integration](https://github.com/prowler-cloud/prowler#security-hub-integration)
4. Make sure SecurityHub is enabled in every account in AWS Organizations, and that the SecurityHub integration is enabled as explained in [Prowler - Security Hub Integration](https://github.com/prowler-cloud/prowler#security-hub-integration)
5. Deploy **CF-Prowler-CrossAccountRole.yml** in the Master Account as a single stack. You will have to choose the CrossAccountRole name (ProwlerXA-Role by default) and the ProwlerTaskRoleName (ProwlerECSTask-Role by default)
6. Deploy **CF-Prowler-CrossAccountRole.yml** in every Member Account as a StackSet. Choose the same CrossAccountName and ProwlerTaskRoleName as the previous step.
7. Deploy **CF-Prowler-IAM.yml** in the account that will host the Prowler container (the same from step 1). The following template parameters must be provided:
@@ -72,7 +72,7 @@ The logs that are generated and sent to Cloudwatch are error logs, and assessmen
8. Deploy **CF-Prowler-ECS.yml** in the account that will host the Prowler container (the same from step 1). The following template parameters must be provided:
- **ProwlerClusterName**: Name for the ECS Cluster (default ProwlerCluster)
- **ProwlerContainerName**: Name for the Prowler container (default prowler)
- **ProwlerContainerInfo**: ECR URI from step 1.
- **ProwlerContainerInfo**: ECR URI from step 1.
- **ProwlerECSLogGroupName**: CloudWatch Log Group name (default /aws/ecs/SecurityHub-Prowler)
- **SecurityGroupVPCId**: VPC ID for the VPC where the container will run.
- **ProwlerScheduledSubnet1 and 2**: Subnets IDs from the VPC specified. Choose private subnets if possible.
@@ -90,5 +90,5 @@ If you permission find errors in the CloudWatch logs, the culprit might be a [Se
---
## Upgrading Prowler
Prowler version is controlled by the PROWLERVER argument in the Dockerfile, change it to the desired version and follow the ECR Push Commands to update the container image.
Old images can be deleted from the ECR Repository after the new image is confirmed to work. They will show as "untagged" as only one image can hold the "latest" tag.
Prowler version is controlled by the PROWLERVER argument in the Dockerfile, change it to the desired version and follow the ECR Push Commands to update the container image.
Old images can be deleted from the ECR Repository after the new image is confirmed to work. They will show as "untagged" as only one image can hold the "latest" tag.

View File

@@ -94,4 +94,4 @@ Resources:
Outputs:
ProwlerCrossAccountRole:
Description: CrossAccount Role to be used by Prowler to assess AWS Accounts in the AWS Organization.
Value: !Ref ProwlerCrossAccountRole
Value: !Ref ProwlerCrossAccountRole

View File

@@ -5,14 +5,14 @@ Parameters:
Type: String
Description: Name of the ECS Cluster that the Prowler Fargate Task will run in
Default: ProwlerCluster
ProwlerContainerName:
ProwlerContainerName:
Type: String
Description: Name of the Prowler Container Definition within the ECS Task
Default: prowler
ProwlerContainerInfo:
ProwlerContainerInfo:
Type: String
Description: ECR URI of the Prowler container
ProwlerECSLogGroupName:
ProwlerECSLogGroupName:
Type: String
Description: Name for the log group to be created
Default: /aws/ecs/SecurityHub-Prowler
@@ -41,17 +41,17 @@ Parameters:
Resources:
ProwlerECSCloudWatchLogsGroup:
Type: AWS::Logs::LogGroup
Properties:
Properties:
LogGroupName: !Ref ProwlerECSLogGroupName
RetentionInDays: 90
ProwlerECSCluster:
Type: AWS::ECS::Cluster
Properties:
Properties:
ClusterName: !Ref ProwlerClusterName
ProwlerECSTaskDefinition:
Type: AWS::ECS::TaskDefinition
Properties:
ContainerDefinitions:
Properties:
ContainerDefinitions:
- Image: !Ref ProwlerContainerInfo
Name: !Ref ProwlerContainerName
LogConfiguration:
@@ -99,4 +99,4 @@ Resources:
- !Ref ProwlerSecurityGroup
Subnets:
- !Ref ProwlerScheduledSubnet1
- !Ref ProwlerScheduledSubnet2
- !Ref ProwlerScheduledSubnet2

View File

@@ -102,4 +102,4 @@ Outputs:
Value: !GetAtt ProwlerTaskRole.Arn
ECSEventRoleARN:
Description: ARN of the Eventbridge Task Role
Value: !GetAtt ECSEventRole.Arn
Value: !GetAtt ECSEventRole.Arn

View File

@@ -32,7 +32,7 @@ For more information on how to use prowler, see [here](https://github.com/prowle
- Specify regions : Region to deploy
3. **Audit Account**
1. Go to S3 console, create a bucket, upload [run-prowler-reports.sh.zip](src/run-prowler-reports.sh.zip)
- bucket name : prowler-util-*[Account ID]*-*[region]*
- bucket name : prowler-util-*[Account ID]*-*[region]*
![Untitled](docs/images/s3_screenshot.png)
1. Deploy [ProwlerCodeBuildStack.yaml](templates/ProwlerCodeBuildStack.yaml) which creates CloudWatch Rule to trigger CodeBuild every fixed time, allowing prowler to audit multi-accounts.
@@ -45,4 +45,4 @@ For more information on how to use prowler, see [here](https://github.com/prowle
- ProwlerReportS3Account : The account where the report S3 bucket resides.
1. If you'd like to change the scheduled time,
1. You can change the cron expression of ScheduleExpression within [ProwlerCodeBuildStack.yaml](templates/ProwlerCodeBuildStack.yaml).
2. Alternatively, you can make changes directrly from Events > Rules > ProwlerExecuteRule > Actions > Edit in CloudWatch console.
2. Alternatively, you can make changes directrly from Events > Rules > ProwlerExecuteRule > Actions > Edit in CloudWatch console.

View File

@@ -4,7 +4,7 @@ Langage: [English](README.md)
이 문서는 AWS Organization 내의 multi account 환경에서 prowler 를 적용하기 위해 작성된 문서입니다.
일정 시간마다 CloudWatch는 CodeBuild 를 트리거합니다.
CodeBuild 는 최신의 [prowler](https://github.com/prowler-cloud/prowler) 소스를 클론받고,
CodeBuild 는 최신의 [prowler](https://github.com/prowler-cloud/prowler) 소스를 클론받고,
Organization 내의 모든 Account 에 대해 security assessment 를 수행합니다.
prowler 의 자세한 사용방법은 [이 곳](https://github.com/prowler-cloud/prowler#usagee) 을 참고합니다.
@@ -59,4 +59,4 @@ prowler 의 자세한 사용방법은 [이 곳](https://github.com/prowler-cloud
- ProwlerReportS3Account : report 가 저장될 S3 bucket이 위치한 Account
3. 스케줄 된 시간을 변경하고 싶은 경우
1. [ProwlerCodeBuildStack.yaml](templates/ProwlerCodeBuildStack.yaml) 내에서 ScheduleExpression의 크론 표현식을 변경할 수 있습니다.
2. 또는 CloudWatch console 에서 Events > Rules > ProwlerExecuteRule > Actions > Edit 에서 직접 변경할 수 있습니다.
2. 또는 CloudWatch console 에서 Events > Rules > ProwlerExecuteRule > Actions > Edit 에서 직접 변경할 수 있습니다.

View File

@@ -209,4 +209,4 @@ data "aws_iam_policy" "SecurityAudit" {
data "aws_caller_identity" "current" {
}
data "aws_region" "current" {
}
}

View File

@@ -256,7 +256,7 @@ resource "aws_iam_role" "prowler_event_trigger_role" {
}
]
})
}
resource "aws_iam_policy" "prowler_event_trigger_policy" {
depends_on = [aws_codebuild_project.prowler_codebuild]
@@ -328,7 +328,7 @@ resource "aws_iam_policy" "prowler_kickstarter_iam_policy" {
]
Effect = "Allow"
Resource = "arn:aws:glue:${data.aws_region.current.name}:${data.aws_caller_identity.current.account_id}:catalog"
},
{
Action = [
@@ -340,19 +340,19 @@ resource "aws_iam_policy" "prowler_kickstarter_iam_policy" {
]
Effect = "Allow"
Resource = "arn:aws:codebuild:${data.aws_region.current.name}:${data.aws_caller_identity.current.account_id}:report-group/*"
},
{
Action = [ "securityhub:BatchImportFindings"]
Effect = "Allow"
Resource = "*"
Resource = "*"
},
{
Action = [ "securityhub:GetFindings"]
Effect = "Allow"
Resource = "*"
Resource = "*"
},
{
{
"Action": "codebuild:StartBuild",
"Resource": "arn:aws:codebuild:${data.aws_region.current.name}:${data.aws_caller_identity.current.account_id}:project/*",
"Effect": "Allow"
@@ -416,7 +416,7 @@ resource "aws_s3_bucket_policy" "prowler_report_storage_bucket_policy" {
}
}
}
]
})
}
@@ -474,7 +474,7 @@ resource "aws_codebuild_project" "prowler_codebuild" {
resource "aws_securityhub_account" "securityhub_resource" {
resource "aws_securityhub_account" "securityhub_resource" {
}
resource "aws_securityhub_product_subscription" "security_hub_enable_prowler_findings" {
@@ -496,4 +496,4 @@ resource "aws_cloudwatch_event_target" "run_prowler_scan" {
rule = aws_cloudwatch_event_rule.prowler_check_scheduler_event.name
role_arn = aws_iam_role.prowler_event_trigger_role.arn
}
}

View File

@@ -206,4 +206,4 @@
output "account_id" {
value = data.aws_caller_identity.current.account_id
}
}

View File

@@ -5,20 +5,20 @@ phases:
python: 3.8
commands:
- echo "Installing Prowler and dependencies..."
- pip3 install detect-secrets
- pip3 install detect-secrets
- yum -y install jq
- curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip"
- unzip awscliv2.zip
- ./aws/install
- git clone https://github.com/prowler-cloud/prowler
- cd prowler
build:
commands:
- echo "Running Prowler as ./prowler $PROWLER_OPTIONS"
- ./prowler $PROWLER_OPTIONS || true
post_build:
commands:
- echo "Scan Complete"
- echo "Scan Complete"
- aws s3 cp --sse AES256 output/ s3://$BUCKET_REPORT/ --recursive
- echo "Done!"

View File

@@ -6,4 +6,4 @@ cd prowler
git checkout -t origin/terraform-kickstart
sudo yum install -y yum-utils
sudo yum-config-manager --add-repo https://rpm.releases.hashicorp.com/AmazonLinux/hashicorp.repo
sudo yum -y install terraform
sudo yum -y install terraform

View File

@@ -55,7 +55,7 @@ cp /var/ossec/integrations/prowler/integrations/prowler_rules.xml /var/ossec/etc
```
Edit `/var/ossec/etc/ossec.conf` and add the following wodle configuration. Remember that here `timeout 21600 seconds` is 6 hours, just to allow Prowler runs completely in case of a large account. The interval recommended is 1d:
```xml
```xml
<wodle name="command">
<disabled>no</disabled>
<tag>aws-prowler: account1</tag>

View File

@@ -19,15 +19,15 @@
# 6 - Error sending to socket
import signal
import sys
import socket
import argparse
import subprocess
import json
from datetime import datetime
import os
import re
import signal
import socket
import subprocess
import sys
from datetime import datetime
################################################################################
# Constants