def create_data_lake_zone_bucket()

in lib/s3_bucket_zones_stack.py [0:0]


    def create_data_lake_zone_bucket(self, logical_id, bucket_name, access_logs_bucket, s3_kms_key) -> s3.Bucket:
        """
        Creates an Amazon S3 bucket and attaches bucket policy with necessary guardrails.
        It enables server-side encryption using provided KMS key and leverage S3 bucket key feature.

        @param logical_id str: The logical id to apply to the bucket
        @param bucket_name str: The name for the bucket resource
        @param access_logs_bucket s3.Bucket: The bucket to target for Access Logging
        @param s3_kms_key kms.Key: The KMS Key to use for encryption of data at rest

        @return: s3.Bucket: The bucket that was created
        """
        lifecycle_rules = [
            s3.LifecycleRule(
                enabled=True,
                expiration=cdk.Duration.days(60),
                noncurrent_version_expiration=cdk.Duration.days(30),
            )
        ]
        if self.target_environment == PROD:
            lifecycle_rules = [
                s3.LifecycleRule(
                    enabled=True,
                    expiration=cdk.Duration.days(2555),
                    noncurrent_version_expiration=cdk.Duration.days(90),
                    transitions=[
                        s3.Transition(
                            storage_class=s3.StorageClass.GLACIER,
                            transition_after=cdk.Duration.days(365),
                        )
                    ]
                )
            ]
        bucket = s3.Bucket(
            self,
            id=logical_id,
            access_control=s3.BucketAccessControl.PRIVATE,
            block_public_access=s3.BlockPublicAccess.BLOCK_ALL,
            bucket_key_enabled=True,
            bucket_name=bucket_name,
            encryption=s3.BucketEncryption.KMS,
            encryption_key=s3_kms_key,
            lifecycle_rules=lifecycle_rules,
            public_read_access=False,
            removal_policy=self.removal_policy,
            versioned=True,
            object_ownership=s3.ObjectOwnership.OBJECT_WRITER,
            server_access_logs_bucket=access_logs_bucket,
            server_access_logs_prefix=bucket_name,
        )
        policy_document_statements = [
            iam.PolicyStatement(
                sid='OnlyAllowSecureTransport',
                effect=iam.Effect.DENY,
                principals=[iam.AnyPrincipal()],
                actions=[
                    's3:GetObject',
                    's3:PutObject',
                ],
                resources=[f'{bucket.bucket_arn}/*'],
                conditions={'Bool': {'aws:SecureTransport': 'false'}}
            )
        ]
        # Prevents user deletion of buckets
        if self.target_environment == PROD or self.target_environment == TEST:
            policy_document_statements.append(
                iam.PolicyStatement(
                    sid='BlockUserDeletionOfBucket',
                    effect=iam.Effect.DENY,
                    principals=[iam.AnyPrincipal()],
                    actions=[
                        's3:DeleteBucket',
                    ],
                    resources=[bucket.bucket_arn],
                    conditions={'StringLike': {'aws:userId': f'arn:aws:iam::{self.account}:user/*'}}
                )
            )
        for statement in policy_document_statements:
            bucket.add_to_resource_policy(statement)

        return bucket