Gems/AWSMetrics/cdk/aws_metrics/data_lake_integration.py [28:71]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
                 server_access_logs_bucket: str = None) -> None:
        self._stack = stack
        self._application_name = application_name
        self._server_access_logs_bucket = server_access_logs_bucket

        self._create_analytics_bucket()
        self._create_events_database()
        self._create_events_table()
        self._create_events_crawler()

    def _create_analytics_bucket(self) -> None:
        """
        Create a private bucket that should only be accessed by the resources defined in the CDK application.
        The bucket uses server-side encryption with a CMK managed by S3:
        https://docs.aws.amazon.com/AmazonS3/latest/userguide/UsingKMSEncryption.html
        """
        # Enable server access logging if the server access logs bucket is provided following S3 best practices.
        # See https://docs.aws.amazon.com/AmazonS3/latest/dev/security-best-practices.html
        server_access_logs_bucket = s3.Bucket.from_bucket_name(
            self._stack,
            f'{self._stack.stack_name}-ImportedAccessLogsBucket',
            self._server_access_logs_bucket,
        ) if self._server_access_logs_bucket else None

        # Bucket name cannot contain uppercase characters
        # Do not specify the bucket name here since bucket name is required to be unique globally. If we set
        # a specific name here, only one customer can deploy the bucket successfully.
        self._analytics_bucket = s3.Bucket(
            self._stack,
            id=resource_name_sanitizer.sanitize_resource_name(
                f'{self._stack.stack_name}-AnalyticsBucket'.lower(), 's3_bucket'),
            encryption=s3.BucketEncryption.S3_MANAGED,
            block_public_access=s3.BlockPublicAccess(
                block_public_acls=True,
                block_public_policy=True,
                ignore_public_acls=True,
                restrict_public_buckets=True
            ),
            server_access_logs_bucket=server_access_logs_bucket,
            server_access_logs_prefix=f'{self._stack.stack_name}-AccessLogs' if server_access_logs_bucket else None
        )

        # For Amazon S3 buckets, you must delete all objects in the bucket for deletion to succeed.
        cfn_bucket = self._analytics_bucket.node.find_child('Resource')
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



Gems/AWSMetrics/cdv1/aws_metrics/data_lake_integration.py [24:67]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
                 server_access_logs_bucket: str = None) -> None:
        self._stack = stack
        self._application_name = application_name
        self._server_access_logs_bucket = server_access_logs_bucket

        self._create_analytics_bucket()
        self._create_events_database()
        self._create_events_table()
        self._create_events_crawler()

    def _create_analytics_bucket(self) -> None:
        """
        Create a a private bucket that should only be accessed by the resources defined in the CDK application.
        The bucket uses server-side encryption with a CMK managed by S3:
        https://docs.aws.amazon.com/AmazonS3/latest/userguide/UsingKMSEncryption.html
        """
        # Enable server access logging if the server access logs bucket is provided following S3 best practices.
        # See https://docs.aws.amazon.com/AmazonS3/latest/dev/security-best-practices.html
        server_access_logs_bucket = s3.Bucket.from_bucket_name(
            self._stack,
            f'{self._stack.stack_name}-ImportedAccessLogsBucket',
            self._server_access_logs_bucket,
        ) if self._server_access_logs_bucket else None

        # Bucket name cannot contain uppercase characters
        # Do not specify the bucket name here since bucket name is required to be unique globally. If we set
        # a specific name here, only one customer can deploy the bucket successfully.
        self._analytics_bucket = s3.Bucket(
            self._stack,
            id=resource_name_sanitizer.sanitize_resource_name(
                f'{self._stack.stack_name}-AnalyticsBucket'.lower(), 's3_bucket'),
            encryption=s3.BucketEncryption.S3_MANAGED,
            block_public_access=s3.BlockPublicAccess(
                block_public_acls=True,
                block_public_policy=True,
                ignore_public_acls=True,
                restrict_public_buckets=True
            ),
            server_access_logs_bucket=server_access_logs_bucket,
            server_access_logs_prefix=f'{self._stack.stack_name}-AccessLogs' if server_access_logs_bucket else None
        )

        # For Amazon S3 buckets, you must delete all objects in the bucket for deletion to succeed.
        cfn_bucket = self._analytics_bucket.node.find_child('Resource')
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



