def get_regional_configs()

in src/smspark/bootstrapper.py [0:0]


    def get_regional_configs(self) -> List[Configuration]:
        aws_region = os.getenv("AWS_REGION")
        logging.info("default/current AWS_REGION is {}".format(aws_region))

        if aws_region is None:
            logging.warning("Unable to detect AWS region from environment variable AWS_REGION")
            return []
        elif aws_region in ["cn-northwest-1", "cn-north-1"]:
            aws_domain = "amazonaws.com.cn"
            s3_endpoint = f"s3.{aws_region}.{aws_domain}"
        elif aws_region in ["us-gov-west-1", "us-gov-east-1"]:
            aws_domain = "amazonaws.com"
            s3_endpoint = f"s3.{aws_region}.{aws_domain}"
        else:
            # to make Hadoop 3.3.6 work with aws-java-sdk-v2
            aws_domain = "amazonaws.com"
            s3_endpoint = f"s3.{aws_region}.{aws_domain}"

        logging.info("fs.s3a.endpoint config is {}".format(s3_endpoint))

        return [
            Configuration(
                Classification="core-site",
                Properties={"fs.s3a.endpoint": s3_endpoint, "fs.s3a.endpoint.region": aws_region},
            )
        ]