src/graph_notebook/notebooks/03-Neptune-ML/02-SPARQL/neptune_ml_sparql_utils.py [518:608]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
            config = json.load(f)
            region_name = boto3.session.Session().region_name
            if region_name in ['cn-north-1', 'cn-northwest-1']:
                self.PRETRAINED_MODEL = config['models_cn']
            else:
                self.PRETRAINED_MODEL = config['models']
            self.PYTORCH_CPU_CONTAINER_IMAGE = config['container_images'][region_name]

    def __run_create_model(self, sm_client,
                           name,
                           role,
                           image_uri,
                           model_s3_location,
                           container_mode='SingleModel',
                           script_name='infer_entry_point.py',
                           ):
        model_environment_vars = {self.SCRIPT_PARAM_NAME.upper(): script_name,
                                  self.DIR_PARAM_NAME.upper(): model_s3_location,
                                  self.CONTAINER_LOG_LEVEL_PARAM_NAME.upper(): str(20),
                                  self.MODEL_SERVER_TIMEOUT_PARAM_NAME.upper(): str(1200),
                                  self.MODEL_SERVER_WORKERS_PARAM_NAME.upper(): str(1),
                                  self.SAGEMAKER_REGION_PARAM_NAME.upper(): boto3.session.Session().region_name,
                                  self.ENABLE_CLOUDWATCH_METRICS_PARAM.upper(): "false"
                                  }

        container_def = [{"Image": self.PYTORCH_CPU_CONTAINER_IMAGE,
                          "Environment": model_environment_vars,
                          "ModelDataUrl": model_s3_location,
                          "Mode": container_mode
                          }]
        request = {"ModelName": name,
                   "ExecutionRoleArn": role,
                   "Containers": container_def
                   }
        return sm_client.create_model(**request)

    def __run_create_endpoint_config(self, sm_client,
                                     model_name,
                                     instance_type='ml.m5.2xlarge',
                                     initial_instance_count=1,
                                     initial_weight=1,
                                     variant_name='AllTraffic'
                                     ):
        production_variant_configuration = [{
            "ModelName": model_name,
            "InstanceType": instance_type,
            "InitialInstanceCount": initial_instance_count,
            "VariantName": variant_name,
            "InitialVariantWeight": initial_weight,
        }]
        request = {"EndpointConfigName": model_name,
                   "ProductionVariants": production_variant_configuration
                   }

        return sm_client.create_endpoint_config(**request)

    def __create_model(self, name: str, model_s3_location: str):
        image_uri = self.PYTORCH_CPU_CONTAINER_IMAGE
        instance_type = self.INSTANCE_TYPE
        role = self.__get_neptune_ml_role()
        sm = boto3.client("sagemaker")
        name = "{}-{}".format(name, strftime("%Y-%m-%d-%H-%M-%S", gmtime()))
        create_model_result = self.__run_create_model(
            sm, name, role, image_uri, model_s3_location)
        create_endpoint_config_result = self.__run_create_endpoint_config(
            sm, name, instance_type=instance_type)
        create_endpoint_result = sm.create_endpoint(
            EndpointName=name, EndpointConfigName=name)
        return name

    def __get_neptune_ml_role(self):
        with open(f'{HOME_DIRECTORY}/.bashrc') as f:
            data = f.readlines()
        for d in data:
            if str.startswith(d, 'export NEPTUNE_ML_ROLE_ARN'):
                parts = d.split('=')
                if len(parts) == 2:
                    return parts[1].rstrip()
        logging.error("Unable to determine the Neptune ML IAM Role.")
        return None

    def __copy_s3(self, s3_bucket_uri: str, source_s3_uri: str):
        path = urlparse(s3_bucket_uri, allow_fragments=False)
        bucket = path.netloc
        file_path = path.path.lstrip('/').rstrip('/')
        source_path = urlparse(source_s3_uri, allow_fragments=False)
        source_bucket = source_path.netloc
        source_file_path = source_path.path.lstrip('/').rstrip('/')
        s3 = boto3.resource('s3')
        s3.meta.client.copy(
            {"Bucket": source_bucket, "Key": source_file_path}, bucket, file_path)
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



src/graph_notebook/notebooks/03-Neptune-ML/03-Sample-Applications/04-Telco-Networks/neptune_ml_utils.py [490:580]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
            config = json.load(f)
            region_name = boto3.session.Session().region_name
            if region_name in ['cn-north-1', 'cn-northwest-1']:
                self.PRETRAINED_MODEL = config['models_cn']
            else:
                self.PRETRAINED_MODEL = config['models']
            self.PYTORCH_CPU_CONTAINER_IMAGE = config['container_images'][region_name]

    def __run_create_model(self, sm_client,
                           name,
                           role,
                           image_uri,
                           model_s3_location,
                           container_mode='SingleModel',
                           script_name='infer_entry_point.py',
                           ):
        model_environment_vars = {self.SCRIPT_PARAM_NAME.upper(): script_name,
                                  self.DIR_PARAM_NAME.upper(): model_s3_location,
                                  self.CONTAINER_LOG_LEVEL_PARAM_NAME.upper(): str(20),
                                  self.MODEL_SERVER_TIMEOUT_PARAM_NAME.upper(): str(1200),
                                  self.MODEL_SERVER_WORKERS_PARAM_NAME.upper(): str(1),
                                  self.SAGEMAKER_REGION_PARAM_NAME.upper(): boto3.session.Session().region_name,
                                  self.ENABLE_CLOUDWATCH_METRICS_PARAM.upper(): "false"
                                  }

        container_def = [{"Image": self.PYTORCH_CPU_CONTAINER_IMAGE,
                          "Environment": model_environment_vars,
                          "ModelDataUrl": model_s3_location,
                          "Mode": container_mode
                          }]
        request = {"ModelName": name,
                   "ExecutionRoleArn": role,
                   "Containers": container_def
                   }
        return sm_client.create_model(**request)

    def __run_create_endpoint_config(self, sm_client,
                                     model_name,
                                     instance_type='ml.m5.2xlarge',
                                     initial_instance_count=1,
                                     initial_weight=1,
                                     variant_name='AllTraffic'
                                     ):
        production_variant_configuration = [{
            "ModelName": model_name,
            "InstanceType": instance_type,
            "InitialInstanceCount": initial_instance_count,
            "VariantName": variant_name,
            "InitialVariantWeight": initial_weight,
        }]
        request = {"EndpointConfigName": model_name,
                   "ProductionVariants": production_variant_configuration
                   }

        return sm_client.create_endpoint_config(**request)

    def __create_model(self, name: str, model_s3_location: str):
        image_uri = self.PYTORCH_CPU_CONTAINER_IMAGE
        instance_type = self.INSTANCE_TYPE
        role = self.__get_neptune_ml_role()
        sm = boto3.client("sagemaker")
        name = "{}-{}".format(name, strftime("%Y-%m-%d-%H-%M-%S", gmtime()))
        create_model_result = self.__run_create_model(
            sm, name, role, image_uri, model_s3_location)
        create_endpoint_config_result = self.__run_create_endpoint_config(
            sm, name, instance_type=instance_type)
        create_endpoint_result = sm.create_endpoint(
            EndpointName=name, EndpointConfigName=name)
        return name

    def __get_neptune_ml_role(self):
        with open(f'{HOME_DIRECTORY}/.bashrc') as f:
            data = f.readlines()
        for d in data:
            if str.startswith(d, 'export NEPTUNE_ML_ROLE_ARN'):
                parts = d.split('=')
                if len(parts) == 2:
                    return parts[1].rstrip()
        logging.error("Unable to determine the Neptune ML IAM Role.")
        return None

    def __copy_s3(self, s3_bucket_uri: str, source_s3_uri: str):
        path = urlparse(s3_bucket_uri, allow_fragments=False)
        bucket = path.netloc
        file_path = path.path.lstrip('/').rstrip('/')
        source_path = urlparse(source_s3_uri, allow_fragments=False)
        source_bucket = source_path.netloc
        source_file_path = source_path.path.lstrip('/').rstrip('/')
        s3 = boto3.resource('s3')
        s3.meta.client.copy(
            {"Bucket": source_bucket, "Key": source_file_path}, bucket, file_path)
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



