in sagemaker-spark-sdk/src/main/scala/com/amazonaws/services/sagemaker/sparksdk/SageMakerModel.scala [155:186]
def fromModelS3Path(modelPath: String,
modelImage: String,
modelExecutionRoleARN: String,
endpointInstanceType: String,
endpointInitialInstanceCount : Int,
requestRowSerializer: RequestRowSerializer,
responseRowDeserializer: ResponseRowDeserializer,
modelEnvironmentVariables: Map[String, String] = Map[String, String](),
endpointCreationPolicy: EndpointCreationPolicy =
EndpointCreationPolicy.CREATE_ON_CONSTRUCT,
sagemakerClient : AmazonSageMaker
= AmazonSageMakerClientBuilder.defaultClient,
prependResultRows : Boolean = true,
namePolicy : NamePolicy = new RandomNamePolicy(),
uid: String = Identifiable.randomUID("sagemaker")) : SageMakerModel = {
require(endpointCreationPolicy != EndpointCreationPolicy.DO_NOT_CREATE,
"Endpoint creation policy must not be DO_NOT_CREATE to create an endpoint from a model path.")
new SageMakerModel(modelImage = Some(modelImage),
modelPath = Some(S3DataPath.fromS3URI(modelPath)),
requestRowSerializer = requestRowSerializer,
responseRowDeserializer = responseRowDeserializer,
modelEnvironmentVariables = modelEnvironmentVariables,
modelExecutionRoleARN = Some(modelExecutionRoleARN),
endpointCreationPolicy = endpointCreationPolicy,
endpointInstanceType = Some(endpointInstanceType),
endpointInitialInstanceCount = Some(endpointInitialInstanceCount),
sagemakerClient = sagemakerClient,
prependResultRows = prependResultRows,
namePolicy = namePolicy,
uid = uid)
}