in sagemaker-pyspark-sdk/src/sagemaker_pyspark/SageMakerEstimator.py [0:0]
def __init__(self,
trainingImage,
modelImage,
trainingInstanceType,
trainingInstanceCount,
endpointInstanceType,
endpointInitialInstanceCount,
requestRowSerializer,
responseRowDeserializer,
hyperParameters=None,
trainingInputS3DataPath=S3AutoCreatePath(),
trainingOutputS3DataPath=S3AutoCreatePath(),
trainingInstanceVolumeSizeInGB=1024,
trainingProjectedColumns=None,
trainingChannelName="train",
trainingContentType=None,
trainingS3DataDistribution="ShardedByS3Key",
trainingSparkDataFormat="sagemaker",
trainingSparkDataFormatOptions=None,
trainingInputMode="File",
trainingCompressionCodec=None,
trainingMaxRuntimeInSeconds=24*60*60,
trainingKmsKeyId=None,
modelEnvironmentVariables=None,
endpointCreationPolicy=EndpointCreationPolicy.CREATE_ON_CONSTRUCT,
sagemakerClient=SageMakerClients.create_sagemaker_client(),
sagemakerRole=IAMRoleFromConfig(),
s3Client=SageMakerClients.create_s3_default_client(),
stsClient=SageMakerClients.create_sts_default_client(),
modelPrependInputRowsToTransformationRows=True,
deleteStagingDataAfterTraining=True,
namePolicyFactory=RandomNamePolicyFactory(),
uid=None):
if trainingSparkDataFormatOptions is None:
trainingSparkDataFormatOptions = {}
if modelEnvironmentVariables is None:
modelEnvironmentVariables = {}
if hyperParameters is None:
hyperParameters = {}
if uid is None:
uid = Identifiable._randomUID()
kwargs = locals().copy()
del kwargs['self']
super(SageMakerEstimator, self).__init__(**kwargs)