in sagemaker-pyspark-sdk/src/sagemaker_pyspark/algorithms/XGBoostSageMakerEstimator.py [0:0]
def __init__(self,
trainingInstanceType,
trainingInstanceCount,
endpointInstanceType,
endpointInitialInstanceCount,
sagemakerRole=IAMRoleFromConfig(),
requestRowSerializer=LibSVMRequestRowSerializer(),
responseRowDeserializer=XGBoostCSVRowDeserializer(),
trainingInputS3DataPath=S3AutoCreatePath(),
trainingOutputS3DataPath=S3AutoCreatePath(),
trainingInstanceVolumeSizeInGB=1024,
trainingProjectedColumns=None,
trainingChannelName="train",
trainingContentType=None,
trainingS3DataDistribution="ShardedByS3Key",
trainingSparkDataFormat="libsvm",
trainingSparkDataFormatOptions=None,
trainingInputMode="File",
trainingCompressionCodec=None,
trainingMaxRuntimeInSeconds=24*60*60,
trainingKmsKeyId=None,
modelEnvironmentVariables=None,
endpointCreationPolicy=EndpointCreationPolicy.CREATE_ON_CONSTRUCT,
sagemakerClient=SageMakerClients.create_sagemaker_client(),
region=None,
s3Client=SageMakerClients.create_s3_default_client(),
stsClient=SageMakerClients.create_sts_default_client(),
modelPrependInputRowsToTransformationRows=True,
deleteStagingDataAfterTraining=True,
namePolicyFactory=RandomNamePolicyFactory(),
uid=None):
if trainingSparkDataFormatOptions is None:
trainingSparkDataFormatOptions = {}
if modelEnvironmentVariables is None:
modelEnvironmentVariables = {}
if uid is None:
uid = Identifiable._randomUID()
kwargs = locals().copy()
del kwargs['self']
super(XGBoostSageMakerEstimator, self).__init__(**kwargs)