emr-serverless-spark-20230808/v2/main.tea (2,995 lines of code) (raw):

/** * */ import OpenApi; import OpenApi.OpenApiUtil; extends OpenApi; init(config: OpenApiUtil.Config){ super(config); @endpointRule = ''; checkConfig(config); @endpoint = getEndpoint('emr-serverless-spark', @regionId, @endpointRule, @network, @suffix, @endpointMap, @endpoint); } function getEndpoint(productId: string, regionId: string, endpointRule: string, network: string, suffix: string, endpointMap: map[string]string, endpoint: string) throws: string{ if (!$isNull(endpoint)) { return endpoint; } if (!$isNull(endpointMap) && !$isNull(endpointMap[regionId])) { return endpointMap[regionId]; } return OpenApiUtil.getEndpointRules(productId, regionId, endpointRule, network, suffix); } model Artifact { bizId?: string(name='bizId', description='This parameter is required.'), catagoryBizId?: string(name='catagoryBizId'), creator?: long(name='creator', description='This parameter is required.'), credential?: Credential(name='credential'), fullPath?: [ string ](name='fullPath'), gmtCreated?: string(name='gmtCreated', description='This parameter is required.'), gmtModified?: string(name='gmtModified', description='This parameter is required.'), location?: string(name='location', description='This parameter is required.'), modifier?: long(name='modifier', description='This parameter is required.'), modifierName?: string(name='modifierName'), name?: string(name='name', description='This parameter is required.'), } model Category { bizId?: string(name='bizId', description='This parameter is required.'), creator?: long(name='creator', description='This parameter is required.'), gmtCreated?: string(name='gmtCreated', description='This parameter is required.'), gmtModified?: string(name='gmtModified', description='This parameter is required.'), modifier?: long(name='modifier', description='This parameter is required.'), name?: string(name='name', description='This parameter is required.'), parentBizId?: string(name='parentBizId'), type?: string(name='type', description='This parameter is required.'), } model Configuration { configFileName?: string(name='configFileName'), configItemKey?: string(name='configItemKey'), configItemValue?: string(name='configItemValue'), } model ConfigurationOverrides { configurations?: [ { configFileName?: string(name='configFileName'), configItemKey?: string(name='configItemKey'), configItemValue?: string(name='configItemValue'), } ](name='configurations'), } model Credential { accessId?: string(name='accessId', description='This parameter is required.'), dir?: string(name='dir', description='This parameter is required.'), expire?: string(name='expire', description='This parameter is required.'), host?: string(name='host', description='This parameter is required.'), policy?: string(name='policy', description='This parameter is required.'), securityToken?: string(name='securityToken', description='This parameter is required.'), signature?: string(name='signature', description='This parameter is required.'), } model JobDriver { sparkSubmit?: { entryPoint?: string(name='entryPoint'), entryPointArguments?: [ string ](name='entryPointArguments'), sparkSubmitParameters?: string(name='sparkSubmitParameters'), }(name='sparkSubmit'), } model KerberosConf { creator?: string(name='creator'), enabled?: boolean(name='enabled'), gmtCreated?: string(name='gmtCreated'), gmtModified?: string(name='gmtModified'), kerberosConfId?: string(name='kerberosConfId'), keytabs?: [ string ](name='keytabs'), krb5Conf?: string(name='krb5Conf'), name?: string(name='name'), networkServiceId?: string(name='networkServiceId'), workspaceId?: string(name='workspaceId'), } model PrincipalAction { actionArn?: string(name='actionArn', example='acs:emr::workspaceId:action/create_queue'), principalArn?: string(name='principalArn', example='acs:emr::workspaceId:user/237593691541622267'), } model ReleaseVersionImage { cpuArchitecture?: string(name='cpuArchitecture'), imageId?: string(name='imageId'), runtimeEngineType?: string(name='runtimeEngineType'), } model RunLog { driverStartup?: string(name='driverStartup'), driverStdError?: string(name='driverStdError'), driverStdOut?: string(name='driverStdOut'), driverSyslog?: string(name='driverSyslog'), } model SparkConf { key?: string(name='key', description='This parameter is required.'), value?: string(name='value', description='This parameter is required.'), } model SqlOutput { rows?: [ { values?: [ string ](name='values', example='null'), } ](name='rows'), schema?: { fields?: [ { name?: string(name='name'), nullable?: boolean(name='nullable'), type?: string(name='type'), } ](name='fields'), }(name='schema'), } model Tag { key?: string(name='key', description='标签key值。', example='workflowId'), value?: string(name='value', description='标签key值。', example='wf-123test'), } model Task { archives?: [ string ](name='archives'), artifactUrl?: string(name='artifactUrl'), bizId?: string(name='bizId', description='This parameter is required.'), categoryBizId?: string(name='categoryBizId'), content?: string(name='content'), creator?: long(name='creator', description='This parameter is required.'), credential?: { accessId?: string(name='accessId'), accessUrl?: string(name='accessUrl'), expire?: long(name='expire'), host?: string(name='host'), path?: string(name='path'), policy?: string(name='policy'), securityToken?: string(name='securityToken'), signature?: string(name='signature'), }(name='credential'), defaultCatalogId?: string(name='defaultCatalogId'), defaultDatabase?: string(name='defaultDatabase'), defaultResourceQueueId?: string(name='defaultResourceQueueId'), defaultSqlComputeId?: string(name='defaultSqlComputeId'), deploymentId?: string(name='deploymentId'), environmentId?: string(name='environmentId'), extraArtifactIds?: [ string ](name='extraArtifactIds'), extraSparkSubmitParams?: string(name='extraSparkSubmitParams'), files?: [ string ](name='files'), fusion?: boolean(name='fusion'), gmtCreated?: string(name='gmtCreated', description='This parameter is required.'), gmtModified?: string(name='gmtModified', description='This parameter is required.'), hasChanged?: boolean(name='hasChanged'), hasCommited?: boolean(name='hasCommited', description='This parameter is required.'), isStreaming?: boolean(name='isStreaming'), jars?: [ string ](name='jars'), kernelId?: string(name='kernelId'), lastRunResourceQueueId?: string(name='lastRunResourceQueueId'), modifier?: long(name='modifier', description='This parameter is required.'), name?: string(name='name', description='This parameter is required.'), params?: map[string]string(name='params'), pyFiles?: [ string ](name='pyFiles'), sessionClusterId?: string(name='sessionClusterId'), sparkArgs?: string(name='sparkArgs', example='100'), sparkConf?: [ SparkConf ](name='sparkConf'), sparkDriverCores?: int32(name='sparkDriverCores', description='This parameter is required.'), sparkDriverMemory?: long(name='sparkDriverMemory', description='This parameter is required.'), sparkEntrypoint?: string(name='sparkEntrypoint'), sparkExecutorCores?: int32(name='sparkExecutorCores', description='This parameter is required.'), sparkExecutorMemory?: long(name='sparkExecutorMemory', description='This parameter is required.'), sparkLogLevel?: string(name='sparkLogLevel', description='This parameter is required.'), sparkLogPath?: string(name='sparkLogPath', description='This parameter is required.'), sparkSubmitClause?: string(name='sparkSubmitClause'), sparkVersion?: string(name='sparkVersion', description='This parameter is required.'), tags?: map[string]string(name='tags'), timeout?: int32(name='timeout'), type?: string(name='type', description='This parameter is required.'), } model TaskInstance { bizId?: string(name='bizId'), creator?: long(name='creator'), fenixRunId?: string(name='fenixRunId'), gmtCreated?: string(name='gmtCreated'), taskBizId?: string(name='taskBizId'), taskInfo?: Task(name='taskInfo'), taskStatus?: string(name='taskStatus'), workspaceBizId?: string(name='workspaceBizId'), } model TaskSnapshot { bizId?: string(name='bizId'), commiter?: long(name='commiter'), gmtCreated?: string(name='gmtCreated'), item?: Task(name='item'), message?: string(name='message'), taskBizId?: string(name='taskBizId'), version?: string(name='version'), } model Template { creator?: long(name='creator', description='This parameter is required.'), displaySparkVersion?: string(name='displaySparkVersion'), fusion?: boolean(name='fusion'), gmtCreated?: string(name='gmtCreated', description='This parameter is required.'), gmtModified?: string(name='gmtModified', description='This parameter is required.'), modifier?: long(name='modifier', description='This parameter is required.'), sparkConf?: [ SparkConf ](name='sparkConf'), sparkDriverCores?: int32(name='sparkDriverCores', description='This parameter is required.'), sparkDriverMemory?: long(name='sparkDriverMemory', description='This parameter is required.'), sparkExecutorCores?: int32(name='sparkExecutorCores', description='This parameter is required.'), sparkExecutorMemory?: long(name='sparkExecutorMemory', description='This parameter is required.'), sparkLogLevel?: string(name='sparkLogLevel', description='This parameter is required.'), sparkLogPath?: string(name='sparkLogPath', description='This parameter is required.'), sparkVersion?: string(name='sparkVersion', description='This parameter is required.'), templateType?: string(name='templateType'), } model TimeRange { endTime?: long(name='endTime', description='时间范围结束时间。', example='1688370894339'), startTime?: long(name='startTime', description='时间范围开始时间。', example='1688370894339'), } model AddMembersRequest { memberArns?: [ string ](name='memberArns', description='This parameter is required.'), workspaceId?: string(name='workspaceId', description='The workspace ID. This parameter is required.', example='w-975bcfda9625****'), regionId?: string(name='regionId', description='The region ID.', example='cn-hangzhou'), } model AddMembersResponseBody = { requestId?: string(name='requestId', description='The request ID.', example='DD6B1B2A-5837-5237-ABE4-FF0C8944****'), } model AddMembersResponse = { headers?: map[string]string(name='headers'), statusCode?: int32(name='statusCode'), body?: AddMembersResponseBody(name='body'), } /** * @summary Adds a RAM user or RAM role to a workspace as a member. * * @param request AddMembersRequest * @param headers map * @param runtime runtime options for this request RuntimeOptions * @return AddMembersResponse */ async function addMembersWithOptions(request: AddMembersRequest, headers: map[string]string, runtime: $RuntimeOptions): AddMembersResponse { request.validate(); var query : map[string]any = {}; if (!$isNull(request.regionId)) { query['regionId'] = request.regionId; } var body : map[string]any = {}; if (!$isNull(request.memberArns)) { body['memberArns'] = request.memberArns; } if (!$isNull(request.workspaceId)) { body['workspaceId'] = request.workspaceId; } var req = new OpenApiUtil.OpenApiRequest{ headers = headers, query = OpenApiUtil.query(query), body = OpenApiUtil.parseToMap(body), }; var params = new OpenApiUtil.Params{ action = 'AddMembers', version = '2023-08-08', protocol = 'HTTPS', pathname = `/api/v1/auth/members`, method = 'POST', authType = 'AK', style = 'ROA', reqBodyType = 'json', bodyType = 'json', }; return callApi(params, req, runtime); } /** * @summary Adds a RAM user or RAM role to a workspace as a member. * * @param request AddMembersRequest * @return AddMembersResponse */ async function addMembers(request: AddMembersRequest): AddMembersResponse { var runtime = new $RuntimeOptions{}; var headers : map[string]string = {}; return addMembersWithOptions(request, headers, runtime); } model CancelJobRunRequest { regionId?: string(name='regionId', description='The region ID.', example='cn-hangzhou'), } model CancelJobRunResponseBody = { jobRunId?: string(name='jobRunId', description='The job ID.', example='jr-1a2bc3'), requestId?: string(name='requestId', description='The request ID.', example='DD6B1B2A-5837-5237-ABE4-FF0C8944****'), } model CancelJobRunResponse = { headers?: map[string]string(name='headers'), statusCode?: int32(name='statusCode'), body?: CancelJobRunResponseBody(name='body'), } /** * @summary Terminates a Spark job. * * @param request CancelJobRunRequest * @param headers map * @param runtime runtime options for this request RuntimeOptions * @return CancelJobRunResponse */ async function cancelJobRunWithOptions(workspaceId: string, jobRunId: string, request: CancelJobRunRequest, headers: map[string]string, runtime: $RuntimeOptions): CancelJobRunResponse { request.validate(); var query : map[string]any = {}; if (!$isNull(request.regionId)) { query['regionId'] = request.regionId; } var req = new OpenApiUtil.OpenApiRequest{ headers = headers, query = OpenApiUtil.query(query), }; var params = new OpenApiUtil.Params{ action = 'CancelJobRun', version = '2023-08-08', protocol = 'HTTPS', pathname = `/api/v1/workspaces/${$URL.percentEncode(workspaceId)}/jobRuns/${$URL.percentEncode(jobRunId)}`, method = 'DELETE', authType = 'AK', style = 'ROA', reqBodyType = 'json', bodyType = 'json', }; return callApi(params, req, runtime); } /** * @summary Terminates a Spark job. * * @param request CancelJobRunRequest * @return CancelJobRunResponse */ async function cancelJobRun(workspaceId: string, jobRunId: string, request: CancelJobRunRequest): CancelJobRunResponse { var runtime = new $RuntimeOptions{}; var headers : map[string]string = {}; return cancelJobRunWithOptions(workspaceId, jobRunId, request, headers, runtime); } model CreateProcessDefinitionWithScheduleRequest { alertEmailAddress?: string(name='alertEmailAddress', description='The email address to receive alerts.', example='foo_bar@spark.alert.invalid.com'), description?: string(name='description', description='The description of the workflow. This parameter is required.', example='ods batch workflow'), executionType?: string(name='executionType', description='The execution policy This parameter is required.', example='PARALLEL'), globalParams?: [ { direct?: string(name='direct'), prop?: string(name='prop'), type?: string(name='type'), value?: string(name='value'), } ](name='globalParams'), name?: string(name='name', description='The name of the workflow. This parameter is required.', example='ods_batch_workflow'), productNamespace?: string(name='productNamespace', description='The code of the service. This parameter is required.', example='SS'), publish?: boolean(name='publish', description='Specifies whether to publish the workflow.', example='true'), regionId?: string(name='regionId', description='The region ID.', example='cn-hangzhou'), resourceQueue?: string(name='resourceQueue', description='The resource queue.', example='root_queue'), retryTimes?: int32(name='retryTimes', description='The number of retries.', example='1'), runAs?: string(name='runAs', description='The ID of the Alibaba Cloud account used by the user who creates the workflow.', example='113***************'), schedule?: { crontab?: string(name='crontab', description='The CRON expression that is used for scheduling.', example='0 0 0 * * ?'), endTime?: string(name='endTime', description='The end time of the scheduling.', example='2025-12-23 16:13:27'), startTime?: string(name='startTime', description='The start time of the scheduling.', example='2024-12-23 16:13:27'), timezoneId?: string(name='timezoneId', description='The ID of the time zone.', example='Asia/Shanghai'), }(name='schedule', description='The scheduling settings.'), tags?: map[string]string(name='tags', description='The tags.'), taskDefinitionJson?: [ { alertEmailAddress?: string(name='alertEmailAddress', description='The email address to receive alerts.', example='foo_bar@spark.alert.invalid.com'), code?: long(name='code', description='The node ID. This parameter is required.', example='36************'), description?: string(name='description', description='The node description.', example='ods transform task'), failAlertEnable?: boolean(name='failAlertEnable', description='Specifies whether to send alerts when the node fails.', example='false'), failRetryTimes?: int32(name='failRetryTimes', description='The number of retries when the node fails.', example='1'), name?: string(name='name', description='The name of the node. This parameter is required.', example='ods_transform_task'), startAlertEnable?: boolean(name='startAlertEnable', description='Specifies whether to send alerts when the node is started.', example='false'), tags?: map[string]string(name='tags', description='The tags.'), taskParams?: { displaySparkVersion?: string(name='displaySparkVersion', description='The displayed version of the Spark engine.', example='esr-4.0.0 (Spark 3.5.2, Scala 2.12)'), environmentId?: string(name='environmentId', description='The environment ID.', example='env-crhq2h5lhtgju93buhkg'), fusion?: boolean(name='fusion', description='Specifies whether to enable Fusion engine for acceleration.', example='false'), localParams?: [ { direct?: string(name='direct'), prop?: string(name='prop'), type?: string(name='type'), value?: string(name='value'), } ](name='localParams'), resourceQueueId?: string(name='resourceQueueId', description='The name of the resource queue on which the job runs. This parameter is required.', example='root_queue'), sparkConf?: [ { key?: string(name='key', description='The key of the SparkConf object.', example='spark.dynamicAllocation.enabled'), value?: string(name='value', description='The value of the SparkConf object.', example='true'), } ](name='sparkConf', description='The configurations of the Spark job.'), sparkDriverCores?: int32(name='sparkDriverCores', description='The number of driver cores of the Spark job.', example='1'), sparkDriverMemory?: long(name='sparkDriverMemory', description='The size of driver memory of the Spark job.', example='4g'), sparkExecutorCores?: int32(name='sparkExecutorCores', description='The number of executor cores of the Spark job.', example='1'), sparkExecutorMemory?: long(name='sparkExecutorMemory', description='The size of executor memory of the Spark job.', example='4g'), sparkLogLevel?: string(name='sparkLogLevel', description='The level of the Spark log.', example='INFO'), sparkLogPath?: string(name='sparkLogPath', description='The path where the operational logs of the Spark job are stored.'), sparkVersion?: string(name='sparkVersion', description='The version of the Spark engine.', example='esr-4.0.0 (Spark 3.5.2, Scala 2.12)'), taskBizId?: string(name='taskBizId', description='The ID of the data development job. This parameter is required.', example='TSK-d87******************'), type?: string(name='type', description='The type of the Spark job.', example='VPC'), workspaceBizId?: string(name='workspaceBizId', description='The workspace ID. This parameter is required.', example='w-d8********'), }(name='taskParams', description='The job parameters. This parameter is required.'), taskType?: string(name='taskType', description='The type of the node. This parameter is required.', example='MigrateData'), timeout?: int32(name='timeout', description='The timeout period of the callback. Unit: seconds.', example='1200'), } ](name='taskDefinitionJson', description='The descriptions of all nodes in the workflow. This parameter is required.'), taskParallelism?: int32(name='taskParallelism', description='The node parallelism.', example='1'), taskRelationJson?: [ { name?: string(name='name', description='The name of the node topology. You can enter a workflow name. This parameter is required.', example='ods batch workflow'), postTaskCode?: long(name='postTaskCode', description='The ID of the downstream node. This parameter is required.', example='28************'), postTaskVersion?: int32(name='postTaskVersion', description='The version of the downstream node. This parameter is required.', example='1'), preTaskCode?: long(name='preTaskCode', description='The ID of the upstream node. This parameter is required.', example='16************'), preTaskVersion?: int32(name='preTaskVersion', description='The version of the upstream node. This parameter is required.', example='1'), } ](name='taskRelationJson', description='The dependencies of all nodes in the workflow. preTaskCode specifies the ID of an upstream node, and postTaskCode specifies the ID of a downstream node. The ID of each node is unique. If a node does not have an upstream node, set preTaskCode to 0. This parameter is required.'), timeout?: int32(name='timeout', description='The default timeout period of the workflow.', example='60'), } model CreateProcessDefinitionWithScheduleShrinkRequest { alertEmailAddress?: string(name='alertEmailAddress', description='The email address to receive alerts.', example='foo_bar@spark.alert.invalid.com'), description?: string(name='description', description='The description of the workflow. This parameter is required.', example='ods batch workflow'), executionType?: string(name='executionType', description='The execution policy This parameter is required.', example='PARALLEL'), globalParamsShrink?: string(name='globalParams'), name?: string(name='name', description='The name of the workflow. This parameter is required.', example='ods_batch_workflow'), productNamespace?: string(name='productNamespace', description='The code of the service. This parameter is required.', example='SS'), publish?: boolean(name='publish', description='Specifies whether to publish the workflow.', example='true'), regionId?: string(name='regionId', description='The region ID.', example='cn-hangzhou'), resourceQueue?: string(name='resourceQueue', description='The resource queue.', example='root_queue'), retryTimes?: int32(name='retryTimes', description='The number of retries.', example='1'), runAs?: string(name='runAs', description='The ID of the Alibaba Cloud account used by the user who creates the workflow.', example='113***************'), scheduleShrink?: string(name='schedule', description='The scheduling settings.'), tagsShrink?: string(name='tags', description='The tags.'), taskDefinitionJsonShrink?: string(name='taskDefinitionJson', description='The descriptions of all nodes in the workflow. This parameter is required.'), taskParallelism?: int32(name='taskParallelism', description='The node parallelism.', example='1'), taskRelationJsonShrink?: string(name='taskRelationJson', description='The dependencies of all nodes in the workflow. preTaskCode specifies the ID of an upstream node, and postTaskCode specifies the ID of a downstream node. The ID of each node is unique. If a node does not have an upstream node, set preTaskCode to 0. This parameter is required.'), timeout?: int32(name='timeout', description='The default timeout period of the workflow.', example='60'), } model CreateProcessDefinitionWithScheduleResponseBody = { code?: int32(name='code', description='The code that is returned by the backend server.', example='1400009'), data?: { code?: long(name='code', description='The workflow ID.', example='160************'), id?: int32(name='id', description='The serial number of the workflow.', example='12342'), }(name='data', description='The returned data.'), failed?: string(name='failed', description='Indicates whether the request failed.', example='false'), httpStatusCode?: int32(name='httpStatusCode', description='The HTTP status code.', example='200'), msg?: string(name='msg', description='The description of the returned code.', example='No permission for resource action'), requestId?: string(name='requestId', description='The request ID.', example='DD6B1B2A-5837-5237-ABE4-FF0C8944****'), success?: string(name='success', description='Indicates whether the request was successful.', example='true'), } model CreateProcessDefinitionWithScheduleResponse = { headers?: map[string]string(name='headers'), statusCode?: int32(name='statusCode'), body?: CreateProcessDefinitionWithScheduleResponseBody(name='body'), } /** * @summary Creates a workflow. * * @param tmpReq CreateProcessDefinitionWithScheduleRequest * @param headers map * @param runtime runtime options for this request RuntimeOptions * @return CreateProcessDefinitionWithScheduleResponse */ async function createProcessDefinitionWithScheduleWithOptions(bizId: string, tmpReq: CreateProcessDefinitionWithScheduleRequest, headers: map[string]string, runtime: $RuntimeOptions): CreateProcessDefinitionWithScheduleResponse { tmpReq.validate(); var request = new CreateProcessDefinitionWithScheduleShrinkRequest{}; OpenApiUtil.convert(tmpReq, request); if (!$isNull(tmpReq.globalParams)) { request.globalParamsShrink = OpenApiUtil.arrayToStringWithSpecifiedStyle(tmpReq.globalParams, 'globalParams', 'json'); } if (!$isNull(tmpReq.schedule)) { request.scheduleShrink = OpenApiUtil.arrayToStringWithSpecifiedStyle(tmpReq.schedule, 'schedule', 'json'); } if (!$isNull(tmpReq.tags)) { request.tagsShrink = OpenApiUtil.arrayToStringWithSpecifiedStyle(tmpReq.tags, 'tags', 'json'); } if (!$isNull(tmpReq.taskDefinitionJson)) { request.taskDefinitionJsonShrink = OpenApiUtil.arrayToStringWithSpecifiedStyle(tmpReq.taskDefinitionJson, 'taskDefinitionJson', 'json'); } if (!$isNull(tmpReq.taskRelationJson)) { request.taskRelationJsonShrink = OpenApiUtil.arrayToStringWithSpecifiedStyle(tmpReq.taskRelationJson, 'taskRelationJson', 'json'); } var query : map[string]any = {}; if (!$isNull(request.alertEmailAddress)) { query['alertEmailAddress'] = request.alertEmailAddress; } if (!$isNull(request.description)) { query['description'] = request.description; } if (!$isNull(request.executionType)) { query['executionType'] = request.executionType; } if (!$isNull(request.globalParamsShrink)) { query['globalParams'] = request.globalParamsShrink; } if (!$isNull(request.name)) { query['name'] = request.name; } if (!$isNull(request.productNamespace)) { query['productNamespace'] = request.productNamespace; } if (!$isNull(request.publish)) { query['publish'] = request.publish; } if (!$isNull(request.regionId)) { query['regionId'] = request.regionId; } if (!$isNull(request.resourceQueue)) { query['resourceQueue'] = request.resourceQueue; } if (!$isNull(request.retryTimes)) { query['retryTimes'] = request.retryTimes; } if (!$isNull(request.runAs)) { query['runAs'] = request.runAs; } if (!$isNull(request.scheduleShrink)) { query['schedule'] = request.scheduleShrink; } if (!$isNull(request.tagsShrink)) { query['tags'] = request.tagsShrink; } if (!$isNull(request.taskDefinitionJsonShrink)) { query['taskDefinitionJson'] = request.taskDefinitionJsonShrink; } if (!$isNull(request.taskParallelism)) { query['taskParallelism'] = request.taskParallelism; } if (!$isNull(request.taskRelationJsonShrink)) { query['taskRelationJson'] = request.taskRelationJsonShrink; } if (!$isNull(request.timeout)) { query['timeout'] = request.timeout; } var req = new OpenApiUtil.OpenApiRequest{ headers = headers, query = OpenApiUtil.query(query), }; var params = new OpenApiUtil.Params{ action = 'CreateProcessDefinitionWithSchedule', version = '2023-08-08', protocol = 'HTTPS', pathname = `/dolphinscheduler/projects/${$URL.percentEncode(bizId)}/process-definition`, method = 'POST', authType = 'AK', style = 'ROA', reqBodyType = 'json', bodyType = 'json', }; return callApi(params, req, runtime); } /** * @summary Creates a workflow. * * @param request CreateProcessDefinitionWithScheduleRequest * @return CreateProcessDefinitionWithScheduleResponse */ async function createProcessDefinitionWithSchedule(bizId: string, request: CreateProcessDefinitionWithScheduleRequest): CreateProcessDefinitionWithScheduleResponse { var runtime = new $RuntimeOptions{}; var headers : map[string]string = {}; return createProcessDefinitionWithScheduleWithOptions(bizId, request, headers, runtime); } model CreateSessionClusterRequest { applicationConfigs?: [ { configFileName?: string(name='configFileName', description='The name of the configuration file.', example='spark-defaults.conf'), configItemKey?: string(name='configItemKey', description='The key of SparkConf.', example='spark.app.name'), configItemValue?: string(name='configItemValue', description='The value of SparkConf.', example='test'), } ](name='applicationConfigs', description='The Spark configurations.'), autoStartConfiguration?: { enable?: boolean(name='enable', description='Specifies whether to enable automatic startup. * true * false', example='false'), }(name='autoStartConfiguration', description='The automatic startup configuration.'), autoStopConfiguration?: { enable?: boolean(name='enable', description='Specifies whether to enable automatic termination. * true * false', example='false'), idleTimeoutMinutes?: integer(name='idleTimeoutMinutes', description='The idle timeout period. The session is automatically terminated when the idle timeout period is exceeded.', example='60'), }(name='autoStopConfiguration', description='The automatic termination configuration.'), displayReleaseVersion?: string(name='displayReleaseVersion', description='The version of the Spark engine.', example='esr-3.3.1'), envId?: string(name='envId', description='The ID of the Python environment. This parameter takes effect only for notebook sessions.', example='env-cpv569tlhtgndjl86t40'), fusion?: boolean(name='fusion', description='Specifies whether to enable Fusion engine for acceleration.', example='false'), kind?: string(name='kind', description='The session type. * SQL * NOTEBOOK', example='SQL'), name?: string(name='name', description='The name of the job.', example='spark_job_name'), queueName?: string(name='queueName', description='The queue name.', example='root_queue'), releaseVersion?: string(name='releaseVersion', description='The version number of Spark.', example='esr-3.3.1'), regionId?: string(name='regionId', description='The region ID.', example='cn-hangzhou'), } model CreateSessionClusterResponseBody = { requestId?: string(name='requestId', description='The request ID.', example='DD6B1B2A-5837-5237-ABE4-FF0C8944****'), sessionClusterId?: string(name='sessionClusterId', description='The session ID.', example='w-******'), } model CreateSessionClusterResponse = { headers?: map[string]string(name='headers'), statusCode?: int32(name='statusCode'), body?: CreateSessionClusterResponseBody(name='body'), } /** * @summary Creates a session. * * @param request CreateSessionClusterRequest * @param headers map * @param runtime runtime options for this request RuntimeOptions * @return CreateSessionClusterResponse */ async function createSessionClusterWithOptions(workspaceId: string, request: CreateSessionClusterRequest, headers: map[string]string, runtime: $RuntimeOptions): CreateSessionClusterResponse { request.validate(); var query : map[string]any = {}; if (!$isNull(request.regionId)) { query['regionId'] = request.regionId; } var body : map[string]any = {}; if (!$isNull(request.applicationConfigs)) { body['applicationConfigs'] = request.applicationConfigs; } if (!$isNull(request.autoStartConfiguration)) { body['autoStartConfiguration'] = request.autoStartConfiguration; } if (!$isNull(request.autoStopConfiguration)) { body['autoStopConfiguration'] = request.autoStopConfiguration; } if (!$isNull(request.displayReleaseVersion)) { body['displayReleaseVersion'] = request.displayReleaseVersion; } if (!$isNull(request.envId)) { body['envId'] = request.envId; } if (!$isNull(request.fusion)) { body['fusion'] = request.fusion; } if (!$isNull(request.kind)) { body['kind'] = request.kind; } if (!$isNull(request.name)) { body['name'] = request.name; } if (!$isNull(request.queueName)) { body['queueName'] = request.queueName; } if (!$isNull(request.releaseVersion)) { body['releaseVersion'] = request.releaseVersion; } var req = new OpenApiUtil.OpenApiRequest{ headers = headers, query = OpenApiUtil.query(query), body = OpenApiUtil.parseToMap(body), }; var params = new OpenApiUtil.Params{ action = 'CreateSessionCluster', version = '2023-08-08', protocol = 'HTTPS', pathname = `/api/v1/workspaces/${$URL.percentEncode(workspaceId)}/sessionClusters`, method = 'POST', authType = 'AK', style = 'ROA', reqBodyType = 'json', bodyType = 'json', }; return callApi(params, req, runtime); } /** * @summary Creates a session. * * @param request CreateSessionClusterRequest * @return CreateSessionClusterResponse */ async function createSessionCluster(workspaceId: string, request: CreateSessionClusterRequest): CreateSessionClusterResponse { var runtime = new $RuntimeOptions{}; var headers : map[string]string = {}; return createSessionClusterWithOptions(workspaceId, request, headers, runtime); } model CreateSqlStatementRequest { codeContent?: string(name='codeContent', description='The SQL code. You can specify one or more SQL statements.', example='SHOW TABLES'), defaultCatalog?: string(name='defaultCatalog', description='The default Data Lake Formation (DLF) catalog ID.', example='default_catalog'), defaultDatabase?: string(name='defaultDatabase', description='The name of the default database.', example='default'), limit?: int32(name='limit', description='The maximum number of entries to return. Valid values: 1 to 10000.', example='1000'), sqlComputeId?: string(name='sqlComputeId', description='The SQL session ID. You can create an SQL session in the workspace created in EMR Serverless Spark.', example='sc-dfahdfjafhajd****'), regionId?: string(name='regionId', description='The region ID.', example='cn-hangzhou'), } model CreateSqlStatementResponseBody = { data?: { statementId?: string(name='statementId', description='The interactive query ID.', example='st-1231dfafadfa***'), }(name='data', description='The data returned.'), requestId?: string(name='requestId', description='The request ID.', example='DD6B1B2A-5837-5237-ABE4-FF0C8944****'), } model CreateSqlStatementResponse = { headers?: map[string]string(name='headers'), statusCode?: int32(name='statusCode'), body?: CreateSqlStatementResponseBody(name='body'), } /** * @summary Creates an SQL query task. * * @param request CreateSqlStatementRequest * @param headers map * @param runtime runtime options for this request RuntimeOptions * @return CreateSqlStatementResponse */ async function createSqlStatementWithOptions(workspaceId: string, request: CreateSqlStatementRequest, headers: map[string]string, runtime: $RuntimeOptions): CreateSqlStatementResponse { request.validate(); var query : map[string]any = {}; if (!$isNull(request.regionId)) { query['regionId'] = request.regionId; } var body : map[string]any = {}; if (!$isNull(request.codeContent)) { body['codeContent'] = request.codeContent; } if (!$isNull(request.defaultCatalog)) { body['defaultCatalog'] = request.defaultCatalog; } if (!$isNull(request.defaultDatabase)) { body['defaultDatabase'] = request.defaultDatabase; } if (!$isNull(request.limit)) { body['limit'] = request.limit; } if (!$isNull(request.sqlComputeId)) { body['sqlComputeId'] = request.sqlComputeId; } var req = new OpenApiUtil.OpenApiRequest{ headers = headers, query = OpenApiUtil.query(query), body = OpenApiUtil.parseToMap(body), }; var params = new OpenApiUtil.Params{ action = 'CreateSqlStatement', version = '2023-08-08', protocol = 'HTTPS', pathname = `/api/interactive/v1/workspace/${$URL.percentEncode(workspaceId)}/statement`, method = 'PUT', authType = 'AK', style = 'ROA', reqBodyType = 'json', bodyType = 'json', }; return callApi(params, req, runtime); } /** * @summary Creates an SQL query task. * * @param request CreateSqlStatementRequest * @return CreateSqlStatementResponse */ async function createSqlStatement(workspaceId: string, request: CreateSqlStatementRequest): CreateSqlStatementResponse { var runtime = new $RuntimeOptions{}; var headers : map[string]string = {}; return createSqlStatementWithOptions(workspaceId, request, headers, runtime); } model CreateWorkspaceRequest { autoRenew?: string(name='autoRenew', description='Specifies whether to enable auto-renewal. This parameter is required only if the paymentType parameter is set to Pre.', example='false'), autoRenewPeriod?: string(name='autoRenewPeriod', description='The auto-renewal duration. This parameter is required only if the paymentType parameter is set to Pre.', example='100'), autoRenewPeriodUnit?: string(name='autoRenewPeriodUnit', description='The unit of the auto-renewal duration. This parameter is required only if the paymentType parameter is set to Pre.', example='month'), autoStartSessionCluster?: boolean(name='autoStartSessionCluster', description='Specifies whether to automatically start a session.', example='false'), clientToken?: string(name='clientToken', description='The client token that is used to ensure the idempotence of the request.', example='8e6aae2810c8f67229ca70bb31cd****'), dlfCatalogId?: string(name='dlfCatalogId', description='The information of the Data Lake Formation (DLF) catalog.', example='123xxxxx'), dlfType?: string(name='dlfType', description='The version of DLF.', example='dlf1.0'), duration?: string(name='duration', description='The subscription period. This parameter is required only if the paymentType parameter is set to Pre.', example='12452'), ossBucket?: string(name='ossBucket', description='The name of the Object Storage Service (OSS) bucket.', example='oss://test-bucket/'), paymentDurationUnit?: string(name='paymentDurationUnit', description='The unit of the subscription duration.', example='1000'), paymentType?: string(name='paymentType', description='The billing method. Valid values: * PayAsYouGo * Pre', example='PayAsYouGo'), ramRoleName?: string(name='ramRoleName', description='The name of the role used to run Spark jobs.', example='AliyunEMRSparkJobRunDefaultRole'), releaseType?: string(name='releaseType', description='The type of the version.', example='pro'), resourceSpec?: { cu?: string(name='cu', description='The maximum resource quota for a workspace.', example='1000'), }(name='resourceSpec', description='The resource specifications.'), tag?: [ { key?: string(name='key'), value?: string(name='value'), } ](name='tag', nullable=false), workspaceName?: string(name='workspaceName', description='The name of the workspace.', example='default'), regionId?: string(name='regionId', description='The region ID.', example='cn-hangzhou'), } model CreateWorkspaceResponseBody = { operationId?: string(name='operationId', description='The operation ID.', example='op-******'), requestId?: string(name='requestId', description='The request ID.', example='DD6B1B2A-5837-5237-ABE4-FF0C8944****'), workspaceId?: string(name='workspaceId', description='The workspace ID.', example='w-******'), } model CreateWorkspaceResponse = { headers?: map[string]string(name='headers'), statusCode?: int32(name='statusCode'), body?: CreateWorkspaceResponseBody(name='body'), } /** * @summary Creates a workspace. * * @param request CreateWorkspaceRequest * @param headers map * @param runtime runtime options for this request RuntimeOptions * @return CreateWorkspaceResponse */ async function createWorkspaceWithOptions(request: CreateWorkspaceRequest, headers: map[string]string, runtime: $RuntimeOptions): CreateWorkspaceResponse { request.validate(); var query : map[string]any = {}; if (!$isNull(request.regionId)) { query['regionId'] = request.regionId; } var body : map[string]any = {}; if (!$isNull(request.autoRenew)) { body['autoRenew'] = request.autoRenew; } if (!$isNull(request.autoRenewPeriod)) { body['autoRenewPeriod'] = request.autoRenewPeriod; } if (!$isNull(request.autoRenewPeriodUnit)) { body['autoRenewPeriodUnit'] = request.autoRenewPeriodUnit; } if (!$isNull(request.autoStartSessionCluster)) { body['autoStartSessionCluster'] = request.autoStartSessionCluster; } if (!$isNull(request.clientToken)) { body['clientToken'] = request.clientToken; } if (!$isNull(request.dlfCatalogId)) { body['dlfCatalogId'] = request.dlfCatalogId; } if (!$isNull(request.dlfType)) { body['dlfType'] = request.dlfType; } if (!$isNull(request.duration)) { body['duration'] = request.duration; } if (!$isNull(request.ossBucket)) { body['ossBucket'] = request.ossBucket; } if (!$isNull(request.paymentDurationUnit)) { body['paymentDurationUnit'] = request.paymentDurationUnit; } if (!$isNull(request.paymentType)) { body['paymentType'] = request.paymentType; } if (!$isNull(request.ramRoleName)) { body['ramRoleName'] = request.ramRoleName; } if (!$isNull(request.releaseType)) { body['releaseType'] = request.releaseType; } if (!$isNull(request.resourceSpec)) { body['resourceSpec'] = request.resourceSpec; } if (!$isNull(request.tag)) { body['tag'] = request.tag; } if (!$isNull(request.workspaceName)) { body['workspaceName'] = request.workspaceName; } var req = new OpenApiUtil.OpenApiRequest{ headers = headers, query = OpenApiUtil.query(query), body = OpenApiUtil.parseToMap(body), }; var params = new OpenApiUtil.Params{ action = 'CreateWorkspace', version = '2023-08-08', protocol = 'HTTPS', pathname = `/api/v1/workspaces`, method = 'POST', authType = 'AK', style = 'ROA', reqBodyType = 'json', bodyType = 'json', }; return callApi(params, req, runtime); } /** * @summary Creates a workspace. * * @param request CreateWorkspaceRequest * @return CreateWorkspaceResponse */ async function createWorkspace(request: CreateWorkspaceRequest): CreateWorkspaceResponse { var runtime = new $RuntimeOptions{}; var headers : map[string]string = {}; return createWorkspaceWithOptions(request, headers, runtime); } model EditWorkspaceQueueRequest { environments?: [ string ](name='environments'), resourceSpec?: { cu?: long(name='cu', example='1000'), }(name='resourceSpec'), workspaceId?: string(name='workspaceId', example='w-975bcfda9625****'), workspaceQueueName?: string(name='workspaceQueueName', example='dev_queue'), regionId?: string(name='regionId', example='cn-hangzhou'), } model EditWorkspaceQueueResponseBody = { requestId?: string(name='RequestId', description='请求ID。', example='DD6B1B2A-5837-5237-ABE4-FF0C8944****'), } model EditWorkspaceQueueResponse = { headers?: map[string]string(name='headers'), statusCode?: int32(name='statusCode'), body?: EditWorkspaceQueueResponseBody(name='body'), } /** * @summary Modifies the queue of a workspace. * * @param request EditWorkspaceQueueRequest * @param headers map * @param runtime runtime options for this request RuntimeOptions * @return EditWorkspaceQueueResponse */ async function editWorkspaceQueueWithOptions(request: EditWorkspaceQueueRequest, headers: map[string]string, runtime: $RuntimeOptions): EditWorkspaceQueueResponse { request.validate(); var query : map[string]any = {}; if (!$isNull(request.regionId)) { query['regionId'] = request.regionId; } var body : map[string]any = {}; if (!$isNull(request.environments)) { body['environments'] = request.environments; } if (!$isNull(request.resourceSpec)) { body['resourceSpec'] = request.resourceSpec; } if (!$isNull(request.workspaceId)) { body['workspaceId'] = request.workspaceId; } if (!$isNull(request.workspaceQueueName)) { body['workspaceQueueName'] = request.workspaceQueueName; } var req = new OpenApiUtil.OpenApiRequest{ headers = headers, query = OpenApiUtil.query(query), body = OpenApiUtil.parseToMap(body), }; var params = new OpenApiUtil.Params{ action = 'EditWorkspaceQueue', version = '2023-08-08', protocol = 'HTTPS', pathname = `/api/v1/workspaces/queues/action/edit`, method = 'POST', authType = 'AK', style = 'ROA', reqBodyType = 'json', bodyType = 'json', }; return callApi(params, req, runtime); } /** * @summary Modifies the queue of a workspace. * * @param request EditWorkspaceQueueRequest * @return EditWorkspaceQueueResponse */ async function editWorkspaceQueue(request: EditWorkspaceQueueRequest): EditWorkspaceQueueResponse { var runtime = new $RuntimeOptions{}; var headers : map[string]string = {}; return editWorkspaceQueueWithOptions(request, headers, runtime); } model GetCuHoursRequest { endTime?: string(name='endTime', description='The end time of the query time range. This parameter is required.', example='2024-01-08 00:00:00'), startTime?: string(name='startTime', description='The start time of the query time range. This parameter is required.', example='2024-01-01 00:00:00'), } model GetCuHoursResponseBody = { data?: { cuHours?: string(name='cuHours', description='The number of CU-hours consumed by a queue during a specified cycle. The value is an estimated value. Refer to your Alibaba Cloud bill for the actual number of consumed CU-hours.', example='{2025-01-09 00:00:00=2.033333, 2025-01-09 01:00:00=2.033333, 2025-01-09 02:00:00=2.033333, 2025-01-09 03:00:00=2.033333, 2025-01-09 04:00:00=2.033333, 2025-01-09 05:00:00=2.033333, 2025-01-09 06:00:00=2.033333, 2025-01-09 07:00:00=2.033333, 2025-01-09 08:00:00=2.033333, 2025-01-09 09:00:00=1.933333, 2025-01-09 10:00:00=2.133333, 2025-01-09 11:00:00=3.100000, 2025-01-09 12:00:00=2.900000}'), }(name='data', description='The returned data.', example='{ "cuHours": "{2025-01-09 00:00:00=2.033333, 2025-01-09 01:00:00=2.033333, 2025-01-09 02:00:00=2.033333, 2025-01-09 03:00:00=2.033333, 2025-01-09 04:00:00=2.033333, 2025-01-09 05:00:00=2.033333, 2025-01-09 06:00:00=2.033333, 2025-01-09 07:00:00=2.033333, 2025-01-09 08:00:00=2.033333, 2025-01-09 09:00:00=1.933333, 2025-01-09 10:00:00=2.133333, 2025-01-09 11:00:00=3.100000, 2025-01-09 12:00:00=2.900000}" }'), requestId?: string(name='requestId', description='The request ID.', example='DD6B1B2A-5837-5237-ABE4-FF0C8944****'), } model GetCuHoursResponse = { headers?: map[string]string(name='headers'), statusCode?: int32(name='statusCode'), body?: GetCuHoursResponseBody(name='body'), } /** * @summary Queries the number of CU-hours consumed by a queue during a specified cycle. * * @param request GetCuHoursRequest * @param headers map * @param runtime runtime options for this request RuntimeOptions * @return GetCuHoursResponse */ async function getCuHoursWithOptions(workspaceId: string, queue: string, request: GetCuHoursRequest, headers: map[string]string, runtime: $RuntimeOptions): GetCuHoursResponse { request.validate(); var query : map[string]any = {}; if (!$isNull(request.endTime)) { query['endTime'] = request.endTime; } if (!$isNull(request.startTime)) { query['startTime'] = request.startTime; } var req = new OpenApiUtil.OpenApiRequest{ headers = headers, query = OpenApiUtil.query(query), }; var params = new OpenApiUtil.Params{ action = 'GetCuHours', version = '2023-08-08', protocol = 'HTTPS', pathname = `/api/v1/workspaces/${$URL.percentEncode(workspaceId)}/metric/cuHours/${$URL.percentEncode(queue)}`, method = 'GET', authType = 'AK', style = 'ROA', reqBodyType = 'json', bodyType = 'json', }; return callApi(params, req, runtime); } /** * @summary Queries the number of CU-hours consumed by a queue during a specified cycle. * * @param request GetCuHoursRequest * @return GetCuHoursResponse */ async function getCuHours(workspaceId: string, queue: string, request: GetCuHoursRequest): GetCuHoursResponse { var runtime = new $RuntimeOptions{}; var headers : map[string]string = {}; return getCuHoursWithOptions(workspaceId, queue, request, headers, runtime); } model GetDoctorApplicationRequest { locale?: string(name='locale', description='The language of diagnostic information.', example='zh-CN'), queryTime?: string(name='queryTime', description='The query time.', example='2024-01-01'), regionId?: string(name='regionId', description='The region ID.', example='cn-hangzhou'), } model GetDoctorApplicationResponseBody = { data?: { suggestions?: [ string ](name='suggestions', description='The diagnostics list.'), }(name='data', description='The data returned.'), } model GetDoctorApplicationResponse = { headers?: map[string]string(name='headers'), statusCode?: int32(name='statusCode'), body?: GetDoctorApplicationResponseBody(name='body'), } /** * @summary Obtains job analysis information on E-MapReduce (EMR) Doctor. * * @param request GetDoctorApplicationRequest * @param headers map * @param runtime runtime options for this request RuntimeOptions * @return GetDoctorApplicationResponse */ async function getDoctorApplicationWithOptions(workspaceId: string, runId: string, request: GetDoctorApplicationRequest, headers: map[string]string, runtime: $RuntimeOptions): GetDoctorApplicationResponse { request.validate(); var query : map[string]any = {}; if (!$isNull(request.locale)) { query['locale'] = request.locale; } if (!$isNull(request.queryTime)) { query['queryTime'] = request.queryTime; } if (!$isNull(request.regionId)) { query['regionId'] = request.regionId; } var req = new OpenApiUtil.OpenApiRequest{ headers = headers, query = OpenApiUtil.query(query), }; var params = new OpenApiUtil.Params{ action = 'GetDoctorApplication', version = '2023-08-08', protocol = 'HTTPS', pathname = `/api/v1/workspaces/${$URL.percentEncode(workspaceId)}/runs/${$URL.percentEncode(runId)}/action/getDoctorApplication`, method = 'GET', authType = 'AK', style = 'ROA', reqBodyType = 'json', bodyType = 'json', }; return callApi(params, req, runtime); } /** * @summary Obtains job analysis information on E-MapReduce (EMR) Doctor. * * @param request GetDoctorApplicationRequest * @return GetDoctorApplicationResponse */ async function getDoctorApplication(workspaceId: string, runId: string, request: GetDoctorApplicationRequest): GetDoctorApplicationResponse { var runtime = new $RuntimeOptions{}; var headers : map[string]string = {}; return getDoctorApplicationWithOptions(workspaceId, runId, request, headers, runtime); } model GetJobRunRequest { regionId?: string(name='regionId', description='The region ID.', example='cn-hangzhou'), } model GetJobRunResponseBody = { jobRun?: { codeType?: string(name='codeType', description='The code type of the job. Valid values: * SQL * JAR * PYTHON', example='SQL'), configurationOverrides?: { configurations?: [ Configuration ](name='configurations', description='The configurations.'), }(name='configurationOverrides', description='The configurations of the Spark jobs.'), displayReleaseVersion?: string(name='displayReleaseVersion', description='The version of the Spark engine.', example='esr-4.0.0 (Spark 3.5.2, Scala 2.12)'), endTime?: long(name='endTime', description='The end time of the job.', example='1684119314000'), environmentId?: string(name='environmentId', description='The environment ID.', example='env-cpv569tlhtgndjl8****'), executionTimeoutSeconds?: int32(name='executionTimeoutSeconds', description='The timeout period of the job.', example='3600'), fusion?: boolean(name='fusion', description='Indicates whether the Fusion engine is used for acceleration.', example='false'), jobDriver?: JobDriver(name='jobDriver', description='The information about Spark Driver.'), jobRunId?: string(name='jobRunId', description='The job ID.', example='jr-231231'), log?: RunLog(name='log', description='The path where the operational logs are stored.'), name?: string(name='name', description='The job name.', example='jobName'), releaseVersion?: string(name='releaseVersion', description='The version of the Spark engine on which the job runs.', example='esr-3.3.1'), resourceOwnerId?: string(name='resourceOwnerId', description='The ID of the user who created the job.', example='1509789347011222'), resourceQueueId?: string(name='resourceQueueId', description='The name of the queue on which the job runs.', example='root_queue'), state?: string(name='state', description='The job state.', example='Running'), stateChangeReason?: { code?: string(name='code', description='The error code.', example='ERR-100000'), message?: string(name='message', description='The error message.', example='connection refused'), }(name='stateChangeReason', description='The reason of the job status change.'), submitTime?: long(name='submitTime', description='The time when the job was submitted.', example='1684119314000'), tags?: [ Tag ](name='tags', description='The tags of the job.'), webUI?: string(name='webUI', description='The web UI of the job.', example='http://spark-ui'), workspaceId?: string(name='workspaceId', description='The workspace ID.', example='w-1234abcd'), }(name='jobRun', description='The details of the job.'), requestId?: string(name='requestId', description='The request ID.', example='DD6B1B2A-5837-5237-ABE4-FF0C8944****'), } model GetJobRunResponse = { headers?: map[string]string(name='headers'), statusCode?: int32(name='statusCode'), body?: GetJobRunResponseBody(name='body'), } /** * @summary Obtain the job details. * * @param request GetJobRunRequest * @param headers map * @param runtime runtime options for this request RuntimeOptions * @return GetJobRunResponse */ async function getJobRunWithOptions(workspaceId: string, jobRunId: string, request: GetJobRunRequest, headers: map[string]string, runtime: $RuntimeOptions): GetJobRunResponse { request.validate(); var query : map[string]any = {}; if (!$isNull(request.regionId)) { query['regionId'] = request.regionId; } var req = new OpenApiUtil.OpenApiRequest{ headers = headers, query = OpenApiUtil.query(query), }; var params = new OpenApiUtil.Params{ action = 'GetJobRun', version = '2023-08-08', protocol = 'HTTPS', pathname = `/api/v1/workspaces/${$URL.percentEncode(workspaceId)}/jobRuns/${$URL.percentEncode(jobRunId)}`, method = 'GET', authType = 'AK', style = 'ROA', reqBodyType = 'json', bodyType = 'json', }; return callApi(params, req, runtime); } /** * @summary Obtain the job details. * * @param request GetJobRunRequest * @return GetJobRunResponse */ async function getJobRun(workspaceId: string, jobRunId: string, request: GetJobRunRequest): GetJobRunResponse { var runtime = new $RuntimeOptions{}; var headers : map[string]string = {}; return getJobRunWithOptions(workspaceId, jobRunId, request, headers, runtime); } model GetSessionClusterRequest { regionId?: string(name='regionId', description='The region ID.', example='cn-hangzhou'), } model GetSessionClusterResponseBody = { requestId?: string(name='requestId', description='The request ID.', example='DD6B1B2A-5837-5237-ABE4-FF0C8944****'), sessionCluster?: { applicationConfigs?: [ { configFileName?: string(name='configFileName', description='The name of the configuration file.', example='spark-defaults.conf'), configItemKey?: string(name='configItemKey', description='The key of the configuration.', example='spark.app.name'), configItemValue?: string(name='configItemValue', description='The configuration value.', example='test'), } ](name='applicationConfigs', description='The Spark configurations.'), autoStartConfiguration?: { enable?: boolean(name='enable', description='Indicates whether automatic startup is enabled. * true * false', example='false'), }(name='autoStartConfiguration', description='Indicates whether automatic startup is enabled.'), autoStopConfiguration?: { enable?: boolean(name='enable', description='Indicates whether automatic termination is enabled. * true * false', example='false'), idleTimeoutMinutes?: int32(name='idleTimeoutMinutes', description='The idle timeout period. The session is automatically terminated when the idle timeout period is exceeded.', example='60'), }(name='autoStopConfiguration', description='Indicates whether automatic termination is enabled.'), displayReleaseVersion?: string(name='displayReleaseVersion', description='The version of the Spark engine.', example='esr-2.2(Java Runtime)'), domain?: string(name='domain', description='The domain name to which the Spark UI of the session belongs.', example='your.domain.com'), domainInner?: string(name='domainInner', description='The internal endpoint.', example='emr-spark-gateway-cn-hangzhou-internal.data.aliyuncs.com'), draftId?: string(name='draftId', description='The ID of the job that is associated with the session.', example='TSK-xxxxxxxx'), envId?: string(name='envId', description='The environment ID.', example='env-cpv569tlhtgndjl86t40'), extra?: string(name='extra', description='The additional metadata of the session.', example='{"extraInfoKey":"extraInfoValue"}'), fusion?: boolean(name='fusion', description='Indicates whether the Fusion engine is used for acceleration.', example='false'), gmtCreate?: long(name='gmtCreate', description='The creation time.', example='2024-09-01 06:23:01'), kind?: string(name='kind', description='The type of the job. This parameter is required and cannot be modified after the deployment is created. Valid values: * SQLSCRIPT * JAR * PYTHON', example='SQL'), name?: string(name='name', description='The name of the session.', example='test'), queueName?: string(name='queueName', description='The queue name.', example='jobName'), releaseVersion?: string(name='releaseVersion', description='The version of Serverless Spark.', example='esr-2.2(Java Runtime)'), sessionClusterId?: string(name='sessionClusterId', description='The session ID.', example='1234abcd-12ab-34cd-56ef-1234567890ab'), startTime?: long(name='startTime', description='The start time.', example='2024-09-01 06:23:01'), state?: string(name='state', description='The job status. * Starting * Running * Stopping * Stopped * Error', example='Running'), stateChangeReason?: { code?: string(name='code', description='The status change code.', example='1000000'), message?: string(name='message', description='The status change message.', example='ok'), }(name='stateChangeReason', description='The reason of the job status change.'), userId?: string(name='userId', description='The user ID.', example='jr-231231'), userName?: string(name='userName', description='The name of the account that is used to create the session.', example='user1'), webUI?: string(name='webUI', description='The Spark UI of the session.', example='https://spark-ui/link'), workspaceId?: string(name='workspaceId', description='The workspace ID.', example='w-1234abcd'), }(name='sessionCluster', description='The session.'), } model GetSessionClusterResponse = { headers?: map[string]string(name='headers'), statusCode?: int32(name='statusCode'), body?: GetSessionClusterResponseBody(name='body'), } /** * @summary Queries the information about a session. * * @param request GetSessionClusterRequest * @param headers map * @param runtime runtime options for this request RuntimeOptions * @return GetSessionClusterResponse */ async function getSessionClusterWithOptions(workspaceId: string, sessionClusterId: string, request: GetSessionClusterRequest, headers: map[string]string, runtime: $RuntimeOptions): GetSessionClusterResponse { request.validate(); var query : map[string]any = {}; if (!$isNull(request.regionId)) { query['regionId'] = request.regionId; } var req = new OpenApiUtil.OpenApiRequest{ headers = headers, query = OpenApiUtil.query(query), }; var params = new OpenApiUtil.Params{ action = 'GetSessionCluster', version = '2023-08-08', protocol = 'HTTPS', pathname = `/api/v1/workspaces/${$URL.percentEncode(workspaceId)}/sessionClusters/${$URL.percentEncode(sessionClusterId)}`, method = 'GET', authType = 'AK', style = 'ROA', reqBodyType = 'json', bodyType = 'json', }; return callApi(params, req, runtime); } /** * @summary Queries the information about a session. * * @param request GetSessionClusterRequest * @return GetSessionClusterResponse */ async function getSessionCluster(workspaceId: string, sessionClusterId: string, request: GetSessionClusterRequest): GetSessionClusterResponse { var runtime = new $RuntimeOptions{}; var headers : map[string]string = {}; return getSessionClusterWithOptions(workspaceId, sessionClusterId, request, headers, runtime); } model GetSqlStatementRequest { regionId?: string(name='regionId', description='The region ID.', example='cn-hangzhou'), } model GetSqlStatementResponseBody = { data?: { executionTime?: [ long ](name='executionTime', description='The list of time that is consumed by SQL queries.'), sqlErrorCode?: string(name='sqlErrorCode', description='The error code.', example='ERROR-102'), sqlErrorMessage?: string(name='sqlErrorMessage', description='The error message.', example='error message'), sqlOutputs?: [ { rows?: string(name='rows', description='The queried data, which is a string in the JSON format.', example='[{\\\\"values\\\\":[\\\\"test_db\\\\",\\\\"test_table\\\\",false]}'), rowsFilePath?: string(name='rowsFilePath'), schema?: string(name='schema', description='The information about the schema, which is a string in the JSON format.', example='{\\\\"type\\\\":\\\\"struct\\\\",\\\\"fields\\\\":[{\\\\"name\\\\":\\\\"namespace\\\\",\\\\"type\\\\":\\\\"string\\\\",\\\\"nullable\\\\":false,\\\\"metadata\\\\":{}},{\\\\"name\\\\":\\\\"tableName\\\\",\\\\"type\\\\":\\\\"string\\\\",\\\\"nullable\\\\":false,\\\\"metadata\\\\":{}},{\\\\"name\\\\":\\\\"isTemporary\\\\",\\\\"type\\\\":\\\\"boolean\\\\",\\\\"nullable\\\\":false,\\\\"metadata\\\\":{}}]}'), } ](name='sqlOutputs', description='The query results.'), state?: string(name='state', description='The query status. Valid values: * running * available * cancelled * error * cancelling', example='running'), statementId?: string(name='statementId', description='The query ID.', example='st-1231311abadfaa'), }(name='data', description='The response parameters.'), requestId?: string(name='requestId', description='The request ID.', example='DD6B1B2A-5837-5237-ABE4-FF0C8944****'), } model GetSqlStatementResponse = { headers?: map[string]string(name='headers'), statusCode?: int32(name='statusCode'), body?: GetSqlStatementResponseBody(name='body'), } /** * @summary Queries the status of an SQL query task. * * @param request GetSqlStatementRequest * @param headers map * @param runtime runtime options for this request RuntimeOptions * @return GetSqlStatementResponse */ async function getSqlStatementWithOptions(workspaceId: string, statementId: string, request: GetSqlStatementRequest, headers: map[string]string, runtime: $RuntimeOptions): GetSqlStatementResponse { request.validate(); var query : map[string]any = {}; if (!$isNull(request.regionId)) { query['regionId'] = request.regionId; } var req = new OpenApiUtil.OpenApiRequest{ headers = headers, query = OpenApiUtil.query(query), }; var params = new OpenApiUtil.Params{ action = 'GetSqlStatement', version = '2023-08-08', protocol = 'HTTPS', pathname = `/api/interactive/v1/workspace/${$URL.percentEncode(workspaceId)}/statement/${$URL.percentEncode(statementId)}`, method = 'GET', authType = 'AK', style = 'ROA', reqBodyType = 'json', bodyType = 'json', }; return callApi(params, req, runtime); } /** * @summary Queries the status of an SQL query task. * * @param request GetSqlStatementRequest * @return GetSqlStatementResponse */ async function getSqlStatement(workspaceId: string, statementId: string, request: GetSqlStatementRequest): GetSqlStatementResponse { var runtime = new $RuntimeOptions{}; var headers : map[string]string = {}; return getSqlStatementWithOptions(workspaceId, statementId, request, headers, runtime); } model GetTemplateRequest { regionId?: string(name='regionId', description='The region ID.', example='cn-hangzhou'), templateType?: string(name='templateType', description='The template type. Valid values: * TASK * SESSION', example='TASK'), } model GetTemplateResponseBody = { data?: Template(name='data', description='The data returned.'), errorCode?: string(name='errorCode', description='* If the value of success was false, an error code was returned. * If the value of success was true, a null value was returned.', example='040003'), errorMessage?: string(name='errorMessage', description='* If the value of success was false, an error message was returned. * If the value of success was true, a null value was returned.', example='InvalidUser.NotFound'), httpStatusCode?: string(name='httpStatusCode', description='The HTTP status code.', example='200'), requestId?: string(name='requestId', description='The request ID.', example='484D9DDA-300D-525E-AF7A-0CCCA5C64A7A'), success?: boolean(name='success', description='Indicates whether the request was successful.', example='True'), } model GetTemplateResponse = { headers?: map[string]string(name='headers'), statusCode?: int32(name='statusCode'), body?: GetTemplateResponseBody(name='body'), } /** * @summary Queries task templates. * * @param request GetTemplateRequest * @param headers map * @param runtime runtime options for this request RuntimeOptions * @return GetTemplateResponse */ async function getTemplateWithOptions(workspaceBizId: string, request: GetTemplateRequest, headers: map[string]string, runtime: $RuntimeOptions): GetTemplateResponse { request.validate(); var query : map[string]any = {}; if (!$isNull(request.regionId)) { query['regionId'] = request.regionId; } if (!$isNull(request.templateType)) { query['templateType'] = request.templateType; } var req = new OpenApiUtil.OpenApiRequest{ headers = headers, query = OpenApiUtil.query(query), }; var params = new OpenApiUtil.Params{ action = 'GetTemplate', version = '2023-08-08', protocol = 'HTTPS', pathname = `/api/interactive/v1/workspace/${$URL.percentEncode(workspaceBizId)}/template`, method = 'GET', authType = 'AK', style = 'ROA', reqBodyType = 'json', bodyType = 'json', }; return callApi(params, req, runtime); } /** * @summary Queries task templates. * * @param request GetTemplateRequest * @return GetTemplateResponse */ async function getTemplate(workspaceBizId: string, request: GetTemplateRequest): GetTemplateResponse { var runtime = new $RuntimeOptions{}; var headers : map[string]string = {}; return getTemplateWithOptions(workspaceBizId, request, headers, runtime); } model GrantRoleToUsersRequest { roleArn?: string(name='roleArn', description='The Alibaba Cloud Resource Name (ARN) of the RAM role.', example='acs:emr::w-975bcfda9625****:role/Owner'), userArns?: [ string ](name='userArns', description='The user ARNs.'), regionId?: string(name='regionId', description='The region ID.', example='cn-hangzhou'), } model GrantRoleToUsersResponseBody = { requestId?: string(name='requestId', description='The request ID.', example='DD6B1B2A-5837-5237-ABE4-FF0C8944****'), } model GrantRoleToUsersResponse = { headers?: map[string]string(name='headers'), statusCode?: int32(name='statusCode'), body?: GrantRoleToUsersResponseBody(name='body'), } /** * @summary Assigns a specified role to users. * * @param request GrantRoleToUsersRequest * @param headers map * @param runtime runtime options for this request RuntimeOptions * @return GrantRoleToUsersResponse */ async function grantRoleToUsersWithOptions(request: GrantRoleToUsersRequest, headers: map[string]string, runtime: $RuntimeOptions): GrantRoleToUsersResponse { request.validate(); var query : map[string]any = {}; if (!$isNull(request.regionId)) { query['regionId'] = request.regionId; } var body : map[string]any = {}; if (!$isNull(request.roleArn)) { body['roleArn'] = request.roleArn; } if (!$isNull(request.userArns)) { body['userArns'] = request.userArns; } var req = new OpenApiUtil.OpenApiRequest{ headers = headers, query = OpenApiUtil.query(query), body = OpenApiUtil.parseToMap(body), }; var params = new OpenApiUtil.Params{ action = 'GrantRoleToUsers', version = '2023-08-08', protocol = 'HTTPS', pathname = `/api/v1/auth/roles/grant`, method = 'POST', authType = 'AK', style = 'ROA', reqBodyType = 'json', bodyType = 'json', }; return callApi(params, req, runtime); } /** * @summary Assigns a specified role to users. * * @param request GrantRoleToUsersRequest * @return GrantRoleToUsersResponse */ async function grantRoleToUsers(request: GrantRoleToUsersRequest): GrantRoleToUsersResponse { var runtime = new $RuntimeOptions{}; var headers : map[string]string = {}; return grantRoleToUsersWithOptions(request, headers, runtime); } model ListJobRunsRequest { creator?: string(name='creator', description='The ID of the user who created the job.', example='1509789347011222'), endTime?: { endTime?: long(name='endTime', description='The end of the end time range.', example='1710432000000'), startTime?: long(name='startTime', description='The beginning of the end time range.', example='1709740800000'), }(name='endTime', description='The range of end time.'), jobRunDeploymentId?: string(name='jobRunDeploymentId', description='The job run ID.', example='jd-b6d003f1930f****'), jobRunId?: string(name='jobRunId', description='The job ID.', example='j-xxx'), maxResults?: int32(name='maxResults', description='The maximum number of entries to return.', example='20'), minDuration?: long(name='minDuration', description='The minimum running duration of the job. Unit: ms.', example='60000'), name?: string(name='name', description='The job name.', example='emr-spark-demo-job'), nextToken?: string(name='nextToken', description='The pagination token that is used in the request to retrieve a new page of results.', example='DD6B1B2A-5837-5237-ABE4-FF0C89568980'), regionId?: string(name='regionId', description='The region ID.', example='cn-hangzhou'), resourceQueueId?: string(name='resourceQueueId', description='The name of the resource queue on which the Spark jobs run.', example='dev_queue'), startTime?: { endTime?: long(name='endTime', description='The end of the start time range.', example='1710432000000'), startTime?: long(name='startTime', description='The beginning of the start time range.', example='1709740800000'), }(name='startTime', description='The range of start time.'), states?: [ string ](name='states', description='The job states.', example='["Running","Submitted"]'), tags?: [ { key?: string(name='key', description='The key of tag N.', example='tag_key'), value?: string(name='value', description='The value of tag N.', example='value'), } ](name='tags', description='The tags of the job.'), } model ListJobRunsShrinkRequest { creator?: string(name='creator', description='The ID of the user who created the job.', example='1509789347011222'), endTimeShrink?: string(name='endTime', description='The range of end time.'), jobRunDeploymentId?: string(name='jobRunDeploymentId', description='The job run ID.', example='jd-b6d003f1930f****'), jobRunId?: string(name='jobRunId', description='The job ID.', example='j-xxx'), maxResults?: int32(name='maxResults', description='The maximum number of entries to return.', example='20'), minDuration?: long(name='minDuration', description='The minimum running duration of the job. Unit: ms.', example='60000'), name?: string(name='name', description='The job name.', example='emr-spark-demo-job'), nextToken?: string(name='nextToken', description='The pagination token that is used in the request to retrieve a new page of results.', example='DD6B1B2A-5837-5237-ABE4-FF0C89568980'), regionId?: string(name='regionId', description='The region ID.', example='cn-hangzhou'), resourceQueueId?: string(name='resourceQueueId', description='The name of the resource queue on which the Spark jobs run.', example='dev_queue'), startTimeShrink?: string(name='startTime', description='The range of start time.'), statesShrink?: string(name='states', description='The job states.', example='["Running","Submitted"]'), tagsShrink?: string(name='tags', description='The tags of the job.'), } model ListJobRunsResponseBody = { jobRuns?: [ { codeType?: string(name='codeType', description='The code type of the job. Valid values: SQL JAR PYTHON', example='SQL'), configurationOverrides?: { configurations?: [ Configuration ](name='configurations', description='The SparkConf objects.'), }(name='configurationOverrides', description='The advanced configurations of Spark.'), creator?: string(name='creator', description='The ID of the user who created the job.', example='1509789347011222'), cuHours?: double(name='cuHours', description='The number of CUs consumed during a specified cycle of a task. The value is an estimated value. Refer to your Alibaba Cloud bill for the actual number of consumed CUs.', example='2.059'), displayReleaseVersion?: string(name='displayReleaseVersion', description='The version of Spark on which the jobs run.', example='esr-3.0.0 (Spark 3.4.3, Scala 2.12)'), endTime?: long(name='endTime', description='The end time of the job.', example='1684119314000'), executionTimeoutSeconds?: int32(name='executionTimeoutSeconds', description='The timeout period of the job.', example='3600'), fusion?: boolean(name='fusion', description='Indicates whether the Fusion engine is used for acceleration.', example='true'), jobDriver?: JobDriver(name='jobDriver', description='The information about Spark Driver.'), jobRunId?: string(name='jobRunId', description='The job ID.', example='jr-231231'), log?: RunLog(name='log', description='The path where the operational logs are stored.'), mbSeconds?: long(name='mbSeconds', description='The total amount of memory allocated to the job multiplied by the running duration (seconds).', example='33030784'), name?: string(name='name', description='The job name.', example='jobName'), releaseVersion?: string(name='releaseVersion', description='The version of Spark on which the jobs run.', example='esr-native-3.4.0'), state?: string(name='state', description='The job state.', example='Running'), stateChangeReason?: { code?: string(name='code', description='The error code.', example='0'), message?: string(name='message', description='The error message.', example='success'), }(name='stateChangeReason', description='The reason of the job status change.'), submitTime?: long(name='submitTime', description='The time when the job was submitted.', example='1684119314000'), tags?: [ Tag ](name='tags', description='The tags.'), vcoreSeconds?: long(name='vcoreSeconds', description='The total number of CPU cores allocated to the job multiplied by the running duration (seconds).', example='8236'), webUI?: string(name='webUI', description='The web UI of the job.', example='http://spark-ui'), workspaceId?: string(name='workspaceId', description='The workspace ID.', example='w-1234abcd'), } ](name='jobRuns', description='The Spark jobs.'), maxResults?: int32(name='maxResults', description='The maximum number of entries returned.', example='20'), nextToken?: string(name='nextToken', description='A pagination token.', example='DD6B1B2A-5837-5237-ABE4-FF0C89568980'), requestId?: string(name='requestId', description='The request ID.', example='DD6B1B2A-5837-5237-ABE4-FF0C8944****'), totalCount?: int32(name='totalCount', description='The total number of entries returned.', example='200'), } model ListJobRunsResponse = { headers?: map[string]string(name='headers'), statusCode?: int32(name='statusCode'), body?: ListJobRunsResponseBody(name='body'), } /** * @summary Queries a list of Spark jobs. * * @param tmpReq ListJobRunsRequest * @param headers map * @param runtime runtime options for this request RuntimeOptions * @return ListJobRunsResponse */ async function listJobRunsWithOptions(workspaceId: string, tmpReq: ListJobRunsRequest, headers: map[string]string, runtime: $RuntimeOptions): ListJobRunsResponse { tmpReq.validate(); var request = new ListJobRunsShrinkRequest{}; OpenApiUtil.convert(tmpReq, request); if (!$isNull(tmpReq.endTime)) { request.endTimeShrink = OpenApiUtil.arrayToStringWithSpecifiedStyle(tmpReq.endTime, 'endTime', 'json'); } if (!$isNull(tmpReq.startTime)) { request.startTimeShrink = OpenApiUtil.arrayToStringWithSpecifiedStyle(tmpReq.startTime, 'startTime', 'json'); } if (!$isNull(tmpReq.states)) { request.statesShrink = OpenApiUtil.arrayToStringWithSpecifiedStyle(tmpReq.states, 'states', 'json'); } if (!$isNull(tmpReq.tags)) { request.tagsShrink = OpenApiUtil.arrayToStringWithSpecifiedStyle(tmpReq.tags, 'tags', 'json'); } var query : map[string]any = {}; if (!$isNull(request.creator)) { query['creator'] = request.creator; } if (!$isNull(request.endTimeShrink)) { query['endTime'] = request.endTimeShrink; } if (!$isNull(request.jobRunDeploymentId)) { query['jobRunDeploymentId'] = request.jobRunDeploymentId; } if (!$isNull(request.jobRunId)) { query['jobRunId'] = request.jobRunId; } if (!$isNull(request.maxResults)) { query['maxResults'] = request.maxResults; } if (!$isNull(request.minDuration)) { query['minDuration'] = request.minDuration; } if (!$isNull(request.name)) { query['name'] = request.name; } if (!$isNull(request.nextToken)) { query['nextToken'] = request.nextToken; } if (!$isNull(request.regionId)) { query['regionId'] = request.regionId; } if (!$isNull(request.resourceQueueId)) { query['resourceQueueId'] = request.resourceQueueId; } if (!$isNull(request.startTimeShrink)) { query['startTime'] = request.startTimeShrink; } if (!$isNull(request.statesShrink)) { query['states'] = request.statesShrink; } if (!$isNull(request.tagsShrink)) { query['tags'] = request.tagsShrink; } var req = new OpenApiUtil.OpenApiRequest{ headers = headers, query = OpenApiUtil.query(query), }; var params = new OpenApiUtil.Params{ action = 'ListJobRuns', version = '2023-08-08', protocol = 'HTTPS', pathname = `/api/v1/workspaces/${$URL.percentEncode(workspaceId)}/jobRuns`, method = 'GET', authType = 'AK', style = 'ROA', reqBodyType = 'json', bodyType = 'json', }; return callApi(params, req, runtime); } /** * @summary Queries a list of Spark jobs. * * @param request ListJobRunsRequest * @return ListJobRunsResponse */ async function listJobRuns(workspaceId: string, request: ListJobRunsRequest): ListJobRunsResponse { var runtime = new $RuntimeOptions{}; var headers : map[string]string = {}; return listJobRunsWithOptions(workspaceId, request, headers, runtime); } model ListKyuubiSparkApplicationsRequest { applicationId?: string(name='applicationId', description='The ID of the application that is submitted by using a Kyuubi gateway.', example='spark-339f844005b6404c95f9f7c7a13b****'), applicationName?: string(name='applicationName', description='The name of the Spark application that is submitted by using a Kyuubi gateway.', example='kyuubi-connection-spark-sql-anonymous-fa9a5e73-b4b1-474a-b****'), maxResults?: int32(name='maxResults', description='The maximum number of entries to return.', example='20'), nextToken?: string(name='nextToken', description='The pagination token that is used in the next request to retrieve a new page of results.', example='1'), startTime?: { endTime?: long(name='endTime', description='The end of the start time range.', example='1710432000000'), startTime?: long(name='startTime', description='The beginning of the start time range.', example='1709740800000'), }(name='startTime', description='The range of start time.'), } model ListKyuubiSparkApplicationsShrinkRequest { applicationId?: string(name='applicationId', description='The ID of the application that is submitted by using a Kyuubi gateway.', example='spark-339f844005b6404c95f9f7c7a13b****'), applicationName?: string(name='applicationName', description='The name of the Spark application that is submitted by using a Kyuubi gateway.', example='kyuubi-connection-spark-sql-anonymous-fa9a5e73-b4b1-474a-b****'), maxResults?: int32(name='maxResults', description='The maximum number of entries to return.', example='20'), nextToken?: string(name='nextToken', description='The pagination token that is used in the next request to retrieve a new page of results.', example='1'), startTimeShrink?: string(name='startTime', description='The range of start time.'), } model ListKyuubiSparkApplicationsResponseBody = { applications?: [ { applicationId?: string(name='applicationId', description='The ID of the application that is submitted by using a Kyuubi gateway.', example='spark-339f844005b6404c95f9f7c7a13b****'), applicationName?: string(name='applicationName', description='The name of the Spark application that is submitted by using a Kyuubi gateway.', example='kyuubi-connection-spark-sql-anonymous-fa9a5e73-b4b1-474a-b****'), cuHours?: double(name='cuHours', description='The number of CUs consumed during a specified cycle of a task. The value is an estimated value. Refer to your Alibaba Cloud bill for the actual number of consumed CUs.', example='0.238302'), endTime?: string(name='endTime', description='The time when the task ended.', example='2025-02-12 20:02:02'), mbSeconds?: long(name='mbSeconds', description='The total amount of memory allocated to the job multiplied by the running duration (seconds).', example='3513900'), resourceQueueId?: string(name='resourceQueueId', description='The name of the resource queue on which the Spark jobs run.', example='dev_queue'), startTime?: string(name='startTime', description='The time when the task started.', example='2025-02-12 19:59:16'), state?: string(name='state', description='The status of the Spark application. * STARTING * RUNNING * TERMINATED', example='STARTING'), vcoreSeconds?: long(name='vcoreSeconds', description='The total number of CPU cores allocated to the job multiplied by the running duration (seconds).', example='780'), webUI?: string(name='webUI', description='The URL of the web UI for the Spark application.'), } ](name='applications', description='The details of the applications.'), maxResults?: int32(name='maxResults', description='The maximum number of entries returned.', example='20'), nextToken?: string(name='nextToken', description='A pagination token. It can be used in the next request to retrieve a new page of results.', example='1'), requestId?: string(name='requestId', description='The request ID.', example='DD6B1B2A-5837-5237-ABE4-FF0C8944****'), totalCount?: int32(name='totalCount', description='The total number of entries returned.', example='200'), } model ListKyuubiSparkApplicationsResponse = { headers?: map[string]string(name='headers'), statusCode?: int32(name='statusCode'), body?: ListKyuubiSparkApplicationsResponseBody(name='body'), } /** * @summary Queries the applications that are submitted by using a Kyuubi gateway. * * @param tmpReq ListKyuubiSparkApplicationsRequest * @param headers map * @param runtime runtime options for this request RuntimeOptions * @return ListKyuubiSparkApplicationsResponse */ async function listKyuubiSparkApplicationsWithOptions(workspaceId: string, kyuubiServiceId: string, tmpReq: ListKyuubiSparkApplicationsRequest, headers: map[string]string, runtime: $RuntimeOptions): ListKyuubiSparkApplicationsResponse { tmpReq.validate(); var request = new ListKyuubiSparkApplicationsShrinkRequest{}; OpenApiUtil.convert(tmpReq, request); if (!$isNull(tmpReq.startTime)) { request.startTimeShrink = OpenApiUtil.arrayToStringWithSpecifiedStyle(tmpReq.startTime, 'startTime', 'json'); } var query : map[string]any = {}; if (!$isNull(request.applicationId)) { query['applicationId'] = request.applicationId; } if (!$isNull(request.applicationName)) { query['applicationName'] = request.applicationName; } if (!$isNull(request.maxResults)) { query['maxResults'] = request.maxResults; } if (!$isNull(request.nextToken)) { query['nextToken'] = request.nextToken; } if (!$isNull(request.startTimeShrink)) { query['startTime'] = request.startTimeShrink; } var req = new OpenApiUtil.OpenApiRequest{ headers = headers, query = OpenApiUtil.query(query), }; var params = new OpenApiUtil.Params{ action = 'ListKyuubiSparkApplications', version = '2023-08-08', protocol = 'HTTPS', pathname = `/api/v1/kyuubi/${$URL.percentEncode(workspaceId)}/${$URL.percentEncode(kyuubiServiceId)}/applications`, method = 'GET', authType = 'AK', style = 'ROA', reqBodyType = 'json', bodyType = 'json', }; return callApi(params, req, runtime); } /** * @summary Queries the applications that are submitted by using a Kyuubi gateway. * * @param request ListKyuubiSparkApplicationsRequest * @return ListKyuubiSparkApplicationsResponse */ async function listKyuubiSparkApplications(workspaceId: string, kyuubiServiceId: string, request: ListKyuubiSparkApplicationsRequest): ListKyuubiSparkApplicationsResponse { var runtime = new $RuntimeOptions{}; var headers : map[string]string = {}; return listKyuubiSparkApplicationsWithOptions(workspaceId, kyuubiServiceId, request, headers, runtime); } model ListLogContentsRequest { fileName?: string(name='fileName', description='Full path of the file.'), length?: int32(name='length', description='Length of the log.', example='9999'), offset?: int32(name='offset', description='Start line for query.', example='0'), regionId?: string(name='regionId', description='Region ID.', example='cn-hangzhou'), } model ListLogContentsResponseBody = { listLogContent?: { contents?: [ { lineContent?: string(name='LineContent', description='Log line content.', example='spark pi is 3.14\\\\n'), } ](name='contents', description='List of log line contents.'), totalLength?: long(name='totalLength', description='Total number of log lines.', example='10'), }(name='listLogContent', description='Log content.'), requestId?: string(name='requestId', description='The request ID.', example='DD6B1B2A-5837-5237-ABE4-FF0C8944****'), } model ListLogContentsResponse = { headers?: map[string]string(name='headers'), statusCode?: int32(name='statusCode'), body?: ListLogContentsResponseBody(name='body'), } /** * @summary Get Log Content * * @param request ListLogContentsRequest * @param headers map * @param runtime runtime options for this request RuntimeOptions * @return ListLogContentsResponse */ async function listLogContentsWithOptions(workspaceId: string, request: ListLogContentsRequest, headers: map[string]string, runtime: $RuntimeOptions): ListLogContentsResponse { request.validate(); var query : map[string]any = {}; if (!$isNull(request.fileName)) { query['fileName'] = request.fileName; } if (!$isNull(request.length)) { query['length'] = request.length; } if (!$isNull(request.offset)) { query['offset'] = request.offset; } if (!$isNull(request.regionId)) { query['regionId'] = request.regionId; } var req = new OpenApiUtil.OpenApiRequest{ headers = headers, query = OpenApiUtil.query(query), }; var params = new OpenApiUtil.Params{ action = 'ListLogContents', version = '2023-08-08', protocol = 'HTTPS', pathname = `/api/v1/workspaces/${$URL.percentEncode(workspaceId)}/action/listLogContents`, method = 'GET', authType = 'AK', style = 'ROA', reqBodyType = 'json', bodyType = 'json', }; return callApi(params, req, runtime); } /** * @summary Get Log Content * * @param request ListLogContentsRequest * @return ListLogContentsResponse */ async function listLogContents(workspaceId: string, request: ListLogContentsRequest): ListLogContentsResponse { var runtime = new $RuntimeOptions{}; var headers : map[string]string = {}; return listLogContentsWithOptions(workspaceId, request, headers, runtime); } model ListReleaseVersionsRequest { regionId?: string(name='regionId', description='The region ID.', example='cn-hangzhou'), releaseType?: string(name='releaseType', description='The type of the version. Valid values: * stable * Beta', example='stable'), releaseVersion?: string(name='releaseVersion', description='The version of EMR Serverless Spark.', example='esr-2.1 (Spark 3.3.1, Scala 2.12, Java Runtime)'), releaseVersionStatus?: string(name='releaseVersionStatus', description='The status of the version. Valid values: * ONLINE * OFFLINE', example='ONLINE'), serviceFilter?: string(name='serviceFilter'), workspaceId?: string(name='workspaceId', description='The workspace ID.', example='w-d2d82aa09155****'), } model ListReleaseVersionsResponseBody = { maxResults?: int32(name='maxResults', description='The maximum number of entries returned.', example='20'), nextToken?: string(name='nextToken', description='A pagination token. It can be used in the next request to retrieve a new page of results.', example='1'), releaseVersions?: [ { communityVersion?: string(name='communityVersion', description='The version number of open source Spark.', example='Spark 3.3.1'), cpuArchitectures?: [ string ](name='cpuArchitectures', description='The CPU architectures.'), displayReleaseVersion?: string(name='displayReleaseVersion', description='The version number.', example='esr-2.1 (Spark 3.3.1, Scala 2.12)'), fusion?: boolean(name='fusion', description='Indicates whether the Fusion engine is used for acceleration.', example='true'), gmtCreate?: long(name='gmtCreate', description='The creation time.', example='1716215854101'), iaasType?: string(name='iaasType', description='The type of the Infrastructure as a Service (IaaS) layer.', example='ASI'), releaseVersion?: string(name='releaseVersion', description='The version number.', example='esr-2.1 (Spark 3.3.1, Scala 2.12, Java Runtime)'), scalaVersion?: string(name='scalaVersion', description='The version of Scala.', example='2.12'), state?: string(name='state', description='The status of the version.', example='ONLINE'), type?: string(name='type', description='The type of the version.', example='stable'), } ](name='releaseVersions', description='The versions.'), requestId?: string(name='requestId', description='The request ID.', example='DD6B1B2A-5837-5237-ABE4-FF0C8944****'), totalCount?: int32(name='totalCount', description='The total number of entries returned.', example='200'), } model ListReleaseVersionsResponse = { headers?: map[string]string(name='headers'), statusCode?: int32(name='statusCode'), body?: ListReleaseVersionsResponseBody(name='body'), } /** * @summary Queries the list of published versions of E-MapReduce (EMR) Serverless Spark. * * @param request ListReleaseVersionsRequest * @param headers map * @param runtime runtime options for this request RuntimeOptions * @return ListReleaseVersionsResponse */ async function listReleaseVersionsWithOptions(request: ListReleaseVersionsRequest, headers: map[string]string, runtime: $RuntimeOptions): ListReleaseVersionsResponse { request.validate(); var query : map[string]any = {}; if (!$isNull(request.regionId)) { query['regionId'] = request.regionId; } if (!$isNull(request.releaseType)) { query['releaseType'] = request.releaseType; } if (!$isNull(request.releaseVersion)) { query['releaseVersion'] = request.releaseVersion; } if (!$isNull(request.releaseVersionStatus)) { query['releaseVersionStatus'] = request.releaseVersionStatus; } if (!$isNull(request.serviceFilter)) { query['serviceFilter'] = request.serviceFilter; } if (!$isNull(request.workspaceId)) { query['workspaceId'] = request.workspaceId; } var req = new OpenApiUtil.OpenApiRequest{ headers = headers, query = OpenApiUtil.query(query), }; var params = new OpenApiUtil.Params{ action = 'ListReleaseVersions', version = '2023-08-08', protocol = 'HTTPS', pathname = `/api/v1/releaseVersions`, method = 'GET', authType = 'AK', style = 'ROA', reqBodyType = 'json', bodyType = 'json', }; return callApi(params, req, runtime); } /** * @summary Queries the list of published versions of E-MapReduce (EMR) Serverless Spark. * * @param request ListReleaseVersionsRequest * @return ListReleaseVersionsResponse */ async function listReleaseVersions(request: ListReleaseVersionsRequest): ListReleaseVersionsResponse { var runtime = new $RuntimeOptions{}; var headers : map[string]string = {}; return listReleaseVersionsWithOptions(request, headers, runtime); } model ListSessionClustersRequest { kind?: string(name='kind', description='The session type. Valid values: * NOTEBOOK * THRIFT * SQL', example='SQL'), maxResults?: int32(name='maxResults', description='The maximum number of entries to return.', example='20'), nextToken?: string(name='nextToken', description='The pagination token that is used in the request to retrieve a new page of results.', example='DD6B1B2A-5837-5237-ABE4-FF0C89568980'), queueName?: string(name='queueName', description='The name of the queue.', example='root'), regionId?: string(name='regionId', description='The region ID.', example='cn-hangzhou'), sessionClusterId?: string(name='sessionClusterId', description='The name of the job.', example='emr-spark-demo-job'), } model ListSessionClustersResponseBody = { maxResults?: int32(name='maxResults', description='The maximum number of entries returned.', example='20'), nextToken?: string(name='nextToken', description='A pagination token.', example='DD6B1B2A-5837-5237-ABE4-FF0C89568980'), requestId?: string(name='requestId', description='The request ID.', example='DD6B1B2A-5837-5237-ABE4-FF0C8944****'), sessionClusters?: [ { applicationConfigs?: [ { configFileName?: string(name='configFileName', description='The name of the configuration file.', example='spark-default.conf'), configItemKey?: string(name='configItemKey', description='The key of the configuration.', example='spark.app.name'), configItemValue?: string(name='configItemValue', description='The configuration value.', example='test_application'), } ](name='applicationConfigs', description='The session configurations, which are equivalent to the configurations of the Spark job.'), autoStartConfiguration?: { enable?: boolean(name='enable', description='Indicates whether automatic startup is enabled.', example='true'), }(name='autoStartConfiguration', description='The automatic startup configurations.'), autoStopConfiguration?: { enable?: boolean(name='enable', description='Indicates whether automatic termination is enabled.', example='false'), idleTimeoutMinutes?: int32(name='idleTimeoutMinutes', description='The idle timeout period. The session is automatically terminated when the idle timeout period is exceeded.', example='45'), }(name='autoStopConfiguration', description='The configurations of automatic termination.'), displayReleaseVersion?: string(name='displayReleaseVersion', description='The version of the Spark engine.', example='esr-4.0.0 (Spark 3.5.2, Scala 2.12)'), domain?: string(name='domain', description='The public endpoint of the Thrift server.', example='emr-spark-gateway-cn-hangzhou.data.aliyun.com'), domainInner?: string(name='domainInner', description='The internal endpoint of the Thrift server.', example='emr-spark-gateway-cn-hangzhou-internal.data.aliyuncs.com'), draftId?: string(name='draftId', description='The ID of the job that is associated with the session.', example='TSK-xxxxxxxxx'), extra?: string(name='extra', description='The additional metadata of the session.', example='{"extraInfoKey":"extraInfoValue"}'), fusion?: boolean(name='fusion', description='Indicates whether the Fusion engine is used for acceleration.', example='false'), gmtCreate?: long(name='gmtCreate', description='The creation time.', example='1732267598000'), kind?: string(name='kind', description='The session type. Valid values: * NOTEBOOK * THRIFT * SQL', example='SQL'), name?: string(name='name', description='The name of the session.', example='adhoc_query'), queueName?: string(name='queueName', description='The name of the queue that is used to run the session.', example='dev_queue'), releaseVersion?: string(name='releaseVersion', description='The version of EMR Serverless Spark.', example='esr-2.1'), sessionClusterId?: string(name='sessionClusterId', description='The session ID.', example='sc-123131'), startTime?: long(name='startTime', description='The start time.', example='1732267598000'), state?: string(name='state', description='The status of the session. * Starting * Running * Stopping * Stopped * Error', example='Running'), stateChangeReason?: { code?: string(name='code', description='The status change code.', example='200'), message?: string(name='message', description='The status change message.', example='ok'), }(name='stateChangeReason', description='The details of the most recent status change of the session.'), userId?: string(name='userId', description='The user ID.', example='123131'), userName?: string(name='userName', description='The username.', example='test_user'), webUI?: string(name='webUI', description='The Spark UI of the session.', example='http://spark-ui-xxxx'), workspaceId?: string(name='workspaceId', description='The workspace ID.', example='w-1234abcd'), } ](name='sessionClusters', description='The sessions.'), totalCount?: int32(name='totalCount', description='The total number of entries returned.', example='200'), } model ListSessionClustersResponse = { headers?: map[string]string(name='headers'), statusCode?: int32(name='statusCode'), body?: ListSessionClustersResponseBody(name='body'), } /** * @summary Queries the list of sessions. * * @param request ListSessionClustersRequest * @param headers map * @param runtime runtime options for this request RuntimeOptions * @return ListSessionClustersResponse */ async function listSessionClustersWithOptions(workspaceId: string, request: ListSessionClustersRequest, headers: map[string]string, runtime: $RuntimeOptions): ListSessionClustersResponse { request.validate(); var query : map[string]any = {}; if (!$isNull(request.kind)) { query['kind'] = request.kind; } if (!$isNull(request.maxResults)) { query['maxResults'] = request.maxResults; } if (!$isNull(request.nextToken)) { query['nextToken'] = request.nextToken; } if (!$isNull(request.queueName)) { query['queueName'] = request.queueName; } if (!$isNull(request.regionId)) { query['regionId'] = request.regionId; } if (!$isNull(request.sessionClusterId)) { query['sessionClusterId'] = request.sessionClusterId; } var req = new OpenApiUtil.OpenApiRequest{ headers = headers, query = OpenApiUtil.query(query), }; var params = new OpenApiUtil.Params{ action = 'ListSessionClusters', version = '2023-08-08', protocol = 'HTTPS', pathname = `/api/v1/workspaces/${$URL.percentEncode(workspaceId)}/sessionClusters`, method = 'GET', authType = 'AK', style = 'ROA', reqBodyType = 'json', bodyType = 'json', }; return callApi(params, req, runtime); } /** * @summary Queries the list of sessions. * * @param request ListSessionClustersRequest * @return ListSessionClustersResponse */ async function listSessionClusters(workspaceId: string, request: ListSessionClustersRequest): ListSessionClustersResponse { var runtime = new $RuntimeOptions{}; var headers : map[string]string = {}; return listSessionClustersWithOptions(workspaceId, request, headers, runtime); } model ListWorkspaceQueuesRequest { environment?: string(name='environment', description='The environment type. Valid values: * dev * production', example='production'), regionId?: string(name='regionId', description='The region ID.', example='cn-hangzhou'), } model ListWorkspaceQueuesResponseBody = { maxResults?: int32(name='maxResults', description='The maximum number of entries returned.', example='20'), nextToken?: string(name='nextToken', description='A pagination token. It can be used in the next request to retrieve a new page of results.', example='1'), queues?: [ { allowActions?: [ { actionArn?: string(name='actionArn', description='The Alibaba Cloud Resource Name (ARN) of a behavior.', example='acs:emr::workspaceId:action/create_queue'), actionName?: string(name='actionName', description='The name of the permission.', example='view'), dependencies?: [ string ](name='dependencies', description='The dependencies of the operation.', example='["view"]'), description?: string(name='description', description='The description of the operation.', example='文件目录遍历、文件浏览'), displayName?: string(name='displayName', description='The display name of the permission.', example='文件目录遍历、文件浏览'), } ](name='allowActions', description='The operations allowed for the queue.'), createTime?: long(name='createTime', description='The time when the workspace was created.', example='1684115879955'), creator?: string(name='creator', description='The ID of the user who created the queue.', example='237109'), environments?: [ string ](name='environments', description='The environment types of the queue.'), maxResource?: string(name='maxResource', description='The maximum capacity of resources that can be used in the queue.', example='{"cpu": "2","memory": "2Gi"}'), minResource?: string(name='minResource', description='The minimum capacity of resources that can be used in the queue.', example='{"cpu": "2","memory": "2Gi"}'), paymentType?: string(name='paymentType', description='The billing method. Valid values: * PayAsYouGo * Pre', example='PayAsYouGo'), properties?: string(name='properties', description='The queue label.', example='dev_queue'), queueName?: string(name='queueName', description='The name of the queue.', example='dev_queue'), queueScope?: string(name='queueScope', description='The queue architecture.', example='{"arch": "x86"}'), queueStatus?: string(name='queueStatus', description='The status of the queue.', example='RUNNING'), queueType?: string(name='queueType', description='The type of the queue. Valid values: * instance * instanceChildren', example='instance, instanceChildren'), regionId?: string(name='regionId', description='The region ID.', example='cn-hangzhou'), usedResource?: string(name='usedResource', description='The capacity of resources that are used in the queue.', example='{"cpu": "2","memory": "2Gi"}'), workspaceId?: string(name='workspaceId', description='The workspace ID.', example='w-1234abcd'), } ](name='queues', description='The list of queues.'), requestId?: string(name='requestId', description='The request ID.', example='DD6B1B2A-5837-5237-ABE4-FF0C8944****'), totalCount?: int32(name='totalCount', description='The total number of entries returned.', example='200'), } model ListWorkspaceQueuesResponse = { headers?: map[string]string(name='headers'), statusCode?: int32(name='statusCode'), body?: ListWorkspaceQueuesResponseBody(name='body'), } /** * @summary Queries the list of queues in a Spark workspace. * * @param request ListWorkspaceQueuesRequest * @param headers map * @param runtime runtime options for this request RuntimeOptions * @return ListWorkspaceQueuesResponse */ async function listWorkspaceQueuesWithOptions(workspaceId: string, request: ListWorkspaceQueuesRequest, headers: map[string]string, runtime: $RuntimeOptions): ListWorkspaceQueuesResponse { request.validate(); var query : map[string]any = {}; if (!$isNull(request.environment)) { query['environment'] = request.environment; } if (!$isNull(request.regionId)) { query['regionId'] = request.regionId; } var req = new OpenApiUtil.OpenApiRequest{ headers = headers, query = OpenApiUtil.query(query), }; var params = new OpenApiUtil.Params{ action = 'ListWorkspaceQueues', version = '2023-08-08', protocol = 'HTTPS', pathname = `/api/v1/workspaces/${$URL.percentEncode(workspaceId)}/queues`, method = 'GET', authType = 'AK', style = 'ROA', reqBodyType = 'json', bodyType = 'json', }; return callApi(params, req, runtime); } /** * @summary Queries the list of queues in a Spark workspace. * * @param request ListWorkspaceQueuesRequest * @return ListWorkspaceQueuesResponse */ async function listWorkspaceQueues(workspaceId: string, request: ListWorkspaceQueuesRequest): ListWorkspaceQueuesResponse { var runtime = new $RuntimeOptions{}; var headers : map[string]string = {}; return listWorkspaceQueuesWithOptions(workspaceId, request, headers, runtime); } model ListWorkspacesRequest { maxResults?: int32(name='maxResults', description='The maximum number of entries returned.', example='20'), name?: string(name='name', description='The name of the workspace. Fuzzy match is supported.', example='test_workspace'), nextToken?: string(name='nextToken', description='A pagination token. It can be used in the next request to retrieve a new page of results.', example='1'), regionId?: string(name='regionId', description='The region ID.', example='cn-hangzhou'), state?: string(name='state', description='The state of the workspace.', example='running'), tag?: [ { key?: string(name='key'), value?: string(name='value'), } ](name='tag'), } model ListWorkspacesShrinkRequest { maxResults?: int32(name='maxResults', description='The maximum number of entries returned.', example='20'), name?: string(name='name', description='The name of the workspace. Fuzzy match is supported.', example='test_workspace'), nextToken?: string(name='nextToken', description='A pagination token. It can be used in the next request to retrieve a new page of results.', example='1'), regionId?: string(name='regionId', description='The region ID.', example='cn-hangzhou'), state?: string(name='state', description='The state of the workspace.', example='running'), tagShrink?: string(name='tag'), } model ListWorkspacesResponseBody = { maxResults?: int32(name='maxResults', description='The maximum number of entries returned.', example='20'), nextToken?: string(name='nextToken', description='A pagination token. It can be used in the next request to retrieve a new page of results.', example='1'), requestId?: string(name='requestId', description='The request ID.', example='DD6B1B2A-5837-5237-ABE4-FF0C8944****'), totalCount?: int32(name='totalCount', description='The total number of entries returned.', example='200'), workspaces?: [ { autoRenew?: boolean(name='autoRenew', description='Specifies whether to enable auto-renewal. This parameter is required only if the paymentType parameter is set to Pre.', example='true'), autoRenewPeriod?: int32(name='autoRenewPeriod', description='The auto-renewal duration. This parameter is required only if the paymentType parameter is set to Pre.', example='1'), autoRenewPeriodUnit?: string(name='autoRenewPeriodUnit', description='The unit of the auto-renewal duration. This parameter is required only if the paymentType parameter is set to Pre.', example='YEAR, MONTH, WEEK, DAY, HOUR, MINUTE'), createTime?: long(name='createTime', description='The time when the workflow was created.', example='1684115879955'), dlfCatalogId?: string(name='dlfCatalogId', description='The information of the Data Lake Formation (DLF) catalog.', example='default'), dlfType?: string(name='dlfType', description='The version of DLF.', example='1.0'), duration?: int32(name='duration', description='The subscription period. This parameter is required only if the paymentType parameter is set to Pre.', example='1'), endTime?: long(name='endTime', description='The end of the end time range.', example='1687103999999'), failReason?: string(name='failReason', description='The failure reason.', example='out of stock'), paymentDurationUnit?: string(name='paymentDurationUnit', description='The unit of the subscription duration.', example='YEAR, MONTH, WEEK, DAY, HOUR, MINUTE'), paymentStatus?: string(name='paymentStatus', description='The status of the payment.', example='PAID/UNPAID'), paymentType?: string(name='paymentType', description='The billing method. Valid values: - PayAsYouGo - Pre', example='PayAsYouGo'), prePaidQuota?: { allocatedResource?: string(name='allocatedResource', description='The amount of resources that are allocated by a subscription quota.', example='{\\\\"cpu\\\\":\\\\"1\\\\",\\\\"memory\\\\":\\\\"4Gi\\\\",\\\\"cu\\\\":\\\\"1\\\\"}'), autoRenewal?: boolean(name='autoRenewal', description='Indicates whether auto-renewal is enabled for the subscription quota. * true * false', example='true'), createTime?: long(name='createTime', description='The creation time of the subscription quota.', example='1745683200000'), expireTime?: long(name='expireTime', description='The expiration time of the subscription quota.', example='1740537153000'), instanceId?: string(name='instanceId', description='The ID of the instance that is generated when you purchase the subscription quota.', example='i-abc12345'), maxResource?: string(name='maxResource', description='The maximum amount of resources that can be used in a subscription quota.', example='{\\\\"cpu\\\\":\\\\"1\\\\",\\\\"memory\\\\":\\\\"4Gi\\\\",\\\\"cu\\\\":\\\\"1\\\\"}'), paymentStatus?: string(name='paymentStatus', description='The status of the subscription quota. Valid values: * NORMAL * WAIT_FOR_EXPIRE * EXPIRED', example='NORMAL'), usedResource?: string(name='usedResource', description='The amount of resources that are used.', example='{\\\\"cpu\\\\":\\\\"0\\\\",\\\\"memory\\\\":\\\\"0Gi\\\\",\\\\"cu\\\\":\\\\"0\\\\"}'), }(name='prePaidQuota', description='The information about the subscription quota.'), regionId?: string(name='regionId', description='The region ID.', example='cn-shanghai'), releaseType?: string(name='releaseType', description='The reason why the workspace is released.', example='SERVICE_RELEASE'), resourceSpec?: string(name='resourceSpec', description='The resource specifications.', example='100cu'), stateChangeReason?: { code?: string(name='code', description='The error code.', example='0'), message?: string(name='message', description='The error message.', example='Success'), }(name='stateChangeReason', description='The reason of the job status change.'), storage?: string(name='storage', description='The OSS path.', example='spark-result'), tags?: [ { tagKey?: string(name='tagKey'), tagValue?: string(name='tagValue'), } ](name='tags'), workspaceId?: string(name='workspaceId', description='The workspace ID.', example='w-******'), workspaceName?: string(name='workspaceName', description='The name of the workspace.', example='spark-1'), workspaceStatus?: string(name='workspaceStatus', description='The workspace status.', example='STARTING,RUNNING,TERMINATED'), } ](name='workspaces', description='The queried workspaces.'), } model ListWorkspacesResponse = { headers?: map[string]string(name='headers'), statusCode?: int32(name='statusCode'), body?: ListWorkspacesResponseBody(name='body'), } /** * @summary Queries a list of workspaces. * * @param tmpReq ListWorkspacesRequest * @param headers map * @param runtime runtime options for this request RuntimeOptions * @return ListWorkspacesResponse */ async function listWorkspacesWithOptions(tmpReq: ListWorkspacesRequest, headers: map[string]string, runtime: $RuntimeOptions): ListWorkspacesResponse { tmpReq.validate(); var request = new ListWorkspacesShrinkRequest{}; OpenApiUtil.convert(tmpReq, request); if (!$isNull(tmpReq.tag)) { request.tagShrink = OpenApiUtil.arrayToStringWithSpecifiedStyle(tmpReq.tag, 'tag', 'json'); } var query : map[string]any = {}; if (!$isNull(request.maxResults)) { query['maxResults'] = request.maxResults; } if (!$isNull(request.name)) { query['name'] = request.name; } if (!$isNull(request.nextToken)) { query['nextToken'] = request.nextToken; } if (!$isNull(request.regionId)) { query['regionId'] = request.regionId; } if (!$isNull(request.state)) { query['state'] = request.state; } if (!$isNull(request.tagShrink)) { query['tag'] = request.tagShrink; } var req = new OpenApiUtil.OpenApiRequest{ headers = headers, query = OpenApiUtil.query(query), }; var params = new OpenApiUtil.Params{ action = 'ListWorkspaces', version = '2023-08-08', protocol = 'HTTPS', pathname = `/api/v1/workspaces`, method = 'GET', authType = 'AK', style = 'ROA', reqBodyType = 'json', bodyType = 'json', }; return callApi(params, req, runtime); } /** * @summary Queries a list of workspaces. * * @param request ListWorkspacesRequest * @return ListWorkspacesResponse */ async function listWorkspaces(request: ListWorkspacesRequest): ListWorkspacesResponse { var runtime = new $RuntimeOptions{}; var headers : map[string]string = {}; return listWorkspacesWithOptions(request, headers, runtime); } model StartJobRunRequest { clientToken?: string(name='clientToken', description='The client token that is used to ensure the idempotence of the request.', example='8e6aae2810c8f67229ca70bb31cd6028'), codeType?: string(name='codeType', description='The code type of the job. Valid values: * SQL * JAR * PYTHON', example='SQL'), configurationOverrides?: { configurations?: [ { configFileName?: string(name='configFileName', description='The configuration file of SparkConf.', example='spark-default.conf'), configItemKey?: string(name='configItemKey', description='The key of SparkConf.', example='spark.app.name'), configItemValue?: string(name='configItemValue', description='The value of SparkConf.', example='test_app'), } ](name='configurations', description='The SparkConf objects.'), }(name='configurationOverrides', description='The advanced configurations of Spark.'), displayReleaseVersion?: string(name='displayReleaseVersion', description='The version of the Spark engine.', example='esr-3.3.1'), executionTimeoutSeconds?: int32(name='executionTimeoutSeconds', description='The timeout period of the job.', example='100'), fusion?: boolean(name='fusion', description='Specifies whether to enable Fusion engine for acceleration.', example='false'), jobDriver?: JobDriver(name='jobDriver', description='The information about Spark Driver.'), jobId?: string(name='jobId', description='The job ID.', example='jr-12345'), name?: string(name='name', description='The name of the job.', example='spark_job_name'), releaseVersion?: string(name='releaseVersion', description='The version number of Spark.', example='esr-3.3.1'), resourceQueueId?: string(name='resourceQueueId', description='The name of the resource queue on which the Spark job runs.', example='dev_queue'), tags?: [ Tag ](name='tags', description='The tags of the job.'), regionId?: string(name='regionId', description='The region ID.', example='cn-hangzhou'), } model StartJobRunResponseBody = { jobRunId?: string(name='jobRunId', description='The job ID.', example='jr-54321'), requestId?: string(name='requestId', description='The request ID.', example='DD6B1B2A-5837-5237-ABE4-FF0C8944****'), } model StartJobRunResponse = { headers?: map[string]string(name='headers'), statusCode?: int32(name='statusCode'), body?: StartJobRunResponseBody(name='body'), } /** * @summary Starts a Spark job. * * @param request StartJobRunRequest * @param headers map * @param runtime runtime options for this request RuntimeOptions * @return StartJobRunResponse */ async function startJobRunWithOptions(workspaceId: string, request: StartJobRunRequest, headers: map[string]string, runtime: $RuntimeOptions): StartJobRunResponse { request.validate(); var query : map[string]any = {}; if (!$isNull(request.regionId)) { query['regionId'] = request.regionId; } var body : map[string]any = {}; if (!$isNull(request.clientToken)) { body['clientToken'] = request.clientToken; } if (!$isNull(request.codeType)) { body['codeType'] = request.codeType; } if (!$isNull(request.configurationOverrides)) { body['configurationOverrides'] = request.configurationOverrides; } if (!$isNull(request.displayReleaseVersion)) { body['displayReleaseVersion'] = request.displayReleaseVersion; } if (!$isNull(request.executionTimeoutSeconds)) { body['executionTimeoutSeconds'] = request.executionTimeoutSeconds; } if (!$isNull(request.fusion)) { body['fusion'] = request.fusion; } if (!$isNull(request.jobDriver)) { body['jobDriver'] = request.jobDriver; } if (!$isNull(request.jobId)) { body['jobId'] = request.jobId; } if (!$isNull(request.name)) { body['name'] = request.name; } if (!$isNull(request.releaseVersion)) { body['releaseVersion'] = request.releaseVersion; } if (!$isNull(request.resourceQueueId)) { body['resourceQueueId'] = request.resourceQueueId; } if (!$isNull(request.tags)) { body['tags'] = request.tags; } var req = new OpenApiUtil.OpenApiRequest{ headers = headers, query = OpenApiUtil.query(query), body = OpenApiUtil.parseToMap(body), }; var params = new OpenApiUtil.Params{ action = 'StartJobRun', version = '2023-08-08', protocol = 'HTTPS', pathname = `/api/v1/workspaces/${$URL.percentEncode(workspaceId)}/jobRuns`, method = 'POST', authType = 'AK', style = 'ROA', reqBodyType = 'json', bodyType = 'json', }; return callApi(params, req, runtime); } /** * @summary Starts a Spark job. * * @param request StartJobRunRequest * @return StartJobRunResponse */ async function startJobRun(workspaceId: string, request: StartJobRunRequest): StartJobRunResponse { var runtime = new $RuntimeOptions{}; var headers : map[string]string = {}; return startJobRunWithOptions(workspaceId, request, headers, runtime); } model StartProcessInstanceRequest { action?: string(name='action'), comments?: string(name='comments'), email?: string(name='email'), interval?: string(name='interval'), isProd?: boolean(name='isProd', description='Specifies whether to run the workflow in the production environment.', example='false'), processDefinitionCode?: long(name='processDefinitionCode', description='The workflow ID. This parameter is required.', example='12***********'), productNamespace?: string(name='productNamespace', description='The code of the service. This parameter is required.', example='SS'), regionId?: string(name='regionId', description='The region ID.', example='cn-hangzhou'), runtimeQueue?: string(name='runtimeQueue', description='The queue on which the workflow runs.', example='root_queue'), versionHashCode?: string(name='versionHashCode', description='The hash code of the version.', example='dh*********'), versionNumber?: int32(name='versionNumber', description='The version number of the workflow.', example='1'), } model StartProcessInstanceResponseBody = { code?: int32(name='code', description='The code that is returned by the backend server.', example='1400009'), data?: any(name='data', description='The data returned.', example='{\\\\"sessionBizId\\\\": \\\\"sc-dc85644dba1c8c63\\\\", \\\\"bizId\\\\": \\\\"st-aeed3b0d4f87418a9a9dcbd757477658\\\\", \\\\"gmtCreated\\\\": \\\\"Thu Sep 12 02:28:45 UTC 2024\\\\"}'), failed?: boolean(name='failed', description='Indicates whether the workflow fails to be run manually.', example='false'), httpStatusCode?: int32(name='httpStatusCode', description='The HTTP status code.', example='200'), msg?: string(name='msg', description='The description of the returned code.', example='No permission for resource action'), requestId?: string(name='requestId', description='The request ID.', example='DD6B1B2A-5837-5237-ABE4-FF0C8944****'), success?: boolean(name='success', description='Indicates whether the request was successful.', example='true'), } model StartProcessInstanceResponse = { headers?: map[string]string(name='headers'), statusCode?: int32(name='statusCode'), body?: StartProcessInstanceResponseBody(name='body'), } /** * @summary Manually runs a workflow. * * @param request StartProcessInstanceRequest * @param headers map * @param runtime runtime options for this request RuntimeOptions * @return StartProcessInstanceResponse */ async function startProcessInstanceWithOptions(bizId: string, request: StartProcessInstanceRequest, headers: map[string]string, runtime: $RuntimeOptions): StartProcessInstanceResponse { request.validate(); var query : map[string]any = {}; if (!$isNull(request.action)) { query['action'] = request.action; } if (!$isNull(request.comments)) { query['comments'] = request.comments; } if (!$isNull(request.email)) { query['email'] = request.email; } if (!$isNull(request.interval)) { query['interval'] = request.interval; } if (!$isNull(request.isProd)) { query['isProd'] = request.isProd; } if (!$isNull(request.processDefinitionCode)) { query['processDefinitionCode'] = request.processDefinitionCode; } if (!$isNull(request.productNamespace)) { query['productNamespace'] = request.productNamespace; } if (!$isNull(request.regionId)) { query['regionId'] = request.regionId; } if (!$isNull(request.runtimeQueue)) { query['runtimeQueue'] = request.runtimeQueue; } if (!$isNull(request.versionHashCode)) { query['versionHashCode'] = request.versionHashCode; } if (!$isNull(request.versionNumber)) { query['versionNumber'] = request.versionNumber; } var req = new OpenApiUtil.OpenApiRequest{ headers = headers, query = OpenApiUtil.query(query), }; var params = new OpenApiUtil.Params{ action = 'StartProcessInstance', version = '2023-08-08', protocol = 'HTTPS', pathname = `/dolphinscheduler/projects/${$URL.percentEncode(bizId)}/executors/start-process-instance`, method = 'POST', authType = 'AK', style = 'ROA', reqBodyType = 'json', bodyType = 'json', }; return callApi(params, req, runtime); } /** * @summary Manually runs a workflow. * * @param request StartProcessInstanceRequest * @return StartProcessInstanceResponse */ async function startProcessInstance(bizId: string, request: StartProcessInstanceRequest): StartProcessInstanceResponse { var runtime = new $RuntimeOptions{}; var headers : map[string]string = {}; return startProcessInstanceWithOptions(bizId, request, headers, runtime); } model StartSessionClusterRequest { queueName?: string(name='queueName', description='The queue name.', example='root_queue'), sessionClusterId?: string(name='sessionClusterId', description='The session ID.', example='sc-xxxxxxxxxxx'), regionId?: string(name='regionId', description='The region ID.', example='cn-hangzhou'), } model StartSessionClusterResponseBody = { requestId?: string(name='requestId', description='The request ID.', example='DD6B1B2A-5837-5237-ABE4-FF0C8944****'), sessionClusterId?: string(name='sessionClusterId', description='The workspace ID.', example='w-******'), } model StartSessionClusterResponse = { headers?: map[string]string(name='headers'), statusCode?: int32(name='statusCode'), body?: StartSessionClusterResponseBody(name='body'), } /** * @summary Starts a session. * * @param request StartSessionClusterRequest * @param headers map * @param runtime runtime options for this request RuntimeOptions * @return StartSessionClusterResponse */ async function startSessionClusterWithOptions(workspaceId: string, request: StartSessionClusterRequest, headers: map[string]string, runtime: $RuntimeOptions): StartSessionClusterResponse { request.validate(); var query : map[string]any = {}; if (!$isNull(request.regionId)) { query['regionId'] = request.regionId; } var body : map[string]any = {}; if (!$isNull(request.queueName)) { body['queueName'] = request.queueName; } if (!$isNull(request.sessionClusterId)) { body['sessionClusterId'] = request.sessionClusterId; } var req = new OpenApiUtil.OpenApiRequest{ headers = headers, query = OpenApiUtil.query(query), body = OpenApiUtil.parseToMap(body), }; var params = new OpenApiUtil.Params{ action = 'StartSessionCluster', version = '2023-08-08', protocol = 'HTTPS', pathname = `/api/v1/workspaces/${$URL.percentEncode(workspaceId)}/sessionClusters/action/startSessionCluster`, method = 'POST', authType = 'AK', style = 'ROA', reqBodyType = 'json', bodyType = 'json', }; return callApi(params, req, runtime); } /** * @summary Starts a session. * * @param request StartSessionClusterRequest * @return StartSessionClusterResponse */ async function startSessionCluster(workspaceId: string, request: StartSessionClusterRequest): StartSessionClusterResponse { var runtime = new $RuntimeOptions{}; var headers : map[string]string = {}; return startSessionClusterWithOptions(workspaceId, request, headers, runtime); } model StopSessionClusterRequest { queueName?: string(name='queueName', description='The queue name.', example='root_queue'), sessionClusterId?: string(name='sessionClusterId', description='The session ID.', example='sc-xxxxxxxxxxxx'), regionId?: string(name='regionId', description='The region ID.', example='cn-hangzhou'), } model StopSessionClusterResponseBody = { requestId?: string(name='requestId', description='The request ID.', example='DD6B1B2A-5837-5237-ABE4-FF0C8944****'), sessionClusterId?: string(name='sessionClusterId', description='The session ID.', example='w-******'), } model StopSessionClusterResponse = { headers?: map[string]string(name='headers'), statusCode?: int32(name='statusCode'), body?: StopSessionClusterResponseBody(name='body'), } /** * @summary Stops a session. * * @param request StopSessionClusterRequest * @param headers map * @param runtime runtime options for this request RuntimeOptions * @return StopSessionClusterResponse */ async function stopSessionClusterWithOptions(workspaceId: string, request: StopSessionClusterRequest, headers: map[string]string, runtime: $RuntimeOptions): StopSessionClusterResponse { request.validate(); var query : map[string]any = {}; if (!$isNull(request.regionId)) { query['regionId'] = request.regionId; } var body : map[string]any = {}; if (!$isNull(request.queueName)) { body['queueName'] = request.queueName; } if (!$isNull(request.sessionClusterId)) { body['sessionClusterId'] = request.sessionClusterId; } var req = new OpenApiUtil.OpenApiRequest{ headers = headers, query = OpenApiUtil.query(query), body = OpenApiUtil.parseToMap(body), }; var params = new OpenApiUtil.Params{ action = 'StopSessionCluster', version = '2023-08-08', protocol = 'HTTPS', pathname = `/api/v1/workspaces/${$URL.percentEncode(workspaceId)}/sessionClusters/action/stopSessionCluster`, method = 'POST', authType = 'AK', style = 'ROA', reqBodyType = 'json', bodyType = 'json', }; return callApi(params, req, runtime); } /** * @summary Stops a session. * * @param request StopSessionClusterRequest * @return StopSessionClusterResponse */ async function stopSessionCluster(workspaceId: string, request: StopSessionClusterRequest): StopSessionClusterResponse { var runtime = new $RuntimeOptions{}; var headers : map[string]string = {}; return stopSessionClusterWithOptions(workspaceId, request, headers, runtime); } model TerminateSqlStatementRequest { regionId?: string(name='regionId', description='The region ID.', example='cn-hangzhou'), } model TerminateSqlStatementResponseBody = { requestId?: string(name='requestId', description='The request ID.', example='DD6B1B2A-5837-5237-ABE4-FF0C8944****'), } model TerminateSqlStatementResponse = { headers?: map[string]string(name='headers'), statusCode?: int32(name='statusCode'), body?: TerminateSqlStatementResponseBody(name='body'), } /** * @summary Terminates an SQL query task. * * @param request TerminateSqlStatementRequest * @param headers map * @param runtime runtime options for this request RuntimeOptions * @return TerminateSqlStatementResponse */ async function terminateSqlStatementWithOptions(workspaceId: string, statementId: string, request: TerminateSqlStatementRequest, headers: map[string]string, runtime: $RuntimeOptions): TerminateSqlStatementResponse { request.validate(); var query : map[string]any = {}; if (!$isNull(request.regionId)) { query['regionId'] = request.regionId; } var req = new OpenApiUtil.OpenApiRequest{ headers = headers, query = OpenApiUtil.query(query), }; var params = new OpenApiUtil.Params{ action = 'TerminateSqlStatement', version = '2023-08-08', protocol = 'HTTPS', pathname = `/api/interactive/v1/workspace/${$URL.percentEncode(workspaceId)}/statement/${$URL.percentEncode(statementId)}/terminate`, method = 'POST', authType = 'AK', style = 'ROA', reqBodyType = 'json', bodyType = 'json', }; return callApi(params, req, runtime); } /** * @summary Terminates an SQL query task. * * @param request TerminateSqlStatementRequest * @return TerminateSqlStatementResponse */ async function terminateSqlStatement(workspaceId: string, statementId: string, request: TerminateSqlStatementRequest): TerminateSqlStatementResponse { var runtime = new $RuntimeOptions{}; var headers : map[string]string = {}; return terminateSqlStatementWithOptions(workspaceId, statementId, request, headers, runtime); } model UpdateProcessDefinitionWithScheduleRequest { alertEmailAddress?: string(name='alertEmailAddress', description='The email address to receive alerts.', example='foo_bar@spark.alert.invalid.com'), description?: string(name='description', description='The description of the workflow.', example='ods batch workflow'), executionType?: string(name='executionType', description='The execution policy. This parameter is required.', example='PARALLEL'), globalParams?: [ { direct?: string(name='direct'), prop?: string(name='prop'), type?: string(name='type'), value?: string(name='value'), } ](name='globalParams'), name?: string(name='name', description='The name of the workflow. This parameter is required.', example='ods_batch_workflow'), productNamespace?: string(name='productNamespace', description='The code of the service. This parameter is required.', example='SS'), publish?: boolean(name='publish', description='Specifies whether to publish the workflow.', example='true'), regionId?: string(name='regionId', description='The region ID.', example='cn-hangzhou'), releaseState?: string(name='releaseState', description='The status of the workflow.', example='ONLINE'), resourceQueue?: string(name='resourceQueue', description='The resource queue.', example='root_queue'), retryTimes?: int32(name='retryTimes', description='The number of retries.', example='1'), runAs?: string(name='runAs', description='The execution user.', example='113***************'), schedule?: { crontab?: string(name='crontab', description='The CRON expression that is used for scheduling.', example='0 0 0 * * ?'), endTime?: string(name='endTime', description='The end time of the scheduling.', example='2025-12-23 16:13:27'), startTime?: string(name='startTime', description='The start time of the scheduling.', example='2024-12-23 16:13:27'), timezoneId?: string(name='timezoneId', description='The ID of the time zone.', example='Asia/Shanghai'), }(name='schedule', description='The scheduling settings.'), tags?: map[string]string(name='tags', description='The tags.'), taskDefinitionJson?: [ { alertEmailAddress?: string(name='alertEmailAddress', description='The email address to receive alerts.', example='foo_bar@spark.alert.invalid.com'), code?: long(name='code', description='The node ID. This parameter is required.', example='23************'), description?: string(name='description', description='The node description.', example='ods transform task'), failAlertEnable?: boolean(name='failAlertEnable', description='Specifies whether to send alerts when the node fails.', example='true'), failRetryTimes?: int32(name='failRetryTimes', description='The number of retries when the node fails.', example='1'), name?: string(name='name', description='The name of the job. This parameter is required.', example='ods_transform_task'), startAlertEnable?: boolean(name='startAlertEnable', description='Specifies whether to send alerts when the node is started.', example='true'), tags?: map[string]string(name='tags', description='The tags of the job.'), taskParams?: { displaySparkVersion?: string(name='displaySparkVersion', description='The displayed version of the Spark engine.', example='esr-4.0.0 (Spark 3.5.2, Scala 2.12)'), environmentId?: string(name='environmentId', description='The environment ID.', example='ev-h*************'), fusion?: boolean(name='fusion', description='Specifies whether to enable Fusion engine for acceleration.', example='false'), localParams?: [ { direct?: string(name='direct'), prop?: string(name='prop'), type?: string(name='type'), value?: string(name='value'), } ](name='localParams'), resourceQueueId?: string(name='resourceQueueId', description='The name of the queue on which the job runs. This parameter is required.', example='root_queue'), sparkConf?: [ { key?: string(name='key', description='The key of the SparkConf object.', example='spark.dynamicAllocation.enabled'), value?: string(name='value', description='The value of the SparkConf object.', example='true'), } ](name='sparkConf', description='The configurations of the Spark jobs.'), sparkDriverCores?: int32(name='sparkDriverCores', description='The number of driver cores of the Spark job.', example='1'), sparkDriverMemory?: long(name='sparkDriverMemory', description='The size of driver memory of the Spark job.', example='4g'), sparkExecutorCores?: int32(name='sparkExecutorCores', description='The number of executor cores of the Spark job.', example='1'), sparkExecutorMemory?: long(name='sparkExecutorMemory', description='The size of executor memory of the Spark job.', example='4g'), sparkLogLevel?: string(name='sparkLogLevel', description='The level of the Spark log.', example='INFO'), sparkLogPath?: string(name='sparkLogPath', description='The path where the operational logs of the Spark job are stored.'), sparkVersion?: string(name='sparkVersion', description='The version of the Spark engine.', example='esr-4.0.0 (Spark 3.5.2, Scala 2.12)'), taskBizId?: string(name='taskBizId', description='The ID of the data development job. This parameter is required.', example='TSK-d87******************'), type?: string(name='type', description='The type of the Spark job.', example='SQL'), workspaceBizId?: string(name='workspaceBizId', description='The workspace ID. This parameter is required.', example='w-d8********'), }(name='taskParams', description='The job parameters. This parameter is required.'), taskType?: string(name='taskType', description='The type of the node. This parameter is required.', example='EMR-SERVERLESS-SPARK'), timeout?: int32(name='timeout', description='The default timeout period of the node.', example='30'), } ](name='taskDefinitionJson', description='The descriptions of all nodes in the workflow. This parameter is required.'), taskParallelism?: int32(name='taskParallelism', description='The node parallelism.', example='1'), taskRelationJson?: [ { name?: string(name='name', description='The name of the node topology. You can enter a workflow name. This parameter is required.', example='ods batch workflow'), postTaskCode?: long(name='postTaskCode', description='The ID of the downstream node. This parameter is required.', example='19************'), postTaskVersion?: int32(name='postTaskVersion', description='The version of the downstream node. This parameter is required.', example='1'), preTaskCode?: long(name='preTaskCode', description='The ID of the upstream node. This parameter is required.', example='16************'), preTaskVersion?: int32(name='preTaskVersion', description='The version of the upstream node. This parameter is required.', example='1'), } ](name='taskRelationJson', description='The dependencies of all nodes in the workflow. preTaskCode specifies the ID of an upstream node, and postTaskCode specifies the ID of a downstream node. The ID of each node is unique. If a node does not have an upstream node, set preTaskCode to 0. This parameter is required.'), timeout?: int32(name='timeout', description='The default timeout period of the workflow.', example='300'), } model UpdateProcessDefinitionWithScheduleShrinkRequest { alertEmailAddress?: string(name='alertEmailAddress', description='The email address to receive alerts.', example='foo_bar@spark.alert.invalid.com'), description?: string(name='description', description='The description of the workflow.', example='ods batch workflow'), executionType?: string(name='executionType', description='The execution policy. This parameter is required.', example='PARALLEL'), globalParamsShrink?: string(name='globalParams'), name?: string(name='name', description='The name of the workflow. This parameter is required.', example='ods_batch_workflow'), productNamespace?: string(name='productNamespace', description='The code of the service. This parameter is required.', example='SS'), publish?: boolean(name='publish', description='Specifies whether to publish the workflow.', example='true'), regionId?: string(name='regionId', description='The region ID.', example='cn-hangzhou'), releaseState?: string(name='releaseState', description='The status of the workflow.', example='ONLINE'), resourceQueue?: string(name='resourceQueue', description='The resource queue.', example='root_queue'), retryTimes?: int32(name='retryTimes', description='The number of retries.', example='1'), runAs?: string(name='runAs', description='The execution user.', example='113***************'), scheduleShrink?: string(name='schedule', description='The scheduling settings.'), tagsShrink?: string(name='tags', description='The tags.'), taskDefinitionJsonShrink?: string(name='taskDefinitionJson', description='The descriptions of all nodes in the workflow. This parameter is required.'), taskParallelism?: int32(name='taskParallelism', description='The node parallelism.', example='1'), taskRelationJsonShrink?: string(name='taskRelationJson', description='The dependencies of all nodes in the workflow. preTaskCode specifies the ID of an upstream node, and postTaskCode specifies the ID of a downstream node. The ID of each node is unique. If a node does not have an upstream node, set preTaskCode to 0. This parameter is required.'), timeout?: int32(name='timeout', description='The default timeout period of the workflow.', example='300'), } model UpdateProcessDefinitionWithScheduleResponseBody = { code?: int32(name='code', description='The code that is returned by the backend server.', example='1400009'), data?: { alertEmailAddress?: string(name='alertEmailAddress', description='The email address to receive alerts.', example='foo_bar@spark.alert.invalid.com'), bizId?: string(name='bizId', description='The workspace ID.', example='alicloud_ack_one_cluster'), code?: string(name='code', description='The workflow ID.', example='12***********'), createTime?: string(name='createTime', description='The time when the workflow was created.', example='2024-09-05T02:03:19Z'), crontab?: string(name='crontab', description='The CRON expression that is used for scheduling.', example='0 0 0 * * ?'), description?: string(name='description', description='The node description.', example='1'), endTime?: string(name='endTime', description='The end of the end time range.', example='1710432000000'), executionType?: string(name='executionType', description='The execution policy.', example='SERIAL'), id?: string(name='id', description='The serial number of the workflow.', example='123223'), name?: string(name='name', description='The name of the workflow.', example='ods_batch_workflow'), projectName?: string(name='projectName', description='The name of the project to which the workflow belongs.', example='w-********'), releaseState?: string(name='releaseState', description='The status of the workflow.', example='ONLINE'), startTime?: string(name='startTime', description='The start time of the scheduling.', example='0'), timezoneId?: string(name='timezoneId', description='The ID of the time zone.', example='Asia/Shanghai'), updateTime?: string(name='updateTime', description='The time when the workflow was updated.', example='2024-03-05T06:24:27Z'), userId?: string(name='userId', description='The ID of the user that is used to initiate a scheduling.', example='113*********'), userName?: string(name='userName', description='The name of the user that is used to initiate a scheduling.', example='w-********'), version?: int32(name='version', description='The version number.', example='1'), versionHashCode?: string(name='versionHashCode', description='The hash code of the version.', example='dwerf*********'), }(name='data', description='The data returned.'), failed?: string(name='failed', description='Indicates whether the request failed.', example='false'), httpStatusCode?: int32(name='httpStatusCode', description='The HTTP status code.', example='200'), msg?: string(name='msg', description='The description of the returned code.', example='No permission for resource action'), requestId?: string(name='requestId', description='The request ID.', example='DD6B1B2A-5837-5237-ABE4-FF0C8944****'), success?: string(name='success', description='Indicates whether the request was successful.', example='true'), } model UpdateProcessDefinitionWithScheduleResponse = { headers?: map[string]string(name='headers'), statusCode?: int32(name='statusCode'), body?: UpdateProcessDefinitionWithScheduleResponseBody(name='body'), } /** * @summary Updates the workflow and time-based scheduling configurations. * * @param tmpReq UpdateProcessDefinitionWithScheduleRequest * @param headers map * @param runtime runtime options for this request RuntimeOptions * @return UpdateProcessDefinitionWithScheduleResponse */ async function updateProcessDefinitionWithScheduleWithOptions(bizId: string, code: string, tmpReq: UpdateProcessDefinitionWithScheduleRequest, headers: map[string]string, runtime: $RuntimeOptions): UpdateProcessDefinitionWithScheduleResponse { tmpReq.validate(); var request = new UpdateProcessDefinitionWithScheduleShrinkRequest{}; OpenApiUtil.convert(tmpReq, request); if (!$isNull(tmpReq.globalParams)) { request.globalParamsShrink = OpenApiUtil.arrayToStringWithSpecifiedStyle(tmpReq.globalParams, 'globalParams', 'json'); } if (!$isNull(tmpReq.schedule)) { request.scheduleShrink = OpenApiUtil.arrayToStringWithSpecifiedStyle(tmpReq.schedule, 'schedule', 'json'); } if (!$isNull(tmpReq.tags)) { request.tagsShrink = OpenApiUtil.arrayToStringWithSpecifiedStyle(tmpReq.tags, 'tags', 'json'); } if (!$isNull(tmpReq.taskDefinitionJson)) { request.taskDefinitionJsonShrink = OpenApiUtil.arrayToStringWithSpecifiedStyle(tmpReq.taskDefinitionJson, 'taskDefinitionJson', 'json'); } if (!$isNull(tmpReq.taskRelationJson)) { request.taskRelationJsonShrink = OpenApiUtil.arrayToStringWithSpecifiedStyle(tmpReq.taskRelationJson, 'taskRelationJson', 'json'); } var query : map[string]any = {}; if (!$isNull(request.alertEmailAddress)) { query['alertEmailAddress'] = request.alertEmailAddress; } if (!$isNull(request.description)) { query['description'] = request.description; } if (!$isNull(request.executionType)) { query['executionType'] = request.executionType; } if (!$isNull(request.globalParamsShrink)) { query['globalParams'] = request.globalParamsShrink; } if (!$isNull(request.name)) { query['name'] = request.name; } if (!$isNull(request.productNamespace)) { query['productNamespace'] = request.productNamespace; } if (!$isNull(request.publish)) { query['publish'] = request.publish; } if (!$isNull(request.regionId)) { query['regionId'] = request.regionId; } if (!$isNull(request.releaseState)) { query['releaseState'] = request.releaseState; } if (!$isNull(request.resourceQueue)) { query['resourceQueue'] = request.resourceQueue; } if (!$isNull(request.retryTimes)) { query['retryTimes'] = request.retryTimes; } if (!$isNull(request.runAs)) { query['runAs'] = request.runAs; } if (!$isNull(request.scheduleShrink)) { query['schedule'] = request.scheduleShrink; } if (!$isNull(request.tagsShrink)) { query['tags'] = request.tagsShrink; } if (!$isNull(request.taskDefinitionJsonShrink)) { query['taskDefinitionJson'] = request.taskDefinitionJsonShrink; } if (!$isNull(request.taskParallelism)) { query['taskParallelism'] = request.taskParallelism; } if (!$isNull(request.taskRelationJsonShrink)) { query['taskRelationJson'] = request.taskRelationJsonShrink; } if (!$isNull(request.timeout)) { query['timeout'] = request.timeout; } var req = new OpenApiUtil.OpenApiRequest{ headers = headers, query = OpenApiUtil.query(query), }; var params = new OpenApiUtil.Params{ action = 'UpdateProcessDefinitionWithSchedule', version = '2023-08-08', protocol = 'HTTPS', pathname = `/dolphinscheduler/projects/${$URL.percentEncode(bizId)}/process-definition/${$URL.percentEncode(code)}`, method = 'PUT', authType = 'AK', style = 'ROA', reqBodyType = 'json', bodyType = 'json', }; return callApi(params, req, runtime); } /** * @summary Updates the workflow and time-based scheduling configurations. * * @param request UpdateProcessDefinitionWithScheduleRequest * @return UpdateProcessDefinitionWithScheduleResponse */ async function updateProcessDefinitionWithSchedule(bizId: string, code: string, request: UpdateProcessDefinitionWithScheduleRequest): UpdateProcessDefinitionWithScheduleResponse { var runtime = new $RuntimeOptions{}; var headers : map[string]string = {}; return updateProcessDefinitionWithScheduleWithOptions(bizId, code, request, headers, runtime); }