in dolphinscheduler-ui/src/views/projects/task/components/node/format-data.ts [524:711]
export function formatModel(data: ITaskData) {
const params = {
...omit(data, [
'environmentCode',
'timeoutFlag',
'timeoutNotifyStrategy',
'taskParams'
]),
...omit(data.taskParams, ['resourceList', 'mainJar', 'localParams']),
environmentCode: data.environmentCode === -1 ? null : data.environmentCode,
timeoutFlag: data.timeoutFlag === 'OPEN',
timeoutNotifyStrategy: data.timeoutNotifyStrategy
? [data.timeoutNotifyStrategy]
: [],
localParams: data.taskParams?.localParams || []
} as INodeData
if (data.timeoutNotifyStrategy === 'WARNFAILED') {
params.timeoutNotifyStrategy = ['WARN', 'FAILED']
}
if (data.taskParams?.resourceList) {
params.resourceList = data.taskParams.resourceList.map(
(item: { resourceName: string }) => `${item.resourceName}`
)
}
if (data.taskParams?.mainJar) {
params.mainJar = data.taskParams?.mainJar.resourceName
}
if (data.taskParams?.method) {
params.method = data.taskParams?.method
}
if (data.taskParams?.targetParams) {
const targetParams: ISqoopTargetParams = JSON.parse(
data.taskParams.targetParams
)
params.targetType = data.taskParams.targetType
params.targetHiveDatabase = targetParams.hiveDatabase
params.targetHiveTable = targetParams.hiveTable
params.targetHiveCreateTable = targetParams.createHiveTable
params.targetHiveDropDelimiter = targetParams.dropDelimiter
params.targetHiveOverWrite =
targetParams.hiveOverWrite === void 0 ? true : targetParams.hiveOverWrite
params.targetHiveTargetDir = targetParams.hiveTargetDir
params.targetHiveReplaceDelimiter = targetParams.replaceDelimiter
params.targetHivePartitionKey = targetParams.hivePartitionKey
params.targetHivePartitionValue = targetParams.hivePartitionValue
params.targetHdfsTargetPath = targetParams.targetPath
params.targetHdfsDeleteTargetDir =
targetParams.deleteTargetDir === void 0
? true
: targetParams.deleteTargetDir
params.targetHdfsCompressionCodec =
targetParams.compressionCodec === void 0
? 'snappy'
: targetParams.compressionCodec
params.targetHdfsFileType =
targetParams.fileType === void 0
? '--as-avrodatafile'
: targetParams.fileType
params.targetHdfsFieldsTerminated = targetParams.fieldsTerminated
params.targetHdfsLinesTerminated = targetParams.linesTerminated
params.targetMysqlType = targetParams.targetType
params.targetMysqlDatasource = targetParams.targetDatasource
params.targetMysqlTable = targetParams.targetTable
params.targetMysqlColumns = targetParams.targetColumns
params.targetMysqlFieldsTerminated = targetParams.fieldsTerminated
params.targetMysqlLinesTerminated = targetParams.linesTerminated
params.targetMysqlIsUpdate = targetParams.isUpdate
params.targetMysqlTargetUpdateKey = targetParams.targetUpdateKey
params.targetMysqlUpdateMode =
targetParams.targetUpdateMode === void 0
? 'allowinsert'
: targetParams.targetUpdateMode
}
if (data.taskParams?.sourceParams) {
const sourceParams: ISqoopSourceParams = JSON.parse(
data.taskParams.sourceParams
)
params.srcTable = sourceParams.srcTable
params.srcColumnType = sourceParams.srcColumnType
params.srcColumns = sourceParams.srcColumns
params.sourceMysqlSrcQuerySql = sourceParams.srcQuerySql
params.srcQueryType = sourceParams.srcQueryType
params.sourceMysqlType = sourceParams.srcType
params.sourceMysqlDatasource = sourceParams.srcDatasource
params.mapColumnHive = sourceParams.mapColumnHive || []
params.mapColumnJava = sourceParams.mapColumnJava || []
params.sourceHdfsExportDir = sourceParams.exportDir
params.sourceHiveDatabase = sourceParams.hiveDatabase
params.sourceHiveTable = sourceParams.hiveTable
params.sourceHivePartitionKey = sourceParams.hivePartitionKey
params.sourceHivePartitionValue = sourceParams.hivePartitionValue
}
if (data.taskParams?.rawScript) {
params.rawScript = data.taskParams?.rawScript
}
if (data.taskParams?.initScript) {
params.initScript = data.taskParams?.initScript
}
if (data.taskParams?.switchResult) {
params.switchResult = data.taskParams.switchResult
params.dependTaskList = data.taskParams.switchResult?.dependTaskList
? data.taskParams.switchResult?.dependTaskList
: []
params.nextNode = data.taskParams.switchResult?.nextNode
}
if (data.taskParams?.dependence) {
const dependence: IDependentParameters = JSON.parse(
JSON.stringify(data.taskParams.dependence)
)
params.checkInterval = dependence.checkInterval
params.failurePolicy = dependence.failurePolicy
params.failureWaitingTime = dependence.failureWaitingTime
params.dependTaskList = dependence.dependTaskList || []
params.relation = dependence.relation
}
if (data.taskParams?.ruleInputParameter) {
params.check_type = data.taskParams.ruleInputParameter.check_type
params.comparison_execute_sql =
data.taskParams.ruleInputParameter.comparison_execute_sql
params.comparison_type = data.taskParams.ruleInputParameter.comparison_type
params.comparison_name = data.taskParams.ruleInputParameter.comparison_name
params.failure_strategy =
data.taskParams.ruleInputParameter.failure_strategy
params.operator = data.taskParams.ruleInputParameter.operator
params.src_connector_type =
data.taskParams.ruleInputParameter.src_connector_type
params.src_datasource_id =
data.taskParams.ruleInputParameter.src_datasource_id
params.src_database = data.taskParams.ruleInputParameter.src_database
params.src_table = data.taskParams.ruleInputParameter.src_table
params.field_length = data.taskParams.ruleInputParameter.field_length
params.begin_time = data.taskParams.ruleInputParameter.begin_time
params.deadline = data.taskParams.ruleInputParameter.deadline
params.datetime_format = data.taskParams.ruleInputParameter.datetime_format
params.target_filter = data.taskParams.ruleInputParameter.target_filter
params.regexp_pattern = data.taskParams.ruleInputParameter.regexp_pattern
params.enum_list = data.taskParams.ruleInputParameter.enum_list
params.src_filter = data.taskParams.ruleInputParameter.src_filter
params.src_field = data.taskParams.ruleInputParameter.src_field
params.statistics_execute_sql =
data.taskParams.ruleInputParameter.statistics_execute_sql
params.statistics_name = data.taskParams.ruleInputParameter.statistics_name
params.target_connector_type =
data.taskParams.ruleInputParameter.target_connector_type
params.target_datasource_id =
data.taskParams.ruleInputParameter.target_datasource_id
params.target_database = data.taskParams.ruleInputParameter.target_database
params.target_table = data.taskParams.ruleInputParameter.target_table
params.threshold = data.taskParams.ruleInputParameter.threshold
if (data.taskParams.ruleInputParameter.mapping_columns)
params.mapping_columns = JSON.parse(
data.taskParams.ruleInputParameter.mapping_columns
)
}
if (data.taskParams?.sparkParameters) {
params.deployMode = data.taskParams.sparkParameters.deployMode
params.driverCores = data.taskParams.sparkParameters.driverCores
params.driverMemory = data.taskParams.sparkParameters.driverMemory
params.executorCores = data.taskParams.sparkParameters.executorCores
params.executorMemory = data.taskParams.sparkParameters.executorMemory
params.numExecutors = data.taskParams.sparkParameters.numExecutors
params.others = data.taskParams.sparkParameters.others
params.sqlExecutionType = data.taskParams.sparkParameters.sqlExecutionType
}
if (data.taskParams?.conditionResult?.successNode?.length) {
params.successBranch = data.taskParams.conditionResult.successNode[0]
}
if (data.taskParams?.conditionResult?.failedNode?.length) {
params.failedBranch = data.taskParams.conditionResult.failedNode[0]
}
if (data.taskParams?.customConfig !== void 0) {
params.customConfig = data.taskParams.customConfig === 1 ? true : false
}
if (data.taskParams?.jobType) {
params.isCustomTask = data.taskParams.jobType === 'CUSTOM'
}
return params
}