linkis-dist/package/db/upgrade/1.7.0_schema/mysql/linkis_dml.sql (85 lines of code) (raw):
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01006','没有健康可用的ecm节点,可能任务量大,导致节点资源处于不健康状态,尝试kill空闲引擎释放资源','There are corresponding ECM tenant labels',0);
INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01007','文件编码格式异常,请联系管理人员处理','UnicodeEncodeError.*characters',0);
UPDATE linkis_ps_error_code SET error_regex = "KeyError: (.*)" WHERE error_code = "43017";
UPDATE linkis_ps_error_code SET error_desc = "任务实际运行内存超过了设置的内存限制,请在管理台增加executor内存或在提交任务时通过spark.executor.memory增加内存。更多细节请参考Linkis常见问题Q60" WHERE error_code = "13002";
UPDATE linkis_ps_configuration_config_key SET validate_range ='[\",\",\"\\\\t\",\"\\\\;\",\"\\\\|\"]',description ="取值范围:,或\\t或;或|" WHERE `key`= "pipeline.field.split";
DELETE FROM linkis_ps_error_code WHERE error_code = "43007";
UPDATE linkis_ps_error_code SET error_regex='Permission denied:\\s*user=[a-zA-Z0-9_]+[,,]\\s*access=[a-zA-Z0-9_]+\\s*[,,]\\s*inode="([a-zA-Z0-9/_\\.]+)"' WHERE error_code = "22001";
INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13010','任务实际运行内存超过了设置的内存限制,请在管理台增加executor内存或在提交任务时通过spark.executor.memory增加内存','Container exited with a non-zero exit code',0);
UPDATE linkis_ps_configuration_config_key SET `key`="pipeline.output.isoverwrite" where `key` = "pipeline.output.isoverwtite";
INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43042','插入数据表动态分区数超过配置值 %s ,请优化sql或调整配置hive.exec.max.dynamic.partitions后重试','Maximum was set to (\\S+) partitions per node',0);
INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43043','执行任务消耗内存超过限制,hive任务请修改map或reduce的内存,spark任务请修改executor端内存','Error:java heap space',0);
INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43044','表 %s 分区数超过阈值 %s,需要分批删除分区,再删除表','the partitions of table (\\S+) exceeds threshold (\\S+)',0);
INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43045','查询/操作的表 %s 分区数为 %s ,超过阈值 %s ,需要限制查询/操作的分区数量','Number of partitions scanned \\(=(\\d+)\\) on table (\\S+) exceeds limit \\(=(\\d+)\\)',0);
INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43046','动态分区一次性写入分区数 %s ,超过阈值 %s,请减少一次性写入的分区数','Number of dynamic partitions created is (\\S+), which is more than (\\S+)',0);
INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43047','动态分区一次性写入分区数 %s ,超过阈值 %s,请减少一次性写入的分区数','Maximum was set to (\\S+) partitions per node, number of dynamic partitions on this node: (\\S+)',0);
INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`,`en_description`, `en_name`, `en_treeName`, `template_required`) VALUES ('mapreduce.job.reduce.slowstart.completedmaps', '取值范围:0-1', 'Map任务数与总Map任务数之间的比例','0.05', 'Regex', '^(0(\\.\\d{1,2})?|1(\\.0{1,2})?)$', '0', '0', '1', 'hive引擎设置', 'hive', 'Value Range: 0-1', 'The Ratio Between The Number Of Map Tasks And The Total Number Of Map Tasks', 'Hive Engine Settings', '1');
insert into `linkis_ps_configuration_key_engine_relation` (`config_key_id`, `engine_type_label_id`)
(select config.id as `config_key_id`, label.id AS `engine_type_label_id` FROM linkis_ps_configuration_config_key config
INNER JOIN linkis_cg_manager_label label ON config.engine_conn_type = 'hive' and config.`key` = "mapreduce.job.reduce.slowstart.completedmaps" and label_value = "*-*,hive-2.3.3");
insert into `linkis_ps_configuration_config_value` (`config_key_id`, `config_value`, `config_label_id`)
(select `relation`.`config_key_id` AS `config_key_id`, '' AS `config_value`, `relation`.`engine_type_label_id` AS `config_label_id` FROM linkis_ps_configuration_key_engine_relation relation
INNER JOIN linkis_cg_manager_label label ON relation.engine_type_label_id = label.id and relation.config_key_id = (select id FROM linkis_ps_configuration_config_key where `key`="mapreduce.job.reduce.slowstart.completedmaps")AND label.label_value = '*-*,hive-2.3.3');
UPDATE linkis_ps_error_code SET error_desc = "任务运行内存超过设置内存限制,请在管理台增加executor内存或在提交任务时通过spark.executor.memory调整内存。更多细节请参考Linkis常见问题Q60" WHERE error_code = "13002";
UPDATE linkis_ps_error_code SET error_desc = "任务运行内存超过设置内存限制,请在管理台增加executor内存或在提交任务时通过spark.executor.memory调整内存。更多细节请参考Linkis常见问题Q60" WHERE error_code = "13010";
UPDATE linkis_ps_error_code SET error_desc = "任务运行内存超过设置内存限制,请在管理台增加executor内存或调优sql后执行" WHERE error_code = "13003";
UPDATE linkis_ps_error_code SET error_desc = "任务运行内存超过设置内存限制,导致引擎意外退出,请在管理台增加executor内存或在提交任务时通过spark.executor.memory调整内存。更多细节请参考Linkis常见问题Q60" WHERE error_code = "13004";
UPDATE linkis_ps_error_code SET error_desc = "任务运行内存超过设置内存限制,导致Spark app应用退出,请在管理台增加executor内存或在提交任务时通过spark.executor.memory调整内存。更多细节请参考Linkis常见问题Q60" WHERE error_code = "13005";
UPDATE linkis_ps_error_code SET error_desc = "任务运行内存超过设置内存限制,导致Spark context应用退出,请在管理台增加executor内存或在提交任务时通过spark.executor.memory调整内存。更多细节请参考Linkis常见问题Q60" WHERE error_code = "13006";
UPDATE linkis_ps_error_code SET error_desc = "任务运行内存超过设置内存限制,导致Pyspark子进程退出,请在管理台增加executor内存或在提交任务时通过spark.executor.memory调整内存。更多细节请参考Linkis常见问题Q60" WHERE error_code = "13007";
UPDATE linkis_ps_error_code SET error_desc = "任务运行内存超过设置内存限制,导致Linkis服务负载过高,请在管理台调整executor内存或联系管理员扩容" WHERE error_code = "01002";
UPDATE linkis_ps_error_code SET error_desc = "任务运行内存超过设置内存限制,导致Linkis服务负载过高,请在管理台调整executor内存或联系管理员扩容" WHERE error_code = "01003";
-- add starrocks
INSERT INTO `linkis_ps_dm_datasource_type` (`name`, `description`, `option`, `classifier`, `icon`, `layers`, `description_en`, `option_en`, `classifier_en`) VALUES ('starrocks', 'starrocks数据库', 'starrocks', 'olap', '', 4, 'StarRocks Database', 'StarRocks', 'Olap');
INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `default_value`, `value_type`, `scope`, `require`, `description`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`, `name_en`, `description_en`) VALUES ((select id from `linkis_ps_dm_datasource_type` where `name` = 'starrocks'),'host','主机名(Host)',NULL,'TEXT',NULL,1,'主机名(Host)',NULL,NULL,NULL,NULL,now(),now(),'Host','Host');
INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `default_value`, `value_type`, `scope`, `require`, `description`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`, `name_en`, `description_en`) VALUES ((select id from `linkis_ps_dm_datasource_type` where `name` = 'starrocks'),'port','TCP端口号(Port)','9030','TEXT',NULL,1,'TCP端口号',NULL,NULL,NULL,NULL,now(),now(),'Tcp_Port','Tcp_Port');
INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `default_value`, `value_type`, `scope`, `require`, `description`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`, `name_en`, `description_en`) VALUES ((select id from `linkis_ps_dm_datasource_type` where `name` = 'starrocks'),'driverClassName','驱动类名(Driver class name)','com.mysql.jdbc.Driver','TEXT',NULL,1,'驱动类名(Driver class name)','',NULL,NULL,NULL,'2024-05-23 18:28:07.0','2024-05-23 18:28:07.0','Driver class name','Driver class name');
INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `default_value`, `value_type`, `scope`, `require`, `description`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`, `name_en`, `description_en`) VALUES ((select id from `linkis_ps_dm_datasource_type` where `name` = 'starrocks'),'username','用户名(Username)',NULL,'TEXT',NULL,1,'用户名(Username)','^[0-9A-Za-z_-]+$',NULL,NULL,NULL,now(),now(),'Username','Username');
INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `default_value`, `value_type`, `scope`, `require`, `description`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`, `name_en`, `description_en`) VALUES ((select id from `linkis_ps_dm_datasource_type` where `name` = 'starrocks'),'password','密码(Password)',NULL,'PASSWORD',NULL,1,'密码(Password)','',NULL,NULL,NULL,now(),now(),'Password','Password');
INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `default_value`, `value_type`, `scope`, `require`, `description`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`, `name_en`, `description_en`) VALUES ((select id from `linkis_ps_dm_datasource_type` where `name` = 'starrocks'),'databaseName','数据库名(Database name)',NULL,'TEXT',NULL,0,'数据库名(Database name)',NULL,NULL,NULL,NULL,now(),now(),'Database name','Database name');
INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `default_value`, `value_type`, `scope`, `require`, `description`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`, `name_en`, `description_en`) VALUES ((select id from `linkis_ps_dm_datasource_type` where `name` = 'starrocks'),'params','连接参数(Connection params)',NULL,'TEXT',NULL,0,'输入JSON格式(Input JSON format): {"param":"value"}',NULL,NULL,NULL,NULL,now(),now(),'Connection params','Input JSON format: {"param":"value"}');
INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `default_value`, `value_type`, `scope`, `require`, `description`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`, `name_en`, `description_en`) VALUES ((select id from `linkis_ps_dm_datasource_type` where `name` = 'starrocks'),'http_port','HTTP端口号(Port)','8030','TEXT',NULL,0,'HTTP端口号',NULL,NULL,NULL,NULL,now(),now(),'Http_Port','Http_Port');
-- add userClientIP for tdsql
INSERT INTO linkis_ps_dm_datasource_type_key (data_source_type_id, `key`, name, default_value, value_type, `scope`, `require`, description, value_regex, ref_id, ref_value, data_source, update_time, create_time, name_en, description_en) VALUES(5, 'userClientIp', 'userClientIp', NULL, 'TEXT', 'ENV', 0, 'userClientIp', NULL, NULL, NULL, NULL, now(),now(), 'user client ip', 'user client ip');
INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43019','执行表在元数据库中存在meta缓存,meta信息与缓存不一致导致,请增加参数(--conf spark.sql.hive.convertMetastoreOrc=false)后重试','Unable to alter table.*Table is not allowed to be altered',0);
INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13011','广播表过大导致driver内存溢出,请在执行sql前增加参数后重试:set spark.sql.autoBroadcastJoinThreshold=-1;','dataFrame to local exception',0);
INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43048','参数引用错误,请检查参数 %s 是否正常引用','UnboundLocalError.*local variable (\\S+) referenced before assignment',0);
UPDATE linkis_ps_error_code SET error_desc = "任务运行内存超过设置内存限制,导致Linkis服务负载过高,请在管理台调整Driver内存或联系管理员扩容" WHERE error_code = "01002";
UPDATE linkis_ps_error_code SET error_desc = "任务运行内存超过设置内存限制,导致Linkis服务负载过高,请在管理台调整Driver内存或联系管理员扩容" WHERE error_code = "01003";
UPDATE linkis_ps_error_code SET error_desc = "任务运行内存超过设置内存限制,导致Spark app应用退出,请在管理台增加Driver内存或在提交任务时通过spark.driver.memory调整内存。更多细节请参考Linkis常见问题Q60" WHERE error_code = "13005";
UPDATE linkis_ps_error_code SET error_desc = "任务运行内存超过设置内存限制,导致Spark context应用退出,请在管理台增加Driver内存或在提交任务时通过spark.driver.memory调整内存。更多细节请参考Linkis常见问题Q60" WHERE error_code = "13006";
UPDATE linkis_ps_error_code SET error_desc = "任务运行内存超过设置内存限制,导致Pyspark子进程退出,请在管理台增加Driver内存或在提交任务时通过spark.driver.memory调整内存。更多细节请参考Linkis常见问题Q60" WHERE error_code = "13007";
UPDATE linkis_ps_error_code SET error_desc = "您的任务因为引擎退出(退出可能是引擎进程OOM或者主动kill引擎)导致失败" WHERE error_code = "13009";
INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13012','driver内存不足,请增加driver内存后重试','Failed to allocate a page (\\S+.*\\)), try again.',0);
INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13013','使用spark默认变量sc导致后续代码执行失败','sc.setJobGroup(\\S+.*\\))',0);
DELETE FROM linkis_ps_error_code WHERE error_code = "43016";
INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43016','模块 %s 没有属性 %s ,请确认代码引用是否正常','AttributeError: \'(\\S+)\' object has no attribute \'(\\S+)\'',0);
UPDATE linkis_ps_error_code SET error_desc = "任务运行内存超过设置内存限制,导致引擎意外退出,请在管理台调整内存参数。" WHERE error_code = "13004";
INSERT INTO linkis_cg_manager_label (label_key,label_value,label_feature,label_value_size,update_time,create_time) VALUES ('combined_userCreator_engineType','*-IDE,nebula-3.0.0','OPTIONAL',2,now(),now());
INSERT INTO linkis_cg_manager_label (label_key,label_value,label_feature,label_value_size,update_time,create_time) VALUES ('combined_userCreator_engineType','*-*,nebula-3.0.0','OPTIONAL',2,now(),now());
insert into linkis_ps_configuration_category (`label_id`, `level`) VALUES ((select id from linkis_cg_manager_label where `label_value` = '*-IDE,nebula-3.0.0'), 2);
INSERT INTO linkis_ps_configuration_config_key (`key`,description,name,default_value,validate_type,validate_range,engine_conn_type,is_hidden,is_advanced,`level`,treeName,boundary_type,en_treeName,en_description,en_name,template_required) VALUES
('linkis.nebula.host','Nebula 连接地址','Nebula 连接地址',NULL,'None',NULL,'nebula',0,0,1,'Necula引擎设置',0,'Nebula Engine Settings','Nebula Host','Nebula Host',0);
INSERT INTO linkis_ps_configuration_config_key (`key`,description,name,default_value,validate_type,validate_range,engine_conn_type,is_hidden,is_advanced,`level`,treeName,boundary_type,en_treeName,en_description,en_name,template_required) VALUES
('linkis.nebula.port','Nebula 连接端口','Nebula 连接端口',NULL,'None',NULL,'nebula',0,0,1,'Necula引擎设置',0,'Nebula Engine Settings','Nebula Port','Nebula Port',0);
INSERT INTO linkis_ps_configuration_config_key (`key`,description,name,default_value,validate_type,validate_range,engine_conn_type,is_hidden,is_advanced,`level`,treeName,boundary_type,en_treeName,en_description,en_name,template_required) VALUES
('linkis.nebula.username','Nebula 连接用户名','Nebula 连接用户名',NULL,'None',NULL,'nebula',0,0,1,'Necula引擎设置',0,'Nebula Engine Settings','Nebula Username','Nebula Username',0);
INSERT INTO linkis_ps_configuration_config_key (`key`,description,name,default_value,validate_type,validate_range,engine_conn_type,is_hidden,is_advanced,`level`,treeName,boundary_type,en_treeName,en_description,en_name,template_required) VALUES
('linkis.nebula.password','Nebula 连接密码','Nebula 连接密码',NULL,'None',NULL,'nebula',0,0,1,'Necula引擎设置',0,'Nebula Engine Settings','Nebula Password','Nebula Password',0);
insert into `linkis_ps_configuration_key_engine_relation` (`config_key_id`, `engine_type_label_id`) (select config.id as `config_key_id`, label.id AS `engine_type_label_id` FROM linkis_ps_configuration_config_key config INNER JOIN linkis_cg_manager_label label ON config.engine_conn_type = 'nebula' and config.`key` = 'linkis.nebula.host' and label_value = '*-*,nebula-3.0.0');
insert into `linkis_ps_configuration_key_engine_relation` (`config_key_id`, `engine_type_label_id`) (select config.id as `config_key_id`, label.id AS `engine_type_label_id` FROM linkis_ps_configuration_config_key config INNER JOIN linkis_cg_manager_label label ON config.engine_conn_type = 'nebula' and config.`key` = 'linkis.nebula.port' and label_value = '*-*,nebula-3.0.0');
insert into `linkis_ps_configuration_key_engine_relation` (`config_key_id`, `engine_type_label_id`) (select config.id as `config_key_id`, label.id AS `engine_type_label_id` FROM linkis_ps_configuration_config_key config INNER JOIN linkis_cg_manager_label label ON config.engine_conn_type = 'nebula' and config.`key` = 'linkis.nebula.username' and label_value = '*-*,nebula-3.0.0');
insert into `linkis_ps_configuration_key_engine_relation` (`config_key_id`, `engine_type_label_id`) (select config.id as `config_key_id`, label.id AS `engine_type_label_id` FROM linkis_ps_configuration_config_key config INNER JOIN linkis_cg_manager_label label ON config.engine_conn_type = 'nebula' and config.`key` = 'linkis.nebula.password' and label_value = '*-*,nebula-3.0.0');
insert into `linkis_ps_configuration_config_value` (`config_key_id`, `config_value`, `config_label_id`) ( select `relation`.`config_key_id` AS `config_key_id`, '127.0.0.1' AS `config_value`, `relation`.`engine_type_label_id` AS `config_label_id` FROM linkis_ps_configuration_key_engine_relation relation INNER JOIN linkis_cg_manager_label label ON relation.engine_type_label_id = label.id and relation.config_key_id = ( select id FROM linkis_ps_configuration_config_key where `key` = 'linkis.nebula.host') AND label.label_value = '*-*,nebula-3.0.0');
insert into `linkis_ps_configuration_config_value` (`config_key_id`, `config_value`, `config_label_id`) ( select `relation`.`config_key_id` AS `config_key_id`, '9669' AS `config_value`, `relation`.`engine_type_label_id` AS `config_label_id` FROM linkis_ps_configuration_key_engine_relation relation INNER JOIN linkis_cg_manager_label label ON relation.engine_type_label_id = label.id and relation.config_key_id = ( select id FROM linkis_ps_configuration_config_key where `key` = 'linkis.nebula.port') AND label.label_value = '*-*,nebula-3.0.0');
insert into `linkis_ps_configuration_config_value` (`config_key_id`, `config_value`, `config_label_id`) ( select `relation`.`config_key_id` AS `config_key_id`, 'nebula' AS `config_value`, `relation`.`engine_type_label_id` AS `config_label_id` FROM linkis_ps_configuration_key_engine_relation relation INNER JOIN linkis_cg_manager_label label ON relation.engine_type_label_id = label.id and relation.config_key_id = ( select id FROM linkis_ps_configuration_config_key where `key` = 'linkis.nebula.username') AND label.label_value = '*-*,nebula-3.0.0');
insert into `linkis_ps_configuration_config_value` (`config_key_id`, `config_value`, `config_label_id`) ( select `relation`.`config_key_id` AS `config_key_id`, 'nebula' AS `config_value`, `relation`.`engine_type_label_id` AS `config_label_id` FROM linkis_ps_configuration_key_engine_relation relation INNER JOIN linkis_cg_manager_label label ON relation.engine_type_label_id = label.id and relation.config_key_id = ( select id FROM linkis_ps_configuration_config_key where `key` = 'linkis.nebula.password') AND label.label_value = '*-*,nebula-3.0.0');
INSERT INTO linkis_ps_configuration_config_key (`key`, description, name, default_value, validate_type, validate_range, engine_conn_type, is_hidden, is_advanced, `level`, treeName, boundary_type, en_treeName, en_description, en_name, template_required) VALUES ('linkis.nebula.space', 'Nebula 图空间', 'Nebula 图空间', NULL, 'None', NULL, 'nebula', 0, 0, 1, 'Necula引擎设置', 0, 'Nebula Engine Settings', 'Nebula Space', 'Nebula Space', 0);
insert into `linkis_ps_configuration_key_engine_relation` (`config_key_id`, `engine_type_label_id`) ( select config.id as `config_key_id`, label.id AS `engine_type_label_id` FROM linkis_ps_configuration_config_key config INNER JOIN linkis_cg_manager_label label ON config.engine_conn_type = 'nebula' and config.`key` = 'linkis.nebula.space' and label_value = '*-*,nebula-3.0.0');
insert into `linkis_ps_configuration_config_value` (`config_key_id`, `config_value`, `config_label_id`) ( select `relation`.`config_key_id` AS `config_key_id`, 'nebula' AS `config_value`, `relation`.`engine_type_label_id` AS `config_label_id` FROM linkis_ps_configuration_key_engine_relation relation INNER JOIN linkis_cg_manager_label label ON relation.engine_type_label_id = label.id and relation.config_key_id = ( select id FROM linkis_ps_configuration_config_key where `key` = 'linkis.nebula.space') AND label.label_value = '*-*,nebula-3.0.0');
INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43020','Python 进程已停止,查询失败!','python process has stopped',0);
UPDATE linkis_ps_error_code SET error_desc = "任务运行内存超过设置内存限制,请在管理台增加executor内存或在提交任务时通过spark.executor.memory或spark.executor.memoryOverhead调整内存。" WHERE error_code = "13002";
UPDATE linkis_ps_error_code SET error_desc = "任务运行内存超过设置内存限制,导致引擎意外退出,请在管理台增加executor内存或在提交任务时通过spark.executor.memory或spark.executor.memoryOverhead调整内存" WHERE error_code = "13004";
UPDATE linkis_ps_error_code SET error_desc = "任务运行内存超过设置内存限制,导致Pyspark子进程退出,请在管理台增加executor内存或在提交任务时通过spark.executor.memory或spark.executor.memoryOverhead调整内存" WHERE error_code = "13007";
UPDATE linkis_ps_error_code SET error_desc = "任务运行内存超过设置内存限制,请在管理台增加executor内存或在提交任务时通过spark.executor.memory或spark.executor.memoryOverhead调整内存" WHERE error_code = "13010";
UPDATE linkis_ps_error_code SET error_desc = "任务运行内存超过设置内存限制,导致Spark app应用退出,请在管理台增加driver内存或在提交任务时通过spark.driver.memory调整内存" WHERE error_code = "13005";
UPDATE linkis_ps_error_code SET error_desc = "任务运行内存超过设置内存限制,导致Spark context应用退出,请在管理台增加driver内存或在提交任务时通过spark.driver.memory调整内存" WHERE error_code = "13006";
update linkis_ps_dm_datasource_type_key set name='Catalogs', description='Catalogs',name_en='Catalogs',description_en='Catalogs' where data_source_type_id in (select id from linkis_ps_dm_datasource_type where name = 'starrocks') and `key` = 'databaseName';