Skip to content

Commit a6f905b

Browse files
authored
[issue_1157][taier-scheduler] fix DataX sqlText replace #1157 (#1158)
1 parent 36bb1f4 commit a6f905b

File tree

1 file changed

+1
-30
lines changed

1 file changed

+1
-30
lines changed

taier-scheduler/src/main/java/com/dtstack/taier/scheduler/service/DataxService.java

+1-30
Original file line numberDiff line numberDiff line change
@@ -1,21 +1,14 @@
11
package com.dtstack.taier.scheduler.service;
22

33
import com.alibaba.fastjson.JSONObject;
4-
import com.dtstack.taier.common.enums.EScheduleJobType;
5-
import com.dtstack.taier.common.env.EnvironmentContext;
64
import com.dtstack.taier.common.exception.TaierDefineException;
7-
import com.dtstack.taier.common.util.TaskParamsUtils;
85
import com.dtstack.taier.dao.domain.ScheduleJob;
96
import com.dtstack.taier.dao.domain.ScheduleTaskShade;
107
import com.dtstack.taier.dao.dto.ScheduleTaskParamShade;
118
import com.dtstack.taier.pluginapi.constrant.ConfigConstant;
12-
import com.dtstack.taier.pluginapi.enums.EDeployMode;
139
import com.dtstack.taier.scheduler.PluginWrapper;
14-
import com.dtstack.taier.scheduler.executor.DatasourceOperator;
1510
import com.dtstack.taier.scheduler.server.pipeline.JobParamReplace;
1611
import com.dtstack.taier.scheduler.utils.CreateJsonFileUtil;
17-
import com.dtstack.taier.scheduler.utils.FileUtil;
18-
import com.dtstack.taier.scheduler.utils.ScriptUtil;
1912
import org.apache.commons.collections.CollectionUtils;
2013
import org.apache.commons.io.FileUtils;
2114
import org.apache.commons.lang3.StringUtils;
@@ -35,15 +28,6 @@
3528
@Service
3629
public class DataxService {
3730

38-
@Autowired
39-
private ClusterService clusterService;
40-
41-
@Autowired
42-
private EnvironmentContext environmentContext;
43-
44-
@Autowired
45-
private DatasourceOperator datasourceOperator;
46-
4731
@Autowired
4832
private PluginWrapper pluginWrapper;
4933

@@ -74,19 +58,6 @@ public void handDataxParams(Map<String, Object> actionParam, ScheduleTaskShade t
7458
}
7559
dealDataxExeParams(actionParam, task, scheduleJob, sqlText);
7660
}
77-
/**
78-
* 将脚本上传到 hdfs
79-
*
80-
* @param sqlText
81-
* @param task
82-
* @param scheduleJob
83-
* @return
84-
*/
85-
private String uploadToHdfs(String sqlText, ScheduleTaskShade task, ScheduleJob scheduleJob) {
86-
JSONObject pluginInfo = clusterService.pluginInfoJSON(task.getTenantId(), task.getTaskType(), null, null, null);
87-
String hdfsPath = environmentContext.getHdfsTaskPath() + (FileUtil.getUploadFileName(task.getTaskType(), scheduleJob.getJobId()));
88-
return datasourceOperator.uploadToHdfs(pluginInfo, task.getTenantId(), sqlText, hdfsPath);
89-
}
9061

9162
private void dealDataxExeParams(Map<String, Object> actionParam, ScheduleTaskShade task, ScheduleJob scheduleJob,
9263
String sqlText) throws IOException {
@@ -112,7 +83,7 @@ private void dealDataxExeParams(Map<String, Object> actionParam, ScheduleTaskSha
11283
throw new TaierDefineException("datax.local.path is null");
11384
}
11485
//生成datax的json文件
115-
String taskTempPath = CreateJsonFileUtil.createJsonFile(task.getSqlText(), tempPath, task.getName());
86+
String taskTempPath = CreateJsonFileUtil.createJsonFile(sqlText, tempPath, task.getName());
11687
if (StringUtils.isBlank(taskTempPath)) {
11788
throw new TaierDefineException("创建datax.json文件失败");
11889
}

0 commit comments

Comments
 (0)