Skip to content

Commit

Permalink
Merge remote-tracking branch 'upstream/dev-1.1.0' into dev-1.1.0-perf…
Browse files Browse the repository at this point in the history
…orm-optimize
  • Loading branch information
jefftlin committed Sep 2, 2022
2 parents bcb7925 + 8c81e35 commit 19a4165
Show file tree
Hide file tree
Showing 12 changed files with 40 additions and 24 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -1341,6 +1341,7 @@ public Message queryDataSourceDBTableFieldsMapping(HttpServletRequest request, F
field.setFieldEditable(!"HIVE".equals(vo.getSinkTypeId()) && !"ELASTICSEARCH".equals(vo.getSinkTypeId()));
}
message.data("sinkFields", sinkFields);

// field mapping deduction
List<Map<String, Object>> deductions = new ArrayList<>();
List<DataSourceDbTableColumnDTO> left = sourceFields;
Expand All @@ -1351,16 +1352,20 @@ public Message queryDataSourceDBTableFieldsMapping(HttpServletRequest request, F
right = sourceFields;
exchanged = true;
}
for (int i = 0; i < right.size(); i ++){
LOGGER.info("rightElement is:" + right.get(i).getName());
DataSourceDbTableColumnDTO leftElement = left.get(i % left.size());
DataSourceDbTableColumnDTO rightElement = right.get(i);
Map<String, Object> deduction = new HashMap<>();
deduction.put("source", exchanged ? rightElement : leftElement);
deduction.put("sink", exchanged ? leftElement : rightElement);
deduction.put("deleteEnable", true);
deductions.add(deduction);

// source size and sink size must not be null
if (!Objects.isNull(left) && left.size() > 0) {
for (int i = 0; i < right.size(); i ++){
DataSourceDbTableColumnDTO leftElement = left.get(i % left.size());
DataSourceDbTableColumnDTO rightElement = right.get(i);
Map<String, Object> deduction = new HashMap<>();
deduction.put("source", exchanged ? rightElement : leftElement);
deduction.put("sink", exchanged ? leftElement : rightElement);
deduction.put("deleteEnable", true);
deductions.add(deduction);
}
}

message.data("deductions", deductions);
message.data("transformEnable", true);

Expand Down
2 changes: 0 additions & 2 deletions exchangis-engines/engines/datax/datax-ftpwriter/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,6 @@
<dependency> <scope>provided</scope>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>

</dependency>
<dependency>
<groupId>ch.qos.logback</groupId>
Expand Down Expand Up @@ -61,7 +60,6 @@
<artifactId>commons-net</artifactId>
<version>3.3</version>
</dependency>

</dependencies>
<build>
<plugins>
Expand Down
4 changes: 4 additions & 0 deletions exchangis-engines/engines/datax/datax-hdfsreader/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -91,6 +91,10 @@
<artifactId>zookeeper</artifactId>
<groupId>org.apache.zookeeper</groupId>
</exclusion>
<exclusion>
<groupId>org.pentaho</groupId>
<artifactId>pentaho-aggdesigner-algorithm</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
Expand Down
10 changes: 10 additions & 0 deletions exchangis-engines/engines/datax/datax-hdfswriter/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -100,6 +100,10 @@
<artifactId>geronimo-jaspic_1.0_spec</artifactId>
<groupId>org.apache.geronimo.specs</groupId>
</exclusion>
<exclusion>
<groupId>org.pentaho</groupId>
<artifactId>pentaho-aggdesigner-algorithm</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
Expand All @@ -111,6 +115,12 @@
<groupId>org.apache.hive.hcatalog</groupId>
<artifactId>hive-hcatalog-core</artifactId>
<version>${hive.version}</version>
<exclusions>
<exclusion>
<groupId>org.pentaho</groupId>
<artifactId>pentaho-aggdesigner-algorithm</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>xalan</groupId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -307,7 +307,7 @@ public Message deleteJob(@PathVariable("id") Long id, HttpServletRequest request
public Message getJob(HttpServletRequest request, @PathVariable("id") Long id) {
Message response = Message.ok();
try {
LOG.info("Request88888: {}", request);
LOG.info("Request: {}", request);

String userName = SecurityFilter.getLoginUsername(request);
if (!hasAuthority(userName, jobInfoService.getJob(id, true))) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ public Message createJob(
Long id = null;
id = jobInfoService.createJob(exchangisJobVo).getId();
response.data("id", id);
LOG.info("id6666: {}", id);
LOG.info("job id is: {}", id);
} catch (Exception e){
String message = "Fail to create dss job: " + exchangisJobVo.getJobName() +" (创建DSS任务失败)";
LOG.error(message, e);
Expand Down Expand Up @@ -177,9 +177,9 @@ else if (!hasAuthority(userName, jobInfoService.getJob(id , true))) {
@RequestMapping( value = "/execute/{id}", method = RequestMethod.POST)
public Message executeJob(@PathVariable("id") Long id, HttpServletRequest request, @RequestBody Map<String, Object> params) {
try {
LOG.info("start to parse params8909");
LOG.info("start to parse params");
String paramString = BDPJettyServerHelper.jacksonJson().writeValueAsString(params);
LOG.error("paramString999879: {}", paramString);
LOG.error("paramString: {}", paramString);
} catch (JsonProcessingException e) {
LOG.error("parse execute content error: {}", e.getMessage());
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -185,7 +185,7 @@ public ExchangisLaunchedTaskMetricsVo getLaunchedTaskMetrics(String taskId, Stri
@Override
public boolean hasExecuteJobAuthority(String jobExecutionId, String userName) {
String jobUser = this.launchedJobDao.searchLaunchedJob(jobExecutionId).getCreateUser();
LOG.info("Job user is99999:{}, reuquest user is: {}", jobUser, userName);
LOG.info("Job user is:{}, reuquest user is: {}", jobUser, userName);
return hasExecuteJobAuthority(this.launchedJobDao.searchLaunchedJob(jobExecutionId) , userName);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -352,10 +352,10 @@ private void setSqoop(Long projectId, Map<String, Set<Long>> moduleIdsMap, boole
LOG.error("Occur error while tranform class", e.getMessage());
}

LOG.info("sqoopStr99999:{}", sqoopStr);
LOG.info("sqoopStr:{}", sqoopStr);
LOG.info("ExchangisJobVo sqoop: {}", job.getContent());
LOG.info("getCreateTime: {}", job.getId());
LOG.info("executorUser999: {}", job.getExecuteUser());
LOG.info("executorUser: {}", job.getExecuteUser());
sqoops.add(job);
}
exportedProject.setSqoops(sqoops);
Expand Down Expand Up @@ -390,7 +390,7 @@ private void setDatax(Long projectId, Map<String, Set<Long>> moduleIdsMap, boole
LOG.info("dataxStr:{}", dataxStr);
LOG.info("ExchangisJobVo sqoop: {}", job.getContent());
LOG.info("getCreateTime: {}", job.getId());
LOG.info("executorUser999: {}", job.getExecuteUser());
LOG.info("executorUser: {}", job.getExecuteUser());
dataxs.add(job);
}
exportedProject.setDataxes(dataxs);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -179,10 +179,9 @@ public void addSqoopTask (List<ExchangisJobVo> sqoops, List<ExchangisProject> pr
idCatalog.getSqoop().put(oldId, existingId);
throw new ExchangisJobServerException(31101, "Already exits duplicated job name(存在重复任务名称) jobName is:" + "[" + sqoop.getJobName() + "]");
} else {
//sqoop.setJobName("hahaha");
LOG.info("Sqoop job content is: {}, Modify user is: {}, jobType is: {}", sqoop.getContent(), sqoop.getExecuteUser(), sqoop.getJobType());
ExchangisJobVo jobVo = jobInfoService.createJob(sqoop);
LOG.info("oldId: {}, newid: {}, jobName0000000: {}", sqoop.getId(), jobVo.getId(), jobVo.getJobName());
LOG.info("oldId: {}, newid: {}, jobName: {}", sqoop.getId(), jobVo.getId(), jobVo.getJobName());
idCatalog.getSqoop().put(oldId, jobVo.getId());
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ public static InternalResponseRef getResponseRef(WorkspaceRequestRef requestRef,
}
LOG.info("User {} try to request Exchangis with url {} and labels {}.", httpAction.getUser(), httpAction.getURL(), requestRef.getDSSLabels().get(0).getValue().get("DSSEnv"));
HttpResult httpResult = ssoRequestOperation.requestWithSSO(ssoUrlBuilderOperation, httpAction);
LOG.info("responseBody00000:{}", httpResult.getResponseBody());
LOG.info("responseBody:{}", httpResult.getResponseBody());
InternalResponseRef responseRef = ResponseRef.newInternalBuilder().setResponseBody(httpResult.getResponseBody()).build();
if (responseRef.isFailed()){
throw new ExternalOperationFailedException(95011, responseRef.getErrorMsg());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -220,7 +220,7 @@ public Message deleteProject(HttpServletRequest request, @PathVariable("id") Lon
return Message.error("Cannot delete the outer project (无法删除来自 " + domain + " 的外部项目)");
}
projectService.deleteProject(id);
return ExchangisProjectRestfulUtils.dealOk("删除工程成功888");
return ExchangisProjectRestfulUtils.dealOk("删除工程成功");
} catch (Exception t) {
LOG.error("Failed to delete project for user {}", username, t);
return Message.error("Failed to delete project (删除工程失败)");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,7 @@ public Message deleteProject(HttpServletRequest request, @PathVariable("name") S
// return Message.error("Cannot delete the outer project (无法删除来自 " + domain + " 的外部项目)");
// }
projectService.deleteProjectByName(name);
return ExchangisProjectRestfulUtils.dealOk("删除工程成功777");
return ExchangisProjectRestfulUtils.dealOk("删除工程成功");
} catch (Exception t) {
LOG.error("Failed to delete project for user {}", username, t);
return Message.error("Failed to delete project (删除工程失败)");
Expand Down

0 comments on commit 19a4165

Please sign in to comment.