Skip to content

Commit

Permalink
refactoring
Browse files Browse the repository at this point in the history
  • Loading branch information
MecBoc committed Dec 18, 2019
1 parent d2b6086 commit 4286823
Show file tree
Hide file tree
Showing 3 changed files with 1 addition and 101 deletions.
2 changes: 0 additions & 2 deletions RScripts/relais/relais.R
Original file line number Diff line number Diff line change
Expand Up @@ -184,8 +184,6 @@ fellegisunter <- function(workset, roles, wsparams=NULL, ...) {

result <-list( workset_out=r_out, roles_out= roles,rolesgroup_out= rolesgroup, var_est = var_est, log = stdout)



sink()
close(con)
return(result)
Expand Down
61 changes: 0 additions & 61 deletions src/main/java/it/istat/is2/app/dao/SqlGenericDao.java
Original file line number Diff line number Diff line change
Expand Up @@ -120,67 +120,6 @@ public List<Object[]> findWorKSetDataViewParamsbyQuery( List<Object[]> resulFie
}


public List<Workset> findWorkSetDatasetColumnByQuery_old(Long idDataProcessing, Integer typeIO, Integer groupRole,
Integer row_inf, Integer row_sup, HashMap<String, String> paramsFilter) {

String query = " "
+ "SELECT rs1.id as id, "
+ " rs1.name as name, "
+ " rs1.order_code as order_code, "
+ " rs1.CLS_DATA_TYPE_ID as CLS_DATA_TYPE_ID, "
+ " rs1.value_parameter as value_parameter, "
+ " rs1.paginationTotalRows as content_size,"
+ " concat('[', group_concat( concat('\"',rs1.v,'\"')"
+ " ORDER BY rs1.idx ASC),']' ) AS content "
+ "FROM ("
+ " select rs.*, max(rs.adx) OVER( PARTITION BY rs.id) as paginationTotalRows "
+ " FROM ("
+ " select ss.id as id, "
+ " ss.name as name, "
+ " ss.order_code, "
+ " ss.CLS_DATA_TYPE_ID as CLS_DATA_TYPE_ID, "
+ " ss.value_parameter as value_parameter, "
+ " ss.content_size, "
+ " t.idx, "
+ " t.v,"
+ " DENSE_RANK() OVER(ORDER BY t.idx) as adx "
+ " from "
+" IS2_WORKSET ss, "
+ " IS2_STEP_RUNTIME sv, "
+ " json_table(ss.content , '$[*]' columns( idx FOR ORDINALITY, v TEXT path '$[0]')"
+ " ) t"
+ " where sv.data_processing_id=:idDataProcessing and (:groupRole is null ||sv.ROLE_GROUP=:groupRole) and sv.CLS_TYPE_IO_ID=:typeIO and sv.WORKSET_ID=ss.id and ss.CLS_DATA_TYPE_ID=1 ";
if (paramsFilter != null) {
for (String key : paramsFilter.keySet()) {

query += " and t.idx in( select f.idx from IS2_WORKSET si, IS2_STEP_RUNTIME ssv,json_table( si.content, '$[*]' columns "
+ "( idx FOR ORDINALITY, v TEXT path '$[0]') ) f "
+ " where ssv.data_processing_id=:idDataProcessing and (:groupRole is null ||ssv.ROLE_GROUP=:groupRole) and ssv.CLS_TYPE_IO_ID=:typeIO and ssv.WORKSET_ID=si.id and si.name=:n_"
+ key + " and f.v=:v_" + key + " ) ";
}
}
query += " order by t.idx asc " + " ) rs " + " ) rs1 "
+ " where rs1.adx >:row_inf and rs1.adx <= :row_sup"
+ " group by rs1.id,rs1.name, rs1.order_code , rs1.CLS_DATA_TYPE_ID ,rs1.value_parameter, rs1.paginationTotalRows ";

Query q = em.createNativeQuery(query, Workset.class);
q.setParameter("idDataProcessing", idDataProcessing);
q.setParameter("typeIO", typeIO);
q.setParameter("row_inf", row_inf);
q.setParameter("row_sup", row_sup);
q.setParameter("groupRole", groupRole);
if (paramsFilter != null) {
for (String key : paramsFilter.keySet()) {
String value = paramsFilter.get(key);
q.setParameter("n_" + key, key);
q.setParameter("v_" + key, value);
}
}

@SuppressWarnings("unchecked")
List<Workset> resultList = (List<Workset>) q.getResultList();
return resultList;
}


public List<Object[]> findDatasetIdColAndName(Long dFile) {
Expand Down
39 changes: 1 addition & 38 deletions src/main/java/it/istat/is2/workflow/service/WorkflowService.java
Original file line number Diff line number Diff line change
Expand Up @@ -180,44 +180,7 @@ public String loadWorkSetValoriByDataProcessing(Long idDataProcessing, Integer t
return obj.toString();
}


public String loadWorkSetValoriByDataProcessing_old(Long idDataProcessing, Integer typeIO, Integer groupRole,
Integer length, Integer start, Integer draw, HashMap<String, String> paramsFilter) throws JSONException {

List<Workset> dataList = sqlGenericDao.findWorkSetDatasetColumnByQuery_old(idDataProcessing, typeIO, groupRole,
start, start + length, paramsFilter);
// start, start + length, query_filter);
Integer numRighe = 0;
Integer valueSize = 0;
if (!dataList.isEmpty()) {
numRighe = dataList.get(0).getContents().size();
valueSize = dataList.get(0).getContentSize();
}
JSONObject obj = new JSONObject();
JSONArray data = new JSONArray();
for (int i = 0; i < numRighe; i++) {
JSONObject obji = new JSONObject();
for (int j = 0; j < dataList.size(); j++) {
obji.put(dataList.get(j).getName(), dataList.get(j).getContents().get(i));
}
data.put(obji);
}

obj.put("data", data);
obj.put("draw", draw);
obj.put("recordsTotal", valueSize);
obj.put("recordsFiltered", valueSize);

return obj.toString();
}

public List<Workset> loadWorkSetValoriByDataProcessing(Long idDataProcessing, Integer typeIO, Integer groupRole,
HashMap<String, String> paramsFilter) {
List<Workset> dataList = sqlGenericDao.findWorkSetDatasetColumnByQuery_old(idDataProcessing, typeIO, groupRole, 0,
null, paramsFilter);
return dataList;
}


public Map<String, List<String>> loadWorkSetValoriByDataProcessingRoleGroupMap(Long idDataProcessing,
Integer groupRole) {
Map<String, List<String>> ret = new LinkedHashMap<>();
Expand Down

0 comments on commit 4286823

Please sign in to comment.