Skip to content

Commit

Permalink
fix: dao isolation in arc-web
Browse files Browse the repository at this point in the history
  • Loading branch information
Nolife999 committed Oct 3, 2023
1 parent 72a691f commit a529546
Show file tree
Hide file tree
Showing 6 changed files with 165 additions and 127 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,12 @@ public class ArcWebGenericDao {

private static final Logger LOGGER = LogManager.getLogger(ArcWebGenericDao.class);


public void execQueryTestDatabaseConnection() throws ArcException
{
UtilitaireDao.get(0).executeRequest(null, new ArcPreparedStatementBuilder("select true"));
}

/**
* Get the sandbox list to be show in GUI.
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,12 +15,10 @@
import org.springframework.web.bind.annotation.ModelAttribute;
import org.springframework.web.bind.annotation.RequestParam;

import fr.insee.arc.core.dataobjects.ArcPreparedStatementBuilder;
import fr.insee.arc.core.dataobjects.DataObjectService;
import fr.insee.arc.core.service.global.bo.Sandbox;
import fr.insee.arc.core.service.p0initialisation.dbmaintenance.BddPatcher;
import fr.insee.arc.core.util.LoggerDispatcher;
import fr.insee.arc.utils.dao.UtilitaireDao;
import fr.insee.arc.utils.ressourceUtils.PropertiesHandler;
import fr.insee.arc.utils.structure.AttributeValue;
import fr.insee.arc.utils.textUtils.IConstanteCaractere;
Expand Down Expand Up @@ -67,7 +65,7 @@ public abstract class ArcWebGenericService<T extends ArcModel, D extends IDao> i
protected String repertoire;

@Autowired
private ArcWebGenericDao indexDao;
private ArcWebGenericDao arcWebGenericDao;

private Map<String, String> envMap;

Expand Down Expand Up @@ -148,7 +146,7 @@ public void initializeModel(@ModelAttribute T arcModel, Model model,


// get declared sandboxes
this.envMap= indexDao.getSandboxList();
this.envMap= arcWebGenericDao.getSandboxList();

if (this.bacASable == null) {
// by default bacASable is the first element of the linkedhashmap
Expand Down Expand Up @@ -217,7 +215,9 @@ protected boolean getDataBaseStatus() {
LoggerHelper.debug(LOGGER, "getDataBaseStatus()");
// test the database connection
try {
UtilitaireDao.get(0).executeRequest(null, new ArcPreparedStatementBuilder("select true"));

arcWebGenericDao.execQueryTestDatabaseConnection();

setDataBaseOk(true);

} catch (Exception e) {
Expand Down
130 changes: 129 additions & 1 deletion arc-web/src/main/java/fr/insee/arc/web/gui/export/dao/ExportDao.java
Original file line number Diff line number Diff line change
@@ -1,6 +1,11 @@
package fr.insee.arc.web.gui.export.dao;

import java.io.BufferedWriter;
import java.io.FileOutputStream;
import java.io.IOException;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
Expand Down Expand Up @@ -84,7 +89,16 @@ public void startExportUpdateState(List<String> fileName, int fileIndex, boolean
UtilitaireDao.get(0).executeRequest(vObjectService.getConnection(), query);
}

public HashMap<String, ArrayList<String>> exportFileRetrieve(int n, List<String> howToExport,
/**
* retrieve rules
* @param n
* @param howToExport
* @param tablesToExport
* @param bacASable
* @return
* @throws ArcException
*/
private HashMap<String, ArrayList<String>> exportFileRetrieveRules(int n, List<String> howToExport,
List<String> tablesToExport, String bacASable) throws ArcException {
// if columns,orders table is specified, get the information from database metadata
String howToExportReworked;
Expand Down Expand Up @@ -113,6 +127,120 @@ public ResultSet exportFileFilteredOrdered(Statement stmt, int n, List<String> t
return stmt.executeQuery(query.getQuery().toString());
}


/**
* parse rule and export file
* @param h
* @param n
* @param bw
* @param fw
* @throws ArcException
* @throws IOException
* @throws SQLException
*/
public void exportFile(HashMap<String, ArrayList<String>> h, int n, BufferedWriter bw, FileOutputStream fw)
throws ArcException, IOException, SQLException {
ArrayList<String> tablesToExport = h.get("table_to_export");
ArrayList<String> headers = h.get("headers");
ArrayList<String> nulls = h.get("nulls");
ArrayList<String> filterTable = h.get("filter_table");
ArrayList<String> orderTable = h.get("order_table");
ArrayList<String> howToExport = h.get("nomenclature_export");
ArrayList<String> headersToScan = h.get("columns_array_header");
ArrayList<String> valuesToScan = h.get("columns_array_value");

HashMap<String, Integer> pos = new HashMap<>();
ArrayList<String> headerLine = new ArrayList<>();

h = exportFileRetrieveRules(n, howToExport, tablesToExport, this.dataObjectService.getSandboxSchema());

for (int i = 0; i < h.get("varbdd").size(); i++) {
pos.put(h.get("varbdd").get(i), Integer.parseInt(h.get("pos").get(i)));
headerLine.add(h.get("varbdd").get(i));
}

// write header line if required
if (!StringUtils.isEmpty(headers.get(n))) {
for (String o : headerLine) {
bw.write(o + ";");
}
bw.write("\n");
}

int maxPos = Integer.parseInt(h.get("maxp").get(0));

Connection c = UtilitaireDao.get(0).getDriverConnexion();
c.setAutoCommit(false);

Statement stmt = c.createStatement();
stmt.setFetchSize(5000);

try (ResultSet res = exportFileFilteredOrdered(stmt, n, tablesToExport, filterTable, orderTable, this.dataObjectService.getSandboxSchema())) {
ResultSetMetaData rsmd = res.getMetaData();

ArrayList<String> output;
String[] tabH;
String[] tabV;
String colName;
while (res.next()) {
// reinitialiser l'arraylist de sortie
output = new ArrayList<String>();
for (int k = 0; k < maxPos; k++) {
output.add("");
}

boolean todo = false;
tabH = null;
tabV = null;
for (int i = 1; i <= rsmd.getColumnCount(); i++) {
colName = rsmd.getColumnLabel(i).toLowerCase();

todo = true;
// cas ou on est dans un tableau
if (todo && colName.equals(headersToScan.get(n))) {
todo = false;
tabH = (String[]) res.getArray(i).getArray();
}
if (todo && colName.equals(valuesToScan.get(n))) {
todo = false;
tabV = (String[]) res.getArray(i).getArray();
}
if (todo) {
todo = false;
if (pos.get(colName) != null) {
// if nulls value musn't be quoted as "null" and element is null then don't write
if (!(StringUtils.isEmpty(nulls.get(n)) && StringUtils.isEmpty(res.getString(i)))) {
output.set(pos.get(colName), res.getString(i));
}
}
}
}

// traitement des variables tableaux
if (tabH != null && tabV != null) {
for (int k = 0; k < tabH.length; k++) {
if (pos.get(tabH[k].toLowerCase()) != null) {
// if nulls value musn't be quoted as "null" and element is null then don't write
if (!(StringUtils.isEmpty(nulls.get(n)) && StringUtils.isEmpty(tabV[k]))) {
output.set(pos.get(tabH[k].toLowerCase()), tabV[k]);
}
}
}
}

for (String o : output) {
bw.write(o + ";");
}
bw.write("\n");
}
}
c.close();
bw.flush();
fw.flush();

}


public VObjectService getvObjectService() {
return vObjectService;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,22 +6,16 @@
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.nio.charset.StandardCharsets;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.zip.GZIPOutputStream;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;

import org.apache.commons.lang3.StringUtils;
import org.springframework.stereotype.Service;
import org.springframework.ui.Model;

import fr.insee.arc.utils.dao.UtilitaireDao;
import fr.insee.arc.utils.exception.ArcException;
import fr.insee.arc.web.gui.all.util.VObject;

Expand Down Expand Up @@ -101,7 +95,7 @@ private void exportPlainText(String dirOut, HashMap<String, ArrayList<String>> h

try (FileOutputStream fw = new FileOutputStream(fOut)) {
try (BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(fw, StandardCharsets.UTF_8))) {
exportFile(h, n, bw, fw);
dao.exportFile(h, n, bw, fw);
}
}
}
Expand All @@ -118,7 +112,7 @@ private void exportZip(String dirOut, HashMap<String, ArrayList<String>> h, int
zos.putNextEntry(ze);

try (BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(zos, StandardCharsets.UTF_8))) {
exportFile(h, n, bw, fw);
dao.exportFile(h, n, bw, fw);
zos.flush();
zos.closeEntry();
}
Expand All @@ -135,113 +129,11 @@ private void exportGz(String dirOut, HashMap<String, ArrayList<String>> h, int n
try (FileOutputStream fw = new FileOutputStream(fOut)) {
try (GZIPOutputStream gzos = new GZIPOutputStream(fw)) {
try (BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(gzos, StandardCharsets.UTF_8))) {
exportFile(h, n, bw, fw);
dao.exportFile(h, n, bw, fw);
gzos.flush();
}
}
}
}

private void exportFile(HashMap<String, ArrayList<String>> h, int n, BufferedWriter bw, FileOutputStream fw)
throws ArcException, IOException, SQLException {
ArrayList<String> tablesToExport = h.get("table_to_export");
ArrayList<String> headers = h.get("headers");
ArrayList<String> nulls = h.get("nulls");
ArrayList<String> filterTable = h.get("filter_table");
ArrayList<String> orderTable = h.get("order_table");
ArrayList<String> howToExport = h.get("nomenclature_export");
ArrayList<String> headersToScan = h.get("columns_array_header");
ArrayList<String> valuesToScan = h.get("columns_array_value");

HashMap<String, Integer> pos = new HashMap<>();
ArrayList<String> headerLine = new ArrayList<>();

h = dao.exportFileRetrieve(n, howToExport, tablesToExport, getBacASable());

for (int i = 0; i < h.get("varbdd").size(); i++) {
pos.put(h.get("varbdd").get(i), Integer.parseInt(h.get("pos").get(i)));
headerLine.add(h.get("varbdd").get(i));
}

// write header line if required
if (!StringUtils.isEmpty(headers.get(n))) {
for (String o : headerLine) {
bw.write(o + ";");
}
bw.write("\n");
}

int maxPos = Integer.parseInt(h.get("maxp").get(0));

Connection c = UtilitaireDao.get(0).getDriverConnexion();
c.setAutoCommit(false);

Statement stmt = c.createStatement();
stmt.setFetchSize(5000);

try (ResultSet res = dao.exportFileFilteredOrdered(stmt, n, tablesToExport, filterTable, orderTable, getBacASable())) {
ResultSetMetaData rsmd = res.getMetaData();

ArrayList<String> output;
String[] tabH;
String[] tabV;
String colName;
while (res.next()) {
// reinitialiser l'arraylist de sortie
output = new ArrayList<String>();
for (int k = 0; k < maxPos; k++) {
output.add("");
}

boolean todo = false;
tabH = null;
tabV = null;
for (int i = 1; i <= rsmd.getColumnCount(); i++) {
colName = rsmd.getColumnLabel(i).toLowerCase();

todo = true;
// cas ou on est dans un tableau
if (todo && colName.equals(headersToScan.get(n))) {
todo = false;
tabH = (String[]) res.getArray(i).getArray();
}
if (todo && colName.equals(valuesToScan.get(n))) {
todo = false;
tabV = (String[]) res.getArray(i).getArray();
}
if (todo) {
todo = false;
if (pos.get(colName) != null) {
// if nulls value musn't be quoted as "null" and element is null then don't write
if (!(StringUtils.isEmpty(nulls.get(n)) && StringUtils.isEmpty(res.getString(i)))) {
output.set(pos.get(colName), res.getString(i));
}
}
}
}

// traitement des variables tableaux
if (tabH != null && tabV != null) {
for (int k = 0; k < tabH.length; k++) {
if (pos.get(tabH[k].toLowerCase()) != null) {
// if nulls value musn't be quoted as "null" and element is null then don't write
if (!(StringUtils.isEmpty(nulls.get(n)) && StringUtils.isEmpty(tabV[k]))) {
output.set(pos.get(tabH[k].toLowerCase()), tabV[k]);
}
}
}
}

for (String o : output) {
bw.write(o + ";");
}
bw.write("\n");
}
}
c.close();
bw.flush();
fw.flush();

}

}
Loading

0 comments on commit a529546

Please sign in to comment.