diff --git a/arc-core/src/main/java/fr/insee/arc/core/dataobjects/ArcPreparedStatementBuilder.java b/arc-core/src/main/java/fr/insee/arc/core/dataobjects/ArcPreparedStatementBuilder.java index 6877db832..e6557a22f 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/dataobjects/ArcPreparedStatementBuilder.java +++ b/arc-core/src/main/java/fr/insee/arc/core/dataobjects/ArcPreparedStatementBuilder.java @@ -5,7 +5,6 @@ import java.util.Map; import fr.insee.arc.utils.dao.GenericPreparedStatementBuilder; -import fr.insee.arc.utils.dao.SQL; public class ArcPreparedStatementBuilder extends GenericPreparedStatementBuilder { diff --git a/arc-core/src/main/java/fr/insee/arc/core/dataobjects/DataObjectService.java b/arc-core/src/main/java/fr/insee/arc/core/dataobjects/DataObjectService.java index dc6d90cc2..9d301128e 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/dataobjects/DataObjectService.java +++ b/arc-core/src/main/java/fr/insee/arc/core/dataobjects/DataObjectService.java @@ -31,9 +31,6 @@ public DataObjectService(String sandboxSchema) { */ public String getView(ViewEnum e) { - if (e.getTableLocation().equals(SchemaEnum.TEMPORARY)) { - return e.getTableName(); - } if ((e.getTableLocation().equals(SchemaEnum.SANDBOX) || e.getTableLocation().equals(SchemaEnum.SANDBOX_GENERATED)) && this.sandboxSchema != null) { return this.sandboxSchema + SQL.DOT.getSqlCode() + e.getTableName(); @@ -44,7 +41,7 @@ public String getView(ViewEnum e) { } public static String getFullTableNameInSchema(SchemaEnum schema, String tablename) { - return schema.getSchemaName().equals("") ? tablename : schema.getSchemaName() + SQL.DOT.getSqlCode() + tablename; + return schema.equals(SchemaEnum.TEMPORARY) ? tablename : schema.getSchemaName() + SQL.DOT.getSqlCode() + tablename; } public String getSandboxSchema() { diff --git a/arc-core/src/main/java/fr/insee/arc/core/dataobjects/ViewEnum.java b/arc-core/src/main/java/fr/insee/arc/core/dataobjects/ViewEnum.java index d50b0f7d1..45a9b2496 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/dataobjects/ViewEnum.java +++ b/arc-core/src/main/java/fr/insee/arc/core/dataobjects/ViewEnum.java @@ -185,6 +185,10 @@ public String getFullName(String schema) { return schema + SQL.DOT.getSqlCode() + this.tableName; } + public static String getFullName(String schema, String providedTableName) { + return schema + SQL.DOT.getSqlCode() + providedTableName; + } + public ColumnEnum col(ColumnEnum e) { return this.getColumns().get(e); } diff --git a/arc-core/src/main/java/fr/insee/arc/core/model/XMLConstant.java b/arc-core/src/main/java/fr/insee/arc/core/model/XMLConstant.java new file mode 100644 index 000000000..3dbbeac80 --- /dev/null +++ b/arc-core/src/main/java/fr/insee/arc/core/model/XMLConstant.java @@ -0,0 +1,9 @@ +package fr.insee.arc.core.model; + +public class XMLConstant { + + + // racine xml + public static final String ROOT = "root"; + +} diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/global/ApiService.java b/arc-core/src/main/java/fr/insee/arc/core/service/global/ApiService.java index 1a181a0f1..c01adb5c3 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/global/ApiService.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/global/ApiService.java @@ -4,7 +4,6 @@ import java.sql.SQLException; import java.util.ArrayList; import java.util.HashMap; -import java.util.List; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -16,18 +15,15 @@ import fr.insee.arc.core.dataobjects.ViewEnum; import fr.insee.arc.core.model.TraitementEtat; import fr.insee.arc.core.model.TraitementPhase; -import fr.insee.arc.core.model.TraitementTableParametre; import fr.insee.arc.core.service.global.bo.Sandbox; import fr.insee.arc.core.service.global.dao.DatabaseConnexionConfiguration; import fr.insee.arc.core.service.global.dao.PilotageOperations; import fr.insee.arc.core.service.global.dao.TableNaming; import fr.insee.arc.core.service.global.scalability.ScalableConnection; -import fr.insee.arc.core.service.p2chargement.bo.Norme; import fr.insee.arc.core.util.StaticLoggerDispatcher; import fr.insee.arc.utils.dao.UtilitaireDao; import fr.insee.arc.utils.exception.ArcException; import fr.insee.arc.utils.exception.ArcExceptionMessage; -import fr.insee.arc.utils.ressourceUtils.PropertiesHandler; import fr.insee.arc.utils.ressourceUtils.SpringApplicationContext; import fr.insee.arc.utils.structure.GenericBean; import fr.insee.arc.utils.textUtils.IConstanteNumerique; @@ -41,37 +37,25 @@ public abstract class ApiService implements IConstanteNumerique { protected int maxParallelWorkers; - - // racine xml - public static final String ROOT = "root"; - // anti-spam delay when thread chain error protected static final int PREVENT_ERROR_SPAM_DELAY = 100; protected ScalableConnection connexion; - + + protected Sandbox coordinatorSandbox; + protected String envExecution; protected String tablePrevious; + protected String previousPhase; protected String currentPhase; + protected String tablePil; protected String tablePilTemp; - protected String tableNorme; - protected String tableJeuDeRegle; - protected String tableChargementRegle; - protected String tableNormageRegle; - protected String tableMappingRegle; - protected String tableControleRegle; - protected Integer nbEnr; - protected String tableCalendrier; - protected String directoryRoot; + + private Integer nbEnr; protected String paramBatch = null; - protected String currentIdSource; - protected String directoryIn; - protected List listeNorme; - - protected Sandbox coordinatorSandbox; - + // made to report the number of object processed by the phase private int reportNumberOfObject = 0; @@ -81,59 +65,52 @@ public abstract class ApiService implements IConstanteNumerique { protected Boolean todo = false; - private HashMap> tabIdSource; + protected HashMap> tabIdSource; public ApiService() { super(); springInit(); } - protected ApiService(String aCurrentPhase, String aEnvExecution, String aDirectoryRoot, - Integer aNbEnr, String paramBatch) { - this(); + protected ApiService(String aCurrentPhase, String aEnvExecution, String aDirectoryRoot, Integer aNbEnr, + String paramBatch) { + StaticLoggerDispatcher.info(LOGGER_APISERVICE, "** initialiserVariable **"); + try { - this.connexion = new ScalableConnection(UtilitaireDao.get(ArcDatabase.COORDINATOR.getIndex()).getDriverConnexion()); + this.connexion = new ScalableConnection( + UtilitaireDao.get(ArcDatabase.COORDINATOR.getIndex()).getDriverConnexion()); this.coordinatorSandbox = new Sandbox(this.connexion.getCoordinatorConnection(), aEnvExecution); } catch (Exception ex) { LoggerHelper.error(LOGGER_APISERVICE, ApiService.class, "Error in initializing connexion"); } - this.setParamBatch(paramBatch); - // Initialisation de la phase - this.setCurrentPhase(aCurrentPhase); - this.setPreviousPhase(TraitementPhase.valueOf(this.getCurrentPhase()).previousPhase().toString()); - // Table en entrée - this.setEnvExecution(aEnvExecution); + this.envExecution = aEnvExecution; + - this.setDirectoryRoot(aDirectoryRoot); + // current phase and compute the previous phase + this.currentPhase = aCurrentPhase; + this.previousPhase = TraitementPhase.valueOf(this.getCurrentPhase()).previousPhase().toString(); + + // number of object to be proceed + this.nbEnr = aNbEnr; + + // indicate if api is triggered by batch or not + this.paramBatch = paramBatch; - this.setTablePrevious((TableNaming.dbEnv(aEnvExecution) + this.getPreviousPhase() + "_" + TraitementEtat.OK).toLowerCase()); + + // inputTables + this.tablePrevious = (TableNaming.dbEnv(aEnvExecution) + this.getPreviousPhase() + "_" + TraitementEtat.OK).toLowerCase(); // Tables de pilotage et pilotage temporaire - this.setTablePil(ViewEnum.PILOTAGE_FICHIER.getFullName(aEnvExecution)); - this.tablePilTemp = TableNaming.temporaryTableName(aEnvExecution, aCurrentPhase, + this.tablePil = ViewEnum.PILOTAGE_FICHIER.getFullName(aEnvExecution); + this.tablePilTemp = TableNaming.temporaryTableName(aEnvExecution, aCurrentPhase, ViewEnum.PILOTAGE_FICHIER.getTableName(), "0"); - this.setTableNorme(TableNaming.dbEnv(aEnvExecution) + TraitementTableParametre.NORME); - this.tableCalendrier = TableNaming.dbEnv(aEnvExecution) + TraitementTableParametre.CALENDRIER; - // Tables venant de l'initialisation globale - this.setTableJeuDeRegle(TableNaming.dbEnv(aEnvExecution) + TraitementTableParametre.JEUDEREGLE); - this.setTableChargementRegle(TableNaming.dbEnv(aEnvExecution) + TraitementTableParametre.CHARGEMENT_REGLE); - this.setTableNormageRegle(TableNaming.dbEnv(aEnvExecution) + TraitementTableParametre.NORMAGE_REGLE); - this.setTableControleRegle(TableNaming.dbEnv(aEnvExecution) + TraitementTableParametre.CONTROLE_REGLE); - this.setTableMappingRegle(TableNaming.dbEnv(aEnvExecution) + TraitementTableParametre.MAPPING_REGLE); - this.setTableOutKo((TableNaming.dbEnv(aEnvExecution) + this.getCurrentPhase() + "_" + TraitementEtat.KO).toLowerCase()); - this.setNbEnr(aNbEnr); StaticLoggerDispatcher.info(LOGGER_APISERVICE, "** Fin constructeur ApiService **"); } - /** - * Compteur simple pour tester la boucle d'execution - */ - private String tableOutKo; - /** * Initialisation des variable et des noms de table * @@ -147,12 +124,13 @@ private boolean initialiser() { // Vérifie si y'a des sources à traiter if (this.todo) { try { - UtilitaireDao.get(0).executeBlock(this.connexion.getCoordinatorConnection(), DatabaseConnexionConfiguration.configConnection(this.getEnvExecution())); + UtilitaireDao.get(0).executeBlock(this.connexion.getCoordinatorConnection(), + DatabaseConnexionConfiguration.configConnection(this.getEnvExecution())); } catch (ArcException ex) { LoggerHelper.error(LOGGER_APISERVICE, ApiService.class, "initialiser()", ex); } - register(this.connexion.getCoordinatorConnection(), this.getPreviousPhase(), this.getCurrentPhase(), this.getTablePil(), - this.tablePilTemp, this.getNbEnr()); + register(this.connexion.getCoordinatorConnection(), this.getPreviousPhase(), this.getCurrentPhase(), + this.getTablePil(), this.tablePilTemp, this.nbEnr); } return this.todo; @@ -175,7 +153,7 @@ private void springInit() { * @param phaseAncien * @return */ - private boolean checkTodo(String tablePil, String phaseAncien, String phaseNouveau) { + private boolean checkTodo(String tablePil, String phaseAncien) { ArcPreparedStatementBuilder requete = new ArcPreparedStatementBuilder(); boolean checkTodoResult = false; requete.append("SELECT 1 FROM " + tablePil + " a "); @@ -229,10 +207,10 @@ protected String marqueJeuDeRegleApplique(String pilTemp) { protected String marqueJeuDeRegleApplique(String pilTemp, String defaultEtatTraitement) { StringBuilder requete = new StringBuilder(); requete.append("WITH "); - requete.append( - "prep AS (SELECT a."+ColumnEnum.ID_SOURCE.getColumnName()+", a.id_norme, a.periodicite, b.validite_inf, b.validite_sup, b.version "); + requete.append("prep AS (SELECT a." + ColumnEnum.ID_SOURCE.getColumnName() + + ", a.id_norme, a.periodicite, b.validite_inf, b.validite_sup, b.version "); requete.append(" FROM " + pilTemp + " a "); - requete.append(" INNER JOIN " + this.getTableJeuDeRegle() + requete.append(" INNER JOIN " + ViewEnum.JEUDEREGLE.getFullName(this.envExecution) + " b ON a.id_norme=b.id_norme AND a.periodicite=b.periodicite AND b.validite_inf <=a.validite::date AND b.validite_sup>=a.validite::date "); requete.append(" WHERE phase_traitement='" + this.getCurrentPhase() + "') "); requete.append("UPDATE " + pilTemp + " AS a "); @@ -245,21 +223,6 @@ protected String marqueJeuDeRegleApplique(String pilTemp, String defaultEtatTrai return requete.toString(); } - /** - * promote the application to the full right user role if required. required is - * true if the restrictedUserAccount exists - * - * @throws ArcException - */ - public String switchToFullRightRole() { - PropertiesHandler properties = PropertiesHandler.getInstance(); - if (!properties.getDatabaseRestrictedUsername().equals("")) { - return FormatSQL.changeRole(properties.getDatabaseUsername()); - } - return ""; - } - - public abstract void executer() throws ArcException; /** @@ -308,11 +271,13 @@ public HashMap> pilotageListIdsource(String tablePilot String etat) { LoggerHelper.info(LOGGER_APISERVICE, "pilotageListIdsource"); ArcPreparedStatementBuilder requete = new ArcPreparedStatementBuilder(); - requete.append("SELECT container, "+ColumnEnum.ID_SOURCE.getColumnName()+" FROM " + tablePilotage + " "); + requete.append("SELECT container, " + ColumnEnum.ID_SOURCE.getColumnName() + " FROM " + tablePilotage + " "); requete.append("WHERE phase_traitement=" + requete.quoteText(aCurrentPhase) + " "); requete.append("AND " + requete.quoteText(etat) + "=ANY(etat_traitement); "); try { - return new GenericBean(UtilitaireDao.get(0).executeRequest(this.connexion.getCoordinatorConnection(), requete)).mapContent(); + return new GenericBean( + UtilitaireDao.get(0).executeRequest(this.connexion.getCoordinatorConnection(), requete)) + .mapContent(); } catch (ArcException ex) { LoggerHelper.error(LOGGER_APISERVICE, ApiService.class, "pilotageListIdSource()", ex); } @@ -344,11 +309,10 @@ public static StringBuilder pilotageMarkIdsource(String tablePilotage, String id requete.append(", jointure= '" + jointure[0] + "'"); } - requete.append("WHERE "+ColumnEnum.ID_SOURCE.getColumnName()+"='" + idSource + "';\n"); + requete.append("WHERE " + ColumnEnum.ID_SOURCE.getColumnName() + "='" + idSource + "';\n"); return requete; } - /** * Requête de sélection de la liste des colonnes des tables métier associée à * une norme @@ -366,7 +330,6 @@ public static ArcPreparedStatementBuilder listeColonneTableMetierSelonFamilleNor return requete; } - /** * * @return le temps d'execution @@ -381,7 +344,7 @@ public ServiceReporting invokeApi() { || this.getCurrentPhase().equals(TraitementPhase.RECEPTION.toString())) { this.todo = true; } else { - this.todo = checkTodo(this.getTablePil(), this.getPreviousPhase(), this.getCurrentPhase()); + this.todo = checkTodo(this.getTablePil(), this.getPreviousPhase()); } LoggerHelper.info(LOGGER_APISERVICE, "A faire - " + this.getCurrentPhase() + " : " + this.todo); @@ -391,8 +354,8 @@ public ServiceReporting invokeApi() { } catch (ArcException ex) { LoggerHelper.error(LOGGER_APISERVICE, "Erreur dans " + this.getCurrentPhase() + ". ", ex); try { - this.repriseSurErreur(this.connexion.getCoordinatorConnection(), this.getCurrentPhase(), this.getTablePil(), ex, - "aucuneTableADroper"); + this.repriseSurErreur(this.connexion.getCoordinatorConnection(), this.getCurrentPhase(), + this.getTablePil(), ex, "aucuneTableADroper"); } catch (Exception ex2) { LoggerHelper.error(LOGGER_APISERVICE, "Error in ApiService.invokeApi.repriseSurErreur"); } @@ -408,7 +371,6 @@ public ServiceReporting invokeApi() { } - public String getTablePilTemp() { return this.tablePilTemp; } @@ -450,58 +412,13 @@ private void repriseSurErreur(Connection connexion, String phase, String tablePi requete.append("WITH t0 AS ( "); requete.append(PilotageOperations.queryUpdatePilotageError(phase, tablePil, exception)); - requete.append("\n RETURNING "+ColumnEnum.ID_SOURCE.getColumnName()+") "); + requete.append("\n RETURNING " + ColumnEnum.ID_SOURCE.getColumnName() + ") "); requete.append(PilotageOperations.resetPreviousPhaseMark(tablePil, null, "t0")); UtilitaireDao.get(0).executeBlock(connexion, requete); } - /** - * Remise dans l'état juste avant le lancement des controles et insertion dans - * une table d'erreur pour un fichier particulier - * - * @param connexion - * @param phase - * @param tablePil - * @param exception - * @param tableDrop - * @throws ArcException - */ - public void repriseSurErreur(Connection connexion, String phase, String tablePil, String idSource, - ArcException exception, String... tableDrop) throws ArcException { - // nettoyage de la connexion - // comme on arrive ici à cause d'une erreur, la base de donnée attend une fin de - // la transaction - // si on lui renvoie une requete SQL, il la refuse avec le message - // ERROR: current transaction is aborted, commands ignored until end of - // transaction block - try { - this.connexion.getCoordinatorConnection().setAutoCommit(false); - this.connexion.getCoordinatorConnection().rollback(); - } catch (SQLException rollbackException) { - throw new ArcException(rollbackException, ArcExceptionMessage.DATABASE_ROLLBACK_FAILED); - } - - // promote the application user account to full right - UtilitaireDao.get(0).executeImmediate(connexion, switchToFullRightRole()); - - StringBuilder requete = new StringBuilder(); - - for (int i = 0; i < tableDrop.length; i++) { - requete.append("DROP TABLE IF EXISTS " + tableDrop[i] + ";"); - } - requete.append(PilotageOperations.queryUpdatePilotageError(phase, tablePil, exception)); - - requete.append("\n AND "+ColumnEnum.ID_SOURCE.getColumnName()+" = '" + idSource + "' "); - requete.append("\n ;"); - - requete.append(PilotageOperations.resetPreviousPhaseMark(tablePil, idSource, null)); - - UtilitaireDao.get(0).executeBlock(connexion, requete); - } - - /** * permet de récupérer un tableau de la forme id_source | id1 , id2, id3 ... * type_comp | comp1,comp2, comp3 ... @@ -510,17 +427,15 @@ public void repriseSurErreur(Connection connexion, String phase, String tablePil * @throws ArcException */ protected HashMap> recuperationIdSource() throws ArcException { - - ArcPreparedStatementBuilder query=new ArcPreparedStatementBuilder(); - query.append("SELECT p."+ColumnEnum.ID_SOURCE.getColumnName()+" "); + + ArcPreparedStatementBuilder query = new ArcPreparedStatementBuilder(); + query.append("SELECT p." + ColumnEnum.ID_SOURCE.getColumnName() + " "); query.append("FROM " + this.getTablePilTemp() + " p "); - query.append("ORDER BY "+ColumnEnum.ID_SOURCE.getColumnName()); + query.append("ORDER BY " + ColumnEnum.ID_SOURCE.getColumnName()); query.append(";"); - + HashMap> pil = new GenericBean( - UtilitaireDao.get(0) - .executeRequest(this.connexion.getCoordinatorConnection(), query )) - .mapContent(); + UtilitaireDao.get(0).executeRequest(this.connexion.getCoordinatorConnection(), query)).mapContent(); return (pil); @@ -530,134 +445,34 @@ public String getEnvExecution() { return envExecution; } - public void setEnvExecution(String envExecution) { - this.envExecution = envExecution; - } - public HashMap> getTabIdSource() { return tabIdSource; } - protected void setTabIdSource(HashMap> tabIdSource) { - this.tabIdSource = tabIdSource; - } - public String getTablePil() { return tablePil; } - public void setTablePil(String tablePil) { - this.tablePil = tablePil; - } - public String getPreviousPhase() { return previousPhase; } - public void setPreviousPhase(String previousPhase) { - this.previousPhase = previousPhase; - } - public String getCurrentPhase() { return currentPhase; } - public void setCurrentPhase(String currentPhase) { - this.currentPhase = currentPhase; - } - public String getTablePrevious() { return tablePrevious; } - public void setTablePrevious(String tablePrevious) { - this.tablePrevious = tablePrevious; - } - public String getParamBatch() { return paramBatch; } - protected void setParamBatch(String paramBatch) { - this.paramBatch = paramBatch; - } - - public String getTableJeuDeRegle() { - return tableJeuDeRegle; - } - - public void setTableJeuDeRegle(String tableJeuDeRegle) { - this.tableJeuDeRegle = tableJeuDeRegle; - } - - public String getTableNorme() { - return tableNorme; - } - - public void setTableNorme(String tableNorme) { - this.tableNorme = tableNorme; - } - - public String getTableOutKo() { - return tableOutKo; - } - - public void setTableOutKo(String tableOutKo) { - this.tableOutKo = tableOutKo; - } - public ScalableConnection getConnexion() { return connexion; } - public String getTableControleRegle() { - return tableControleRegle; - } - - public String getTableChargementRegle() { - return tableChargementRegle; - } - - public void setTableChargementRegle(String tableChargementRegle) { - this.tableChargementRegle = tableChargementRegle; - } - - public void setTableControleRegle(String tableControleRegle) { - this.tableControleRegle = tableControleRegle; - } - - public String getTableMappingRegle() { - return tableMappingRegle; - } - - public void setTableMappingRegle(String tableMappingRegle) { - this.tableMappingRegle = tableMappingRegle; - } - - public Integer getNbEnr() { - return nbEnr; - } - - public void setNbEnr(Integer nbEnr) { - this.nbEnr = nbEnr; - } - - public String getTableNormageRegle() { - return tableNormageRegle; - } - - public void setTableNormageRegle(String tableNormageRegle) { - this.tableNormageRegle = tableNormageRegle; - } - - public String getDirectoryRoot() { - return directoryRoot; - } - - public void setDirectoryRoot(String directoryRoot) { - this.directoryRoot = directoryRoot; - } - /** * @return the idSource */ @@ -665,13 +480,6 @@ public String getIdSource() { return idSource; } - /** - * @param idSource the idSource to set - */ - public void setIdSource(String idSource) { - this.idSource = idSource; - } - public int getReportNumberOfObject() { return reportNumberOfObject; } @@ -680,30 +488,12 @@ public void setReportNumberOfObject(int reportNumberOfObject) { this.reportNumberOfObject = reportNumberOfObject; } - public String getDirectoryIn() { - return directoryIn; - } - - public void setDirectoryIn(String directoryIn) { - this.directoryIn = directoryIn; - } - - public List getListeNorme() { - return listeNorme; - } - - public void setListeNorme(List listeNorme) { - this.listeNorme = listeNorme; - } - public Sandbox getCoordinatorSandbox() { return coordinatorSandbox; } - public void setCoordinatorSandbox(Sandbox coordinatorSandbox) { - this.coordinatorSandbox = coordinatorSandbox; + public Integer getNbEnr() { + return nbEnr; } - - } diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/global/dao/DataStorage.java b/arc-core/src/main/java/fr/insee/arc/core/service/global/dao/DataStorage.java index 1d37c83f7..a2b2da7a6 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/global/dao/DataStorage.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/global/dao/DataStorage.java @@ -2,21 +2,17 @@ import java.io.File; import java.sql.Connection; -import java.util.ArrayList; import java.util.List; -import java.util.stream.Collector; import java.util.stream.Collectors; import fr.insee.arc.core.dataobjects.ArcPreparedStatementBuilder; import fr.insee.arc.core.dataobjects.ColumnEnum; -import fr.insee.arc.core.dataobjects.DataObjectService; import fr.insee.arc.core.dataobjects.ViewEnum; import fr.insee.arc.utils.dao.CopyObjectsToDatabase; import fr.insee.arc.utils.dao.SQL; import fr.insee.arc.utils.dao.UtilitaireDao; import fr.insee.arc.utils.exception.ArcException; import fr.insee.arc.utils.structure.GenericBean; -import fr.insee.arc.utils.utils.FormatSQL; public class DataStorage { diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/global/dao/DatabaseConnexionConfiguration.java b/arc-core/src/main/java/fr/insee/arc/core/service/global/dao/DatabaseConnexionConfiguration.java index 59589f80e..b9b28484c 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/global/dao/DatabaseConnexionConfiguration.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/global/dao/DatabaseConnexionConfiguration.java @@ -1,6 +1,9 @@ package fr.insee.arc.core.service.global.dao; import fr.insee.arc.utils.dao.ModeRequeteImpl; +import fr.insee.arc.utils.exception.ArcException; +import fr.insee.arc.utils.ressourceUtils.PropertiesHandler; +import fr.insee.arc.utils.utils.FormatSQL; import fr.insee.arc.utils.utils.ManipString; public class DatabaseConnexionConfiguration { @@ -18,4 +21,20 @@ public static StringBuilder configConnection(String anEnvExecution) { return requete; } + + /** + * promote the application to the full right user role if required. required is + * true if the restrictedUserAccount exists + * + * @throws ArcException + */ + public static String switchToFullRightRole() { + PropertiesHandler properties = PropertiesHandler.getInstance(); + if (!properties.getDatabaseRestrictedUsername().equals("")) { + return FormatSQL.changeRole(properties.getDatabaseUsername()); + } + return ""; + } + + } diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/global/dao/DatabaseMaintenance.java b/arc-core/src/main/java/fr/insee/arc/core/service/global/dao/DatabaseMaintenance.java index f45d6aa2a..4679541e7 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/global/dao/DatabaseMaintenance.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/global/dao/DatabaseMaintenance.java @@ -1,12 +1,10 @@ package fr.insee.arc.core.service.global.dao; import java.sql.Connection; -import java.util.List; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import fr.insee.arc.core.dataobjects.ColumnEnum; import fr.insee.arc.core.dataobjects.ViewEnum; import fr.insee.arc.core.service.global.scalability.ServiceScalability; import fr.insee.arc.core.util.StaticLoggerDispatcher; diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/global/dao/FileSystemManagement.java b/arc-core/src/main/java/fr/insee/arc/core/service/global/dao/FileSystemManagement.java index 73e57ba93..916551427 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/global/dao/FileSystemManagement.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/global/dao/FileSystemManagement.java @@ -2,18 +2,14 @@ import java.io.File; import java.io.IOException; -import java.util.List; import org.apache.commons.io.FileUtils; -import fr.insee.arc.core.dataobjects.ArcPreparedStatementBuilder; import fr.insee.arc.core.model.TraitementEtat; import fr.insee.arc.core.model.TraitementPhase; -import fr.insee.arc.utils.dao.UtilitaireDao; import fr.insee.arc.utils.exception.ArcException; import fr.insee.arc.utils.exception.ArcExceptionMessage; import fr.insee.arc.utils.files.FileUtilsArc; -import fr.insee.arc.utils.structure.GenericBean; public class FileSystemManagement { diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/global/dao/PhaseOperations.java b/arc-core/src/main/java/fr/insee/arc/core/service/global/dao/PhaseOperations.java index ec5d23da2..000f8ab41 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/global/dao/PhaseOperations.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/global/dao/PhaseOperations.java @@ -1,11 +1,8 @@ package fr.insee.arc.core.service.global.dao; import java.sql.Connection; -import java.util.ArrayList; import java.util.List; -import org.apache.commons.lang3.ObjectUtils; - import fr.insee.arc.core.dataobjects.ArcPreparedStatementBuilder; import fr.insee.arc.core.dataobjects.ColumnEnum; import fr.insee.arc.core.model.TraitementEtat; diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/global/dao/PilotageOperations.java b/arc-core/src/main/java/fr/insee/arc/core/service/global/dao/PilotageOperations.java index 3269439e4..ea0cbecab 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/global/dao/PilotageOperations.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/global/dao/PilotageOperations.java @@ -1,6 +1,7 @@ package fr.insee.arc.core.service.global.dao; import java.sql.Connection; +import java.sql.SQLException; import java.text.SimpleDateFormat; import java.util.Date; @@ -17,6 +18,7 @@ import fr.insee.arc.utils.dao.SQL; import fr.insee.arc.utils.dao.UtilitaireDao; import fr.insee.arc.utils.exception.ArcException; +import fr.insee.arc.utils.exception.ArcExceptionMessage; import fr.insee.arc.utils.utils.FormatSQL; import fr.insee.arc.utils.utils.ManipString; @@ -188,4 +190,47 @@ public static StringBuilder resetPreviousPhaseMark(String tablePil, String idSou requete.append("\n ;"); return requete; } + + /** + * Remise dans l'état juste avant le lancement des controles et insertion dans + * une table d'erreur pour un fichier particulier + * + * @param connexion + * @param phase + * @param tablePil + * @param exception + * @param tableDrop + * @throws ArcException + */ + public static void traitementSurErreur(Connection connexion, String phase, String tablePil, String idSource, + ArcException exception) throws ArcException { + // nettoyage de la connexion + // comme on arrive ici à cause d'une erreur, la base de donnée attend une fin de + // la transaction + // si on lui renvoie une requete SQL, il la refuse avec le message + // ERROR: current transaction is aborted, commands ignored until end of + // transaction block + try { + connexion.setAutoCommit(false); + connexion.rollback(); + } catch (SQLException rollbackException) { + throw new ArcException(rollbackException, ArcExceptionMessage.DATABASE_ROLLBACK_FAILED); + } + + // promote the application user account to full right + UtilitaireDao.get(0).executeImmediate(connexion, DatabaseConnexionConfiguration.switchToFullRightRole()); + + StringBuilder requete = new StringBuilder(); + + requete.append(PilotageOperations.queryUpdatePilotageError(phase, tablePil, exception)); + + requete.append("\n AND "+ColumnEnum.ID_SOURCE.getColumnName()+" = '" + idSource + "' "); + requete.append("\n ;"); + + requete.append(PilotageOperations.resetPreviousPhaseMark(tablePil, idSource, null)); + + UtilitaireDao.get(0).executeBlock(connexion, requete); + } + + } diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/global/dao/TableNaming.java b/arc-core/src/main/java/fr/insee/arc/core/service/global/dao/TableNaming.java index 66c3a23b4..9cd352b15 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/global/dao/TableNaming.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/global/dao/TableNaming.java @@ -2,7 +2,8 @@ import fr.insee.arc.core.dataobjects.ArcPreparedStatementBuilder; import fr.insee.arc.core.dataobjects.ColumnEnum; -import fr.insee.arc.utils.dao.GenericPreparedStatementBuilder; +import fr.insee.arc.core.dataobjects.ViewEnum; +import fr.insee.arc.core.model.TraitementEtat; import fr.insee.arc.utils.dao.SQL; import fr.insee.arc.utils.utils.FormatSQL; @@ -34,8 +35,8 @@ public static String temporaryTableName(String aEnvExecution, String aCurrentPha } } - public static String globalTableName(String aEnvExecution, String aCurrentPhase, String tableName) { - return dbEnv(aEnvExecution) + aCurrentPhase + "_" + tableName; + public static String globalTableName(String aEnvExecution, String aPhase, TraitementEtat etat) { + return ViewEnum.getFullName(aEnvExecution, aPhase + "_" + etat.toString()); } /** diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/global/dao/TableOperations.java b/arc-core/src/main/java/fr/insee/arc/core/service/global/dao/TableOperations.java index 07c19f553..57ac2b34b 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/global/dao/TableOperations.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/global/dao/TableOperations.java @@ -3,10 +3,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import fr.insee.arc.core.dataobjects.ArcPreparedStatementBuilder; -import fr.insee.arc.core.dataobjects.ViewEnum; import fr.insee.arc.core.util.StaticLoggerDispatcher; -import fr.insee.arc.utils.dao.SQL; import fr.insee.arc.utils.exception.ArcException; import fr.insee.arc.utils.utils.FormatSQL; diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/global/dao/ThreadOperations.java b/arc-core/src/main/java/fr/insee/arc/core/service/global/dao/ThreadOperations.java index bdd3308e8..6b1a75392 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/global/dao/ThreadOperations.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/global/dao/ThreadOperations.java @@ -14,7 +14,7 @@ public class ThreadOperations { - private final static String JAVA_DATE_FORMAT = "dd/MM/yyyy HH:mm:ss"; + private static final String JAVA_DATE_FORMAT = "dd/MM/yyyy HH:mm:ss"; private ScalableConnection connexion; diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/dbmaintenance/BddPatcher.java b/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/dbmaintenance/BddPatcher.java index e71b77ecf..ba6b4de87 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/dbmaintenance/BddPatcher.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/dbmaintenance/BddPatcher.java @@ -5,7 +5,6 @@ import java.sql.Connection; import java.util.ArrayList; import java.util.HashMap; -import java.util.function.Consumer; import java.util.function.Function; import org.apache.commons.io.IOUtils; @@ -20,7 +19,6 @@ import fr.insee.arc.core.service.global.dao.TableNaming; import fr.insee.arc.core.service.p0initialisation.ApiInitialisationService; import fr.insee.arc.core.util.BDParameters; -import fr.insee.arc.utils.consumer.ThrowingConsumer; import fr.insee.arc.utils.dao.SQL; import fr.insee.arc.utils.dao.UtilitaireDao; import fr.insee.arc.utils.exception.ArcException; diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/filesystem/RestoreFileSystem.java b/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/filesystem/RestoreFileSystem.java index 6fd88aad5..b570cf3b0 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/filesystem/RestoreFileSystem.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/filesystem/RestoreFileSystem.java @@ -11,10 +11,9 @@ import fr.insee.arc.core.service.global.bo.Sandbox; import fr.insee.arc.core.service.global.dao.DataStorage; import fr.insee.arc.core.service.global.dao.FileSystemManagement; -import fr.insee.arc.core.service.p1reception.provider.DirectoryPath; +import fr.insee.arc.core.service.p1reception.provider.DirectoriesDao; import fr.insee.arc.utils.exception.ArcException; import fr.insee.arc.utils.files.FileUtilsArc; -import fr.insee.arc.utils.ressourceUtils.PropertiesHandler; import fr.insee.arc.utils.utils.LoggerHelper; import fr.insee.arc.utils.utils.ManipString; @@ -24,11 +23,14 @@ public class RestoreFileSystem { private Connection connection; private String envExecution; + private DirectoriesDao directories; + public RestoreFileSystem(Sandbox sandbox) { super(); this.connection = sandbox.getConnection(); this.envExecution = sandbox.getSchema(); + this.directories = new DirectoriesDao(sandbox); } @@ -41,9 +43,9 @@ public void execute() throws ArcException { LoggerHelper.info(LOGGER, "Reconstruction du filesystem"); // parcourir toutes les archives dans le répertoire d'archive - String rootDirectory = PropertiesHandler.getInstance().getBatchParametersDirectory(); - FileUtilsArc.createDirIfNotexist(FileSystemManagement.directoryEnvRoot(rootDirectory, envExecution)); + directories.createSandboxDirectories(); + // pour chaque entrepot de données, // Comparer les archives du répertoire aux archives enregistrées dans la table // d'archive : @@ -54,20 +56,17 @@ public void execute() throws ArcException { List entrepotList = DataStorage.execQuerySelectDatastorage(connection); for (String entrepot : entrepotList) { - rebuildFileSystemInEntrepot(rootDirectory, entrepot); + rebuildFileSystemInEntrepot(entrepot); } } - private void rebuildFileSystemInEntrepot(String rootDirectory, String entrepot) throws ArcException + private void rebuildFileSystemInEntrepot(String entrepot) throws ArcException { - String dirEntrepotArchive = DirectoryPath.directoryReceptionEntrepotArchive(rootDirectory, envExecution, - entrepot); - String dirEntrepot = DirectoryPath.directoryReceptionEntrepot(rootDirectory, envExecution, entrepot); + directories.createSandboxDatawarehouseDirectories(entrepot); + String dirEntrepotArchive = directories.getDirectoryEntrepotArchive(); + String dirEntrepot = directories.getDiretoryEntrepotIn(); - FileUtilsArc.createDirIfNotexist(dirEntrepotArchive); - FileUtilsArc.createDirIfNotexist(dirEntrepot); - // On cherche les fichiers du répertoire d'archive qui ne sont pas dans la table // archive // Si on en trouve ce n'est pas cohérent et on doit remettre ces fichiers dans diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/pilotage/bo/ListIdSourceInPilotage.java b/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/pilotage/bo/ListIdSourceInPilotage.java index fd9bc5812..7e5ed1b17 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/pilotage/bo/ListIdSourceInPilotage.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/pilotage/bo/ListIdSourceInPilotage.java @@ -1,13 +1,10 @@ package fr.insee.arc.core.service.p0initialisation.pilotage.bo; import java.sql.Connection; -import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; -import org.apache.commons.lang3.ObjectUtils; - import fr.insee.arc.core.dataobjects.ColumnEnum; import fr.insee.arc.core.model.TraitementEtat; import fr.insee.arc.core.model.TraitementPhase; diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/ApiReceptionService.java b/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/ApiReceptionService.java index 6319f1e5a..71149f9d7 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/ApiReceptionService.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/ApiReceptionService.java @@ -1,63 +1,14 @@ package fr.insee.arc.core.service.p1reception; -import java.io.BufferedInputStream; -import java.io.File; -import java.io.FileInputStream; -import java.io.IOException; -import java.nio.file.Files; -import java.nio.file.Paths; -import java.sql.Connection; -import java.text.SimpleDateFormat; -import java.time.Year; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Date; -import java.util.HashMap; -import java.util.List; -import java.util.stream.Collectors; -import java.util.zip.GZIPInputStream; - -import org.apache.commons.compress.archivers.zip.ZipArchiveEntry; -import org.apache.commons.compress.archivers.zip.ZipArchiveInputStream; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.apache.tools.tar.TarEntry; -import org.apache.tools.tar.TarInputStream; import org.springframework.stereotype.Component; import fr.insee.arc.core.dataobjects.ArcDatabase; -import fr.insee.arc.core.dataobjects.ArcPreparedStatementBuilder; -import fr.insee.arc.core.dataobjects.ColumnEnum; -import fr.insee.arc.core.model.TraitementEtat; -import fr.insee.arc.core.model.TraitementPhase; -import fr.insee.arc.core.model.TraitementRapport; -import fr.insee.arc.core.model.TraitementTypeFichier; import fr.insee.arc.core.service.global.ApiService; -import fr.insee.arc.core.service.global.dao.FileSystemManagement; -import fr.insee.arc.core.service.global.dao.PhaseOperations; -import fr.insee.arc.core.service.global.dao.TableNaming; -import fr.insee.arc.core.service.global.dao.TableOperations; -import fr.insee.arc.core.service.global.scalability.ServiceScalability; -import fr.insee.arc.core.service.p0initialisation.ApiInitialisationService; -import fr.insee.arc.core.service.p0initialisation.pilotage.SynchronizeDataByPilotage; -import fr.insee.arc.core.service.p0initialisation.pilotage.bo.ListIdSourceInPilotage; import fr.insee.arc.core.service.p1reception.registerarchive.ArchiveRegistration; import fr.insee.arc.core.service.p1reception.registerarchive.bo.FilesDescriber; import fr.insee.arc.core.service.p1reception.registerfiles.FileRegistration; -import fr.insee.arc.core.service.p1reception.registerfiles.dao.FileRegistrationDao; import fr.insee.arc.core.util.BDParameters; -import fr.insee.arc.core.util.StaticLoggerDispatcher; -import fr.insee.arc.utils.consumer.ThrowingConsumer; -import fr.insee.arc.utils.dao.UtilitaireDao; -import fr.insee.arc.utils.dataobjects.TypeEnum; import fr.insee.arc.utils.exception.ArcException; -import fr.insee.arc.utils.exception.ArcExceptionMessage; -import fr.insee.arc.utils.files.CompressedUtils; -import fr.insee.arc.utils.files.FileUtilsArc; -import fr.insee.arc.utils.structure.GenericBean; -import fr.insee.arc.utils.utils.FormatSQL; -import fr.insee.arc.utils.utils.LoggerHelper; -import fr.insee.arc.utils.utils.ManipString; /** * ApiReceptionService @@ -74,26 +25,10 @@ @Component public class ApiReceptionService extends ApiService { - // Headers for the generic bean describing the files - private static final String GB_CONTAINER = "container"; - private static final String GB_FILENAME = "fileName"; - private static final String GB_TYPE = "type"; - private static final String GB_STATE = "etat"; - private static final String GB_REPORT = "rapport"; - private static final String GB_VCONTAINER = "v_container"; - private static final ArrayList GENERIC_BEAN_HEADERS = new ArrayList<>( - Arrays.asList(GB_CONTAINER, GB_FILENAME, GB_TYPE, GB_STATE, GB_REPORT, GB_VCONTAINER)); - private static final ArrayList GENERIC_BEAN_TYPES = new ArrayList<>( - Arrays.asList(TypeEnum.TEXT.getTypeName(), TypeEnum.TEXT.getTypeName(), TypeEnum.TEXT.getTypeName(), TypeEnum.TEXT.getTypeName(), TypeEnum.TEXT.getTypeName(), TypeEnum.TEXT.getTypeName())); - public ApiReceptionService() { super(); } - public static final int READ_BUFFER_SIZE = 131072; - - private static final Logger LOGGER = LogManager.getLogger(ApiReceptionService.class); - public ApiReceptionService(String aCurrentPhase, String aEnvExecution, String aDirectoryRoot, Integer aNbEnr, String paramBatch) { super(aCurrentPhase, aEnvExecution, aDirectoryRoot, aNbEnr, paramBatch); diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/provider/DirectoriesDao.java b/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/provider/DirectoriesDao.java new file mode 100644 index 000000000..5b59beff1 --- /dev/null +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/provider/DirectoriesDao.java @@ -0,0 +1,100 @@ +package fr.insee.arc.core.service.p1reception.provider; + +import fr.insee.arc.core.service.global.bo.Sandbox; +import fr.insee.arc.utils.files.FileUtilsArc; +import fr.insee.arc.utils.ressourceUtils.PropertiesHandler; + +public class DirectoriesDao { + + public DirectoriesDao(Sandbox sandbox) { + this.sandbox = sandbox; + this.directoryRoot = PropertiesHandler.getInstance().getBatchParametersDirectory(); + this.directoryReceptionEnCours = DirectoryPath.directoryReceptionEtatEnCours(directoryRoot, sandbox.getSchema()); + this.directoryReceptionOK = DirectoryPath.directoryReceptionEtatOK(directoryRoot, sandbox.getSchema()); + this.directoryReceptionKO = DirectoryPath.directoryReceptionEtatKO(directoryRoot, sandbox.getSchema()); + } + + private String directoryRoot; + private String directoryReceptionEnCours; + private String directoryReceptionOK; + private String directoryReceptionKO; + private String directoryEntrepotIn; + private String directoryEntrepotArchive; + private Sandbox sandbox; + + /** + * create global sandbox directories if not exist and register their paths in class + */ + public void createSandboxDirectories() { + // Create target directories if they don't exist + FileUtilsArc.createDirIfNotexist(this.directoryRoot); + FileUtilsArc.createDirIfNotexist(this.directoryReceptionEnCours); + FileUtilsArc.createDirIfNotexist(this.directoryReceptionOK); + FileUtilsArc.createDirIfNotexist(this.directoryReceptionKO); + } + + /** + * create datawarehouse sandbox directories if not exist and register their paths in class + */ + public void createSandboxDatawarehouseDirectories(String entrepot) { + + this.directoryEntrepotIn = DirectoryPath.directoryReceptionEntrepot(directoryRoot, sandbox.getSchema(), + entrepot); + this.directoryEntrepotArchive = DirectoryPath.directoryReceptionEntrepotArchive(directoryRoot, + sandbox.getSchema(), entrepot); + + // créer le répertoire de l'entrepot et son repertoire archive + FileUtilsArc.createDirIfNotexist(directoryEntrepotArchive); + FileUtilsArc.createDirIfNotexist(directoryEntrepotIn); + } + + public String getDirectoryRoot() { + return directoryRoot; + } + + public void setDirectoryRoot(String directoryRoot) { + this.directoryRoot = directoryRoot; + } + + public String getDirectoryReceptionEnCours() { + return directoryReceptionEnCours; + } + + public void setDirectoryReceptionEnCours(String directoryReceptionEnCours) { + this.directoryReceptionEnCours = directoryReceptionEnCours; + } + + public String getDirectoryReceptionOK() { + return directoryReceptionOK; + } + + public void setDirectoryReceptionOK(String directoryReceptionOK) { + this.directoryReceptionOK = directoryReceptionOK; + } + + public String getDirectoryReceptionKO() { + return directoryReceptionKO; + } + + public void setDirectoryReceptionKO(String directoryReceptionKO) { + this.directoryReceptionKO = directoryReceptionKO; + } + + public String getDiretoryEntrepotIn() { + return directoryEntrepotIn; + } + + public void setDirectoryEntrepotIn(String directoryEntrepotIn) { + this.directoryEntrepotIn = directoryEntrepotIn; + } + + public String getDirectoryEntrepotArchive() { + return directoryEntrepotArchive; + } + + public void setDirectoryEntrepotArchive(String directoryEntrepotArchive) { + this.directoryEntrepotArchive = directoryEntrepotArchive; + } + + +} \ No newline at end of file diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/provider/DirectoryPath.java b/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/provider/DirectoryPath.java index d9f0d1ae7..7337a3e4a 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/provider/DirectoryPath.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/provider/DirectoryPath.java @@ -8,8 +8,11 @@ import fr.insee.arc.core.service.global.dao.FileSystemManagement; public class DirectoryPath { - - + + private DirectoryPath() { + throw new IllegalStateException("Utility class"); + } + /** * Methods to provide directories paths * @@ -22,8 +25,7 @@ public static String directoryReceptionRoot(String rootDirectory, String env) { } public static String directoryReceptionEntrepot(String rootDirectory, String env, String entrepot) { - return FileSystemManagement.directoryPhaseEntrepot(rootDirectory, env, TraitementPhase.RECEPTION, - entrepot); + return FileSystemManagement.directoryPhaseEntrepot(rootDirectory, env, TraitementPhase.RECEPTION, entrepot); } public static String directoryReceptionEntrepotArchive(String rootDirectory, String env, String entrepot) { @@ -32,12 +34,14 @@ public static String directoryReceptionEntrepotArchive(String rootDirectory, Str } public static String directoryReceptionEntrepotArchiveOld(String rootDirectory, String env, String entrepot) { - return FileSystemManagement.directoryPhaseEntrepotArchiveOld(rootDirectory, env, - TraitementPhase.RECEPTION, entrepot); + return FileSystemManagement.directoryPhaseEntrepotArchiveOld(rootDirectory, env, TraitementPhase.RECEPTION, + entrepot); } - - public static String directoryReceptionEntrepotArchiveOldYearStamped(String rootDirectory, String env, String entrepot) { - return directoryReceptionEntrepotArchiveOld(rootDirectory, env, entrepot) + File.separator + Year.now().getValue(); + + public static String directoryReceptionEntrepotArchiveOldYearStamped(String rootDirectory, String env, + String entrepot) { + return directoryReceptionEntrepotArchiveOld(rootDirectory, env, entrepot) + File.separator + + Year.now().getValue(); } public static String directoryReceptionEtat(String rootDirectory, String env, TraitementEtat e) { diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/registerarchive/ArchiveRegistration.java b/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/registerarchive/ArchiveRegistration.java index 3c858f7ce..1a38d50c2 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/registerarchive/ArchiveRegistration.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/registerarchive/ArchiveRegistration.java @@ -8,11 +8,11 @@ import org.apache.logging.log4j.Logger; import fr.insee.arc.core.service.global.bo.Sandbox; +import fr.insee.arc.core.service.p1reception.provider.DirectoriesDao; import fr.insee.arc.core.service.p1reception.registerarchive.bo.FilesDescriber; import fr.insee.arc.core.service.p1reception.registerarchive.bo.GzReader; import fr.insee.arc.core.service.p1reception.registerarchive.bo.TgzReader; import fr.insee.arc.core.service.p1reception.registerarchive.bo.ZipReader; -import fr.insee.arc.core.service.p1reception.registerarchive.dao.DirectoriesDao; import fr.insee.arc.core.service.p1reception.registerarchive.dao.MoveFilesToRegisterDao; import fr.insee.arc.core.service.p1reception.registerarchive.operation.ArchiveCheckOperation; import fr.insee.arc.core.service.p1reception.registerarchive.operation.ReworkArchiveOperation; @@ -112,7 +112,7 @@ private boolean isFileRegisteringFinished() { private void selectFilesInDatawarehouse(String entrepot) throws ArcException { - File fDirIn = new File(directories.getDirEntrepotIn()); + File fDirIn = new File(directories.getDiretoryEntrepotIn()); // vérifier le type (répertoire) if (fDirIn.isDirectory()) { diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/registerarchive/dao/DirectoriesDao.java b/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/registerarchive/dao/DirectoriesDao.java deleted file mode 100644 index 7289cd09c..000000000 --- a/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/registerarchive/dao/DirectoriesDao.java +++ /dev/null @@ -1,100 +0,0 @@ -package fr.insee.arc.core.service.p1reception.registerarchive.dao; - -import fr.insee.arc.core.service.global.bo.Sandbox; -import fr.insee.arc.core.service.p1reception.provider.DirectoryPath; -import fr.insee.arc.utils.files.FileUtilsArc; -import fr.insee.arc.utils.ressourceUtils.PropertiesHandler; - -public class DirectoriesDao { - - public DirectoriesDao(Sandbox sandbox) { - this.sandbox = sandbox; - this.directoryRoot = PropertiesHandler.getInstance().getBatchParametersDirectory(); - this.dirEnCours = DirectoryPath.directoryReceptionEtatEnCours(directoryRoot, sandbox.getSchema()); - this.dirOK = DirectoryPath.directoryReceptionEtatOK(directoryRoot, sandbox.getSchema()); - this.dirKO = DirectoryPath.directoryReceptionEtatKO(directoryRoot, sandbox.getSchema()); - } - - private String directoryRoot; - private String dirEnCours; - private String dirOK; - private String dirKO; - private String dirEntrepotIn; - private String dirEntrepotArchive; - private Sandbox sandbox; - - /** - * create global sandbox directories if not exist and register their paths in class - */ - public void createSandboxDirectories() { - // Create target directories if they don't exist - FileUtilsArc.createDirIfNotexist(this.dirEnCours); - FileUtilsArc.createDirIfNotexist(this.dirOK); - FileUtilsArc.createDirIfNotexist(this.dirKO); - } - - /** - * create datawarehouse sandbox directories if not exist and register their paths in class - */ - public void createSandboxDatawarehouseDirectories(String entrepot) { - - this.dirEntrepotIn = DirectoryPath.directoryReceptionEntrepot(directoryRoot, sandbox.getSchema(), - entrepot); - this.dirEntrepotArchive = DirectoryPath.directoryReceptionEntrepotArchive(directoryRoot, - sandbox.getSchema(), entrepot); - - // créer le répertoire de l'entrepot et son repertoire archive - FileUtilsArc.createDirIfNotexist(dirEntrepotArchive); - FileUtilsArc.createDirIfNotexist(dirEntrepotIn); - } - - public String getDirectoryRoot() { - return directoryRoot; - } - - public void setDirectoryRoot(String directoryRoot) { - this.directoryRoot = directoryRoot; - } - - public String getDirEnCours() { - return dirEnCours; - } - - public void setDirEnCours(String dirEnCours) { - this.dirEnCours = dirEnCours; - } - - public String getDirOK() { - return dirOK; - } - - public void setDirOK(String dirOK) { - this.dirOK = dirOK; - } - - public String getDirKO() { - return dirKO; - } - - public void setDirKO(String dirKO) { - this.dirKO = dirKO; - } - - public String getDirEntrepotIn() { - return dirEntrepotIn; - } - - public void setDirEntrepotIn(String dirEntrepotIn) { - this.dirEntrepotIn = dirEntrepotIn; - } - - public String getDirEntrepotArchive() { - return dirEntrepotArchive; - } - - public void setDirEntrepotArchive(String dirEntrepotArchive) { - this.dirEntrepotArchive = dirEntrepotArchive; - } - - -} \ No newline at end of file diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/registerarchive/dao/MoveFilesToRegisterDao.java b/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/registerarchive/dao/MoveFilesToRegisterDao.java index d7fbf6d2b..d01cf41ee 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/registerarchive/dao/MoveFilesToRegisterDao.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/registerarchive/dao/MoveFilesToRegisterDao.java @@ -42,12 +42,7 @@ public static void registerArchive(Sandbox sandbox, String entrepot, String arch query.build(SQL.END_QUERY); UtilitaireDao.get(0).executeRequest(sandbox.getConnection(), query); -// - -// UtilitaireDao.get(0).executeBlock(sandbox.getConnection(), -// "INSERT INTO " + TableNaming.dbEnv(sandbox.getSchema()) -// + "pilotage_archive (entrepot,nom_archive) values ('" + entrepot + "','" + reworkInstance.getReworkedArchiveName() -// + "'); "); + } } diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/registerarchive/operation/ReworkArchiveOperation.java b/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/registerarchive/operation/ReworkArchiveOperation.java index a32bd6ba5..985e8285c 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/registerarchive/operation/ReworkArchiveOperation.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/registerarchive/operation/ReworkArchiveOperation.java @@ -5,7 +5,7 @@ import java.nio.file.Files; import java.nio.file.Paths; -import fr.insee.arc.core.service.p1reception.registerarchive.dao.DirectoriesDao; +import fr.insee.arc.core.service.p1reception.provider.DirectoriesDao; import fr.insee.arc.utils.exception.ArcException; import fr.insee.arc.utils.exception.ArcExceptionMessage; import fr.insee.arc.utils.files.CompressedUtils; @@ -69,7 +69,7 @@ public void qualifyAndRename() { + ManipString.substringAfterFirst(reworkedArchiveName, "."); } - fileOutArchive = new File(directories.getDirEntrepotArchive() + File.separator + reworkedArchiveName); + fileOutArchive = new File(directories.getDirectoryEntrepotArchive() + File.separator + reworkedArchiveName); if (!fileOutArchive.exists()) { break; @@ -98,13 +98,13 @@ public void reworkArchive() throws ArcException { throw new ArcException(exception, ArcExceptionMessage.FILE_COPY_FAILED, inputFile, fileOutArchive); } // déplacer le fichier dans encours - FileUtilsArc.deplacerFichier(directories.getDirEntrepotIn(), directories.getDirEnCours(), + FileUtilsArc.deplacerFichier(directories.getDiretoryEntrepotIn(), directories.getDirectoryReceptionEnCours(), inputFile.getName(), entrepot + "_" + reworkedArchiveName); } else { // on génére le tar.gz dans archive CompressedUtils.generateTarGzFromFile(inputFile, fileOutArchive, inputFile.getName()); // on copie le tar.gz dans encours - File fOut = new File(directories.getDirEnCours() + File.separator + entrepot + "_" + reworkedArchiveName); + File fOut = new File(directories.getDirectoryReceptionEnCours() + File.separator + entrepot + "_" + reworkedArchiveName); try { Files.copy(Paths.get(fileOutArchive.getAbsolutePath()), Paths.get(fOut.getAbsolutePath())); } catch (IOException exception) { @@ -114,7 +114,7 @@ public void reworkArchive() throws ArcException { FileUtilsArc.delete(inputFile); } - this.reworkedArchiveFile = new File(directories.getDirEnCours() + File.separator + entrepot + "_" + reworkedArchiveName); + this.reworkedArchiveFile = new File(directories.getDirectoryReceptionEnCours() + File.separator + entrepot + "_" + reworkedArchiveName); this.reworkedArchiveSize = (int) (fileOutArchive.length() / 1024 / 1024); } diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/registerfiles/FileRegistration.java b/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/registerfiles/FileRegistration.java index 103867cf2..4eaeffb62 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/registerfiles/FileRegistration.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/registerfiles/FileRegistration.java @@ -1,12 +1,7 @@ package fr.insee.arc.core.service.p1reception.registerfiles; import java.io.File; -import java.io.IOException; -import java.nio.file.Files; -import java.nio.file.Paths; -import java.sql.Connection; import java.util.ArrayList; -import java.util.Arrays; import java.util.HashMap; import java.util.List; @@ -20,28 +15,18 @@ import fr.insee.arc.core.model.TraitementRapport; import fr.insee.arc.core.model.TraitementTypeFichier; import fr.insee.arc.core.service.global.bo.Sandbox; -import fr.insee.arc.core.service.global.dao.TableNaming; import fr.insee.arc.core.service.global.dao.TableOperations; import fr.insee.arc.core.service.p0initialisation.pilotage.SynchronizeDataByPilotage; -import fr.insee.arc.core.service.p1reception.ApiReceptionService; +import fr.insee.arc.core.service.p1reception.provider.DirectoriesDao; import fr.insee.arc.core.service.p1reception.provider.DirectoryPath; import fr.insee.arc.core.service.p1reception.registerarchive.bo.FileDescriber; import fr.insee.arc.core.service.p1reception.registerarchive.bo.FilesDescriber; -import fr.insee.arc.core.service.p1reception.registerarchive.bo.GzReader; -import fr.insee.arc.core.service.p1reception.registerarchive.bo.TgzReader; -import fr.insee.arc.core.service.p1reception.registerarchive.bo.ZipReader; -import fr.insee.arc.core.service.p1reception.registerarchive.dao.DirectoriesDao; -import fr.insee.arc.core.service.p1reception.registerarchive.dao.MoveFilesToRegisterDao; -import fr.insee.arc.core.service.p1reception.registerarchive.operation.ArchiveCheckOperation; -import fr.insee.arc.core.service.p1reception.registerarchive.operation.ReworkArchiveOperation; import fr.insee.arc.core.service.p1reception.registerfiles.dao.FileRegistrationDao; import fr.insee.arc.core.service.p1reception.registerfiles.provider.ContainerName; import fr.insee.arc.core.util.StaticLoggerDispatcher; import fr.insee.arc.utils.dao.UtilitaireDao; import fr.insee.arc.utils.exception.ArcException; -import fr.insee.arc.utils.exception.ArcExceptionMessage; import fr.insee.arc.utils.files.CompressedUtils; -import fr.insee.arc.utils.files.CompressionExtension; import fr.insee.arc.utils.files.FileUtilsArc; import fr.insee.arc.utils.structure.GenericBean; import fr.insee.arc.utils.utils.FormatSQL; @@ -87,7 +72,7 @@ public void registerAndDispatchFiles(FilesDescriber providedArchiveContent) thro if (archiveContent.getFilesAttribute().isEmpty()) { return; } - String dirIn = directories.getDirEnCours(); + String dirIn = directories.getDirectoryReceptionEnCours(); for (FileDescriber f : archiveContent.getFilesAttribute()) { diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p2chargement/ApiChargementService.java b/arc-core/src/main/java/fr/insee/arc/core/service/p2chargement/ApiChargementService.java index c56aa8310..ec29ce5d5 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/p2chargement/ApiChargementService.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p2chargement/ApiChargementService.java @@ -1,6 +1,7 @@ package fr.insee.arc.core.service.p2chargement; import java.io.File; +import java.util.List; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -9,10 +10,9 @@ import fr.insee.arc.core.dataobjects.ArcDatabase; import fr.insee.arc.core.dataobjects.ColumnEnum; import fr.insee.arc.core.model.TraitementEtat; -import fr.insee.arc.core.model.TraitementPhase; import fr.insee.arc.core.service.global.ApiService; -import fr.insee.arc.core.service.global.dao.FileSystemManagement; import fr.insee.arc.core.service.global.thread.MultiThreading; +import fr.insee.arc.core.service.p1reception.provider.DirectoriesDao; import fr.insee.arc.core.service.p2chargement.bo.Norme; import fr.insee.arc.core.service.p2chargement.thread.ThreadChargementService; import fr.insee.arc.core.util.BDParameters; @@ -49,6 +49,9 @@ public ApiChargementService(String aCurrentPhase, String aEnvExecution, String a String paramBatch) { super(aCurrentPhase, aEnvExecution, aDirectoryRoot, aNbEnr, paramBatch); } + + protected List listeNorme; + protected String directoryIn; @Override public void executer() throws ArcException { @@ -58,18 +61,19 @@ public void executer() throws ArcException { BDParameters bdParameters = new BDParameters(ArcDatabase.COORDINATOR); - this.directoryIn = FileSystemManagement.directoryPhaseEtatOK(this.getDirectoryRoot(), this.envExecution, - TraitementPhase.valueOf(previousPhase)) + File.separator; + // input directory is reception_ok directory + this.directoryIn = new DirectoriesDao(this.coordinatorSandbox).getDirectoryReceptionOK() + File.separator; // récupération des différentes normes dans la base - this.listeNorme = Norme.getNormesBase(this.connexion.getCoordinatorConnection(), this.tableNorme); + this.listeNorme = Norme.getNormesBase(this.connexion.getCoordinatorConnection(), this.envExecution); this.maxParallelWorkers = bdParameters.getInt(this.connexion.getCoordinatorConnection(), "ApiChargementService.MAX_PARALLEL_WORKERS", 4); // Récupérer la liste des fichiers selectionnés StaticLoggerDispatcher.info(LOGGER, "Récupérer la liste des fichiers selectionnés"); - setTabIdSource(pilotageListIdsource(this.tablePilTemp, this.currentPhase, TraitementEtat.ENCOURS.toString())); + + this.tabIdSource = pilotageListIdsource(this.tablePilTemp, this.currentPhase, TraitementEtat.ENCOURS.toString()); MultiThreading mt = new MultiThreading<>(this, new ThreadChargementService()); @@ -78,4 +82,13 @@ public void executer() throws ArcException { } + public List getListeNorme() { + return listeNorme; + } + + public String getDirectoryIn() { + return directoryIn; + } + + } diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p2chargement/archiveloader/FilesInputStreamLoad.java b/arc-core/src/main/java/fr/insee/arc/core/service/p2chargement/archiveloader/FilesInputStreamLoad.java index 77a05c10a..28c9d79b5 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/p2chargement/archiveloader/FilesInputStreamLoad.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p2chargement/archiveloader/FilesInputStreamLoad.java @@ -11,8 +11,8 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import fr.insee.arc.core.service.p1reception.ApiReceptionService; import fr.insee.arc.core.util.StaticLoggerDispatcher; +import fr.insee.arc.utils.files.CompressedUtils; /** * Just a map with inputstream in it. We have to read throught the file multiple time, this object is here to @@ -33,9 +33,9 @@ public class FilesInputStreamLoad { public FilesInputStreamLoad(File theFileToRead) throws IOException { super(); try { - this.tmpInxChargement = new GZIPInputStream(new BufferedInputStream(new FileInputStream(theFileToRead),ApiReceptionService.READ_BUFFER_SIZE)); - this.tmpInxNormage = new GZIPInputStream(new BufferedInputStream(new FileInputStream(theFileToRead),ApiReceptionService.READ_BUFFER_SIZE)); - this.tmpInxCSV = new GZIPInputStream(new BufferedInputStream(new FileInputStream(theFileToRead),ApiReceptionService.READ_BUFFER_SIZE)); + this.tmpInxChargement = new GZIPInputStream(new BufferedInputStream(new FileInputStream(theFileToRead),CompressedUtils.READ_BUFFER_SIZE)); + this.tmpInxNormage = new GZIPInputStream(new BufferedInputStream(new FileInputStream(theFileToRead),CompressedUtils.READ_BUFFER_SIZE)); + this.tmpInxCSV = new GZIPInputStream(new BufferedInputStream(new FileInputStream(theFileToRead),CompressedUtils.READ_BUFFER_SIZE)); } catch (FileNotFoundException e) { StaticLoggerDispatcher.error(LOGGER, "Can't instanciate FilesInputStreamLoad for file " + theFileToRead.getName()); throw e; diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p2chargement/archiveloader/GZArchiveLoader.java b/arc-core/src/main/java/fr/insee/arc/core/service/p2chargement/archiveloader/GZArchiveLoader.java index d72a0b9c1..1c742d39d 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/p2chargement/archiveloader/GZArchiveLoader.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p2chargement/archiveloader/GZArchiveLoader.java @@ -9,10 +9,10 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import fr.insee.arc.core.service.p1reception.ApiReceptionService; import fr.insee.arc.core.util.StaticLoggerDispatcher; import fr.insee.arc.utils.exception.ArcException; import fr.insee.arc.utils.exception.ArcExceptionMessage; +import fr.insee.arc.utils.files.CompressedUtils; /** @@ -32,9 +32,9 @@ public FilesInputStreamLoad readFileWithoutExtracting() throws ArcException { // Loading try { - this.filesInputStreamLoad.setTmpInxChargement(new GZIPInputStream(new BufferedInputStream(new FileInputStream(this.archiveChargement),ApiReceptionService.READ_BUFFER_SIZE))); - this.filesInputStreamLoad.setTmpInxCSV(new GZIPInputStream(new BufferedInputStream(new FileInputStream(this.archiveChargement),ApiReceptionService.READ_BUFFER_SIZE))); - this.filesInputStreamLoad.setTmpInxNormage(new GZIPInputStream(new BufferedInputStream(new FileInputStream(this.archiveChargement),ApiReceptionService.READ_BUFFER_SIZE))); + this.filesInputStreamLoad.setTmpInxChargement(new GZIPInputStream(new BufferedInputStream(new FileInputStream(this.archiveChargement),CompressedUtils.READ_BUFFER_SIZE))); + this.filesInputStreamLoad.setTmpInxCSV(new GZIPInputStream(new BufferedInputStream(new FileInputStream(this.archiveChargement),CompressedUtils.READ_BUFFER_SIZE))); + this.filesInputStreamLoad.setTmpInxNormage(new GZIPInputStream(new BufferedInputStream(new FileInputStream(this.archiveChargement),CompressedUtils.READ_BUFFER_SIZE))); } catch (IOException ioReadException) { throw new ArcException(ioReadException, ArcExceptionMessage.FILE_READ_FAILED, this.archiveChargement); } diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p2chargement/archiveloader/TarGzDecompressor.java b/arc-core/src/main/java/fr/insee/arc/core/service/p2chargement/archiveloader/TarGzDecompressor.java index 4d13a64a0..b1ea76900 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/p2chargement/archiveloader/TarGzDecompressor.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p2chargement/archiveloader/TarGzDecompressor.java @@ -14,9 +14,9 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import fr.insee.arc.core.service.p1reception.ApiReceptionService; import fr.insee.arc.core.util.StaticLoggerDispatcher; import fr.insee.arc.utils.exception.ArcException; +import fr.insee.arc.utils.files.CompressedUtils; import fr.insee.arc.utils.files.FileUtilsArc; import fr.insee.arc.utils.utils.ManipString; @@ -35,7 +35,7 @@ public void extract(File archiveFile) throws IOException { File dir = new File(archiveFile + ".dir"); try (GzipCompressorInputStream gzipIn = new GzipCompressorInputStream( - new BufferedInputStream(new FileInputStream(archiveFile), ApiReceptionService.READ_BUFFER_SIZE)); + new BufferedInputStream(new FileInputStream(archiveFile), CompressedUtils.READ_BUFFER_SIZE)); TarArchiveInputStream tarIn = new TarArchiveInputStream(gzipIn)) { TarArchiveEntry entry; @@ -45,7 +45,7 @@ public void extract(File archiveFile) throws IOException { // directories if not empty are automatically read in tar entries list if (!entry.isDirectory()) { int count; - byte data[] = new byte[32738]; + byte[] data = new byte[32738]; // temporary name for the file being uncompress try (FileOutputStream fos = new FileOutputStream(dir.getAbsolutePath() + File.separator diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p2chargement/archiveloader/ZipDecompressor.java b/arc-core/src/main/java/fr/insee/arc/core/service/p2chargement/archiveloader/ZipDecompressor.java index 8dc62f9f4..836ade9a8 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/p2chargement/archiveloader/ZipDecompressor.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p2chargement/archiveloader/ZipDecompressor.java @@ -13,9 +13,9 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import fr.insee.arc.core.service.p1reception.ApiReceptionService; import fr.insee.arc.core.util.StaticLoggerDispatcher; import fr.insee.arc.utils.exception.ArcException; +import fr.insee.arc.utils.files.CompressedUtils; import fr.insee.arc.utils.files.FileUtilsArc; import fr.insee.arc.utils.utils.ManipString; @@ -29,7 +29,7 @@ public void extract(File archiveFile) throws IOException { File dir = new File(archiveFile + ".dir"); try (ZipArchiveInputStream zipIn = new ZipArchiveInputStream( - new BufferedInputStream(new FileInputStream(archiveFile), ApiReceptionService.READ_BUFFER_SIZE))) { + new BufferedInputStream(new FileInputStream(archiveFile), CompressedUtils.READ_BUFFER_SIZE))) { ZipArchiveEntry entry; while ((entry = (ZipArchiveEntry) zipIn.getNextEntry()) != null) { @@ -43,7 +43,7 @@ public void extract(File archiveFile) throws IOException { } } else { int count; - byte data[] = new byte[32738]; + byte[] data = new byte[32738]; // temporary name for the file being uncompress try (FileOutputStream fos = new FileOutputStream(dir.getAbsolutePath() + File.separator diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p2chargement/bo/Norme.java b/arc-core/src/main/java/fr/insee/arc/core/service/p2chargement/bo/Norme.java index 7ed4165f3..df52a1843 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/p2chargement/bo/Norme.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p2chargement/bo/Norme.java @@ -8,6 +8,7 @@ import org.apache.logging.log4j.Logger; import fr.insee.arc.core.dataobjects.ArcPreparedStatementBuilder; +import fr.insee.arc.core.dataobjects.ViewEnum; import fr.insee.arc.utils.dao.UtilitaireDao; import fr.insee.arc.utils.exception.ArcException; import fr.insee.arc.utils.structure.GenericBean; @@ -79,14 +80,14 @@ public void setRegleChargement(RegleChargement regleChargement) { * @return * @throws ArcException */ - public static List getNormesBase(Connection connexion, String tableNorme) { + public static List getNormesBase(Connection connexion, String envExecution) { List output = new ArrayList() ; // Récupérer les régles de définition de normes ArrayList> normes = new ArrayList>(); try { normes = new GenericBean(UtilitaireDao.get(0).executeRequest(connexion, - new ArcPreparedStatementBuilder( "select id_norme, periodicite, def_norme, def_validite from " + tableNorme + ";"))).content; + new ArcPreparedStatementBuilder( "select id_norme, periodicite, def_norme, def_validite from " + ViewEnum.NORME.getFullName(envExecution) + ";"))).content; } catch (ArcException e) { LoggerHelper.errorAsComment(LOGGER, "Norme.getNormesBase - norms retrieval in database failed "); } diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p2chargement/engine/ChargeurXml.java b/arc-core/src/main/java/fr/insee/arc/core/service/p2chargement/engine/ChargeurXml.java index 45b4e53d2..68c87752c 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/p2chargement/engine/ChargeurXml.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p2chargement/engine/ChargeurXml.java @@ -153,27 +153,20 @@ public void execution() throws ArcException { java.util.Date beginDate = new java.util.Date(); // Création de la table de stockage - XMLHandlerCharger4 handler = new XMLHandlerCharger4(); - handler.fileName = fileName; - handler.connexion = connexion; - handler.tempTableA = this.tableTempA; - handler.normeCourante = norme; - handler.validite = validite; - handler.tempTableAColumnsLongName=this.tempTableAColumnsLongName; - handler.tempTableAColumnsShortName=this.tempTableAColumnsShortName; + XMLHandlerCharger4 handler = new XMLHandlerCharger4(connexion, fileName, norme, validite, this.tableTempA, this.tempTableAColumnsLongName, this.tempTableAColumnsShortName); // appel du parser et gestion d'erreur try { SAXParser saxParser = SecuredSaxParser.buildSecuredSaxParser(); saxParser.parse(f, handler); } catch (ParserConfigurationException | SAXException | IOException e) { - error = true; + error = true; ArcException businessException = new ArcException(e, ArcExceptionMessage.XML_SAX_PARSING_FAILED, this.fileName).logMessageException(); rapport = businessException.getMessage().replace("'", "''"); throw businessException; } - this.jointure=handler.jointure; + this.jointure=handler.getJointure(); java.util.Date endDate = new java.util.Date(); StaticLoggerDispatcher.info(LOGGER, "** excecution temps" + (endDate.getTime() - beginDate.getTime()) + " ms"); diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p2chargement/engine/ChargeurXmlComplexe.java b/arc-core/src/main/java/fr/insee/arc/core/service/p2chargement/engine/ChargeurXmlComplexe.java index 6c4fa4be1..05630728a 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/p2chargement/engine/ChargeurXmlComplexe.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p2chargement/engine/ChargeurXmlComplexe.java @@ -15,6 +15,7 @@ import org.xml.sax.SAXException; import fr.insee.arc.core.dataobjects.ColumnEnum; +import fr.insee.arc.core.dataobjects.ViewEnum; import fr.insee.arc.core.model.TraitementEtat; import fr.insee.arc.core.service.global.ApiService; import fr.insee.arc.core.service.global.bo.NormeFichier; @@ -47,7 +48,7 @@ public class ChargeurXmlComplexe implements IChargeur{ private String fileName; private Connection connexion; private String tableChargementPilTemp; - private String tableChargementRegle; + private String envExecution; private String currentPhase; private Norme norme; private String validite; @@ -74,18 +75,18 @@ public ChargeurXmlComplexe(ThreadChargementService threadChargementService, Stri this.f = threadChargementService.filesInputStreamLoad.getTmpInxChargement(); this.norme = threadChargementService.normeOk; this.validite = threadChargementService.validite; - this.tableChargementRegle=threadChargementService.getTableChargementRegle(); + this.envExecution=threadChargementService.getEnvExecution(); } - public ChargeurXmlComplexe(Connection connexion, String fileName, InputStream f, String tableOut, String norme, String periodicite, String validite, String tableRegle) { + public ChargeurXmlComplexe(Connection connexion, String fileName, InputStream f, String tableOut, String norme, String periodicite, String validite, String envExecution) { this.fileName = fileName; this.connexion = connexion; this.tableTempA = tableOut; this.norme=new Norme(norme, periodicite, null, null); this.validite = validite; this.f=f; - this.tableChargementRegle=tableRegle; + this.envExecution=envExecution; } @@ -110,7 +111,7 @@ public void initialisation() { // voir avec Pierre comment factoriser ce genre de truc try { - HashMap> regle = RulesOperations.getBean(this.connexion,RulesOperations.getRegles(tableChargementRegle, normeFichier)); + HashMap> regle = RulesOperations.getBean(this.connexion,RulesOperations.getRegles(ViewEnum.CHARGEMENT_REGLE.getFullName(this.envExecution), normeFichier)); if (regle.get("format").get(0)!=null) { for (String rule:regle.get("format").get(0).split("\n")) { @@ -193,7 +194,6 @@ public void execution() throws ArcException { handler.fileName = fileName; handler.connexion = connexion; handler.tempTableA = this.tableTempA; - handler.start = 0; handler.normeCourante = norme; handler.validite = validite; handler.tempTableAColumnsLongName=this.tempTableAColumnsLongName; diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p2chargement/thread/ThreadChargementService.java b/arc-core/src/main/java/fr/insee/arc/core/service/p2chargement/thread/ThreadChargementService.java index 9d83a909e..38df9551f 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/p2chargement/thread/ThreadChargementService.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p2chargement/thread/ThreadChargementService.java @@ -8,8 +8,10 @@ import fr.insee.arc.core.dataobjects.ArcPreparedStatementBuilder; import fr.insee.arc.core.dataobjects.ColumnEnum; +import fr.insee.arc.core.dataobjects.ViewEnum; import fr.insee.arc.core.model.TraitementEtat; import fr.insee.arc.core.model.TraitementRapport; +import fr.insee.arc.core.service.global.dao.DatabaseConnexionConfiguration; import fr.insee.arc.core.service.global.dao.HashFileNameConversion; import fr.insee.arc.core.service.global.dao.PilotageOperations; import fr.insee.arc.core.service.global.dao.TableNaming; @@ -47,7 +49,6 @@ public class ThreadChargementService extends ApiChargementService implements Run private Thread t; - private int indice; private String container; @@ -65,19 +66,17 @@ public class ThreadChargementService extends ApiChargementService implements Run private String tableTempA; + @Override public void configThread(ScalableConnection connexion, int currentIndice, ApiChargementService aApi) { - this.indice = currentIndice; - this.setEnvExecution(aApi.getEnvExecution()); - this.idSource = aApi.getTabIdSource().get(ColumnEnum.ID_SOURCE.getColumnName()).get(this.indice); + this.envExecution = aApi.getEnvExecution(); + this.idSource = aApi.getTabIdSource().get(ColumnEnum.ID_SOURCE.getColumnName()).get(currentIndice); this.connexion = connexion; - this.container = aApi.getTabIdSource().get("container").get(this.indice); - this.tableChargementRegle = aApi.getTableChargementRegle(); - this.tableNorme = aApi.getTableNorme(); + this.container = aApi.getTabIdSource().get("container").get(currentIndice); this.tablePilTemp = aApi.getTablePilTemp(); this.currentPhase = aApi.getCurrentPhase(); - this.setTablePil(aApi.getTablePil()); + this.tablePil = aApi.getTablePil(); this.paramBatch = aApi.getParamBatch(); this.directoryIn = aApi.getDirectoryIn(); this.listeNorme = aApi.getListeNorme(); @@ -92,7 +91,7 @@ public void configThread(ScalableConnection connexion, int currentIndice, ApiCha // table de sortie des données dans l'application (hors du module) this.tableChargementOK = TableNaming.globalTableName(envExecution, this.currentPhase, - TraitementEtat.OK.toString()); + TraitementEtat.OK); // thread generic dao arcThreadGenericDao = new ThreadOperations(connexion, tablePil, tablePilTemp, tableChargementPilTemp, @@ -121,13 +120,13 @@ public void run() { finalisation(); } catch (ArcException processException) { - + processException.logFullException(); try { // En cas d'erreur on met le fichier en KO avec l'erreur obtenu. - this.repriseSurErreur(this.connexion.getExecutorConnection(), this.getCurrentPhase(), this.tablePil, - this.idSource, processException, "aucuneTableADroper"); + PilotageOperations.traitementSurErreur(this.connexion.getCoordinatorConnection(), this.getCurrentPhase(), this.tablePil, + this.idSource, processException); } catch (ArcException marquageException) { marquageException.logFullException(); } @@ -295,7 +294,7 @@ private void choixChargeur() throws ArcException { private Norme calculerTypeFichier(Norme norme) throws ArcException { ArcPreparedStatementBuilder requete = new ArcPreparedStatementBuilder(); - requete.append("SELECT type_fichier, delimiter, format ").append(" FROM " + this.getTableChargementRegle()) + requete.append("SELECT type_fichier, delimiter, format ").append(" FROM " + ViewEnum.CHARGEMENT_REGLE.getFullName(this.getEnvExecution())) .append(" WHERE id_norme =" + requete.quoteText(norme.getIdNorme()) + ";"); GenericBean g = new GenericBean( @@ -323,7 +322,7 @@ private String insertionFinale(String tableName, String idSource) throws ArcExce String tableIdSource = HashFileNameConversion.tableOfIdSource(tableName, idSource); // promote the application user account to full right - query.append(switchToFullRightRole()); + query.append(DatabaseConnexionConfiguration.switchToFullRightRole()); // Créer la table des données de la table des donénes chargées query.append(TableOperations.createTableInherit(getTableTempA(), tableIdSource)); diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p2chargement/xmlhandler/ArbreFormat.java b/arc-core/src/main/java/fr/insee/arc/core/service/p2chargement/xmlhandler/ArbreFormat.java index 25a497563..c7fd516da 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/p2chargement/xmlhandler/ArbreFormat.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p2chargement/xmlhandler/ArbreFormat.java @@ -12,6 +12,7 @@ import org.xml.sax.InputSource; import org.xml.sax.SAXException; +import fr.insee.arc.core.model.XMLConstant; import fr.insee.arc.core.service.p2chargement.bo.Norme; import fr.insee.arc.utils.exception.ArcException; import fr.insee.arc.utils.exception.ArcExceptionMessage; @@ -85,18 +86,15 @@ public ArrayList getPeres (String fils){ listePere.add(fils); String pere = this.arbreHierachieDuFichier.get(fils); - while (pere != null && !pere.equalsIgnoreCase("root")) { + while (pere != null && !pere.equalsIgnoreCase(XMLConstant.ROOT)) { listePere.add(pere); pere = arbreHierachieDuFichier.get(pere); } return listePere; } - - - /** * @return the arbreFormat */ diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p2chargement/xmlhandler/XMLComplexeHandlerCharger.java b/arc-core/src/main/java/fr/insee/arc/core/service/p2chargement/xmlhandler/XMLComplexeHandlerCharger.java index 066358f27..de3c46bd3 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/p2chargement/xmlhandler/XMLComplexeHandlerCharger.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p2chargement/xmlhandler/XMLComplexeHandlerCharger.java @@ -14,6 +14,7 @@ import org.xml.sax.SAXParseException; import fr.insee.arc.core.dataobjects.ColumnEnum; +import fr.insee.arc.core.model.XMLConstant; import fr.insee.arc.core.service.global.ApiService; import fr.insee.arc.core.service.global.dao.DateConversion; import fr.insee.arc.core.service.p2chargement.bo.Norme; @@ -39,15 +40,13 @@ public XMLComplexeHandlerCharger() { private HashMap col = new HashMap<>(); private HashMap colData = new HashMap<>(); - - // @trees private HashMap tree = new HashMap<>(); private HashMap treeNode = new HashMap<>(); private HashMap colDist = new HashMap<>(); private HashMap keepLast = new HashMap<>(); - public int start; + private int start = 0; private int idLigne = 0; private int distance = 0; @@ -213,13 +212,7 @@ public void endElement(String uri, String localName, String qName) throws SAXPar // mettre à jour la colonne si elle n'existe pas dans tree ou si elle ne pointe // pas vers le bon pere - if (tree.get(this.allCols.indexOf(closedTagHeader1)) == null - /* - * @trees || - * !tree.get(this.allCols.indexOf(closedTagHeader1)).equals(this.allCols.indexOf - * (fatherOfTheBlock)) - */ - ) { + if (tree.get(this.allCols.indexOf(closedTagHeader1)) == null) { // mettre à jour tree this.tree.put(this.allCols.indexOf(closedTagHeader1), this.allCols.indexOf(fatherOfTheBlock)); @@ -253,7 +246,7 @@ public void endElement(String uri, String localName, String qName) throws SAXPar orderTreeStackQName--; this.treeStack.remove(this.treeStack.size() - 1); - this.treeStackFatherLag = new ArrayList(this.treeStackFather); + this.treeStackFatherLag = new ArrayList<>(this.treeStackFather); this.father = this.treeStackFather.get(this.treeStackFather.size() - 1); this.treeStackFather.remove(this.treeStackFather.size() - 1); @@ -310,9 +303,6 @@ public void endElement(String uri, String localName, String qName) throws SAXPar */ @Override public void startDocument() { - // intialisation de la connexion - // creation de la table - try { this.colDist.put(-1, 0); } catch (Exception ex) { @@ -334,19 +324,9 @@ public void startElement(String uri, String localName, String qName, Attributes orderTreeStackQName++; this.currentTag = Format.toBdRaw(renameColumn(qName)); -// +(this.father.equals(root_father)?"": -// (father_separator+ -// ManipString.substringBeforeFirst( -// this.father -// ,father_separator) -// ) -// ) - ; this.currentData.setLength(0); this.hasData = false; -// this.firstData = true; - // distance++; // on ajoute les colonnes si besoin // on met à jour le numéro d'index @@ -355,15 +335,6 @@ public void startElement(String uri, String localName, String qName, Attributes // créer et enregistrer la colonne si elle n'existe pas if (o == null) { this.col.put(this.currentTag, 1); - // rootDistance.put(currentTag,distance); - // try { - // if (pst!=null){ - // requete=Format.executeBlock(st, requete); - // - // pst.executeBatch(); - // pst=null; - // } - this.allCols.add(this.currentTag); this.requete.append("alter table " + this.tempTableA + " add i" + this.allCols.indexOf(this.currentTag) @@ -385,7 +356,7 @@ public void startElement(String uri, String localName, String qName, Attributes } // enregistrement de la structure - structure.append(("," + (this.father.equals(rootFather) ? ApiService.ROOT : "i_" + this.father)) + " " + structure.append(("," + (this.father.equals(rootFather) ? XMLConstant.ROOT : "i_" + this.father)) + " " + (this.father.equals(rootFather) ? "1" : this.col.get(this.father)) + " " + "i_" + this.currentTag); if (this.tree.get(this.allCols.indexOf(this.currentTag)).equals(this.allCols.indexOf(this.father)) @@ -393,6 +364,7 @@ public void startElement(String uri, String localName, String qName, Attributes || (this.tree.get(this.allCols.indexOf(this.currentTag + HEADER)) != null && this.tree.get(this.allCols.indexOf(this.currentTag + HEADER)) .equals(this.allCols.indexOf(this.father)))) { + // nothing } else { throw new SAXParseException("Le tag " + this.currentTag + " a des pères differents", "", "", 0, 0); } @@ -460,7 +432,7 @@ public void startElement(String uri, String localName, String qName, Attributes * @throws SAXParseException */ private void insertQueryBuilder(StringBuilder aRequete, String tempTableI, String fileName, List lineCols, - List lineIds, List lineValues) throws SAXParseException { + List lineIds, List lineValues) { HashMap keep = new HashMap<>(); @@ -685,7 +657,7 @@ private void multiLeafUpdate() { * @return */ private String renameColumn(String qName) { - Map m = new TreeMap(); + Map m = new TreeMap<>(); for (Pair p : this.format) { if (treeStackQName.get(p.getSecond()) != null && treeStackQName.get(p.getFirst()) != null diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p2chargement/xmlhandler/XMLHandlerCharger4.java b/arc-core/src/main/java/fr/insee/arc/core/service/p2chargement/xmlhandler/XMLHandlerCharger4.java index 64ec3e740..d1e95724c 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/p2chargement/xmlhandler/XMLHandlerCharger4.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p2chargement/xmlhandler/XMLHandlerCharger4.java @@ -29,10 +29,33 @@ public class XMLHandlerCharger4 extends org.xml.sax.helpers.DefaultHandler { private static final Logger LOGGER = LogManager.getLogger(XMLHandlerCharger4.class); - public XMLHandlerCharger4() { - super(); + + + public XMLHandlerCharger4(Connection connexion, String fileName, Norme normeCourante, String validite, + String tempTableA, FastList tempTableAColumnsLongName, FastList tempTableAColumnsShortName) { + super(); + this.connexion = connexion; + this.fileName = fileName; + this.normeCourante = normeCourante; + this.validite = validite; + this.tempTableA = tempTableA; + this.tempTableAColumnsLongName = tempTableAColumnsLongName; + this.tempTableAColumnsShortName = tempTableAColumnsShortName; } + // input + private Connection connexion; + private String fileName; + private Norme normeCourante; + private String validite; + private String tempTableA; + private FastList tempTableAColumnsLongName; + private FastList tempTableAColumnsShortName; + + // output + private String jointure = ""; + + private HashMap col = new HashMap<>(); private HashMap colData = new HashMap<>(); private HashMap tree = new HashMap<>(); @@ -42,13 +65,8 @@ public XMLHandlerCharger4() { private HashMap keepLast = new HashMap<>(); private int idLigne = 0; - private int distance = 0; - public Connection connexion; - - public String fileName; - public String jointure = ""; private String currentTag; private String closedTag; @@ -56,6 +74,7 @@ public XMLHandlerCharger4() { private String father = "*"; private StringBuilder currentData = new StringBuilder(); + /* * pour les rubriques recursives (au cas ou...) */ @@ -77,13 +96,7 @@ public XMLHandlerCharger4() { private ParallelInsert pi; - public Norme normeCourante; - public String validite; - // column of the load table A - public String tempTableA; - public FastList tempTableAColumnsLongName; - public FastList tempTableAColumnsShortName; private static final String ALTER = "ALTER"; @@ -93,6 +106,8 @@ public XMLHandlerCharger4() { // initialize the integration date with current private final String integrationDate = DateConversion.queryDateConversion(new Date()); + + /** * Actions à réaliser sur les données */ @@ -564,4 +579,8 @@ private void renameColumns(StringBuilder aRequete) { } + public String getJointure() { + return jointure; + } + } diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p3normage/ApiNormageService.java b/arc-core/src/main/java/fr/insee/arc/core/service/p3normage/ApiNormageService.java index 7e9c18899..6327af8cb 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/p3normage/ApiNormageService.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p3normage/ApiNormageService.java @@ -4,9 +4,7 @@ import fr.insee.arc.core.dataobjects.ArcDatabase; import fr.insee.arc.core.dataobjects.ColumnEnum; -import fr.insee.arc.core.model.TraitementTableParametre; import fr.insee.arc.core.service.global.ApiService; -import fr.insee.arc.core.service.global.dao.TableNaming; import fr.insee.arc.core.service.global.thread.MultiThreading; import fr.insee.arc.core.service.p3normage.thread.ThreadNormageService; import fr.insee.arc.core.util.BDParameters; @@ -39,7 +37,6 @@ public ApiNormageService() { public ApiNormageService(String aCurrentPhase, String aEnvExecution, String aDirectoryRoot, Integer aNbEnr, String paramBatch) { super(aCurrentPhase, aEnvExecution, aDirectoryRoot, aNbEnr, paramBatch); - this.setTableNorme(TableNaming.dbEnv(this.getEnvExecution()) + TraitementTableParametre.NORME); } @Override @@ -52,7 +49,7 @@ public void executer() throws ArcException { this.maxParallelWorkers = bdParameters.getInt(this.connexion.getCoordinatorConnection(), "ApiNormageService.MAX_PARALLEL_WORKERS",4); // récupère le nombre de fichier à traiter - this.setTabIdSource(recuperationIdSource()); + this.tabIdSource = recuperationIdSource(); MultiThreading mt=new MultiThreading<>(this, new ThreadNormageService()); mt.execute(maxParallelWorkers, getTabIdSource().get(ColumnEnum.ID_SOURCE.getColumnName()), this.envExecution, properties.getDatabaseRestrictedUsername()); diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p3normage/engine/NormageEngineRegleUnicite.java b/arc-core/src/main/java/fr/insee/arc/core/service/p3normage/engine/NormageEngineRegleUnicite.java index 3d1eb381a..a612c98ce 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/p3normage/engine/NormageEngineRegleUnicite.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p3normage/engine/NormageEngineRegleUnicite.java @@ -1,7 +1,6 @@ package fr.insee.arc.core.service.p3normage.engine; import java.util.ArrayList; -import java.util.Date; import java.util.HashMap; import org.apache.logging.log4j.LogManager; diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p3normage/thread/ThreadNormageService.java b/arc-core/src/main/java/fr/insee/arc/core/service/p3normage/thread/ThreadNormageService.java index 362420180..914aaf077 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/p3normage/thread/ThreadNormageService.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p3normage/thread/ThreadNormageService.java @@ -8,7 +8,9 @@ import fr.insee.arc.core.dataobjects.ArcPreparedStatementBuilder; import fr.insee.arc.core.dataobjects.ColumnEnum; +import fr.insee.arc.core.dataobjects.ViewEnum; import fr.insee.arc.core.model.TraitementEtat; +import fr.insee.arc.core.service.global.dao.DatabaseConnexionConfiguration; import fr.insee.arc.core.service.global.dao.HashFileNameConversion; import fr.insee.arc.core.service.global.dao.PilotageOperations; import fr.insee.arc.core.service.global.dao.RulesOperations; @@ -45,9 +47,7 @@ public class ThreadNormageService extends ApiNormageService implements Runnable, private static final Logger LOGGER = LogManager.getLogger(ThreadNormageService.class); private Thread t; - - private int indice ; - + private String tableNormageDataTemp; private String tableNormagePilTemp; @@ -63,9 +63,8 @@ public class ThreadNormageService extends ApiNormageService implements Runnable, @Override public void configThread(ScalableConnection connexion, int currentIndice, ApiNormageService theApi) { - - this.indice = currentIndice; - this.idSource = theApi.getTabIdSource().get(ColumnEnum.ID_SOURCE.getColumnName()).get(indice); + + this.idSource = theApi.getTabIdSource().get(ColumnEnum.ID_SOURCE.getColumnName()).get(currentIndice); this.connexion = connexion; // tables du thread @@ -76,25 +75,17 @@ public void configThread(ScalableConnection connexion, int currentIndice, ApiNor this.tableNormageOKTemp = FormatSQL.temporaryTableName("ok_Temp"); this.tableNormageKOTemp = FormatSQL.temporaryTableName("ko_Temp"); - this.tableNormageOK = TableNaming.globalTableName(theApi.getEnvExecution(), theApi.getCurrentPhase(), TraitementEtat.OK.toString()); - this.tableNormageKO = TableNaming.globalTableName(theApi.getEnvExecution(), theApi.getCurrentPhase(), TraitementEtat.KO.toString()); + this.tableNormageOK = TableNaming.globalTableName(theApi.getEnvExecution(), theApi.getCurrentPhase(), TraitementEtat.OK); + this.tableNormageKO = TableNaming.globalTableName(theApi.getEnvExecution(), theApi.getCurrentPhase(), TraitementEtat.KO); // tables héritées - this.setTableNormageRegle(theApi.getTableNormageRegle()); - this.setTableControleRegle(theApi.getTableControleRegle()); - this.setTableMappingRegle(theApi.getTableMappingRegle()); - - this.setTablePil(theApi.getTablePil()); - this.setTablePilTemp(theApi.getTablePilTemp()); - this.setPreviousPhase(theApi.getPreviousPhase()); - this.setCurrentPhase(theApi.getCurrentPhase()); - this.setNbEnr(theApi.getNbEnr()); - this.setTablePrevious(theApi.getTablePrevious()); - this.setTabIdSource(theApi.getTabIdSource()); - this.setTableNorme(theApi.getTableNorme()); - this.setTableNormageRegle(theApi.getTableNormageRegle()); - this.setEnvExecution(theApi.getEnvExecution()); - this.setParamBatch(theApi.getParamBatch()); + this.tablePil = theApi.getTablePil(); + this.tablePilTemp = theApi.getTablePilTemp(); + this.currentPhase = theApi.getCurrentPhase(); + this.tablePrevious = theApi.getTablePrevious(); + this.tabIdSource=theApi.getTabIdSource(); + this.envExecution=theApi.getEnvExecution(); + this.paramBatch=theApi.getParamBatch(); // arc thread dao arcThreadGenericDao=new ThreadOperations(connexion, tablePil, tablePilTemp, tableNormagePilTemp, tablePrevious, paramBatch, idSource); @@ -123,8 +114,7 @@ public void run() { } catch (ArcException e) { StaticLoggerDispatcher.error(LOGGER, e); try { - this.repriseSurErreur(this.connexion.getExecutorConnection(), this.getCurrentPhase(), this.tablePil, this.idSource, e, - "aucuneTableADroper"); + PilotageOperations.traitementSurErreur(this.connexion.getCoordinatorConnection(), this.getCurrentPhase(), this.tablePil, this.idSource, e); } catch (ArcException e2) { StaticLoggerDispatcher.error(LOGGER, e2); } @@ -185,7 +175,7 @@ private void jointureBlocXML() throws ArcException { HashMap> pil = RulesOperations.getBean(this.connexion.getExecutorConnection(),RulesOperations.getNormeAttributes(this.idSource, tableNormagePilTemp)); // récupéreration des règles relative au fichier pour la phase courante - HashMap> regle = RulesOperations.getBean(this.connexion.getExecutorConnection(),RulesOperations.getRegles(this.tableNormageRegle, this.tableNormagePilTemp)); + HashMap> regle = RulesOperations.getBean(this.connexion.getExecutorConnection(),RulesOperations.getRegles(ViewEnum.NORMAGE_REGLE.getFullName(envExecution), this.tableNormagePilTemp)); // récupéreration des rubriques utilisées dans règles relative au fichier pour l'ensemble des phases @@ -199,7 +189,7 @@ private void jointureBlocXML() throws ArcException { StringBuilder query=new StringBuilder(); query.append("\n DROP TABLE IF EXISTS "+tableTmpRubriqueDansregles+";"); query.append("\n CREATE TEMPORARY TABLE "+tableTmpRubriqueDansregles+" AS "); - query.append(RulesOperations.getAllRubriquesInRegles(this.tableNormagePilTemp, this.tableNormageRegle, this.tableControleRegle, this.tableMappingRegle)); + query.append(RulesOperations.getAllRubriquesInRegles(this.tableNormagePilTemp, ViewEnum.NORMAGE_REGLE.getFullName(envExecution), ViewEnum.CONTROLE_REGLE.getFullName(envExecution), ViewEnum.MAPPING_REGLE.getFullName(envExecution))); UtilitaireDao.get(0).executeImmediate( this.connexion.getExecutorConnection(), query @@ -242,7 +232,7 @@ private void insertionFinale() throws ArcException { query.append(PilotageOperations.queryUpdateNbEnr(this.tableNormagePilTemp, this.tableNormageOKTemp, this.structure)); // promote the application user account to full right - query.append(switchToFullRightRole()); + query.append(DatabaseConnexionConfiguration.switchToFullRightRole()); String tableIdSourceOK=HashFileNameConversion.tableOfIdSource(this.tableNormageOK ,this.idSource); query.append(TableOperations.createTableInherit(this.tableNormageOKTemp, tableIdSourceOK)); diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p4controle/ApiControleService.java b/arc-core/src/main/java/fr/insee/arc/core/service/p4controle/ApiControleService.java index 767b36bca..eefa0df64 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/p4controle/ApiControleService.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p4controle/ApiControleService.java @@ -63,7 +63,7 @@ public void executer() throws ArcException { this.maxParallelWorkers = bdParameters.getInt(this.connexion.getCoordinatorConnection(), "ApiControleService.MAX_PARALLEL_WORKERS", 3); - this.setTabIdSource(recuperationIdSource()); + this.tabIdSource = recuperationIdSource(); MultiThreading mt = new MultiThreading<>(this, new ThreadControleService()); diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p4controle/engine/ServiceJeuDeRegle.java b/arc-core/src/main/java/fr/insee/arc/core/service/p4controle/engine/ServiceJeuDeRegle.java index 88f78d58b..19b8e8908 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/p4controle/engine/ServiceJeuDeRegle.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p4controle/engine/ServiceJeuDeRegle.java @@ -11,6 +11,7 @@ import org.apache.logging.log4j.Logger; import org.springframework.stereotype.Component; +import fr.insee.arc.core.model.XMLConstant; import fr.insee.arc.core.service.global.ApiService; import fr.insee.arc.core.service.global.bo.JeuDeRegle; import fr.insee.arc.core.service.global.bo.RegleControleEntity; @@ -196,7 +197,7 @@ private void control(Connection connexion, JeuDeRegle jdr, String table, String case "CARDINALITE": if (this.listRubTable.contains(reg.getRubriquePere()) // rules to set tree root and father label are ignored - && !(reg.getRubriquePere().equalsIgnoreCase(ApiService.ROOT))) + && !(reg.getRubriquePere().equalsIgnoreCase(XMLConstant.ROOT))) { blocRequete.append(executeRegleCardinalite(jdr, reg)); blocRequete.append(System.lineSeparator()); diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p4controle/thread/ThreadControleService.java b/arc-core/src/main/java/fr/insee/arc/core/service/p4controle/thread/ThreadControleService.java index f36077639..8599a986e 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/p4controle/thread/ThreadControleService.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p4controle/thread/ThreadControleService.java @@ -5,9 +5,12 @@ import fr.insee.arc.core.dataobjects.ArcPreparedStatementBuilder; import fr.insee.arc.core.dataobjects.ColumnEnum; +import fr.insee.arc.core.dataobjects.ViewEnum; import fr.insee.arc.core.model.TraitementEtat; import fr.insee.arc.core.service.global.bo.JeuDeRegle; +import fr.insee.arc.core.service.global.dao.DatabaseConnexionConfiguration; import fr.insee.arc.core.service.global.dao.HashFileNameConversion; +import fr.insee.arc.core.service.global.dao.PilotageOperations; import fr.insee.arc.core.service.global.dao.TableNaming; import fr.insee.arc.core.service.global.dao.TableOperations; import fr.insee.arc.core.service.global.dao.ThreadOperations; @@ -35,8 +38,6 @@ public class ThreadControleService extends ApiControleService implements Runnabl private Thread t = null; - private int indice; - private String tableControleDataTemp; private String tableControlePilTemp; private String tableOutOkTemp = "tableOutOkTemp"; @@ -56,30 +57,17 @@ public class ThreadControleService extends ApiControleService implements Runnabl @Override public void configThread(ScalableConnection connexion, int currentIndice, ApiControleService theApi) { - this.indice = currentIndice; - this.setEnvExecution(theApi.getEnvExecution()); - this.idSource = theApi.getTabIdSource().get(ColumnEnum.ID_SOURCE.getColumnName()).get(indice); + this.envExecution = theApi.getEnvExecution(); + this.idSource = theApi.getTabIdSource().get(ColumnEnum.ID_SOURCE.getColumnName()).get(currentIndice); this.connexion = connexion; - this.setTablePil(theApi.getTablePil()); + this.tablePil = theApi.getTablePil(); this.tablePilTemp = theApi.getTablePilTemp(); + this.currentPhase = theApi.getCurrentPhase(); + this.tablePrevious = theApi.getTablePrevious(); + this.tabIdSource=theApi.getTabIdSource(); + this.paramBatch=theApi.getParamBatch(); - this.setPreviousPhase(theApi.getPreviousPhase()); - this.setCurrentPhase(theApi.getCurrentPhase()); - - this.setNbEnr(theApi.getNbEnr()); - - this.setTablePrevious(theApi.getTablePrevious()); - this.setTabIdSource(theApi.getTabIdSource()); - - this.setTableNorme(theApi.getTableNorme()); - this.setTableNormageRegle(theApi.getTableNormageRegle()); - - this.setParamBatch(theApi.getParamBatch()); - - this.setTableJeuDeRegle(theApi.getTableJeuDeRegle()); - this.setTableControleRegle(theApi.getTableControleRegle()); - - this.sjdr = new ServiceJeuDeRegle(theApi.getTableControleRegle()); + this.sjdr = new ServiceJeuDeRegle(ViewEnum.CONTROLE_REGLE.getFullName(envExecution)); this.jdr = new JeuDeRegle(); // Nom des tables temporaires @@ -87,8 +75,8 @@ public void configThread(ScalableConnection connexion, int currentIndice, ApiCon this.tableControlePilTemp = FormatSQL.temporaryTableName("controle_pil_temp"); // tables finales - this.tableOutOk = TableNaming.dbEnv(this.getEnvExecution()) + this.getCurrentPhase() + "_" + TraitementEtat.OK; - this.tableOutKo = TableNaming.dbEnv(this.getEnvExecution()) + this.getCurrentPhase() + "_" + TraitementEtat.KO; + this.tableOutOk = TableNaming.globalTableName(theApi.getEnvExecution(), theApi.getCurrentPhase(), TraitementEtat.OK); + this.tableOutKo = TableNaming.globalTableName(theApi.getEnvExecution(), theApi.getCurrentPhase(), TraitementEtat.KO); // arc thread dao arcThreadGenericDao=new ThreadOperations(connexion, tablePil, tablePilTemp, tableControlePilTemp, tablePrevious, paramBatch, idSource); @@ -107,8 +95,8 @@ public void run() { } catch (ArcException e) { StaticLoggerDispatcher.error(LOGGER, "Error in control Thread"); try { - this.repriseSurErreur(this.connexion.getExecutorConnection(), this.getCurrentPhase(), this.tablePil, - this.idSource, e, "aucuneTableADroper"); + PilotageOperations.traitementSurErreur(this.connexion.getCoordinatorConnection(), this.getCurrentPhase(), this.tablePil, + this.idSource, e); } catch (ArcException e2) { StaticLoggerDispatcher.error(LOGGER, e2); } @@ -155,7 +143,7 @@ private void preparation() throws ArcException { UtilitaireDao.get(0).executeBlock(this.connexion.getExecutorConnection(), query.getQueryWithParameters()); // Récupération des Jeux de règles associés - this.sjdr.fillRegleControle(this.connexion.getExecutorConnection(), jdr, this.getTableControleRegle(), + this.sjdr.fillRegleControle(this.connexion.getExecutorConnection(), jdr, ViewEnum.CONTROLE_REGLE.getFullName(envExecution), this.tableControleDataTemp); this.structure = UtilitaireDao.get(0).getString(this.connexion.getExecutorConnection(), new ArcPreparedStatementBuilder("SELECT jointure FROM " + this.tableControlePilTemp)); @@ -243,7 +231,7 @@ private void insertionFinale() throws ArcException { query.append(calculSeuilControle()); // promote the application user account to full right - query.append(switchToFullRightRole()); + query.append(DatabaseConnexionConfiguration.switchToFullRightRole()); // Créer les tables héritées String tableIdSourceOK = HashFileNameConversion.tableOfIdSource(tableOutOk, this.idSource); diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p5mapping/ApiMappingService.java b/arc-core/src/main/java/fr/insee/arc/core/service/p5mapping/ApiMappingService.java index 60864b180..04463b72c 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/p5mapping/ApiMappingService.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p5mapping/ApiMappingService.java @@ -71,7 +71,7 @@ public void executer() throws ArcException { this.maxParallelWorkers = bdParameters.getInt(this.connexion.getCoordinatorConnection(), "MappingService.MAX_PARALLEL_WORKERS",4); // récupère le nombre de fichier à traiter - this.setTabIdSource(recuperationIdSource()); + this.tabIdSource = recuperationIdSource(); MultiThreading mt=new MultiThreading<>(this, new ThreadMappingService()); mt.execute(maxParallelWorkers, getTabIdSource().get(ColumnEnum.ID_SOURCE.getColumnName()), this.envExecution, properties.getDatabaseRestrictedUsername()); diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p5mapping/engine/ServiceMapping.java b/arc-core/src/main/java/fr/insee/arc/core/service/p5mapping/engine/ServiceMapping.java index 35e070570..e1aa56915 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/p5mapping/engine/ServiceMapping.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p5mapping/engine/ServiceMapping.java @@ -6,6 +6,7 @@ import java.util.Set; import fr.insee.arc.core.dataobjects.ArcPreparedStatementBuilder; +import fr.insee.arc.core.dataobjects.ViewEnum; import fr.insee.arc.core.service.global.bo.JeuDeRegle; import fr.insee.arc.utils.dao.UtilitaireDao; import fr.insee.arc.utils.exception.ArcException; @@ -57,10 +58,10 @@ private static Set calculerListeColonnes(Connection aConnexion, String a * @return Le bon id_famille * @throws ArcException */ - public String fetchIdFamille(Connection connexion, JeuDeRegle aJeuDeRegle, String tableNorme) throws ArcException { + public String fetchIdFamille(Connection connexion, JeuDeRegle aJeuDeRegle, String envExecution) throws ArcException { ArcPreparedStatementBuilder requete = new ArcPreparedStatementBuilder(); requete - .append("SELECT id_famille FROM " + tableNorme) + .append("SELECT id_famille FROM " + ViewEnum.NORME.getFullName(envExecution)) .append("\n WHERE id_norme = " + requete.quoteText(aJeuDeRegle.getIdNorme())) .append("\n AND periodicite = " + requete.quoteText(aJeuDeRegle.getPeriodicite())); return UtilitaireDao.get(0).getString(connexion, requete); diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p5mapping/thread/ThreadMappingService.java b/arc-core/src/main/java/fr/insee/arc/core/service/p5mapping/thread/ThreadMappingService.java index aef6d2fe3..d54e38975 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/p5mapping/thread/ThreadMappingService.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p5mapping/thread/ThreadMappingService.java @@ -8,9 +8,12 @@ import fr.insee.arc.core.dataobjects.ArcPreparedStatementBuilder; import fr.insee.arc.core.dataobjects.ColumnEnum; +import fr.insee.arc.core.dataobjects.ViewEnum; import fr.insee.arc.core.model.TraitementEtat; import fr.insee.arc.core.service.global.bo.JeuDeRegle; import fr.insee.arc.core.service.global.bo.JeuDeRegleDao; +import fr.insee.arc.core.service.global.dao.DatabaseConnexionConfiguration; +import fr.insee.arc.core.service.global.dao.PilotageOperations; import fr.insee.arc.core.service.global.dao.TableOperations; import fr.insee.arc.core.service.global.dao.ThreadOperations; import fr.insee.arc.core.service.global.scalability.ScalableConnection; @@ -46,26 +49,16 @@ public void configThread(ScalableConnection connexion, int currentIndice, ApiMap this.connexion = connexion; this.indice = currentIndice; this.idSource = anApi.getTabIdSource().get(ColumnEnum.ID_SOURCE.getColumnName()).get(indice); - this.setEnvExecution(anApi.getEnvExecution()); - - + this.envExecution = anApi.getEnvExecution(); this.tablePilTemp = anApi.getTablePilTemp(); - - this.setPreviousPhase(anApi.getPreviousPhase()); - this.setCurrentPhase(anApi.getCurrentPhase()); - - this.setTablePrevious(anApi.getTablePrevious()); - - this.setTabIdSource(anApi.getTabIdSource()); - - this.setParamBatch(anApi.getParamBatch()); + this.currentPhase = anApi.getCurrentPhase(); + this.tablePrevious = anApi.getTablePrevious(); + this.tabIdSource = anApi.getTabIdSource(); + this.paramBatch = anApi.getParamBatch(); this.tableTempControleOk = "tableTempControleOk".toLowerCase(); this.tableMappingPilTemp = "tableMappingPilTemp".toLowerCase(); - this.setTableJeuDeRegle(anApi.getTableJeuDeRegle()); - this.setTableNorme(anApi.getTableNorme()); - this.setTableOutKo(anApi.getTableOutKo()); this.tablePil = anApi.getTablePil(); // thread generic dao @@ -90,8 +83,7 @@ public void run() { StaticLoggerDispatcher.error(LOGGER, e); try { - this.repriseSurErreur(this.connexion.getExecutorConnection(), this.getCurrentPhase(), this.tablePil, this.idSource, e, - "aucuneTableADroper"); + PilotageOperations.traitementSurErreur(this.connexion.getCoordinatorConnection(), this.getCurrentPhase(), this.tablePil, this.idSource, e); } catch (ArcException e2) { StaticLoggerDispatcher.error(LOGGER, e); @@ -123,7 +115,7 @@ private void executionMapping() throws ArcException /* * Construire l'ensemble des jeux de règles */ - List listeJeuxDeRegles = JeuDeRegleDao.recupJeuDeRegle(this.connexion.getExecutorConnection(), this.tableTempControleOk, this.getTableJeuDeRegle()); + List listeJeuxDeRegles = JeuDeRegleDao.recupJeuDeRegle(this.connexion.getExecutorConnection(), this.tableTempControleOk, ViewEnum.JEUDEREGLE.getFullName(this.getEnvExecution())); /* * Construction de la factory pour les règles de mapping @@ -137,7 +129,7 @@ private void executionMapping() throws ArcException /* * Récupération de l'id_famille */ - String idFamille = serviceMapping.fetchIdFamille(this.connexion.getExecutorConnection(), listeJeuxDeRegles.get(i), this.getTableNorme()); + String idFamille = serviceMapping.fetchIdFamille(this.connexion.getExecutorConnection(), listeJeuxDeRegles.get(i), this.getEnvExecution()); /* * Instancier une requête de mapping générique pour ce jeu de règles. */ @@ -184,7 +176,7 @@ private void executionMapping() throws ArcException query.append(requeteMapping.requeteTransfertVersTablesMetierDefinitives()); // promote the application user account to full right - query.append(switchToFullRightRole()); + query.append(DatabaseConnexionConfiguration.switchToFullRightRole()); /* * Transfert de la table mapping_ko temporaire vers la table mapping_ko définitive diff --git a/arc-core/src/test/java/fr/insee/arc/core/businesstest/TestsFonctionnels.java b/arc-core/src/test/java/fr/insee/arc/core/businesstest/TestsFonctionnels.java index 965eade72..1271c2607 100644 --- a/arc-core/src/test/java/fr/insee/arc/core/businesstest/TestsFonctionnels.java +++ b/arc-core/src/test/java/fr/insee/arc/core/businesstest/TestsFonctionnels.java @@ -16,8 +16,6 @@ import fr.insee.arc.core.model.TraitementEtat; import fr.insee.arc.core.model.TraitementPhase; import fr.insee.arc.core.service.engine.initialisation.BddPatcherTest; -import fr.insee.arc.core.service.global.ApiService; -import fr.insee.arc.core.service.p1reception.ApiReceptionService; import fr.insee.arc.core.service.p1reception.provider.DirectoryPath; import fr.insee.arc.utils.dao.SQL; import fr.insee.arc.utils.dao.UtilitaireDao; diff --git a/arc-core/src/test/java/fr/insee/arc/core/service/global/dao/ServiceDateTest.java b/arc-core/src/test/java/fr/insee/arc/core/service/global/dao/ServiceDateTest.java index 61ab90e20..85420ec83 100644 --- a/arc-core/src/test/java/fr/insee/arc/core/service/global/dao/ServiceDateTest.java +++ b/arc-core/src/test/java/fr/insee/arc/core/service/global/dao/ServiceDateTest.java @@ -1,6 +1,6 @@ package fr.insee.arc.core.service.global.dao; -import static org.junit.Assert.*; +import static org.junit.Assert.assertEquals; import java.text.ParseException; import java.text.SimpleDateFormat; @@ -10,7 +10,6 @@ import org.junit.Test; -import fr.insee.arc.core.service.global.dao.DateConversion; import fr.insee.arc.utils.dao.GenericPreparedStatementBuilder; import fr.insee.arc.utils.dao.UtilitaireDao; import fr.insee.arc.utils.exception.ArcException; diff --git a/arc-core/src/test/java/fr/insee/arc/core/service/global/dao/ServiceHashFileNameTest.java b/arc-core/src/test/java/fr/insee/arc/core/service/global/dao/ServiceHashFileNameTest.java index 249eca8c9..c42373e0e 100644 --- a/arc-core/src/test/java/fr/insee/arc/core/service/global/dao/ServiceHashFileNameTest.java +++ b/arc-core/src/test/java/fr/insee/arc/core/service/global/dao/ServiceHashFileNameTest.java @@ -4,7 +4,6 @@ import org.junit.Test; -import fr.insee.arc.core.service.global.dao.HashFileNameConversion; import fr.insee.arc.utils.exception.ArcException; import fr.insee.arc.utils.utils.PrivateConstructorTest; diff --git a/arc-core/src/test/java/fr/insee/arc/core/service/global/dao/ServiceTableOperationTest.java b/arc-core/src/test/java/fr/insee/arc/core/service/global/dao/ServiceTableOperationTest.java index c74fce39e..0ecb06188 100644 --- a/arc-core/src/test/java/fr/insee/arc/core/service/global/dao/ServiceTableOperationTest.java +++ b/arc-core/src/test/java/fr/insee/arc/core/service/global/dao/ServiceTableOperationTest.java @@ -2,8 +2,6 @@ import org.junit.Test; -import fr.insee.arc.core.service.global.dao.HashFileNameConversion; -import fr.insee.arc.core.service.global.dao.TableOperations; import fr.insee.arc.utils.dao.UtilitaireDao; import fr.insee.arc.utils.exception.ArcException; import fr.insee.arc.utils.query.InitializeQueryTest; diff --git a/arc-core/src/test/java/fr/insee/arc/core/service/global/thread/MultiThreadingTest.java b/arc-core/src/test/java/fr/insee/arc/core/service/global/thread/MultiThreadingTest.java index b11a720c7..02026204f 100644 --- a/arc-core/src/test/java/fr/insee/arc/core/service/global/thread/MultiThreadingTest.java +++ b/arc-core/src/test/java/fr/insee/arc/core/service/global/thread/MultiThreadingTest.java @@ -2,7 +2,6 @@ import static org.junit.Assert.assertEquals; -import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; @@ -11,8 +10,6 @@ import org.junit.Test; -import fr.insee.arc.core.service.global.thread.MultiThreading; - public class MultiThreadingTest { @Test diff --git a/arc-core/src/test/java/fr/insee/arc/core/service/p0initialisation/metadata/SynchronizeUserRulesAndMetadataTest.java b/arc-core/src/test/java/fr/insee/arc/core/service/p0initialisation/metadata/SynchronizeUserRulesAndMetadataTest.java index cd1bc4297..9e8952f68 100644 --- a/arc-core/src/test/java/fr/insee/arc/core/service/p0initialisation/metadata/SynchronizeUserRulesAndMetadataTest.java +++ b/arc-core/src/test/java/fr/insee/arc/core/service/p0initialisation/metadata/SynchronizeUserRulesAndMetadataTest.java @@ -8,7 +8,6 @@ import fr.insee.arc.core.service.engine.initialisation.BddPatcherTest; import fr.insee.arc.core.service.global.bo.Sandbox; -import fr.insee.arc.core.service.p0initialisation.metadata.SynchronizeUserRulesAndMetadata; import fr.insee.arc.utils.exception.ArcException; import fr.insee.arc.utils.query.InitializeQueryTest; diff --git a/arc-utils/src/main/java/fr/insee/arc/utils/ressourceUtils/LogConfigurator.java b/arc-utils/src/main/java/fr/insee/arc/utils/ressourceUtils/LogConfigurator.java index 56b426d14..406f1df0d 100644 --- a/arc-utils/src/main/java/fr/insee/arc/utils/ressourceUtils/LogConfigurator.java +++ b/arc-utils/src/main/java/fr/insee/arc/utils/ressourceUtils/LogConfigurator.java @@ -1,19 +1,16 @@ package fr.insee.arc.utils.ressourceUtils; import java.io.File; -import java.io.Serializable; import java.net.URL; import java.util.Map; import org.apache.logging.log4j.Level; import org.apache.logging.log4j.core.Appender; import org.apache.logging.log4j.core.Filter; -import org.apache.logging.log4j.core.Layout; import org.apache.logging.log4j.core.LoggerContext; import org.apache.logging.log4j.core.appender.ConsoleAppender; import org.apache.logging.log4j.core.appender.RollingFileAppender; import org.apache.logging.log4j.core.appender.rolling.TimeBasedTriggeringPolicy; -import org.apache.logging.log4j.core.appender.rolling.TriggeringPolicy; import org.apache.logging.log4j.core.config.Configuration; import org.apache.logging.log4j.core.config.Configurator; import org.apache.logging.log4j.core.config.LoggerConfig; diff --git a/arc-utils/src/main/java/fr/insee/arc/utils/utils/FormatSQL.java b/arc-utils/src/main/java/fr/insee/arc/utils/utils/FormatSQL.java index ac59b8b2c..7a431a3b8 100644 --- a/arc-utils/src/main/java/fr/insee/arc/utils/utils/FormatSQL.java +++ b/arc-utils/src/main/java/fr/insee/arc/utils/utils/FormatSQL.java @@ -14,369 +14,339 @@ import fr.insee.arc.utils.textUtils.IConstanteCaractere; import fr.insee.arc.utils.textUtils.IConstanteNumerique; -public class FormatSQL implements IConstanteCaractere, IConstanteNumerique -{ - public static final String NULL = "null"; - public static final String NO_VACUUM = " (autovacuum_enabled = false, toast.autovacuum_enabled = false) "; - public static final String WITH_NO_VACUUM = " WITH" + NO_VACUUM; - - // temporary table generation token name - public static final String TMP = "$tmp$"; - public static final String REGEX_TMP = "\\$tmp\\$"; - - - public static final boolean DROP_FIRST_FALSE = false; - public static final boolean DROP_FIRST_TRUE = true; - - public static final int TAILLE_MAXIMAL_BLOC_SQL = 700000; - - public static final int TIMEOUT_MAINTENANCE = 600000; - - public static final String VACUUM_OPTION_NONE=""; - public static final String VACUUM_OPTION_FULL="full"; - - private static final Logger LOGGER = LogManager.getLogger(FormatSQL.class); - - /** - * query to drop a table in database - * @param tableName - * @return - */ - public static String dropTable(String... someTables) { - GenericPreparedStatementBuilder query = new GenericPreparedStatementBuilder(); - for (String tableName:someTables) - { - query.build(SQL.DROP, SQL.TABLE, SQL.IF_EXISTS, tableName, SQL.END_QUERY, SQL.BR); - } - return query.toString(); - } - - /** - * query to retrieve - * @param table - * @return - */ - public static GenericPreparedStatementBuilder tableExists(String table) { - String tableSchema = extractSchemaNameToken(table); - String tableName = extractTableNameToken(table); - - GenericPreparedStatementBuilder requete = new GenericPreparedStatementBuilder(); - requete.append("SELECT schemaname||'.'||tablename AS table_name FROM pg_tables "); - requete.append("\n WHERE tablename like " + requete.quoteText(tableName.toLowerCase()) + " "); - if (tableSchema!=null) { - requete.append("\n AND schemaname = " + requete.quoteText(tableSchema.toLowerCase()) + " "); +public class FormatSQL implements IConstanteCaractere, IConstanteNumerique { + private FormatSQL() { + throw new IllegalStateException("Utility class"); } - return requete; - } - - - public static String extractSchemaNameToken(String fullTableName) - { - return fullTableName.contains(SQL.DOT.getSqlCode()) ? ManipString.substringBeforeFirst(fullTableName, SQL.DOT.getSqlCode()) : null; - } - - public static String extractTableNameToken(String fullTableName) - { - return ManipString.substringAfterFirst(fullTableName, SQL.DOT.getSqlCode()); - } - - - - /** - * Pour récupérer la liste des colonnes d'une table rapidement - * - * @param table - * @return - */ - public static GenericPreparedStatementBuilder listeColonneByHeaders(String table) - { - return new GenericPreparedStatementBuilder("select * from " + table + " where false; "); - } - - /** - * Switch the database user - * @param roleName - * @return - * @throws ArcException - */ - public static String changeRole(String roleName) - { - return "SET role='"+roleName+"';COMMIT;"; + + public static final String NULL = "null"; + public static final String NO_VACUUM = " (autovacuum_enabled = false, toast.autovacuum_enabled = false) "; + public static final String WITH_NO_VACUUM = " WITH" + NO_VACUUM; + + // temporary table generation token name + public static final String TMP = "$tmp$"; + public static final String REGEX_TMP = "\\$tmp\\$"; + + public static final boolean DROP_FIRST_FALSE = false; + public static final boolean DROP_FIRST_TRUE = true; + + public static final int TAILLE_MAXIMAL_BLOC_SQL = 700000; + + public static final int TIMEOUT_MAINTENANCE = 600000; + + public static final String VACUUM_OPTION_NONE = ""; + public static final String VACUUM_OPTION_FULL = "full"; + + private static final Logger LOGGER = LogManager.getLogger(FormatSQL.class); + + /** + * query to drop a table in database + * + * @param tableName + * @return + */ + public static String dropTable(String... someTables) { + GenericPreparedStatementBuilder query = new GenericPreparedStatementBuilder(); + for (String tableName : someTables) { + query.build(SQL.DROP, SQL.TABLE, SQL.IF_EXISTS, tableName, SQL.END_QUERY, SQL.BR); + } + return query.toString(); + } + + /** + * query to retrieve + * + * @param table + * @return + */ + public static GenericPreparedStatementBuilder tableExists(String table) { + String tableSchema = extractSchemaNameToken(table); + String tableName = extractTableNameToken(table); + + GenericPreparedStatementBuilder requete = new GenericPreparedStatementBuilder(); + requete.append("SELECT schemaname||'.'||tablename AS table_name FROM pg_tables "); + requete.append("\n WHERE tablename like " + requete.quoteText(tableName.toLowerCase()) + " "); + if (tableSchema != null) { + requete.append("\n AND schemaname = " + requete.quoteText(tableSchema.toLowerCase()) + " "); + } + return requete; + } + + public static String extractSchemaNameToken(String fullTableName) { + return fullTableName.contains(SQL.DOT.getSqlCode()) + ? ManipString.substringBeforeFirst(fullTableName, SQL.DOT.getSqlCode()) + : null; + } + + public static String extractTableNameToken(String fullTableName) { + return ManipString.substringAfterFirst(fullTableName, SQL.DOT.getSqlCode()); } + /** + * Pour récupérer la liste des colonnes d'une table rapidement + * + * @param table + * @return + */ + public static GenericPreparedStatementBuilder listeColonneByHeaders(String table) { + return new GenericPreparedStatementBuilder("select * from " + table + " where false; "); + } + + /** + * Switch the database user + * + * @param roleName + * @return + * @throws ArcException + */ + public static String changeRole(String roleName) { + return "SET role='" + roleName + "';COMMIT;"; + } + + /** + * timeOut + */ + public static String setTimeOutMaintenance() { + return "BEGIN;SET statement_timeout=" + TIMEOUT_MAINTENANCE + ";COMMIT;"; + } + + public static String resetTimeOutMaintenance() { + return "BEGIN;RESET statement_timeout;COMMIT;"; + } + + /** + * essaie d'exectuer une requete et si elle n'échoue ne fait rien + */ + public static String tryQuery(String query) { + return "do $$ begin " + query + " exception when others then end; $$; "; + } + + /** + * Met entre cote ou renvoie null (comme pour un champ de base de donnée) + * + * @param t + * @return + */ + public static String cast(String t) { + if (t == null) { + return "null"; + } else { + return "'" + t + "'"; + } + } + + /** + * Lance un vacuum d'un certain type sur une table + * + * @param table + * @param type + * @return + */ + public static String vacuumSecured(String table, String type) { + return "VACUUM " + type + " " + table + "; COMMIT; \n"; + } + + /** + * Lance un vacuum d'un certain type sur une table + * + * @param table + * @param type + * @return + */ + public static String analyzeSecured(String table) { + return "ANALYZE " + table + "; COMMIT; \n"; + } - - /** - * timeOut - */ - public static String setTimeOutMaintenance() - { - return "BEGIN;SET statement_timeout="+TIMEOUT_MAINTENANCE+";COMMIT;"; - } - - public static String resetTimeOutMaintenance() - { - return "BEGIN;RESET statement_timeout;COMMIT;"; - } - - /** - * essaie d'exectuer une requete et si elle n'échoue ne fait rien - */ - public static String tryQuery(String query) - { - return "do $$ begin " + query + " exception when others then end; $$; "; - } - - /** - * Met entre cote ou renvoie null (comme pour un champ de base de donnée) - * - * @param t - * @return - */ - public static String cast(String t) - { - if (t == null) - { - return "null"; - } - else - { - return "'" + t + "'"; - } - } - - /** - * Lance un vacuum d'un certain type sur une table - * @param table - * @param type - * @return - */ - public static String vacuumSecured(String table, String type) - { - return "VACUUM "+ type +" " + table + "; COMMIT; \n"; - } - - /** - * Lance un vacuum d'un certain type sur une table - * @param table - * @param type - * @return - */ - public static String analyzeSecured(String table) - { - return "ANALYZE " + table + "; COMMIT; \n"; - } - - /** - * CREATE TABLE @tableOut as SELECT all_columns FROM @tableIn WHERE @where - * @param tableIn - * @param tableOut - * @param where - * @return - */ - public static String createTableAsSelectWhere(String tableIn, String tableOut, String where) - { - StringBuilder requete = new StringBuilder(); + /** + * CREATE TABLE @tableOut as SELECT all_columns FROM @tableIn WHERE @where + * + * @param tableIn + * @param tableOut + * @param where + * @return + */ + public static String createTableAsSelectWhere(String tableIn, String tableOut, String where) { + StringBuilder requete = new StringBuilder(); requete.append(FormatSQL.dropTable(tableOut)); - requete.append("\n CREATE "); - if (!tableOut.contains(".")) - { - requete.append("TEMPORARY "); - } - else - { - requete.append(" "); - } - requete.append("TABLE ").append(tableOut).append(" ").append(FormatSQL.WITH_NO_VACUUM) - .append(" AS SELECT * FROM ").append(tableIn).append(" a WHERE ").append(where); - requete.append("; "); - return requete.toString(); - } - - - /** - * Recopie une table à l'identique - * - * @param table - * @param where - * @param triggersAndIndexes - * @return - */ - public static StringBuilder rebuildTableAsSelectWhere(String table, String where) - { - String tableRebuild = temporaryTableName(table, "RB"); - - StringBuilder requete = new StringBuilder(); - requete.append("set enable_nestloop=off; "); - - requete.append(createTableAsSelectWhere(table, tableRebuild, where)); - + requete.append("\n CREATE "); + if (!tableOut.contains(".")) { + requete.append("TEMPORARY "); + } else { + requete.append(" "); + } + requete.append("TABLE ").append(tableOut).append(" ").append(FormatSQL.WITH_NO_VACUUM) + .append(" AS SELECT * FROM ").append(tableIn).append(" a WHERE ").append(where); + requete.append("; "); + return requete.toString(); + } + + /** + * Recopie une table à l'identique + * + * @param table + * @param where + * @param triggersAndIndexes + * @return + */ + public static StringBuilder rebuildTableAsSelectWhere(String table, String where) { + String tableRebuild = temporaryTableName(table, "RB"); + + StringBuilder requete = new StringBuilder(); + requete.append("set enable_nestloop=off; "); + + requete.append(createTableAsSelectWhere(table, tableRebuild, where)); + requete.append(FormatSQL.dropTable(table)); - - requete.append( - "\n ALTER TABLE " + tableRebuild + " RENAME TO " + ManipString.substringAfterFirst(table, ".") + " ;"); - requete.append("set enable_nestloop=on; "); - return requete; - } - - - /** - * this sql block test is the query to test is true to execute the other query - * @param queryToTest - * @param queryToExecute - * @return - */ - public static String executeIf(String queryToTest, String queryToExecute) - { - StringBuilder query=new StringBuilder(); - query - .append("do $$ declare b boolean; begin execute ") - .append(quoteText(queryToTest)) - .append(" into b; ") - .append("if (b) then execute ") - .append(quoteText(queryToExecute)) - .append("; end if; end; $$;"); - return query.toString(); - } - - public static String executeIf(StringBuilder queryToTest, StringBuilder queryToExecute) - { - return executeIf(queryToTest.toString(), queryToExecute.toString()); - } - - /** - * query that return true is the query as at least one record - * @param tableIn - * @return - */ - public static String hasRecord(String tableIn) - { - return "SELECT (count(*)>0) as has_record FROM (SELECT 1 FROM " + tableIn + " LIMIT 1) u"; - } - - /** - * check if table is temporary according to its name - * no SQL.DOT in temporary - * @return - */ - public static boolean isTemporary(String tablename) - { - return !tablename.contains(SQL.DOT.getSqlCode()); - } - - - /** - * Ajoute un suffixe de table temporaire au nom de table {@code aName} - * - * @param aName - * @return - */ - public static final String temporaryTableName(String aName) - { - String newName = aName.split(REGEX_TMP)[0]; - // on met la date du jour dans le nom de la table - String l = System.currentTimeMillis() + ""; - // on prend que les 10 derniers chiffres (durrée de vie : 6 mois) - l = l.substring(l.length() - 10); - // on inverse la chaine de caractere pour avoir les millisecondes en - // premier en cas de troncature - l = new StringBuffer(l).reverse().toString(); - return new StringBuilder(newName).append(TMP).append(l).append(dollar).append(randomNumber(4)).toString(); - } - - /** - * Ajoute un suffixe de table temporaire au nom de table {@code prefix} - * - * @param aName - * le nom de la table - * @param suffix - * un suffixe - * @return - */ - public static final String temporaryTableName(String aName, String suffix) - { - String newName = aName.split(REGEX_TMP)[0]; - return temporaryTableName(newName + underscore + suffix); - } - - /** - * - * @return Un nombre aléatoire d'une certaine précision - */ - public static final String randomNumber(int precision) - { - String rn = ((int) Math.floor((Math.random() * (Math.pow(10, precision))))) + ""; - return ManipString.padLeft(rn, "0", precision); - } - - /** - * converti une chaine de caractere pour etre mise en parametre d'un sql si - * c'est vide, ca devient "null" quote devient quote quote - * - * @param val - * @return - */ - public static String textToSql(String val) - { - return val == null ? "NULL" : "'" + val.replace("'", "''") + "'"; - } - - /** - * Ne garde que les séparateurs - * - * @param tokens - * @param separator - * @return - */ - public static String toNullRow(Collection tokens) - { - return (tokens == null || tokens.isEmpty()) ? "(" + empty + ")" - : "(" + StringUtils.repeat(",", tokens.size() - 1) + ")"; - } - - /** - * Renvoie les tables héritant de celle-ci - * Colonnes de résultat: - * @child (schema.table) - */ - public static GenericPreparedStatementBuilder getAllInheritedTables(String tableSchema, String tableName) { - GenericPreparedStatementBuilder requete = new GenericPreparedStatementBuilder(); - requete.append("\n SELECT cn.nspname||'.'||c.relname AS child "); - requete.append("\n FROM pg_inherits "); - requete.append("\n JOIN pg_class AS c ON (inhrelid=c.oid) "); - requete.append("\n JOIN pg_class as p ON (inhparent=p.oid) "); - requete.append("\n JOIN pg_namespace pn ON pn.oid = p.relnamespace "); - requete.append("\n JOIN pg_namespace cn ON cn.oid = c.relnamespace "); - requete.append("\n WHERE p.relname = "+requete.quoteText(tableName)+" and pn.nspname = "+requete.quoteText(tableSchema)+" "); - return requete; - } - - /** - * escape quote return value through function - * @param s - * @return - * @throws ArcException - */ - public static String quoteText(String s) - { - try { + + requete.append( + "\n ALTER TABLE " + tableRebuild + " RENAME TO " + ManipString.substringAfterFirst(table, ".") + " ;"); + requete.append("set enable_nestloop=on; "); + return requete; + } + + /** + * this sql block test is the query to test is true to execute the other query + * + * @param queryToTest + * @param queryToExecute + * @return + */ + public static String executeIf(String queryToTest, String queryToExecute) { + StringBuilder query = new StringBuilder(); + query.append("do $$ declare b boolean; begin execute ").append(quoteText(queryToTest)).append(" into b; ") + .append("if (b) then execute ").append(quoteText(queryToExecute)).append("; end if; end; $$;"); + return query.toString(); + } + + public static String executeIf(StringBuilder queryToTest, StringBuilder queryToExecute) { + return executeIf(queryToTest.toString(), queryToExecute.toString()); + } + + /** + * query that return true is the query as at least one record + * + * @param tableIn + * @return + */ + public static String hasRecord(String tableIn) { + return "SELECT (count(*)>0) as has_record FROM (SELECT 1 FROM " + tableIn + " LIMIT 1) u"; + } + + /** + * check if table is temporary according to its name no SQL.DOT in temporary + * + * @return + */ + public static boolean isTemporary(String tablename) { + return !tablename.contains(SQL.DOT.getSqlCode()); + } + + /** + * Ajoute un suffixe de table temporaire au nom de table {@code aName} + * + * @param aName + * @return + */ + public static final String temporaryTableName(String aName) { + String newName = aName.split(REGEX_TMP)[0]; + // on met la date du jour dans le nom de la table + String l = System.currentTimeMillis() + ""; + // on prend que les 10 derniers chiffres (durrée de vie : 6 mois) + l = l.substring(l.length() - 10); + // on inverse la chaine de caractere pour avoir les millisecondes en + // premier en cas de troncature + l = new StringBuffer(l).reverse().toString(); + return new StringBuilder(newName).append(TMP).append(l).append(dollar).append(randomNumber(4)).toString(); + } + + /** + * Ajoute un suffixe de table temporaire au nom de table {@code prefix} + * + * @param aName le nom de la table + * @param suffix un suffixe + * @return + */ + public static final String temporaryTableName(String aName, String suffix) { + String newName = aName.split(REGEX_TMP)[0]; + return temporaryTableName(newName + underscore + suffix); + } + + /** + * + * @return Un nombre aléatoire d'une certaine précision + */ + public static final String randomNumber(int precision) { + String rn = ((int) Math.floor((Math.random() * (Math.pow(10, precision))))) + ""; + return ManipString.padLeft(rn, "0", precision); + } + + /** + * converti une chaine de caractere pour etre mise en parametre d'un sql si + * c'est vide, ca devient "null" quote devient quote quote + * + * @param val + * @return + */ + public static String textToSql(String val) { + return val == null ? "NULL" : "'" + val.replace("'", "''") + "'"; + } + + /** + * Ne garde que les séparateurs + * + * @param tokens + * @param separator + * @return + */ + public static String toNullRow(Collection tokens) { + return (tokens == null || tokens.isEmpty()) ? "(" + empty + ")" + : "(" + StringUtils.repeat(",", tokens.size() - 1) + ")"; + } + + /** + * Renvoie les tables héritant de celle-ci Colonnes de résultat: + * + * @child (schema.table) + */ + public static GenericPreparedStatementBuilder getAllInheritedTables(String tableSchema, String tableName) { + GenericPreparedStatementBuilder requete = new GenericPreparedStatementBuilder(); + requete.append("\n SELECT cn.nspname||'.'||c.relname AS child "); + requete.append("\n FROM pg_inherits "); + requete.append("\n JOIN pg_class AS c ON (inhrelid=c.oid) "); + requete.append("\n JOIN pg_class as p ON (inhparent=p.oid) "); + requete.append("\n JOIN pg_namespace pn ON pn.oid = p.relnamespace "); + requete.append("\n JOIN pg_namespace cn ON cn.oid = c.relnamespace "); + requete.append("\n WHERE p.relname = " + requete.quoteText(tableName) + " and pn.nspname = " + + requete.quoteText(tableSchema) + " "); + return requete; + } + + /** + * escape quote return value through function + * + * @param s + * @return + * @throws ArcException + */ + public static String quoteText(String s) { + try { return "'" + Utils.escapeLiteral(null, s, true) + "'"; } catch (SQLException e) { LoggerHelper.errorAsComment(LOGGER, "This string cannot be escaped to postgres database format"); return null; } - } - - /** - * query expression to convert a date format - * @param dateTextIn - * @param formatIn - * @return - */ - public static String toDate(String dateTextIn, String formatIn) - { - return "to_date("+dateTextIn+"::text,"+formatIn+")"; - } - + } + + /** + * query expression to convert a date format + * + * @param dateTextIn + * @param formatIn + * @return + */ + public static String toDate(String dateTextIn, String formatIn) { + return "to_date(" + dateTextIn + "::text," + formatIn + ")"; + } + } diff --git a/arc-utils/src/test/java/fr/insee/arc/utils/files/FileUtilsArcTest.java b/arc-utils/src/test/java/fr/insee/arc/utils/files/FileUtilsArcTest.java index 5d99018cf..2086852a6 100644 --- a/arc-utils/src/test/java/fr/insee/arc/utils/files/FileUtilsArcTest.java +++ b/arc-utils/src/test/java/fr/insee/arc/utils/files/FileUtilsArcTest.java @@ -6,10 +6,7 @@ import java.io.File; import java.io.IOException; -import java.io.RandomAccessFile; import java.lang.reflect.InvocationTargetException; -import java.nio.file.Path; -import java.nio.file.Paths; import org.junit.Rule; import org.junit.Test; diff --git a/arc-web/src/main/java/fr/insee/arc/web/gui/all/util/WebLoggerDispatcher.java b/arc-web/src/main/java/fr/insee/arc/web/gui/all/util/WebLoggerDispatcher.java index 4452f2298..07a6e9ba5 100644 --- a/arc-web/src/main/java/fr/insee/arc/web/gui/all/util/WebLoggerDispatcher.java +++ b/arc-web/src/main/java/fr/insee/arc/web/gui/all/util/WebLoggerDispatcher.java @@ -1,7 +1,6 @@ package fr.insee.arc.web.gui.all.util; import org.apache.logging.log4j.Logger; -import org.springframework.beans.factory.annotation.Autowired; import fr.insee.arc.core.util.LoggerDispatcher; diff --git a/arc-web/src/test/java/fr/insee/arc/web/gui/pilotage/dao/PilotageDaoTest.java b/arc-web/src/test/java/fr/insee/arc/web/gui/pilotage/dao/PilotageDaoTest.java index 396bf5961..f5f8d3b87 100644 --- a/arc-web/src/test/java/fr/insee/arc/web/gui/pilotage/dao/PilotageDaoTest.java +++ b/arc-web/src/test/java/fr/insee/arc/web/gui/pilotage/dao/PilotageDaoTest.java @@ -1,6 +1,5 @@ package fr.insee.arc.web.gui.pilotage.dao; -import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import java.util.List; diff --git a/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/ImportStep1InitializeClientTablesService.java b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/ImportStep1InitializeClientTablesService.java index bf273d466..3d21cb41e 100644 --- a/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/ImportStep1InitializeClientTablesService.java +++ b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/ImportStep1InitializeClientTablesService.java @@ -8,7 +8,6 @@ import org.json.JSONArray; import org.json.JSONObject; -import fr.insee.arc.core.service.global.ApiService; import fr.insee.arc.core.service.global.dao.TableNaming; import fr.insee.arc.core.util.StaticLoggerDispatcher; import fr.insee.arc.utils.exception.ArcException; diff --git a/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/dao/ClientDaoImpl.java b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/dao/ClientDaoImpl.java index 34de478a2..80a3749d3 100644 --- a/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/dao/ClientDaoImpl.java +++ b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/dao/ClientDaoImpl.java @@ -14,7 +14,6 @@ import fr.insee.arc.core.dataobjects.ColumnEnum; import fr.insee.arc.core.model.TraitementEtat; import fr.insee.arc.core.model.TraitementPhase; -import fr.insee.arc.core.service.global.ApiService; import fr.insee.arc.core.service.global.dao.TableNaming; import fr.insee.arc.utils.dao.UtilitaireDao; import fr.insee.arc.utils.exception.ArcException; diff --git a/arc-ws/src/main/java/fr/insee/arc/ws/services/restServices/execute/ExecuteEngineController.java b/arc-ws/src/main/java/fr/insee/arc/ws/services/restServices/execute/ExecuteEngineController.java index 75d245f48..1609978f8 100644 --- a/arc-ws/src/main/java/fr/insee/arc/ws/services/restServices/execute/ExecuteEngineController.java +++ b/arc-ws/src/main/java/fr/insee/arc/ws/services/restServices/execute/ExecuteEngineController.java @@ -75,12 +75,11 @@ public ResponseEntity executeEngineClient( switch (TraitementPhase.getPhase(i)) { case CHARGEMENT: // register file - String tableRegleChargement = env + ".chargement_regle"; try (InputStream inputStream = new ByteArrayInputStream( bodyPojo.fileContent.getBytes(StandardCharsets.UTF_8));) { ChargeurXmlComplexe chargeur = new ChargeurXmlComplexe(connection, bodyPojo.fileName, inputStream, currentTemporaryTable(i), - bodyPojo.norme, bodyPojo.periodicite, bodyPojo.validite, tableRegleChargement); + bodyPojo.norme, bodyPojo.periodicite, bodyPojo.validite, env); chargeur.executeEngine(); structure = chargeur.jointure.replace("''", "'"); }