From 70e7ac60f640bca2e2fab6ca3850fb7ffe4deb02 Mon Sep 17 00:00:00 2001 From: Manuel Soulier Date: Mon, 25 Sep 2023 15:29:00 +0200 Subject: [PATCH] refactor ApiInitialisation --- .../java/fr/insee/arc/batch/BatchARC.java | 32 ++++---- .../arc/core/service/global/ApiService.java | 8 ++ .../global/dao/DatabaseMaintenance.java | 73 ++++++++----------- .../scalability/ServiceScalability.java | 18 ++++- .../ApiInitialisationService.java | 67 +---------------- .../ResetEnvironmentService.java | 42 +++++++---- .../pilotage/SynchronizeDataByPilotage.java | 5 +- .../dao/SynchronizeDataByPilotageDao.java | 71 +++++++++--------- .../ResetEnvironmentOperation.java | 47 +++++++++++- .../p1reception/ApiReceptionService.java | 2 + .../fr/insee/arc/utils/dao/UtilitaireDao.java | 4 +- .../fr/insee/arc/utils/utils/FormatSQL.java | 2 +- ...ortStep1InitializeClientTablesService.java | 8 +- .../ImportStep2GetTableNameService.java | 8 +- .../ImportStep3GetTableDataService.java | 8 +- .../ws/services/importServlet/ServletArc.java | 6 +- .../actions/InitiateRequest.java | 4 +- .../importServlet}/actions/SendResponse.java | 2 +- .../services/importServlet/bo}/JsonKeys.java | 2 +- .../importServlet}/dao/ClientDao.java | 4 +- .../importServlet}/dao/ClientDaoImpl.java | 8 +- 21 files changed, 210 insertions(+), 211 deletions(-) rename arc-ws/src/main/java/fr/insee/arc/ws/{ => services/importServlet}/actions/InitiateRequest.java (93%) rename arc-ws/src/main/java/fr/insee/arc/ws/{ => services/importServlet}/actions/SendResponse.java (93%) rename {arc-utils/src/main/java/fr/insee/arc/utils/utils => arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/bo}/JsonKeys.java (94%) rename arc-ws/src/main/java/fr/insee/arc/ws/{ => services/importServlet}/dao/ClientDao.java (98%) rename arc-ws/src/main/java/fr/insee/arc/ws/{ => services/importServlet}/dao/ClientDaoImpl.java (98%) diff --git a/arc-batch/src/main/java/fr/insee/arc/batch/BatchARC.java b/arc-batch/src/main/java/fr/insee/arc/batch/BatchARC.java index caad21864..2c10c642e 100644 --- a/arc-batch/src/main/java/fr/insee/arc/batch/BatchARC.java +++ b/arc-batch/src/main/java/fr/insee/arc/batch/BatchARC.java @@ -80,9 +80,6 @@ class BatchARC implements IReturnCode { // reste à faire private Integer numberOfIterationBewteenCheckTodo; - // nombre de pods utilisés par ARC - private Integer numberOfPods; - // true = the batch will resume the process from a formerly interrupted batch // false = the batch will proceed to a new load // Maintenance initialization process can only occur in this case @@ -142,9 +139,6 @@ private void initParameters() { numberOfIterationBewteenCheckTodo = bdParameters.getInt(null, "LanceurARC.DATABASE_CHECKTODO_ROUTINE_INTERVAL", 10); - // the number of executor nods declared for scalability - numberOfPods = ArcDatabase.numberOfExecutorNods(); - // either we take env and envExecution from database or properties // default is from properties if (Boolean.parseBoolean(bdParameters.getString(null, "LanceurARC.envFromDatabase", "false"))) { @@ -240,13 +234,10 @@ private void maintenanceTablePilotageBatch() throws ArcException { requete.append( "\n insert into arc.pilotage_batch select '1900-01-01:00','O' where not exists (select 1 from arc.pilotage_batch); "); UtilitaireDao.get(ArcDatabase.COORDINATOR.getIndex()).executeRequest(null, requete); - - for (int poolIndex = 0; poolIndex <= numberOfPods; poolIndex++) { - // Maintenance full du catalog - DatabaseMaintenance.maintenancePgCatalog(poolIndex, null, FormatSQL.VACUUM_OPTION_FULL); - // maintenance des tables métier de la base de données - DatabaseMaintenance.maintenanceDatabaseClassic(poolIndex, null, envExecution); - } + + DatabaseMaintenance.maintenancePgCatalogAllNods(null, FormatSQL.VACUUM_OPTION_FULL); + DatabaseMaintenance.maintenancePilotage(null, envExecution, FormatSQL.VACUUM_OPTION_NONE); + } /** @@ -590,9 +581,9 @@ private void startPhaseThread() for (TraitementPhase phase : phases) { // if no thread in phase, start one if (pool.get(phase).isEmpty()) { - PhaseThreadFactory a = new PhaseThreadFactory(mapParam, phase); - a.start(); - pool.get(phase).add(a); + PhaseThreadFactory thread = new PhaseThreadFactory(mapParam, phase); + thread.start(); + pool.get(phase).add(thread); } // delay between phases not to overload Sleep.sleep(delay); @@ -606,10 +597,13 @@ private void startMaintenanceThread() { maintenance = new Thread() { @Override public void run() { - for (int poolIndex = 0; poolIndex <= numberOfPods; poolIndex++) { - DatabaseMaintenance.maintenanceDatabaseClassic(poolIndex, null, - envExecution); + try { + DatabaseMaintenance.maintenancePgCatalogAllNods(null, FormatSQL.VACUUM_OPTION_NONE); + DatabaseMaintenance.maintenancePilotage(null, envExecution, FormatSQL.VACUUM_OPTION_NONE); + } catch (ArcException e) { + e.logMessageException(); } + } }; maintenance.start(); diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/global/ApiService.java b/arc-core/src/main/java/fr/insee/arc/core/service/global/ApiService.java index 06928947f..1a181a0f1 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/global/ApiService.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/global/ApiService.java @@ -696,6 +696,14 @@ public void setListeNorme(List listeNorme) { this.listeNorme = listeNorme; } + public Sandbox getCoordinatorSandbox() { + return coordinatorSandbox; + } + + public void setCoordinatorSandbox(Sandbox coordinatorSandbox) { + this.coordinatorSandbox = coordinatorSandbox; + } + } diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/global/dao/DatabaseMaintenance.java b/arc-core/src/main/java/fr/insee/arc/core/service/global/dao/DatabaseMaintenance.java index 759e880af..f45d6aa2a 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/global/dao/DatabaseMaintenance.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/global/dao/DatabaseMaintenance.java @@ -1,13 +1,16 @@ package fr.insee.arc.core.service.global.dao; import java.sql.Connection; +import java.util.List; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import fr.insee.arc.core.dataobjects.ColumnEnum; import fr.insee.arc.core.dataobjects.ViewEnum; +import fr.insee.arc.core.service.global.scalability.ServiceScalability; import fr.insee.arc.core.util.StaticLoggerDispatcher; +import fr.insee.arc.utils.consumer.ThrowingConsumer; import fr.insee.arc.utils.dao.UtilitaireDao; import fr.insee.arc.utils.exception.ArcException; import fr.insee.arc.utils.utils.FormatSQL; @@ -20,6 +23,20 @@ private DatabaseMaintenance() { protected static final Logger LOGGER = LogManager.getLogger(DatabaseMaintenance.class); + + /** + * dispatch on every nods the mainteance of catalog + * @param optionalProvidedIdSourceToDrop + * @throws ArcException + */ + public static void maintenancePgCatalogAllNods(Connection coordinatorConnection, String type) throws ArcException { + + ThrowingConsumer function = executorConnection -> UtilitaireDao.get(0).maintenancePgCatalog(executorConnection, type); + + ServiceScalability.dispatchOnNods(coordinatorConnection, function, function); + + } + /** * Maintenance sur la table de pilotage * @@ -27,62 +44,32 @@ private DatabaseMaintenance() { * @param envExecution * @param type */ - private static void maintenancePilotage(Integer poolIndex, Connection connexion, String envExecution, String type) { - String tablePil = ViewEnum.PILOTAGE_FICHIER.getFullName(envExecution); + public static void maintenancePilotage(Connection coordinatorConnection, String envExecution, String type) { StaticLoggerDispatcher.info(LOGGER, "** Maintenance Pilotage **"); + String tablePil = ViewEnum.PILOTAGE_FICHIER.getFullName(envExecution); + try { - UtilitaireDao.get(poolIndex).executeImmediate(connexion, FormatSQL.analyzeSecured(tablePil)); - UtilitaireDao.get(poolIndex).executeImmediate(connexion, FormatSQL.vacuumSecured(tablePil, type)); + UtilitaireDao.get(0).executeImmediate(coordinatorConnection, FormatSQL.vacuumSecured(tablePil, type)); + UtilitaireDao.get(0).executeImmediate(coordinatorConnection, FormatSQL.analyzeSecured(tablePil)); } catch (Exception e) { StaticLoggerDispatcher.error(LOGGER, "Error in ApiService.maintenancePilotage"); } } /** - * - * @param connexion - * @param type - */ - public static void maintenancePgCatalog(Integer poolIndex, Connection connexion, String type) { - // postgres libere mal l'espace sur ces tables qaund on fait trop d'opération - // sur les colonnes - // vaccum full sinon ca fait quasiment rien ... - StaticLoggerDispatcher.info(LOGGER, "** Maintenance Catalogue **"); - UtilitaireDao.get(poolIndex).maintenancePgCatalog(connexion, type); - } - - /** - * classic database maintenance routine 2 vacuum are sent successively to - * analyze and remove dead tuple completely from + * analyze and vacuum the postgres catalog tables analyze + * vacuum the pilotage table located in the sandbox schema * * @param connexion the jdbc connexion * @param envExecution the sandbox schema + * @throws ArcException */ - public static void maintenanceDatabaseClassic(Connection connexion, String envExecution) { - maintenanceDatabaseClassic(0, connexion, envExecution); - } - - public static void maintenanceDatabaseClassic(Integer poolIndex, Connection connexion, String envExecution) { - maintenanceDatabase(poolIndex, connexion, envExecution, FormatSQL.VACUUM_OPTION_NONE); - } - - /** - * analyze and vacuum on postgres catalog tables analyze and vacuum on the - * pilotage table located in the sandbox schema - * - * @param connexion the jdbc connexion - * @param envExecution the sandbox schema - * @param typeMaintenance FormatSQL.VACUUM_OPTION_FULL or - * FormatSQL.VACUUM_OPTION_NONE - */ - private static void maintenanceDatabase(Integer poolIndex, Connection connexion, String envExecution, - String typeMaintenance) { - maintenancePgCatalog(poolIndex, connexion, typeMaintenance); - - maintenancePilotage(poolIndex, connexion, envExecution, typeMaintenance); - - StaticLoggerDispatcher.info(LOGGER, "** Fin de maintenance **"); + public static void maintenanceDatabaseClassic(Connection coordinatorConnection, String envExecution) throws ArcException { + + maintenancePgCatalogAllNods(coordinatorConnection, FormatSQL.VACUUM_OPTION_NONE); + + maintenancePilotage(coordinatorConnection, envExecution, FormatSQL.VACUUM_OPTION_NONE); } } diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/global/scalability/ServiceScalability.java b/arc-core/src/main/java/fr/insee/arc/core/service/global/scalability/ServiceScalability.java index 334d0e2ed..70a0fe90a 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/global/scalability/ServiceScalability.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/global/scalability/ServiceScalability.java @@ -27,7 +27,23 @@ private ServiceScalability() { */ public static int dispatchOnNods(Connection coordinatorConnexion, ThrowingConsumer actionOnCoordinator, ThrowingConsumer actionOnExecutor) throws ArcException { - actionOnCoordinator.accept(coordinatorConnexion); + if (coordinatorConnexion==null) + { + try (Connection newCoordinatorConnexion = UtilitaireDao.get(ArcDatabase.COORDINATOR.getIndex()).getDriverConnexion()) + { + actionOnCoordinator.accept(newCoordinatorConnexion); + + } catch (SQLException | ArcException e) { + ArcException customException = new ArcException(e, ArcExceptionMessage.DATABASE_INITIALISATION_SCRIPT_FAILED); + customException.logFullException(); + throw customException; + } + } + else + { + actionOnCoordinator.accept(coordinatorConnexion); + } + int numberOfExecutorNods = ArcDatabase.numberOfExecutorNods(); diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/ApiInitialisationService.java b/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/ApiInitialisationService.java index cfe23a990..0288a0425 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/ApiInitialisationService.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/ApiInitialisationService.java @@ -1,33 +1,14 @@ package fr.insee.arc.core.service.p0initialisation; -import java.nio.file.Paths; -import java.util.ArrayList; -import java.util.List; - -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.springframework.stereotype.Component; -import fr.insee.arc.core.dataobjects.ArcPreparedStatementBuilder; -import fr.insee.arc.core.model.TraitementEtat; -import fr.insee.arc.core.model.TraitementPhase; import fr.insee.arc.core.service.global.ApiService; -import fr.insee.arc.core.service.global.dao.DatabaseMaintenance; -import fr.insee.arc.core.service.global.dao.FileSystemManagement; -import fr.insee.arc.core.service.global.dao.TableNaming; import fr.insee.arc.core.service.p0initialisation.filesystem.RestoreFileSystem; import fr.insee.arc.core.service.p0initialisation.metadata.SynchronizeUserRulesAndMetadata; import fr.insee.arc.core.service.p0initialisation.pilotage.CleanPilotage; import fr.insee.arc.core.service.p0initialisation.pilotage.SynchronizeDataByPilotage; import fr.insee.arc.core.service.p0initialisation.useroperation.ReplayOrDeleteFiles; -import fr.insee.arc.core.service.p0initialisation.useroperation.ResetEnvironmentOperation; -import fr.insee.arc.core.service.p1reception.ApiReceptionService; -import fr.insee.arc.utils.dao.UtilitaireDao; import fr.insee.arc.utils.exception.ArcException; -import fr.insee.arc.utils.files.FileUtilsArc; -import fr.insee.arc.utils.structure.GenericBean; -import fr.insee.arc.utils.utils.FormatSQL; -import fr.insee.arc.utils.utils.LoggerHelper; /** * ApiNormageService @@ -49,8 +30,6 @@ public ApiInitialisationService() { super(); } - private static final Logger LOGGER = LogManager.getLogger(ApiInitialisationService.class); - public ApiInitialisationService(String aCurrentPhase, String aEnvExecution, String aDirectoryRoot, Integer aNbEnr, String paramBatch) { super(aCurrentPhase, aEnvExecution, aDirectoryRoot, aNbEnr, paramBatch); @@ -70,7 +49,8 @@ public void executer() throws ArcException { new SynchronizeUserRulesAndMetadata(this.coordinatorSandbox).synchroniserSchemaExecutionAllNods(); // marque les fichiers ou les archives à rejouer - // efface des fichiers de la table de pilotage marqués par l'utilisateur comme étant à effacer + // efface des fichiers de la table de pilotage marqués par l'utilisateur comme + // étant à effacer new ReplayOrDeleteFiles(this.coordinatorSandbox).processMarkedFiles(); // Met en cohérence les table de données avec la table de pilotage de @@ -83,47 +63,4 @@ public void executer() throws ArcException { } - public void retourPhasePrecedente(TraitementPhase phase, ArcPreparedStatementBuilder querySelection, - List listEtat) throws ArcException { - new ResetEnvironmentOperation(this.coordinatorSandbox).retourPhasePrecedente(phase, querySelection, listEtat); - } - - - - public void resetEnvironnement() { - try { - new SynchronizeDataByPilotage(this.coordinatorSandbox).synchronizeDataByPilotage(); - DatabaseMaintenance.maintenanceDatabaseClassic(connexion.getCoordinatorConnection(), envExecution); - } catch (Exception e) { - LoggerHelper.error(LOGGER, e); - } - } - - public static void clearPilotageAndDirectories(String repertoire, String env) throws ArcException { - UtilitaireDao.get(0).executeBlock(null, "truncate " + TableNaming.dbEnv(env) + "pilotage_fichier; "); - UtilitaireDao.get(0).executeBlock(null, "truncate " + TableNaming.dbEnv(env) + "pilotage_archive; "); - - if (Boolean.TRUE.equals(UtilitaireDao.get(0).hasResults(null, FormatSQL.tableExists("arc.ihm_entrepot")))) { - ArrayList entrepotList = new GenericBean(UtilitaireDao.get(0).executeRequest(null, - new ArcPreparedStatementBuilder("select id_entrepot from arc.ihm_entrepot"))).mapContent() - .get("id_entrepot"); - if (entrepotList != null) { - for (String s : entrepotList) { - FileUtilsArc.deleteAndRecreateDirectory( - Paths.get(ApiReceptionService.directoryReceptionEntrepot(repertoire, env, s)).toFile()); - FileUtilsArc.deleteAndRecreateDirectory(Paths - .get(ApiReceptionService.directoryReceptionEntrepotArchive(repertoire, env, s)).toFile()); - } - } - } - FileUtilsArc.deleteAndRecreateDirectory( - Paths.get(ApiReceptionService.directoryReceptionEtatEnCours(repertoire, env)).toFile()); - FileUtilsArc.deleteAndRecreateDirectory( - Paths.get(ApiReceptionService.directoryReceptionEtatOK(repertoire, env)).toFile()); - FileUtilsArc.deleteAndRecreateDirectory( - Paths.get(ApiReceptionService.directoryReceptionEtatKO(repertoire, env)).toFile()); - FileUtilsArc.deleteAndRecreateDirectory( - Paths.get(FileSystemManagement.directoryEnvExport(repertoire, env)).toFile()); - } - } diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/ResetEnvironmentService.java b/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/ResetEnvironmentService.java index 17b99a521..d29eb992e 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/ResetEnvironmentService.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/ResetEnvironmentService.java @@ -1,5 +1,6 @@ package fr.insee.arc.core.service.p0initialisation; +import java.nio.file.Paths; import java.util.ArrayList; import java.util.Arrays; @@ -9,8 +10,20 @@ import fr.insee.arc.core.dataobjects.ArcPreparedStatementBuilder; import fr.insee.arc.core.model.TraitementEtat; import fr.insee.arc.core.model.TraitementPhase; +import fr.insee.arc.core.service.global.bo.Sandbox; +import fr.insee.arc.core.service.global.dao.DatabaseMaintenance; +import fr.insee.arc.core.service.global.dao.FileSystemManagement; +import fr.insee.arc.core.service.global.dao.TableNaming; +import fr.insee.arc.core.service.p0initialisation.pilotage.SynchronizeDataByPilotage; +import fr.insee.arc.core.service.p0initialisation.useroperation.ResetEnvironmentOperation; +import fr.insee.arc.core.service.p1reception.ApiReceptionService; import fr.insee.arc.core.util.StaticLoggerDispatcher; +import fr.insee.arc.utils.dao.UtilitaireDao; import fr.insee.arc.utils.exception.ArcException; +import fr.insee.arc.utils.files.FileUtilsArc; +import fr.insee.arc.utils.structure.GenericBean; +import fr.insee.arc.utils.utils.FormatSQL; +import fr.insee.arc.utils.utils.LoggerHelper; public class ResetEnvironmentService { @@ -27,19 +40,17 @@ private ResetEnvironmentService() { * @param env * @param rootDirectory * @param undoFilesSelection - * @throws ArcException + * @throws ArcException */ public static void backToTargetPhase(TraitementPhase phaseAExecuter, String env, String rootDirectory, ArcPreparedStatementBuilder undoFilesSelection) throws ArcException { if (phaseAExecuter.getOrdre() == TraitementPhase.INITIALISATION.getOrdre()) { resetBAS(env, rootDirectory); } else { - ApiInitialisationService serv = new ApiInitialisationService(TraitementPhase.INITIALISATION.toString(), - env, rootDirectory, TraitementPhase.INITIALISATION.getNbLigneATraiter(), - null); + ApiInitialisationService serv = new ApiInitialisationService(TraitementPhase.INITIALISATION.toString(), env, + rootDirectory, TraitementPhase.INITIALISATION.getNbLigneATraiter(), null); try { - serv.retourPhasePrecedente(phaseAExecuter, undoFilesSelection, - new ArrayList<>(Arrays.asList(TraitementEtat.OK, TraitementEtat.KO))); + new ResetEnvironmentOperation(serv.getCoordinatorSandbox()).retourPhasePrecedente(phaseAExecuter, undoFilesSelection); } finally { serv.finaliser(); } @@ -54,15 +65,18 @@ public static void backToTargetPhase(TraitementPhase phaseAExecuter, String env, * @param rootDirectory */ public static void resetBAS(String env, String rootDirectory) { + + ApiInitialisationService service = new ApiInitialisationService(TraitementPhase.INITIALISATION.toString(), env, + rootDirectory, TraitementPhase.INITIALISATION.getNbLigneATraiter(), null); try { - ApiInitialisationService.clearPilotageAndDirectories(rootDirectory, env); - } catch (Exception e) { - StaticLoggerDispatcher.info(LOGGER, e); - } - ApiInitialisationService service = new ApiInitialisationService(TraitementPhase.INITIALISATION.toString(), - env, rootDirectory, TraitementPhase.INITIALISATION.getNbLigneATraiter(), null); - try { - service.resetEnvironnement(); + // delete files and pilotage tables + new ResetEnvironmentOperation(service.getCoordinatorSandbox()).clearPilotageAndDirectories(rootDirectory); + + // synchronize + new SynchronizeDataByPilotage(service.getCoordinatorSandbox()).synchronizeDataByPilotage(); + + } catch (ArcException e) { + e.logFullException(); } finally { service.finaliser(); } diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/pilotage/SynchronizeDataByPilotage.java b/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/pilotage/SynchronizeDataByPilotage.java index cd93dd49e..277ccd723 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/pilotage/SynchronizeDataByPilotage.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/pilotage/SynchronizeDataByPilotage.java @@ -71,8 +71,9 @@ public void synchronizeDataByPilotage() throws ArcException { // correctement sans mettre en oeuvre // une stratégie de vacuum hyper agressive et donc ajouter une spécificité pour // les DBAs - DatabaseMaintenance.maintenanceDatabaseClassic(this.sandbox.getConnection(), this.sandbox.getSchema()); - + + DatabaseMaintenance.maintenancePgCatalogAllNods(this.sandbox.getConnection(), FormatSQL.VACUUM_OPTION_NONE); + } diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/pilotage/dao/SynchronizeDataByPilotageDao.java b/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/pilotage/dao/SynchronizeDataByPilotageDao.java index 167f5e56a..e3982be35 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/pilotage/dao/SynchronizeDataByPilotageDao.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/pilotage/dao/SynchronizeDataByPilotageDao.java @@ -7,7 +7,6 @@ import fr.insee.arc.core.dataobjects.ColumnEnum; import fr.insee.arc.core.dataobjects.ViewEnum; import fr.insee.arc.core.model.TraitementPhase; -import fr.insee.arc.core.service.global.ApiService; import fr.insee.arc.core.service.global.dao.PilotageOperations; import fr.insee.arc.core.service.global.dao.TableNaming; import fr.insee.arc.utils.dao.CopyObjectsToDatabase; @@ -21,23 +20,26 @@ public class SynchronizeDataByPilotageDao { - + private SynchronizeDataByPilotageDao() { + throw new IllegalStateException("Utility class"); + } + /** * remove temporary states from pilotage table + * * @param connection * @param envExecution * @throws ArcException */ - public static void resetEtapePilotageDao(Connection connection, String envExecution) throws ArcException - { + public static void resetEtapePilotageDao(Connection connection, String envExecution) throws ArcException { String tablePil = ViewEnum.PILOTAGE_FICHIER.getFullName(envExecution); - + StringBuilder requete = new StringBuilder(); - + requete.append("DELETE FROM " + tablePil + " WHERE etat_traitement='{ENCOURS}';"); - + requete.append(PilotageOperations.resetPreviousPhaseMark(tablePil, null, null)); - + requete.append("WITH tmp_1 as (select " + ColumnEnum.ID_SOURCE.getColumnName() + ", max("); new StringBuilder(); requete.append("case "); @@ -58,47 +60,45 @@ public static void resetEtapePilotageDao(Connection connection, String envExecut requete.append("when p=" + p.ordinal() + " then '" + p.toString() + "' "); } requete.append("end ; "); - + UtilitaireDao.get(0).executeBlock(connection, requete); } - /** * rebuild to defragment pilotage table + * * @param connexion * @param envExecution * @throws ArcException */ public static void rebuildPilotageDao(Connection connexion, String envExecution) throws ArcException { - + String tablePilotage = ViewEnum.PILOTAGE_FICHIER.getFullName(envExecution); - + StringBuilder query = FormatSQL.rebuildTableAsSelectWhere(tablePilotage, "true"); - - query.append("create index idx1_" + ManipString.substringAfterFirst(tablePilotage, ".") + " on " - + tablePilotage + " (" + ColumnEnum.ID_SOURCE.getColumnName() + ");"); - - query.append("create index idx2_" + ManipString.substringAfterFirst(tablePilotage, ".") + " on " - + tablePilotage + " (phase_traitement, etape);"); - - query.append("create index idx4_" + ManipString.substringAfterFirst(tablePilotage, ".") + " on " - + tablePilotage + " (rapport) where rapport is not null;"); - - query.append("create index idx5_" + ManipString.substringAfterFirst(tablePilotage, ".") + " on " - + tablePilotage + " (o_container,v_container);"); - - query.append("create index idx6_" + ManipString.substringAfterFirst(tablePilotage, ".") + " on " - + tablePilotage + " (to_delete);"); - - query.append("create index idx7_" + ManipString.substringAfterFirst(tablePilotage, ".") + " on " - + tablePilotage + " (date_entree, phase_traitement, etat_traitement);"); - + + query.append("create index idx1_" + ManipString.substringAfterFirst(tablePilotage, ".") + " on " + tablePilotage + + " (" + ColumnEnum.ID_SOURCE.getColumnName() + ");"); + + query.append("create index idx2_" + ManipString.substringAfterFirst(tablePilotage, ".") + " on " + tablePilotage + + " (phase_traitement, etape);"); + + query.append("create index idx4_" + ManipString.substringAfterFirst(tablePilotage, ".") + " on " + tablePilotage + + " (rapport) where rapport is not null;"); + + query.append("create index idx5_" + ManipString.substringAfterFirst(tablePilotage, ".") + " on " + tablePilotage + + " (o_container,v_container);"); + + query.append("create index idx6_" + ManipString.substringAfterFirst(tablePilotage, ".") + " on " + tablePilotage + + " (to_delete);"); + + query.append("create index idx7_" + ManipString.substringAfterFirst(tablePilotage, ".") + " on " + tablePilotage + + " (date_entree, phase_traitement, etat_traitement);"); + query.append("analyze " + tablePilotage + ";"); - + UtilitaireDao.get(0).executeBlock(connexion, "analyze " + tablePilotage + ";"); } - - /** * Récupere toutes les tables temporaires d'un environnement @@ -120,8 +120,7 @@ public static ArcPreparedStatementBuilder requeteListAllTemporaryTablesInEnv(Str } return requete; } - - + /** * Delete data records from a target table according to a given list of source * to delete diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/useroperation/ResetEnvironmentOperation.java b/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/useroperation/ResetEnvironmentOperation.java index 71c1cb859..e811fe537 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/useroperation/ResetEnvironmentOperation.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/useroperation/ResetEnvironmentOperation.java @@ -1,6 +1,8 @@ package fr.insee.arc.core.service.p0initialisation.useroperation; +import java.nio.file.Paths; import java.sql.Connection; +import java.util.ArrayList; import java.util.List; import org.apache.logging.log4j.LogManager; @@ -13,10 +15,15 @@ import fr.insee.arc.core.model.TraitementPhase; import fr.insee.arc.core.service.global.bo.Sandbox; import fr.insee.arc.core.service.global.dao.DatabaseMaintenance; +import fr.insee.arc.core.service.global.dao.FileSystemManagement; import fr.insee.arc.core.service.global.dao.PilotageOperations; import fr.insee.arc.core.service.p0initialisation.pilotage.SynchronizeDataByPilotage; +import fr.insee.arc.core.service.p1reception.ApiReceptionService; import fr.insee.arc.utils.dao.UtilitaireDao; import fr.insee.arc.utils.exception.ArcException; +import fr.insee.arc.utils.files.FileUtilsArc; +import fr.insee.arc.utils.structure.GenericBean; +import fr.insee.arc.utils.utils.FormatSQL; public class ResetEnvironmentOperation { @@ -40,12 +47,12 @@ public ResetEnvironmentOperation(Sandbox sandbox) { * @param listEtat * @throws ArcException */ - public void retourPhasePrecedente(TraitementPhase phase, ArcPreparedStatementBuilder querySelection, - List listEtat) throws ArcException { + public void retourPhasePrecedente(TraitementPhase phase, ArcPreparedStatementBuilder querySelection) throws ArcException { LOGGER.info("Retour arrière pour la phase : {}", phase); Connection connection = sandbox.getConnection(); String envExecution = sandbox.getSchema(); + String tablePil = ViewEnum.PILOTAGE_FICHIER.getFullName(envExecution); ArcPreparedStatementBuilder requete; @@ -117,4 +124,40 @@ public void retourPhasePrecedente(TraitementPhase phase, ArcPreparedStatementBui } + /** + * Delete file and pilotage table to reset a sandbox + * @param repertoire + * @throws ArcException + */ + public void clearPilotageAndDirectories(String repertoire) throws ArcException { + + Connection connection = sandbox.getConnection(); + String envExecution = sandbox.getSchema(); + + UtilitaireDao.get(0).executeBlock(connection, "truncate " + ViewEnum.PILOTAGE_FICHIER.getFullName(envExecution)+ ";"); + UtilitaireDao.get(0).executeBlock(connection, "truncate " + ViewEnum.PILOTAGE_ARCHIVE.getFullName(envExecution) + ";"); + + if (Boolean.TRUE.equals(UtilitaireDao.get(0).hasResults(null, FormatSQL.tableExists("arc.ihm_entrepot")))) { + ArrayList entrepotList = new GenericBean(UtilitaireDao.get(0).executeRequest(null, + new ArcPreparedStatementBuilder("select id_entrepot from arc.ihm_entrepot"))).mapContent() + .get("id_entrepot"); + if (entrepotList != null) { + for (String s : entrepotList) { + FileUtilsArc.deleteAndRecreateDirectory( + Paths.get(ApiReceptionService.directoryReceptionEntrepot(repertoire, envExecution, s)).toFile()); + FileUtilsArc.deleteAndRecreateDirectory(Paths + .get(ApiReceptionService.directoryReceptionEntrepotArchive(repertoire, envExecution, s)).toFile()); + } + } + } + FileUtilsArc.deleteAndRecreateDirectory( + Paths.get(ApiReceptionService.directoryReceptionEtatEnCours(repertoire, envExecution)).toFile()); + FileUtilsArc.deleteAndRecreateDirectory( + Paths.get(ApiReceptionService.directoryReceptionEtatOK(repertoire, envExecution)).toFile()); + FileUtilsArc.deleteAndRecreateDirectory( + Paths.get(ApiReceptionService.directoryReceptionEtatKO(repertoire, envExecution)).toFile()); + FileUtilsArc.deleteAndRecreateDirectory( + Paths.get(FileSystemManagement.directoryEnvExport(repertoire, envExecution)).toFile()); + } + } diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/ApiReceptionService.java b/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/ApiReceptionService.java index 9d1cf4263..01b62e812 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/ApiReceptionService.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/ApiReceptionService.java @@ -108,8 +108,10 @@ public void executer() { } else { maxNumberOfFiles = bdParameters.getInt(null, "ApiReceptionService.ihm.maxNumberOfFiles", 5000); } + // Enregistrement des fichiers GenericBean archiveContent = moveAndCheckClientFiles(this.nbEnr, maxNumberOfFiles); + if (archiveContent != null) { registerAndDispatchFiles(this.connexion.getCoordinatorConnection(), archiveContent); } diff --git a/arc-utils/src/main/java/fr/insee/arc/utils/dao/UtilitaireDao.java b/arc-utils/src/main/java/fr/insee/arc/utils/dao/UtilitaireDao.java index 16c99f427..750748447 100644 --- a/arc-utils/src/main/java/fr/insee/arc/utils/dao/UtilitaireDao.java +++ b/arc-utils/src/main/java/fr/insee/arc/utils/dao/UtilitaireDao.java @@ -776,11 +776,9 @@ public void maintenancePgCatalog(Connection connexion, String type) { GenericBean gb = new GenericBean(executeRequest(connexion, new GenericPreparedStatementBuilder( "select tablename from pg_tables where schemaname='pg_catalog'"))); StringBuilder requete = new StringBuilder(); - for (String t : gb.mapContent().get("tablename")) { - requete.append(FormatSQL.analyzeSecured(t)); - } for (String t : gb.mapContent().get("tablename")) { requete.append(FormatSQL.vacuumSecured(t, type)); + requete.append(FormatSQL.analyzeSecured(t)); } executeImmediate(connexion, requete.toString()); } catch (Exception ex) { diff --git a/arc-utils/src/main/java/fr/insee/arc/utils/utils/FormatSQL.java b/arc-utils/src/main/java/fr/insee/arc/utils/utils/FormatSQL.java index cc01def6f..ac59b8b2c 100644 --- a/arc-utils/src/main/java/fr/insee/arc/utils/utils/FormatSQL.java +++ b/arc-utils/src/main/java/fr/insee/arc/utils/utils/FormatSQL.java @@ -152,7 +152,7 @@ public static String cast(String t) * @return */ public static String vacuumSecured(String table, String type) - { + { return "VACUUM "+ type +" " + table + "; COMMIT; \n"; } diff --git a/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/ImportStep1InitializeClientTablesService.java b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/ImportStep1InitializeClientTablesService.java index 600f198ad..bf273d466 100644 --- a/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/ImportStep1InitializeClientTablesService.java +++ b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/ImportStep1InitializeClientTablesService.java @@ -12,10 +12,10 @@ import fr.insee.arc.core.service.global.dao.TableNaming; import fr.insee.arc.core.util.StaticLoggerDispatcher; import fr.insee.arc.utils.exception.ArcException; -import fr.insee.arc.utils.utils.JsonKeys; -import fr.insee.arc.ws.actions.SendResponse; -import fr.insee.arc.ws.dao.ClientDao; -import fr.insee.arc.ws.dao.ClientDaoImpl; +import fr.insee.arc.ws.services.importServlet.actions.SendResponse; +import fr.insee.arc.ws.services.importServlet.bo.JsonKeys; +import fr.insee.arc.ws.services.importServlet.dao.ClientDao; +import fr.insee.arc.ws.services.importServlet.dao.ClientDaoImpl; public class ImportStep1InitializeClientTablesService { diff --git a/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/ImportStep2GetTableNameService.java b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/ImportStep2GetTableNameService.java index 10c7c7e03..ed80d1042 100644 --- a/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/ImportStep2GetTableNameService.java +++ b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/ImportStep2GetTableNameService.java @@ -10,10 +10,10 @@ import fr.insee.arc.core.util.StaticLoggerDispatcher; import fr.insee.arc.utils.dao.UtilitaireDao; import fr.insee.arc.utils.exception.ArcException; -import fr.insee.arc.utils.utils.JsonKeys; -import fr.insee.arc.ws.actions.SendResponse; -import fr.insee.arc.ws.dao.ClientDao; -import fr.insee.arc.ws.dao.ClientDaoImpl; +import fr.insee.arc.ws.services.importServlet.actions.SendResponse; +import fr.insee.arc.ws.services.importServlet.bo.JsonKeys; +import fr.insee.arc.ws.services.importServlet.dao.ClientDao; +import fr.insee.arc.ws.services.importServlet.dao.ClientDaoImpl; public class ImportStep2GetTableNameService { diff --git a/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/ImportStep3GetTableDataService.java b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/ImportStep3GetTableDataService.java index 32c50828f..7c662a971 100644 --- a/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/ImportStep3GetTableDataService.java +++ b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/ImportStep3GetTableDataService.java @@ -7,10 +7,10 @@ import fr.insee.arc.core.util.StaticLoggerDispatcher; import fr.insee.arc.utils.dao.UtilitaireDao; import fr.insee.arc.utils.exception.ArcException; -import fr.insee.arc.utils.utils.JsonKeys; -import fr.insee.arc.ws.actions.SendResponse; -import fr.insee.arc.ws.dao.ClientDao; -import fr.insee.arc.ws.dao.ClientDaoImpl; +import fr.insee.arc.ws.services.importServlet.actions.SendResponse; +import fr.insee.arc.ws.services.importServlet.bo.JsonKeys; +import fr.insee.arc.ws.services.importServlet.dao.ClientDao; +import fr.insee.arc.ws.services.importServlet.dao.ClientDaoImpl; public class ImportStep3GetTableDataService { diff --git a/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/ServletArc.java b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/ServletArc.java index 7c125885a..874ecbdc7 100644 --- a/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/ServletArc.java +++ b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/ServletArc.java @@ -27,12 +27,12 @@ import fr.insee.arc.utils.dao.UtilitaireDao; import fr.insee.arc.utils.exception.ArcException; import fr.insee.arc.utils.structure.GenericBean; -import fr.insee.arc.utils.utils.JsonKeys; import fr.insee.arc.utils.utils.LoggerHelper; import fr.insee.arc.utils.utils.ManipString; import fr.insee.arc.utils.webutils.WebUtils; -import fr.insee.arc.ws.actions.InitiateRequest; -import fr.insee.arc.ws.actions.SendResponse; +import fr.insee.arc.ws.services.importServlet.actions.InitiateRequest; +import fr.insee.arc.ws.services.importServlet.actions.SendResponse; +import fr.insee.arc.ws.services.importServlet.bo.JsonKeys; public class ServletArc extends HttpServlet { diff --git a/arc-ws/src/main/java/fr/insee/arc/ws/actions/InitiateRequest.java b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/actions/InitiateRequest.java similarity index 93% rename from arc-ws/src/main/java/fr/insee/arc/ws/actions/InitiateRequest.java rename to arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/actions/InitiateRequest.java index 7556d402b..a12301408 100644 --- a/arc-ws/src/main/java/fr/insee/arc/ws/actions/InitiateRequest.java +++ b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/actions/InitiateRequest.java @@ -1,4 +1,4 @@ -package fr.insee.arc.ws.actions; +package fr.insee.arc.ws.services.importServlet.actions; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -6,12 +6,12 @@ import fr.insee.arc.utils.exception.ArcException; import fr.insee.arc.utils.exception.ArcExceptionMessage; -import fr.insee.arc.utils.utils.JsonKeys; import fr.insee.arc.utils.utils.LoggerHelper; import fr.insee.arc.utils.utils.Services; import fr.insee.arc.ws.services.importServlet.ImportStep1InitializeClientTablesService; import fr.insee.arc.ws.services.importServlet.ImportStep2GetTableNameService; import fr.insee.arc.ws.services.importServlet.ImportStep3GetTableDataService; +import fr.insee.arc.ws.services.importServlet.bo.JsonKeys; /** * Cette classe permet d'initier le requêtage auprès de la base de données. diff --git a/arc-ws/src/main/java/fr/insee/arc/ws/actions/SendResponse.java b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/actions/SendResponse.java similarity index 93% rename from arc-ws/src/main/java/fr/insee/arc/ws/actions/SendResponse.java rename to arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/actions/SendResponse.java index d5e76e23b..2f7bf238c 100644 --- a/arc-ws/src/main/java/fr/insee/arc/ws/actions/SendResponse.java +++ b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/actions/SendResponse.java @@ -1,4 +1,4 @@ -package fr.insee.arc.ws.actions; +package fr.insee.arc.ws.services.importServlet.actions; import java.io.IOException; diff --git a/arc-utils/src/main/java/fr/insee/arc/utils/utils/JsonKeys.java b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/bo/JsonKeys.java similarity index 94% rename from arc-utils/src/main/java/fr/insee/arc/utils/utils/JsonKeys.java rename to arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/bo/JsonKeys.java index 7f4b7b8cb..8cfad3a47 100644 --- a/arc-utils/src/main/java/fr/insee/arc/utils/utils/JsonKeys.java +++ b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/bo/JsonKeys.java @@ -1,4 +1,4 @@ -package fr.insee.arc.utils.utils; +package fr.insee.arc.ws.services.importServlet.bo; public enum JsonKeys { //Requête diff --git a/arc-ws/src/main/java/fr/insee/arc/ws/dao/ClientDao.java b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/dao/ClientDao.java similarity index 98% rename from arc-ws/src/main/java/fr/insee/arc/ws/dao/ClientDao.java rename to arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/dao/ClientDao.java index 6b556d8f8..eb209c556 100644 --- a/arc-ws/src/main/java/fr/insee/arc/ws/dao/ClientDao.java +++ b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/dao/ClientDao.java @@ -1,11 +1,11 @@ -package fr.insee.arc.ws.dao; +package fr.insee.arc.ws.services.importServlet.dao; import java.util.ArrayList; import org.json.JSONObject; import fr.insee.arc.utils.exception.ArcException; -import fr.insee.arc.ws.actions.SendResponse; +import fr.insee.arc.ws.services.importServlet.actions.SendResponse; public interface ClientDao { diff --git a/arc-ws/src/main/java/fr/insee/arc/ws/dao/ClientDaoImpl.java b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/dao/ClientDaoImpl.java similarity index 98% rename from arc-ws/src/main/java/fr/insee/arc/ws/dao/ClientDaoImpl.java rename to arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/dao/ClientDaoImpl.java index 83ef9b879..34de478a2 100644 --- a/arc-ws/src/main/java/fr/insee/arc/ws/dao/ClientDaoImpl.java +++ b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/dao/ClientDaoImpl.java @@ -1,4 +1,4 @@ -package fr.insee.arc.ws.dao; +package fr.insee.arc.ws.services.importServlet.dao; import java.sql.Connection; import java.sql.SQLException; @@ -21,10 +21,10 @@ import fr.insee.arc.utils.exception.ArcExceptionMessage; import fr.insee.arc.utils.format.Format; import fr.insee.arc.utils.utils.FormatSQL; -import fr.insee.arc.utils.utils.JsonKeys; import fr.insee.arc.utils.utils.LoggerHelper; import fr.insee.arc.utils.utils.ManipString; -import fr.insee.arc.ws.actions.SendResponse; +import fr.insee.arc.ws.services.importServlet.actions.SendResponse; +import fr.insee.arc.ws.services.importServlet.bo.JsonKeys; public class ClientDaoImpl implements ClientDao { @@ -293,7 +293,7 @@ public void addImage(long timestamp, String client, String environnement, ArrayL /** * * - * @see fr.insee.arc.ws.dao.ClientDao#getResponse(long, java.lang.String, java.lang.String, fr.insee.arc.ws.actions.SendResponse) + * @see fr.insee.arc.ws.services.importServlet.dao.ClientDao#getResponse(long, java.lang.String, java.lang.String, fr.insee.arc.ws.services.importServlet.actions.SendResponse) */ @Override public void getResponse(long timestamp, String client, String tableMetierName, String environnement, SendResponse resp) throws ArcException {