From 72a691feea07829b5699b06411beeb9da90ae2ad Mon Sep 17 00:00:00 2001 From: Manuel Soulier Date: Tue, 3 Oct 2023 15:01:34 +0200 Subject: [PATCH] fix: SynchronizeRulesAndMetadataDao --- .../ApiInitialisationService.java | 4 +- .../filesystem/BuildFileSystem.java | 21 +- .../SynchronizeRulesAndMetadataOperation.java | 208 ++++++++++++++++ .../SynchronizeRulesAndMetadataDao.java} | 231 +++--------------- ...ntBrutalTable.java => ChargementBrut.java} | 4 +- .../thread/ThreadChargementService.java | 4 +- .../SynchronizeUserRulesAndMetadataTest.java | 2 +- .../service/ServiceViewPilotageBAS.java | 4 +- .../service/ServiceViewPilotageProd.java | 4 +- .../execute/ExecuteServiceController.java | 4 +- 10 files changed, 264 insertions(+), 222 deletions(-) create mode 100644 arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/metadata/SynchronizeRulesAndMetadataOperation.java rename arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/metadata/{SynchronizeUserRulesAndMetadata.java => dao/SynchronizeRulesAndMetadataDao.java} (65%) rename arc-core/src/main/java/fr/insee/arc/core/service/p2chargement/engine/{ChargementBrutalTable.java => ChargementBrut.java} (96%) diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/ApiInitialisationService.java b/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/ApiInitialisationService.java index 152f1587a..557c32cbb 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/ApiInitialisationService.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/ApiInitialisationService.java @@ -4,7 +4,7 @@ import fr.insee.arc.core.service.global.ApiService; import fr.insee.arc.core.service.p0initialisation.filesystem.RestoreFileSystem; -import fr.insee.arc.core.service.p0initialisation.metadata.SynchronizeUserRulesAndMetadata; +import fr.insee.arc.core.service.p0initialisation.metadata.SynchronizeRulesAndMetadataOperation; import fr.insee.arc.core.service.p0initialisation.pilotage.CleanPilotageOperation; import fr.insee.arc.core.service.p0initialisation.pilotage.SynchronizeDataByPilotageOperation; import fr.insee.arc.core.service.p0initialisation.useroperation.ReplayOrDeleteFilesOperation; @@ -46,7 +46,7 @@ public void executer() throws ArcException { // Recopie/remplace les règles définie par l'utilisateur (table de ihm_) dans // l'environnement d'excécution courant // mettre à jour les tables métier avec les paramêtres de la famille de norme - new SynchronizeUserRulesAndMetadata(this.coordinatorSandbox).synchroniserSchemaExecutionAllNods(); + new SynchronizeRulesAndMetadataOperation(this.coordinatorSandbox).synchroniserSchemaExecutionAllNods(); // marque les fichiers ou les archives à rejouer // efface des fichiers de la table de pilotage marqués par l'utilisateur comme diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/filesystem/BuildFileSystem.java b/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/filesystem/BuildFileSystem.java index c71943477..f884fa05f 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/filesystem/BuildFileSystem.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/filesystem/BuildFileSystem.java @@ -11,9 +11,11 @@ import fr.insee.arc.utils.ressourceUtils.PropertiesHandler; public class BuildFileSystem { - + /** - * Build the file system required for arc to proceed for a list of given sandboxes @param envExecutions + * Build the file system required for arc to proceed for a list of given + * sandboxes @param envExecutions + * * @param connexion * @param envExecutions */ @@ -24,12 +26,9 @@ public BuildFileSystem(Connection connexion, String[] envExecutions) { } private Connection connexion; - + private String[] envExecutions; - - - - + /** * Build directories for the sandbox * @@ -53,10 +52,10 @@ public void execute() { FileUtilsArc.createDirIfNotexist(DirectoryPath .directoryReceptionEtatEnCours(properties.getBatchParametersDirectory(), envExecution)); - FileUtilsArc.createDirIfNotexist(DirectoryPath - .directoryReceptionEtatOK(properties.getBatchParametersDirectory(), envExecution)); - FileUtilsArc.createDirIfNotexist(DirectoryPath - .directoryReceptionEtatKO(properties.getBatchParametersDirectory(), envExecution)); + FileUtilsArc.createDirIfNotexist( + DirectoryPath.directoryReceptionEtatOK(properties.getBatchParametersDirectory(), envExecution)); + FileUtilsArc.createDirIfNotexist( + DirectoryPath.directoryReceptionEtatKO(properties.getBatchParametersDirectory(), envExecution)); } } catch (ArcException ex) { diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/metadata/SynchronizeRulesAndMetadataOperation.java b/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/metadata/SynchronizeRulesAndMetadataOperation.java new file mode 100644 index 000000000..0f9232d1a --- /dev/null +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/metadata/SynchronizeRulesAndMetadataOperation.java @@ -0,0 +1,208 @@ +package fr.insee.arc.core.service.p0initialisation.metadata; + +import java.sql.Connection; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.Optional; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +import fr.insee.arc.core.service.global.bo.JeuDeRegle; +import fr.insee.arc.core.service.global.bo.JeuDeRegleDao; +import fr.insee.arc.core.service.global.bo.Sandbox; +import fr.insee.arc.core.service.global.scalability.ServiceScalability; +import fr.insee.arc.core.service.p0initialisation.dbmaintenance.BddPatcher; +import fr.insee.arc.core.service.p0initialisation.metadata.dao.SynchronizeRulesAndMetadataDao; +import fr.insee.arc.core.service.p5mapping.engine.ExpressionService; +import fr.insee.arc.utils.consumer.ThrowingConsumer; +import fr.insee.arc.utils.dao.CopyObjectsToDatabase; +import fr.insee.arc.utils.dao.UtilitaireDao; +import fr.insee.arc.utils.exception.ArcException; +import fr.insee.arc.utils.ressourceUtils.PropertiesHandler; +import fr.insee.arc.utils.structure.GenericBean; +import fr.insee.arc.utils.utils.LoggerHelper; + +public class SynchronizeRulesAndMetadataOperation { + + private static final Logger LOGGER = LogManager.getLogger(SynchronizeRulesAndMetadataOperation.class); + + public SynchronizeRulesAndMetadataOperation(Sandbox sandbox) { + super(); + this.sandbox = sandbox; + this.dao = new SynchronizeRulesAndMetadataDao(sandbox); + } + + private Sandbox sandbox; + + private SynchronizeRulesAndMetadataDao dao; + + /** + * Recopie/remplace les règles définie par l'utilisateur (table de ihm_) dans + * Met à jour le schéma des tables métiers correspondant aux règles définies + * dans les familles + * + * @param connexion + * @param envParameters + * @param envExecution + * @throws ArcException + */ + public void synchroniserSchemaExecutionAllNods() throws ArcException { + + copyMetadataAllNods(); + + mettreAJourSchemaTableMetierOnNods(); + } + + /** + * Recopie/remplace les règles définie par l'utilisateur (table de ihm_) dans + * l'environnement d'excécution courant sur tous les noeuds postgres + * (coordinator et executors) + * + * @param connexion + * @param envParameters + * @param envExecution + * @throws ArcException + */ + public void copyMetadataAllNods() throws ArcException { + + // on coordinator nod - copy the metadata user rules from metadata schema to sandbox schema + copyMetadataToSandbox(); + + // copy the rules in sandbox schema of the coordinator nod to the sandbox schema of the executor nods + copyMetadataToExecutorsAllNods(); + } + + /** + * Recopier les tables de l'environnement de parametres (IHM) vers + * l'environnement d'execution (batch, bas, ...) + * + * @param connexion + * @param anParametersEnvironment + * @param anExecutionEnvironment + * @throws ArcException + */ + private void copyMetadataToSandbox() throws ArcException { + dao.copyRulesTablesToExecution(); + applyExpressions(); + } + + /** + * Instanciate the metadata required into all executors pod + * + * @param envExecution + * @throws ArcException + */ + protected int copyMetadataToExecutorsAllNods() throws ArcException { + + Connection coordinatorConnexion = sandbox.getConnection(); + String envExecution = sandbox.getSchema(); + + ThrowingConsumer onCoordinator = c -> { + }; + + ThrowingConsumer onExecutor = executorConnection -> { + copyMetaDataToExecutors(coordinatorConnexion, executorConnection, envExecution); + }; + + return ServiceScalability.dispatchOnNods(coordinatorConnexion, onCoordinator, onExecutor); + + } + + /** + * Instanciate the metadata required into the given executor pod + * + * @param coordinatorConnexion + * @param executorConnection + * @param envExecution + * @throws ArcException + */ + private static void copyMetaDataToExecutors(Connection coordinatorConnexion, Connection executorConnection, + String envExecution) throws ArcException { + PropertiesHandler properties = PropertiesHandler.getInstance(); + + // add utility functions + BddPatcher.executeBddScript(executorConnection, "BdD/script_function_utility.sql", + properties.getDatabaseRestrictedUsername(), null, null); + + // add tables for phases if required + BddPatcher.bddScriptEnvironmentExecutor(executorConnection, properties.getDatabaseRestrictedUsername(), + new String[] { envExecution }); + + // copy tables + + ArrayList tablesToCopyIntoExecutor = BddPatcher.retrieveRulesTablesFromSchema(coordinatorConnexion, + envExecution); + tablesToCopyIntoExecutor + .addAll(BddPatcher.retrieveExternalTablesUsedInRules(coordinatorConnexion, envExecution)); + tablesToCopyIntoExecutor.addAll(BddPatcher.retrieveModelTablesFromSchema(coordinatorConnexion, envExecution)); + + for (String table : new HashSet(tablesToCopyIntoExecutor)) { + + GenericBean gb = SynchronizeRulesAndMetadataDao.execQuerySelectDataFrom(coordinatorConnexion, table); + + CopyObjectsToDatabase.execCopyFromGenericBean(executorConnection, table, gb); + } + } + + /** + * replace an expression in rules + * + * @param connexion + * @param anExecutionEnvironment + * @throws ArcException + */ + private void applyExpressions() throws ArcException { + + Connection connexion = sandbox.getConnection(); + String anExecutionEnvironment = sandbox.getSchema(); + + // Checks expression validity + ExpressionService expressionService = new ExpressionService(); + ArrayList allRuleSets = JeuDeRegleDao.recupJeuDeRegle(connexion, + anExecutionEnvironment + ".jeuderegle"); + for (JeuDeRegle ruleSet : allRuleSets) { + // Check + GenericBean expressions = expressionService.fetchExpressions(connexion, anExecutionEnvironment, ruleSet); + if (expressions.isEmpty()) { + continue; + } + + Optional loopInExpressionSet = expressionService.loopInExpressionSet(expressions); + if (loopInExpressionSet.isPresent()) { + LoggerHelper.info(LOGGER, "A loop is present in the expression set : " + loopInExpressionSet.get()); + LoggerHelper.info(LOGGER, "The expression set is not applied"); + continue; + } + + // Apply + expressions = expressionService.fetchOrderedExpressions(connexion, anExecutionEnvironment, ruleSet); + if (expressionService.isExpressionSyntaxPresentInControl(connexion, anExecutionEnvironment, ruleSet)) { + UtilitaireDao.get(0).executeRequest(connexion, + expressionService.applyExpressionsToControl(ruleSet, expressions, anExecutionEnvironment)); + } + if (expressionService.isExpressionSyntaxPresentInMapping(connexion, anExecutionEnvironment, ruleSet)) { + UtilitaireDao.get(0).executeRequest(connexion, + expressionService.applyExpressionsToMapping(ruleSet, expressions, anExecutionEnvironment)); + } + } + + } + + + private void mettreAJourSchemaTableMetierOnNods() throws ArcException { + + Connection coordinatorConnexion = sandbox.getConnection(); + String envExecution = sandbox.getSchema(); + + ThrowingConsumer function = executorConnection -> { + SynchronizeRulesAndMetadataDao.mettreAJourSchemaTableMetier(executorConnection, envExecution); + }; + + ServiceScalability.dispatchOnNods(coordinatorConnexion, function, function); + + } + + + +} diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/metadata/SynchronizeUserRulesAndMetadata.java b/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/metadata/dao/SynchronizeRulesAndMetadataDao.java similarity index 65% rename from arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/metadata/SynchronizeUserRulesAndMetadata.java rename to arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/metadata/dao/SynchronizeRulesAndMetadataDao.java index c0702489b..d0e0d2823 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/metadata/SynchronizeUserRulesAndMetadata.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/metadata/dao/SynchronizeRulesAndMetadataDao.java @@ -1,11 +1,9 @@ -package fr.insee.arc.core.service.p0initialisation.metadata; +package fr.insee.arc.core.service.p0initialisation.metadata.dao; import java.sql.Connection; import java.util.ArrayList; import java.util.Arrays; -import java.util.HashSet; import java.util.List; -import java.util.Optional; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -13,20 +11,13 @@ import fr.insee.arc.core.dataobjects.ArcPreparedStatementBuilder; import fr.insee.arc.core.dataobjects.ViewEnum; import fr.insee.arc.core.model.TraitementTableParametre; -import fr.insee.arc.core.service.global.bo.JeuDeRegle; -import fr.insee.arc.core.service.global.bo.JeuDeRegleDao; import fr.insee.arc.core.service.global.bo.Sandbox; import fr.insee.arc.core.service.global.dao.TableNaming; -import fr.insee.arc.core.service.global.scalability.ServiceScalability; -import fr.insee.arc.core.service.p0initialisation.dbmaintenance.BddPatcher; -import fr.insee.arc.core.service.p5mapping.engine.ExpressionService; -import fr.insee.arc.utils.consumer.ThrowingConsumer; -import fr.insee.arc.utils.dao.CopyObjectsToDatabase; +import fr.insee.arc.core.service.p0initialisation.metadata.SynchronizeRulesAndMetadataOperation; import fr.insee.arc.utils.dao.UtilitaireDao; import fr.insee.arc.utils.dataobjects.TypeEnum; import fr.insee.arc.utils.exception.ArcException; import fr.insee.arc.utils.format.Format; -import fr.insee.arc.utils.ressourceUtils.PropertiesHandler; import fr.insee.arc.utils.structure.AttributeValue; import fr.insee.arc.utils.structure.GenericBean; import fr.insee.arc.utils.structure.tree.HierarchicalView; @@ -34,172 +25,16 @@ import fr.insee.arc.utils.utils.LoggerHelper; import fr.insee.arc.utils.utils.ManipString; -public class SynchronizeUserRulesAndMetadata { +public class SynchronizeRulesAndMetadataDao { - - private static final Logger LOGGER = LogManager.getLogger(SynchronizeUserRulesAndMetadata.class); - - public SynchronizeUserRulesAndMetadata(Sandbox sandbox) { + private static final Logger LOGGER = LogManager.getLogger(SynchronizeRulesAndMetadataDao.class); + + public SynchronizeRulesAndMetadataDao(Sandbox sandbox) { super(); this.sandbox = sandbox; } - - private Sandbox sandbox; - - - /** - * Recopie/remplace les règles définie par l'utilisateur (table de ihm_) dans - * Met à jour le schéma des tables métiers correspondant aux règles définies - * dans les familles - * - * @param connexion - * @param envParameters - * @param envExecution - * @throws ArcException - */ - public void synchroniserSchemaExecutionAllNods() throws ArcException { - - copyMetadataAllNods(); - - mettreAJourSchemaTableMetierOnNods(); - } - - - /** - * Recopie/remplace les règles définie par l'utilisateur (table de ihm_) dans - * l'environnement d'excécution courant sur tous les noeuds postgres - * (coordinator et executors) - * - * @param connexion - * @param envParameters - * @param envExecution - * @throws ArcException - */ - public void copyMetadataAllNods() - throws ArcException { - copyMetadataToSandbox(); - - copyMetadataToExecutorsAllNods(); - } - - /** - * Recopier les tables de l'environnement de parametres (IHM) vers - * l'environnement d'execution (batch, bas, ...) - * - * @param connexion - * @param anParametersEnvironment - * @param anExecutionEnvironment - * @throws ArcException - */ - private void copyMetadataToSandbox() throws ArcException { - copyRulesTablesToExecution(); - applyExpressions(); - } - - /** - * Instanciate the metadata required into all executors pod - * - * @param envExecution - * @throws ArcException - */ - protected int copyMetadataToExecutorsAllNods() - throws ArcException { - - Connection coordinatorConnexion = sandbox.getConnection(); - String envExecution = sandbox.getSchema(); - - ThrowingConsumer onCoordinator = c -> { - }; - - ThrowingConsumer onExecutor = executorConnection -> { - copyMetaDataToExecutors(coordinatorConnexion, executorConnection, envExecution); - }; - - return ServiceScalability.dispatchOnNods(coordinatorConnexion, onCoordinator, onExecutor); - - } - - /** - * Instanciate the metadata required into the given executor pod - * - * @param coordinatorConnexion - * @param executorConnection - * @param envExecution - * @throws ArcException - */ - private static void copyMetaDataToExecutors(Connection coordinatorConnexion, Connection executorConnection, - String envExecution) throws ArcException { - PropertiesHandler properties = PropertiesHandler.getInstance(); - - // add utility functions - BddPatcher.executeBddScript(executorConnection, "BdD/script_function_utility.sql", - properties.getDatabaseRestrictedUsername(), null, null); - - // add tables for phases if required - BddPatcher.bddScriptEnvironmentExecutor(executorConnection, properties.getDatabaseRestrictedUsername(), - new String[] { envExecution }); - - // copy tables - - ArrayList tablesToCopyIntoExecutor = BddPatcher.retrieveRulesTablesFromSchema(coordinatorConnexion, - envExecution); - tablesToCopyIntoExecutor - .addAll(BddPatcher.retrieveExternalTablesUsedInRules(coordinatorConnexion, envExecution)); - tablesToCopyIntoExecutor.addAll(BddPatcher.retrieveModelTablesFromSchema(coordinatorConnexion, envExecution)); - - for (String table : new HashSet(tablesToCopyIntoExecutor)) { - GenericBean gb = new GenericBean(UtilitaireDao.get(0).executeRequest(coordinatorConnexion, - new ArcPreparedStatementBuilder("SELECT * FROM " + table))); - - CopyObjectsToDatabase.execCopyFromGenericBean(executorConnection, table, gb); - - } - } - - /** - * replace an expression in rules - * - * @param connexion - * @param anExecutionEnvironment - * @throws ArcException - */ - private void applyExpressions() throws ArcException { - - Connection connexion = sandbox.getConnection(); - String anExecutionEnvironment = sandbox.getSchema(); - - // Checks expression validity - ExpressionService expressionService = new ExpressionService(); - ArrayList allRuleSets = JeuDeRegleDao.recupJeuDeRegle(connexion, - anExecutionEnvironment + ".jeuderegle"); - for (JeuDeRegle ruleSet : allRuleSets) { - // Check - GenericBean expressions = expressionService.fetchExpressions(connexion, anExecutionEnvironment, ruleSet); - if (expressions.isEmpty()) { - continue; - } - - Optional loopInExpressionSet = expressionService.loopInExpressionSet(expressions); - if (loopInExpressionSet.isPresent()) { - LoggerHelper.info(LOGGER, "A loop is present in the expression set : " + loopInExpressionSet.get()); - LoggerHelper.info(LOGGER, "The expression set is not applied"); - continue; - } - - // Apply - expressions = expressionService.fetchOrderedExpressions(connexion, anExecutionEnvironment, ruleSet); - if (expressionService.isExpressionSyntaxPresentInControl(connexion, anExecutionEnvironment, ruleSet)) { - UtilitaireDao.get(0).executeRequest(connexion, - expressionService.applyExpressionsToControl(ruleSet, expressions, anExecutionEnvironment)); - } - if (expressionService.isExpressionSyntaxPresentInMapping(connexion, anExecutionEnvironment, ruleSet)) { - UtilitaireDao.get(0).executeRequest(connexion, - expressionService.applyExpressionsToMapping(ruleSet, expressions, anExecutionEnvironment)); - } - } - - } + private Sandbox sandbox; /** * Copy the table containing user rules to the sandbox so they will be used by @@ -210,12 +45,12 @@ private void applyExpressions() throws ArcException { * @param anExecutionEnvironment * @throws ArcException */ - private void copyRulesTablesToExecution() throws ArcException { + public void copyRulesTablesToExecution() throws ArcException { LoggerHelper.info(LOGGER, "copyTablesToExecution"); - + Connection coordinatorConnexion = sandbox.getConnection(); String anExecutionEnvironment = sandbox.getSchema(); - + try { anExecutionEnvironment = anExecutionEnvironment.replace(".", "_"); @@ -228,7 +63,8 @@ private void copyRulesTablesToExecution() throws ArcException { // on créé une table image de la table venant de l'ihm // (environnement de parametre) TraitementTableParametre parameterTable = r[i]; - String tableImage = FormatSQL.temporaryTableName(parameterTable.getTablenameInSandbox().getFullName(anExecutionEnvironment)); + String tableImage = FormatSQL + .temporaryTableName(parameterTable.getTablenameInSandbox().getFullName(anExecutionEnvironment)); // recopie partielle (en fonction de l'environnement // d'exécution) @@ -260,9 +96,11 @@ private void copyRulesTablesToExecution() throws ArcException { requete.append("CREATE TABLE " + tableImage + " " + FormatSQL.WITH_NO_VACUUM + " AS SELECT a.* FROM " + r[i].getTablenameInMetadata().getFullName() + " AS a " + condition + ";\n"); - requete.append(FormatSQL.dropTable(parameterTable.getTablenameInSandbox().getFullName(anExecutionEnvironment))); + requete.append(FormatSQL + .dropTable(parameterTable.getTablenameInSandbox().getFullName(anExecutionEnvironment))); requete.append("ALTER TABLE " + tableImage + " rename to " - + ManipString.substringAfterLast(parameterTable.getTablenameInSandbox().getTableName(), ".") + "; \n"); + + ManipString.substringAfterLast(parameterTable.getTablenameInSandbox().getTableName(), ".") + + "; \n"); } UtilitaireDao.get(0).executeBlock(coordinatorConnexion, requete); @@ -315,28 +153,16 @@ private void copyRulesTablesToExecution() throws ArcException { } } - private void mettreAJourSchemaTableMetierOnNods() throws ArcException { - - Connection coordinatorConnexion = sandbox.getConnection(); - String envExecution = sandbox.getSchema(); - - ThrowingConsumer function = executorConnection -> { - mettreAJourSchemaTableMetier(executorConnection, envExecution); - }; - - ServiceScalability.dispatchOnNods(coordinatorConnexion, function, function); - - } - + + /** * Créer ou detruire les colonnes ou les tables métiers en comparant ce qu'il y * a en base à ce qu'il y a de déclaré dans la table des familles de norme * - * @param connexion + * @param coordinatorOrExecutorConnexion * @throws ArcException */ - private static void mettreAJourSchemaTableMetier(Connection connexion, String envExecution) - throws ArcException { + public static void mettreAJourSchemaTableMetier(Connection coordinatorOrExecutorConnexion, String envExecution) throws ArcException { LoggerHelper.info(LOGGER, "mettreAJourSchemaTableMetier"); /* * Récupérer la table qui mappe : famille / table métier / variable métier et @@ -344,10 +170,11 @@ private static void mettreAJourSchemaTableMetier(Connection connexion, String en */ ArcPreparedStatementBuilder requeteRef = new ArcPreparedStatementBuilder(); requeteRef.append("SELECT lower(id_famille), lower('" + TableNaming.dbEnv(envExecution) - + "'||nom_table_metier), lower(nom_variable_metier), lower(type_variable_metier) FROM " + ViewEnum.IHM_MOD_VARIABLE_METIER.getFullName()); + + "'||nom_table_metier), lower(nom_variable_metier), lower(type_variable_metier) FROM " + + ViewEnum.IHM_MOD_VARIABLE_METIER.getFullName()); List> relationalViewRef = Format - .patch(UtilitaireDao.get(0).executeRequestWithoutMetadata(connexion, requeteRef)); + .patch(UtilitaireDao.get(0).executeRequestWithoutMetadata(coordinatorOrExecutorConnexion, requeteRef)); HierarchicalView familleToTableToVariableToTypeRef = HierarchicalView.asRelationalToHierarchical( "(Réf) Famille -> Table -> Variable -> Type", Arrays.asList("id_famille", "nom_table_metier", "variable_metier", "type_variable_metier"), @@ -378,7 +205,7 @@ private static void mettreAJourSchemaTableMetier(Connection connexion, String en + "mapping\\_'||lower(id_famille)||'\\_%';"); List> relationalView = Format - .patch(UtilitaireDao.get(0).executeRequestWithoutMetadata(connexion, requete)); + .patch(UtilitaireDao.get(0).executeRequestWithoutMetadata(coordinatorOrExecutorConnexion, requete)); HierarchicalView familleToTableToVariableToType = HierarchicalView.asRelationalToHierarchical( "(Phy) Famille -> Table -> Variable -> Type", @@ -490,7 +317,15 @@ private static void mettreAJourSchemaTableMetier(Connection connexion, String en } } } - UtilitaireDao.get(0).executeBlock(connexion, requeteMAJSchema); + UtilitaireDao.get(0).executeBlock(coordinatorOrExecutorConnexion, requeteMAJSchema); } + + + public static GenericBean execQuerySelectDataFrom(Connection coordinatorConnexion, String table) throws ArcException { + return new GenericBean(UtilitaireDao.get(0).executeRequest(coordinatorConnexion, + new ArcPreparedStatementBuilder("SELECT * FROM " + table))); + } + + } diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p2chargement/engine/ChargementBrutalTable.java b/arc-core/src/main/java/fr/insee/arc/core/service/p2chargement/engine/ChargementBrut.java similarity index 96% rename from arc-core/src/main/java/fr/insee/arc/core/service/p2chargement/engine/ChargementBrutalTable.java rename to arc-core/src/main/java/fr/insee/arc/core/service/p2chargement/engine/ChargementBrut.java index a6fc574fd..af0ae48c0 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/p2chargement/engine/ChargementBrutalTable.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p2chargement/engine/ChargementBrut.java @@ -24,7 +24,7 @@ * @author S4LWO8 * */ -public class ChargementBrutalTable { +public class ChargementBrut { /** Combien de boucle au maximum */ @@ -32,7 +32,7 @@ public class ChargementBrutalTable { /** Combien de ligne on charge pour chacune des boucles */ private static final int LIMIT_CHARGEMENT_BRUTAL = 50; - private static final Logger LOGGER = LogManager.getLogger(ChargementBrutalTable.class); + private static final Logger LOGGER = LogManager.getLogger(ChargementBrut.class); private Connection connexion; private List listeNorme; diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p2chargement/thread/ThreadChargementService.java b/arc-core/src/main/java/fr/insee/arc/core/service/p2chargement/thread/ThreadChargementService.java index 38df9551f..670bcaca0 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/p2chargement/thread/ThreadChargementService.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p2chargement/thread/ThreadChargementService.java @@ -25,7 +25,7 @@ import fr.insee.arc.core.service.p2chargement.archiveloader.IArchiveFileLoader; import fr.insee.arc.core.service.p2chargement.bo.Norme; import fr.insee.arc.core.service.p2chargement.bo.RegleChargement; -import fr.insee.arc.core.service.p2chargement.engine.ChargementBrutalTable; +import fr.insee.arc.core.service.p2chargement.engine.ChargementBrut; import fr.insee.arc.core.service.p2chargement.engine.IChargeur; import fr.insee.arc.core.service.p2chargement.factory.ChargeurFactory; import fr.insee.arc.core.service.p2chargement.factory.TypeChargement; @@ -249,7 +249,7 @@ private void chargementFichierAvecContainer() throws ArcException { private void choixChargeur() throws ArcException { StaticLoggerDispatcher.info(LOGGER, "** choixChargeur : " + this.idSource + " **"); // Si on a pas 1 seule norme alors le fichier est en erreur - ChargementBrutalTable chgrBrtl = new ChargementBrutalTable(); + ChargementBrut chgrBrtl = new ChargementBrut(); chgrBrtl.setConnexion(getConnexion().getExecutorConnection()); chgrBrtl.setListeNorme(listeNorme); diff --git a/arc-core/src/test/java/fr/insee/arc/core/service/p0initialisation/metadata/SynchronizeUserRulesAndMetadataTest.java b/arc-core/src/test/java/fr/insee/arc/core/service/p0initialisation/metadata/SynchronizeUserRulesAndMetadataTest.java index 9e8952f68..06710b06f 100644 --- a/arc-core/src/test/java/fr/insee/arc/core/service/p0initialisation/metadata/SynchronizeUserRulesAndMetadataTest.java +++ b/arc-core/src/test/java/fr/insee/arc/core/service/p0initialisation/metadata/SynchronizeUserRulesAndMetadataTest.java @@ -13,7 +13,7 @@ public class SynchronizeUserRulesAndMetadataTest extends InitializeQueryTest { - private SynchronizeUserRulesAndMetadata synchronizationInstance= new SynchronizeUserRulesAndMetadata(new Sandbox(c, BddPatcherTest.testSandbox3)); + private SynchronizeRulesAndMetadataOperation synchronizationInstance= new SynchronizeRulesAndMetadataOperation(new Sandbox(c, BddPatcherTest.testSandbox3)); @Test diff --git a/arc-web/src/main/java/fr/insee/arc/web/gui/pilotage/service/ServiceViewPilotageBAS.java b/arc-web/src/main/java/fr/insee/arc/web/gui/pilotage/service/ServiceViewPilotageBAS.java index cd7c4dad3..04babdf26 100644 --- a/arc-web/src/main/java/fr/insee/arc/web/gui/pilotage/service/ServiceViewPilotageBAS.java +++ b/arc-web/src/main/java/fr/insee/arc/web/gui/pilotage/service/ServiceViewPilotageBAS.java @@ -18,7 +18,7 @@ import fr.insee.arc.core.service.global.bo.Sandbox; import fr.insee.arc.core.service.p0initialisation.ResetEnvironmentService; import fr.insee.arc.core.service.p0initialisation.dbmaintenance.BddPatcher; -import fr.insee.arc.core.service.p0initialisation.metadata.SynchronizeUserRulesAndMetadata; +import fr.insee.arc.core.service.p0initialisation.metadata.SynchronizeRulesAndMetadataOperation; import fr.insee.arc.core.util.BDParameters; import fr.insee.arc.utils.dao.UtilitaireDao; import fr.insee.arc.utils.exception.ArcException; @@ -173,7 +173,7 @@ public String executerBatch(Model model, TraitementPhase phaseAExecuter) { // no need to do that if selected phase is INITIALISATION as INITIALISATION will synchronize the sandbox if (!phaseAExecuter.equals(TraitementPhase.INITIALISATION)) { try{ - new SynchronizeUserRulesAndMetadata(new Sandbox(null, getBacASable())).synchroniserSchemaExecutionAllNods(); + new SynchronizeRulesAndMetadataOperation(new Sandbox(null, getBacASable())).synchroniserSchemaExecutionAllNods(); } catch (ArcException e) { diff --git a/arc-web/src/main/java/fr/insee/arc/web/gui/pilotage/service/ServiceViewPilotageProd.java b/arc-web/src/main/java/fr/insee/arc/web/gui/pilotage/service/ServiceViewPilotageProd.java index c129ff31e..1bb10fbd4 100644 --- a/arc-web/src/main/java/fr/insee/arc/web/gui/pilotage/service/ServiceViewPilotageProd.java +++ b/arc-web/src/main/java/fr/insee/arc/web/gui/pilotage/service/ServiceViewPilotageProd.java @@ -9,7 +9,7 @@ import fr.insee.arc.core.dataobjects.ArcPreparedStatementBuilder; import fr.insee.arc.core.service.global.bo.Sandbox; -import fr.insee.arc.core.service.p0initialisation.metadata.SynchronizeUserRulesAndMetadata; +import fr.insee.arc.core.service.p0initialisation.metadata.SynchronizeRulesAndMetadataOperation; import fr.insee.arc.utils.dao.UtilitaireDao; import fr.insee.arc.utils.exception.ArcException; @@ -122,7 +122,7 @@ public String toggleOffPROD(Model model) { */ public String applyRulesProd(Model model) { try { - new SynchronizeUserRulesAndMetadata(new Sandbox(null, getBacASable())).copyMetadataAllNods(); + new SynchronizeRulesAndMetadataOperation(new Sandbox(null, getBacASable())).copyMetadataAllNods(); } catch (ArcException e) { diff --git a/arc-ws/src/main/java/fr/insee/arc/ws/services/restServices/execute/ExecuteServiceController.java b/arc-ws/src/main/java/fr/insee/arc/ws/services/restServices/execute/ExecuteServiceController.java index 7b35a234a..e41ac17a7 100644 --- a/arc-ws/src/main/java/fr/insee/arc/ws/services/restServices/execute/ExecuteServiceController.java +++ b/arc-ws/src/main/java/fr/insee/arc/ws/services/restServices/execute/ExecuteServiceController.java @@ -26,7 +26,7 @@ import fr.insee.arc.core.service.p0initialisation.ResetEnvironmentService; import fr.insee.arc.core.service.p0initialisation.dbmaintenance.BddPatcher; import fr.insee.arc.core.service.p0initialisation.filesystem.BuildFileSystem; -import fr.insee.arc.core.service.p0initialisation.metadata.SynchronizeUserRulesAndMetadata; +import fr.insee.arc.core.service.p0initialisation.metadata.SynchronizeRulesAndMetadataOperation; import fr.insee.arc.core.service.p1reception.provider.DirectoryPath; import fr.insee.arc.core.util.LoggerDispatcher; import fr.insee.arc.utils.dao.UtilitaireDao; @@ -193,7 +193,7 @@ public ResponseEntity synchronize( { ReturnView returnView=new ReturnView(); try { - new SynchronizeUserRulesAndMetadata(new Sandbox(null, env)).synchroniserSchemaExecutionAllNods(); + new SynchronizeRulesAndMetadataOperation(new Sandbox(null, env)).synchroniserSchemaExecutionAllNods(); return ResponseEntity.status(HttpStatus.OK).body(returnView); } catch (ArcException e) {