From d0453cc59387b576ed5f232032e5da840e700fcb Mon Sep 17 00:00:00 2001 From: Manuel Soulier Date: Thu, 21 Sep 2023 18:32:37 +0200 Subject: [PATCH] initialization refactor --- .../ArcPreparedStatementBuilder.java | 14 + .../arc/core/dataobjects/ColumnEnum.java | 7 + .../core/dataobjects/DataObjectService.java | 12 +- .../insee/arc/core/dataobjects/ViewEnum.java | 29 +- .../core/model/TraitementTableParametre.java | 31 +- .../arc/core/service/global/ApiService.java | 5 + .../arc/core/service/global/bo/Sandbox.java | 31 + .../core/service/global/dao/DataStorage.java | 80 ++ .../global/dao/FileSystemManagement.java | 27 + .../service/global/dao/PhaseOperations.java | 11 +- .../global/dao/PilotageOperations.java | 13 +- .../service/global/dao/ThreadOperations.java | 6 +- .../ApiInitialisationService.java | 1216 +---------------- .../{engine => dbmaintenance}/BddPatcher.java | 2 +- .../filesystem/BuildFileSystem.java | 68 + .../filesystem/RestoreFileSystem.java | 162 +++ .../pilotage/CleanPilotage.java | 215 +++ .../pilotage/SynchronizeDataByPilotage.java | 350 +++++ .../bo}/ListIdSourceInPilotage.java | 7 +- .../pilotage/dao/PilotageDao.java | 149 ++ .../SynchronizeUserRulesAndMetadata.java | 485 +++++++ .../p1reception/ApiReceptionService.java | 8 +- ... SynchronizeUserRulesAndMetadataTest.java} | 11 +- .../engine/initialisation/BddPatcherTest.java | 2 +- .../dao/ServicePilotageOperationTest.java | 8 +- .../arc/utils/dao/CopyObjectsToDatabase.java | 60 + .../dao/GenericPreparedStatementBuilder.java | 80 +- .../main/java/fr/insee/arc/utils/dao/SQL.java | 10 +- .../fr/insee/arc/utils/dao/UtilitaireDao.java | 4 +- .../arc/utils/dataobjects/PgViewEnum.java | 3 + .../fr/insee/arc/utils/format/Format.java | 1 + .../arc/utils/structure/GenericBean.java | 13 + .../utils/textUtils/IConstanteCaractere.java | 10 - .../fr/insee/arc/utils/utils/FormatSQL.java | 31 +- .../fr/insee/arc/utils/utils/ManipString.java | 13 + .../GenericPreparedStatementBuilderTest.java | 4 +- .../insee/arc/utils/utils/FormatSQLTest.java | 5 +- .../gui/all/service/ArcWebGenericService.java | 2 +- .../service/ServiceViewVariableMetier.java | 3 +- .../controller/ControllerFichierBAS.java | 10 - .../service/ServiceViewFichierBAS.java | 32 - .../service/ServiceViewPilotageBAS.java | 5 +- .../service/ServiceViewPilotageProd.java | 3 +- .../fr/insee/arc/ws/actions/SendResponse.java | 23 +- .../fr/insee/arc/ws/dao/ClientDaoImpl.java | 2 +- .../execute/ExecuteServiceController.java | 8 +- .../setRules/SetRulesController.java | 201 --- .../setRules/pojo/SetRulesActionEnum.java | 30 - .../setRules/pojo/SetRulesPojo.java | 61 - 49 files changed, 1863 insertions(+), 1700 deletions(-) create mode 100644 arc-core/src/main/java/fr/insee/arc/core/service/global/bo/Sandbox.java create mode 100644 arc-core/src/main/java/fr/insee/arc/core/service/global/dao/DataStorage.java rename arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/{engine => dbmaintenance}/BddPatcher.java (99%) create mode 100644 arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/filesystem/BuildFileSystem.java create mode 100644 arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/filesystem/RestoreFileSystem.java create mode 100644 arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/pilotage/CleanPilotage.java create mode 100644 arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/pilotage/SynchronizeDataByPilotage.java rename arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/{model => pilotage/bo}/ListIdSourceInPilotage.java (71%) create mode 100644 arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/pilotage/dao/PilotageDao.java create mode 100644 arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/userdata/SynchronizeUserRulesAndMetadata.java rename arc-core/src/test/java/fr/insee/arc/core/service/api/{ApiInitialisationServiceTest.java => SynchronizeUserRulesAndMetadataTest.java} (70%) create mode 100644 arc-utils/src/main/java/fr/insee/arc/utils/dao/CopyObjectsToDatabase.java delete mode 100644 arc-ws/src/main/java/fr/insee/arc/ws/services/restServices/setRules/SetRulesController.java delete mode 100644 arc-ws/src/main/java/fr/insee/arc/ws/services/restServices/setRules/pojo/SetRulesActionEnum.java delete mode 100644 arc-ws/src/main/java/fr/insee/arc/ws/services/restServices/setRules/pojo/SetRulesPojo.java diff --git a/arc-core/src/main/java/fr/insee/arc/core/dataobjects/ArcPreparedStatementBuilder.java b/arc-core/src/main/java/fr/insee/arc/core/dataobjects/ArcPreparedStatementBuilder.java index 879da3f9e..6877db832 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/dataobjects/ArcPreparedStatementBuilder.java +++ b/arc-core/src/main/java/fr/insee/arc/core/dataobjects/ArcPreparedStatementBuilder.java @@ -5,6 +5,7 @@ import java.util.Map; import fr.insee.arc.utils.dao.GenericPreparedStatementBuilder; +import fr.insee.arc.utils.dao.SQL; public class ArcPreparedStatementBuilder extends GenericPreparedStatementBuilder { @@ -63,6 +64,10 @@ public StringBuilder sqlListeOfColumnsFromModel(ViewEnum tableEnum) { return sqlListeOfColumnsFromModel(tableEnum.getColumns()); } + public ArcPreparedStatementBuilder append(ViewEnum view) { + return (ArcPreparedStatementBuilder) this.append(view.getTableName()); + } + public ArcPreparedStatementBuilder append(ColumnEnum column) { return (ArcPreparedStatementBuilder) this.append(column.getColumnName()); } @@ -70,4 +75,13 @@ public ArcPreparedStatementBuilder append(ColumnEnum column) { public String quoteText(ColumnEnum column) { return quoteText(column.getColumnName()); } + + @Override + public ArcPreparedStatementBuilder build(Object... queryElements) { + for (Object queryElement : queryElements) { + getQuery().append(queryElement); + } + return this; + } + } diff --git a/arc-core/src/main/java/fr/insee/arc/core/dataobjects/ColumnEnum.java b/arc-core/src/main/java/fr/insee/arc/core/dataobjects/ColumnEnum.java index 482e75196..dfe8618bb 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/dataobjects/ColumnEnum.java +++ b/arc-core/src/main/java/fr/insee/arc/core/dataobjects/ColumnEnum.java @@ -4,6 +4,7 @@ import java.util.List; import java.util.stream.Collectors; +import fr.insee.arc.utils.dao.SQL; import fr.insee.arc.utils.dataobjects.PgColumnEnum; import fr.insee.arc.utils.dataobjects.PgSchemaEnum; import fr.insee.arc.utils.dataobjects.TypeEnum; @@ -123,6 +124,8 @@ public enum ColumnEnum { , JOINTURE("jointure", TypeEnum.TEXT, "data structure query of the processing file") , GENERATION_COMPOSITE("generation_composite", TypeEnum.TEXT, "timestamp of the end of the process of the file in pilotage") + , ENTREPOT("entrepot", TypeEnum.TEXT, "name of the datastore that recieve archive file") + , NOM_ARCHIVE("nom_archive", TypeEnum.TEXT, "name of archive file") , TEST1(PgColumnEnum.TEST1), TEST2(PgColumnEnum.TEST2) @@ -187,5 +190,9 @@ public static SchemaEnum convert(PgSchemaEnum pgSchema) return SchemaEnum.valueOf(pgSchema.toString()); } + public String alias(ViewEnum v) + { + return v.getTableName()+SQL.DOT.getSqlCode()+this.columnName; + } } diff --git a/arc-core/src/main/java/fr/insee/arc/core/dataobjects/DataObjectService.java b/arc-core/src/main/java/fr/insee/arc/core/dataobjects/DataObjectService.java index f0cdf94ae..dc6d90cc2 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/dataobjects/DataObjectService.java +++ b/arc-core/src/main/java/fr/insee/arc/core/dataobjects/DataObjectService.java @@ -1,12 +1,8 @@ package fr.insee.arc.core.dataobjects; -public class DataObjectService { +import fr.insee.arc.utils.dao.SQL; - /** - * database characters convention - */ - public static final String SCHEMA_SEPARATOR = "."; - public static final String QUERY_SEPARATOR = ";"; +public class DataObjectService { /** * ARC special parameter @@ -40,7 +36,7 @@ public String getView(ViewEnum e) { } if ((e.getTableLocation().equals(SchemaEnum.SANDBOX) || e.getTableLocation().equals(SchemaEnum.SANDBOX_GENERATED)) && this.sandboxSchema != null) { - return this.sandboxSchema + SCHEMA_SEPARATOR + e.getTableName(); + return this.sandboxSchema + SQL.DOT.getSqlCode() + e.getTableName(); } return getFullTableNameInSchema(e.getTableLocation(), e.getTableName()); @@ -48,7 +44,7 @@ public String getView(ViewEnum e) { } public static String getFullTableNameInSchema(SchemaEnum schema, String tablename) { - return schema.getSchemaName().equals("") ? tablename : schema.getSchemaName() + SCHEMA_SEPARATOR + tablename; + return schema.getSchemaName().equals("") ? tablename : schema.getSchemaName() + SQL.DOT.getSqlCode() + tablename; } public String getSandboxSchema() { diff --git a/arc-core/src/main/java/fr/insee/arc/core/dataobjects/ViewEnum.java b/arc-core/src/main/java/fr/insee/arc/core/dataobjects/ViewEnum.java index 9c89e08b9..3fcc4281b 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/dataobjects/ViewEnum.java +++ b/arc-core/src/main/java/fr/insee/arc/core/dataobjects/ViewEnum.java @@ -3,6 +3,7 @@ import java.util.LinkedHashMap; import java.util.Map; +import fr.insee.arc.utils.dao.SQL; import fr.insee.arc.utils.dataobjects.PgColumnEnum; import fr.insee.arc.utils.dataobjects.PgViewEnum; @@ -104,8 +105,8 @@ public enum ViewEnum { ColumnEnum.DATE_ENTREE, ColumnEnum.CONTAINER, ColumnEnum.V_CONTAINER, ColumnEnum.O_CONTAINER, ColumnEnum.TO_DELETE, ColumnEnum.CLIENT, ColumnEnum.DATE_CLIENT, ColumnEnum.JOINTURE, ColumnEnum.GENERATION_COMPOSITE) // - - , PILOTAGE_ARCHIVE("pilotage_archive", SchemaEnum.SANDBOX) + + , PILOTAGE_ARCHIVE("pilotage_archive", SchemaEnum.SANDBOX, ColumnEnum.ENTREPOT , ColumnEnum.NOM_ARCHIVE) // family model table in sandbox , MOD_TABLE_METIER("mod_table_metier", SchemaEnum.SANDBOX_GENERATED, ColumnEnum.ID_FAMILLE, @@ -133,7 +134,11 @@ public enum ViewEnum { TABLE_TEST_OUT_TEMPORARY(PgViewEnum.TABLE_TEST_OUT_TEMPORARY) // view for table aliases or temporary table in query - , T1(PgViewEnum.T1), T2(PgViewEnum.T1), T3(PgViewEnum.T1) + , T1(PgViewEnum.T1), T2(PgViewEnum.T2), T3(PgViewEnum.T3) + + , ALIAS_A(PgViewEnum.ALIAS_A), ALIAS_B(PgViewEnum.ALIAS_B), ALIAS_C(PgViewEnum.ALIAS_C) + + , TMP_FILES("tmp_files", SchemaEnum.TEMPORARY, ColumnEnum.FILE_NAME) , PG_TABLES(PgViewEnum.PG_TABLES) @@ -183,4 +188,22 @@ public Map getColumns() { return columns; } + public String getFullName() { + return DataObjectService.getFullTableNameInSchema(this.tableLocation, this.tableName); + } + + public String getFullName(String schema) { + return schema + SQL.DOT.getSqlCode() + this.tableName; + } + + public ColumnEnum col(ColumnEnum e) { + return this.getColumns().get(e); + } + + @Override + public String toString() + { + return this.getTableName(); + } + } diff --git a/arc-core/src/main/java/fr/insee/arc/core/model/TraitementTableParametre.java b/arc-core/src/main/java/fr/insee/arc/core/model/TraitementTableParametre.java index f39c854fc..acbd23e74 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/model/TraitementTableParametre.java +++ b/arc-core/src/main/java/fr/insee/arc/core/model/TraitementTableParametre.java @@ -1,21 +1,24 @@ package fr.insee.arc.core.model; +import fr.insee.arc.core.dataobjects.ViewEnum; + public enum TraitementTableParametre { - CALENDRIER("CALENDRIER"), // - NORME("NORME"), // - JEUDEREGLE("JEUDEREGLE"), // - CHARGEMENT_REGLE("CHARGEMENT_REGLE"), // - NORMAGE_REGLE("NORMAGE_REGLE"), // - CONTROLE_REGLE("CONTROLE_REGLE"), // - MAPPING_REGLE("MAPPING_REGLE"), // - EXPRESSION("EXPRESSION"), // - MOD_TABLE_METIER("MOD_TABLE_METIER"), // - MOD_VARIABLE_METIER("MOD_VARIABLE_METIER"); - private TraitementTableParametre(String anExpression) { - this.expression = anExpression; + CALENDRIER(ViewEnum.IHM_CALENDRIER), // + NORME(ViewEnum.IHM_NORME), // + JEUDEREGLE(ViewEnum.IHM_JEUDEREGLE), // + CHARGEMENT_REGLE(ViewEnum.IHM_CHARGEMENT_REGLE), // + NORMAGE_REGLE(ViewEnum.IHM_NORMAGE_REGLE), // + CONTROLE_REGLE(ViewEnum.IHM_CONTROLE_REGLE), // + MAPPING_REGLE(ViewEnum.IHM_MAPPING_REGLE), // + EXPRESSION(ViewEnum.IHM_EXPRESSION), // + MOD_TABLE_METIER(ViewEnum.MOD_TABLE_METIER), // + MOD_VARIABLE_METIER(ViewEnum.MOD_VARIABLE_METIER); + + private TraitementTableParametre(ViewEnum tablename) { + this.tablename = tablename; } - private String expression; + private ViewEnum tablename; public boolean isPartOfRuleset() { return this == TraitementTableParametre.CHARGEMENT_REGLE @@ -27,6 +30,6 @@ public boolean isPartOfRuleset() { @Override public String toString() { - return this.expression; + return this.tablename.getFullName(); } } diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/global/ApiService.java b/arc-core/src/main/java/fr/insee/arc/core/service/global/ApiService.java index 6812fa8be..b356a5b01 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/global/ApiService.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/global/ApiService.java @@ -17,6 +17,7 @@ import fr.insee.arc.core.model.TraitementPhase; import fr.insee.arc.core.model.TraitementTableExecution; import fr.insee.arc.core.model.TraitementTableParametre; +import fr.insee.arc.core.service.global.bo.Sandbox; import fr.insee.arc.core.service.global.dao.DatabaseConnexionConfiguration; import fr.insee.arc.core.service.global.dao.PilotageOperations; import fr.insee.arc.core.service.global.dao.TableNaming; @@ -69,6 +70,8 @@ public abstract class ApiService implements IConstanteNumerique { protected String currentIdSource; protected String directoryIn; protected List listeNorme; + + protected Sandbox coordinatorSandbox; // made to report the number of object processed by the phase private int reportNumberOfObject = 0; @@ -98,6 +101,8 @@ protected ApiService(String aCurrentPhase, String aParametersEnvironment, String LoggerHelper.error(LOGGER_APISERVICE, ApiService.class, "Error in initializing connexion"); } + this.coordinatorSandbox = new Sandbox(this.connexion.getCoordinatorConnection(), this.envExecution); + this.setParamBatch(paramBatch); // Initialisation de la phase diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/global/bo/Sandbox.java b/arc-core/src/main/java/fr/insee/arc/core/service/global/bo/Sandbox.java new file mode 100644 index 000000000..60813f610 --- /dev/null +++ b/arc-core/src/main/java/fr/insee/arc/core/service/global/bo/Sandbox.java @@ -0,0 +1,31 @@ +package fr.insee.arc.core.service.global.bo; + +import java.sql.Connection; + +public class Sandbox { + + private Connection connection; + + private String schema; + + + /** + * instanciate a sandbox reference for execution + * @param connection + * @param schema + */ + public Sandbox(Connection connection, String schema) { + super(); + this.connection = connection; + this.schema = schema; + } + + public Connection getConnection() { + return connection; + } + + public String getSchema() { + return schema; + } + +} diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/global/dao/DataStorage.java b/arc-core/src/main/java/fr/insee/arc/core/service/global/dao/DataStorage.java new file mode 100644 index 000000000..ffe831fef --- /dev/null +++ b/arc-core/src/main/java/fr/insee/arc/core/service/global/dao/DataStorage.java @@ -0,0 +1,80 @@ +package fr.insee.arc.core.service.global.dao; + +import java.io.File; +import java.sql.Connection; +import java.util.ArrayList; +import java.util.List; +import java.util.stream.Collector; +import java.util.stream.Collectors; + +import fr.insee.arc.core.dataobjects.ArcPreparedStatementBuilder; +import fr.insee.arc.core.dataobjects.ColumnEnum; +import fr.insee.arc.core.dataobjects.DataObjectService; +import fr.insee.arc.core.dataobjects.ViewEnum; +import fr.insee.arc.utils.dao.CopyObjectsToDatabase; +import fr.insee.arc.utils.dao.SQL; +import fr.insee.arc.utils.dao.UtilitaireDao; +import fr.insee.arc.utils.exception.ArcException; +import fr.insee.arc.utils.structure.GenericBean; +import fr.insee.arc.utils.utils.FormatSQL; + +public class DataStorage { + + private DataStorage() { + throw new IllegalStateException("dao class"); + } + + /** + * retrieve the data storage identifiers registered in database + * + * @return + * @throws ArcException + */ + public static List execQuerySelectDatastorage(Connection connection) throws ArcException { + ArcPreparedStatementBuilder query = new ArcPreparedStatementBuilder(); + query.build(SQL.SELECT, ColumnEnum.ID_ENTREPOT, SQL.FROM, ViewEnum.IHM_ENTREPOT.getFullName()); + return new GenericBean(UtilitaireDao.get(0).executeRequest(connection, query)) + .getColumnValues(ColumnEnum.ID_ENTREPOT.getColumnName()); + } + + /** + * register a list of files in a table + * + * @param fichiers + * @return + * @throws ArcException + */ + public static void execQueryRegisterFilesInDatabase(Connection connection, List files) throws ArcException { + List filenames = files.stream().filter(e -> !e.isDirectory()).map(e -> e.getName()) + .collect(Collectors.toList()); + + CopyObjectsToDatabase.execCopyFromGenericBean(connection, ViewEnum.TMP_FILES.getTableName(), new GenericBean( + ColumnEnum.FILE_NAME.getColumnName(), ColumnEnum.FILE_NAME.getColumnType().getTypeName(), filenames)); + + } + + /** + * given a list of files, return the list of files with no trace in the + * pilotage_archive table + * + * @param connection + * @param sanbox + * @return + * @throws ArcException + */ + public static List execQuerySelectFilesNotInRegisteredArchives(Connection connection, String sanbox) + throws ArcException { + ArcPreparedStatementBuilder query = new ArcPreparedStatementBuilder(); + query.build(SQL.SELECT, ColumnEnum.FILE_NAME, SQL.FROM, ViewEnum.TMP_FILES, SQL.AS, ViewEnum.ALIAS_A); + query.build(SQL.WHERE, SQL.NOT, SQL.EXISTS); + query.build("("); + query.build(SQL.SELECT, SQL.FROM, ViewEnum.PILOTAGE_ARCHIVE.getFullName(sanbox), SQL.AS, ViewEnum.ALIAS_B); + query.build(SQL.WHERE, ColumnEnum.NOM_ARCHIVE.alias(ViewEnum.ALIAS_B), "=", + ColumnEnum.FILE_NAME.alias(ViewEnum.ALIAS_A)); + query.build(")"); + + return new GenericBean(UtilitaireDao.get(0).executeRequest(connection, query)) + .getColumnValues(ColumnEnum.FILE_NAME.getColumnName()); + } + +} diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/global/dao/FileSystemManagement.java b/arc-core/src/main/java/fr/insee/arc/core/service/global/dao/FileSystemManagement.java index 8b016cbc8..73e57ba93 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/global/dao/FileSystemManagement.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/global/dao/FileSystemManagement.java @@ -1,9 +1,19 @@ package fr.insee.arc.core.service.global.dao; import java.io.File; +import java.io.IOException; +import java.util.List; +import org.apache.commons.io.FileUtils; + +import fr.insee.arc.core.dataobjects.ArcPreparedStatementBuilder; import fr.insee.arc.core.model.TraitementEtat; import fr.insee.arc.core.model.TraitementPhase; +import fr.insee.arc.utils.dao.UtilitaireDao; +import fr.insee.arc.utils.exception.ArcException; +import fr.insee.arc.utils.exception.ArcExceptionMessage; +import fr.insee.arc.utils.files.FileUtilsArc; +import fr.insee.arc.utils.structure.GenericBean; public class FileSystemManagement { @@ -65,4 +75,21 @@ public static String directoryPhaseEtatEnCours(String rootDirectory, String env, return directoryPhaseEtat(rootDirectory, env, t, TraitementEtat.ENCOURS); } + /** + * delete a file if it is the same as another reference file + * @param fileToDeleteIfSame + * @param fileToCompare + * @throws ArcException + */ + public static void deleteFileIfSameAs(File fileToDeleteIfSame, File fileToCompare) throws ArcException + { + try { + if (fileToCompare.exists() && FileUtils.contentEquals(fileToCompare, fileToDeleteIfSame)) { + FileUtilsArc.delete(fileToDeleteIfSame); + } + } catch (IOException exception) { + throw new ArcException(exception, ArcExceptionMessage.FILE_DELETE_FAILED, fileToDeleteIfSame); + } + } + } diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/global/dao/PhaseOperations.java b/arc-core/src/main/java/fr/insee/arc/core/service/global/dao/PhaseOperations.java index 2e158a16c..ec5d23da2 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/global/dao/PhaseOperations.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/global/dao/PhaseOperations.java @@ -76,8 +76,8 @@ private static ArcPreparedStatementBuilder selectPhaseDataTablesFoundInEnv(Strin * @throws ArcException */ public static List selectPhaseDataTablesFoundInEnv(Connection connection, String env) throws ArcException { - return ObjectUtils.firstNonNull(new GenericBean(UtilitaireDao.get(0).executeRequest(connection, - PhaseOperations.selectPhaseDataTablesFoundInEnv(env))).mapContent().get(ColumnEnum.TABLE_NAME.getColumnName()), new ArrayList()); + return new GenericBean(UtilitaireDao.get(0).executeRequest(connection, + PhaseOperations.selectPhaseDataTablesFoundInEnv(env))).getColumnValues(ColumnEnum.TABLE_NAME.getColumnName()); } /** @@ -88,8 +88,8 @@ public static List selectPhaseDataTablesFoundInEnv(Connection connection * @throws ArcException */ public static List selectAllChildrenPhaseDataTables(Connection connection, String phaseTemplateTable) throws ArcException { - return ObjectUtils.firstNonNull(new GenericBean(UtilitaireDao.get(0).executeRequest(connection, - FormatSQL.tableExists(phaseTemplateTable + "\\_" + HashFileNameConversion.CHILD_TABLE_TOKEN + "\\_%"))).mapContent().get(ColumnEnum.TABLE_NAME.getColumnName()), new ArrayList()); + return new GenericBean(UtilitaireDao.get(0).executeRequest(connection, + FormatSQL.tableExists(phaseTemplateTable + "\\_" + HashFileNameConversion.CHILD_TABLE_TOKEN + "\\_%"))).getColumnValues(ColumnEnum.TABLE_NAME.getColumnName()); } /** @@ -119,8 +119,7 @@ public static List selectIdSourceOfDataTable(Connection connection, Stri ArcPreparedStatementBuilder query = new ArcPreparedStatementBuilder(); query.build(SQL.SELECT, SQL.DISTINCT, ColumnEnum.ID_SOURCE.getColumnName(), SQL.FROM, dataTable); - return ObjectUtils.firstNonNull(new GenericBean(UtilitaireDao.get(0).executeRequest(connection, query)).mapContent().get(ColumnEnum.ID_SOURCE.getColumnName()) - , new ArrayList()); + return new GenericBean(UtilitaireDao.get(0).executeRequest(connection, query)).getColumnValues(ColumnEnum.ID_SOURCE.getColumnName()); } diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/global/dao/PilotageOperations.java b/arc-core/src/main/java/fr/insee/arc/core/service/global/dao/PilotageOperations.java index e582976bc..30f37028d 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/global/dao/PilotageOperations.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/global/dao/PilotageOperations.java @@ -9,6 +9,7 @@ import fr.insee.arc.core.dataobjects.ArcPreparedStatementBuilder; import fr.insee.arc.core.dataobjects.ColumnEnum; +import fr.insee.arc.core.dataobjects.ViewEnum; import fr.insee.arc.core.model.TraitementEtat; import fr.insee.arc.core.model.TraitementPhase; import fr.insee.arc.core.service.global.ApiService; @@ -17,6 +18,7 @@ import fr.insee.arc.utils.dao.UtilitaireDao; import fr.insee.arc.utils.exception.ArcException; import fr.insee.arc.utils.utils.FormatSQL; +import fr.insee.arc.utils.utils.ManipString; public class PilotageOperations { @@ -41,10 +43,9 @@ public static String queryUpdateNbEnr(String tablePilTemp, String tableTravailTe query.append("\n SET nb_enr=(select count(*) from " + tableTravailTemp + ") "); if (jointure.length > 0) { - query.append(", jointure= " + FormatSQL.textToSql(jointure[0]) + ""); + query.append(", jointure= " + FormatSQL.textToSql(ManipString.nullIfEmptyTrim(jointure[0]))); } query.append(";"); - return query.toString(); } @@ -139,21 +140,21 @@ public static StringBuilder queryUpdatePilotageError(String phase, String tableP * @param etat * @return */ - public static ArcPreparedStatementBuilder querySelectIdSourceFromPilotage(String tablePilotage, TraitementPhase phase, TraitementEtat etat) + public static ArcPreparedStatementBuilder querySelectIdSourceFromPilotage(String envExecution, TraitementPhase phase, TraitementEtat etat) { ArcPreparedStatementBuilder query = new ArcPreparedStatementBuilder(); - query.build(SQL.SELECT, ColumnEnum.ID_SOURCE.getColumnName(), SQL.FROM, tablePilotage); + query.build(SQL.SELECT, ColumnEnum.ID_SOURCE.getColumnName(), SQL.FROM, ViewEnum.PILOTAGE_FICHIER.getFullName(envExecution)); query.build(SQL.WHERE, ColumnEnum.PHASE_TRAITEMENT, "=" , query.quoteText(phase.toString())); query.build(SQL.AND, ColumnEnum.ETAT_TRAITEMENT, "=", query.quoteText(etat.getSqlArrayExpression()), SQL.CAST_OPERATOR, "text[]"); return query; } - public static String accessSelectEtapeForIdSource(Connection connection, String tablePilotage, TraitementPhase phase, TraitementEtat etat, String idSource) throws ArcException + public static String accessSelectEtapeForIdSource(Connection connection, String envExecution, TraitementPhase phase, TraitementEtat etat, String idSource) throws ArcException { ArcPreparedStatementBuilder query = new ArcPreparedStatementBuilder(); - query.build(SQL.SELECT, ColumnEnum.ETAPE, SQL.FROM, tablePilotage); + query.build(SQL.SELECT, ColumnEnum.ETAPE, SQL.FROM, ViewEnum.PILOTAGE_FICHIER.getFullName(envExecution)); query.build(SQL.WHERE, ColumnEnum.PHASE_TRAITEMENT, "=", query.quoteText(phase.toString())); query.build(SQL.AND, ColumnEnum.ETAT_TRAITEMENT, "=", query.quoteText(etat.getSqlArrayExpression()), SQL.CAST_OPERATOR, ColumnEnum.ETAT_TRAITEMENT.getColumnType().getTypeName() ); query.build(SQL.AND, ColumnEnum.ID_SOURCE.getColumnName(), "=", query.quoteText(idSource)); diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/global/dao/ThreadOperations.java b/arc-core/src/main/java/fr/insee/arc/core/service/global/dao/ThreadOperations.java index 64bad314f..003f83865 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/global/dao/ThreadOperations.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/global/dao/ThreadOperations.java @@ -86,8 +86,8 @@ public ArcPreparedStatementBuilder preparationDefaultDao() throws ArcException { GenericBean gb = new GenericBean( UtilitaireDao.get(0).executeRequest(connexion.getCoordinatorConnection(), new ArcPreparedStatementBuilder("SELECT * FROM " + tablePilotageThread))); - // copy them in the table tablePilotageThread lcoated on the executor nod - query.append(query.copyFromGenericBean(tablePilotageThread, gb, true)); + // copy them in the table tablePilotageThread located on the executor nod + query.append(query.copyFromGenericBean(tablePilotageThread, gb)); } return query; @@ -119,7 +119,7 @@ public void marquageFinalDefaultDao(ArcPreparedStatementBuilder query) throws Ar GenericBean gb = new GenericBean(UtilitaireDao.get(0).executeRequest(connexion.getExecutorConnection(), new ArcPreparedStatementBuilder("SELECT * FROM " + tablePilotageThread))); // copy them on the nod - query.append(query.copyFromGenericBean(tablePilotageThread, gb, true)); + query.append(query.copyFromGenericBean(tablePilotageThread, gb)); query.append(marquageFinal(tablePilotageGlobale, tablePilotageThread, idSource)); diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/ApiInitialisationService.java b/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/ApiInitialisationService.java index 8fca5d2e0..852384d29 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/ApiInitialisationService.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/ApiInitialisationService.java @@ -1,57 +1,31 @@ package fr.insee.arc.core.service.p0initialisation; -import java.io.File; -import java.io.IOException; import java.nio.file.Paths; import java.sql.Connection; import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Optional; -import java.util.stream.Collectors; -import org.apache.commons.io.FileUtils; -import org.apache.commons.lang3.ObjectUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.springframework.stereotype.Component; -import fr.insee.arc.core.dataobjects.ArcDatabase; import fr.insee.arc.core.dataobjects.ArcPreparedStatementBuilder; import fr.insee.arc.core.dataobjects.ColumnEnum; -import fr.insee.arc.core.dataobjects.ViewEnum; import fr.insee.arc.core.model.TraitementEtat; import fr.insee.arc.core.model.TraitementPhase; -import fr.insee.arc.core.model.TraitementTableParametre; import fr.insee.arc.core.service.global.ApiService; -import fr.insee.arc.core.service.global.bo.JeuDeRegle; -import fr.insee.arc.core.service.global.bo.JeuDeRegleDao; import fr.insee.arc.core.service.global.dao.DatabaseMaintenance; import fr.insee.arc.core.service.global.dao.FileSystemManagement; -import fr.insee.arc.core.service.global.dao.HashFileNameConversion; -import fr.insee.arc.core.service.global.dao.PhaseOperations; -import fr.insee.arc.core.service.global.dao.PilotageOperations; import fr.insee.arc.core.service.global.dao.TableNaming; -import fr.insee.arc.core.service.global.scalability.ServiceScalability; -import fr.insee.arc.core.service.p0initialisation.engine.BddPatcher; -import fr.insee.arc.core.service.p0initialisation.model.ListIdSourceInPilotage; +import fr.insee.arc.core.service.p0initialisation.filesystem.RestoreFileSystem; +import fr.insee.arc.core.service.p0initialisation.pilotage.CleanPilotage; +import fr.insee.arc.core.service.p0initialisation.pilotage.SynchronizeDataByPilotage; +import fr.insee.arc.core.service.p0initialisation.userdata.SynchronizeUserRulesAndMetadata; import fr.insee.arc.core.service.p1reception.ApiReceptionService; -import fr.insee.arc.core.service.p5mapping.engine.ExpressionService; -import fr.insee.arc.core.util.BDParameters; -import fr.insee.arc.utils.consumer.ThrowingConsumer; -import fr.insee.arc.utils.dao.SQL; import fr.insee.arc.utils.dao.UtilitaireDao; -import fr.insee.arc.utils.dataobjects.TypeEnum; import fr.insee.arc.utils.exception.ArcException; -import fr.insee.arc.utils.exception.ArcExceptionMessage; import fr.insee.arc.utils.files.FileUtilsArc; -import fr.insee.arc.utils.format.Format; import fr.insee.arc.utils.ressourceUtils.PropertiesHandler; -import fr.insee.arc.utils.structure.AttributeValue; import fr.insee.arc.utils.structure.GenericBean; -import fr.insee.arc.utils.structure.tree.HierarchicalView; import fr.insee.arc.utils.utils.FormatSQL; import fr.insee.arc.utils.utils.LoggerHelper; import fr.insee.arc.utils.utils.ManipString; @@ -85,16 +59,16 @@ public ApiInitialisationService(String aCurrentPhase, String anParametersEnviron @Override public void executer() throws ArcException { - + // Supprime les lignes devenues inutiles récupérées par le webservice de la // table pilotage_fichier // Déplace les archives dans OLD - nettoyerTablePilotage(this.connexion.getCoordinatorConnection(), this.envExecution); + new CleanPilotage(this.coordinatorSandbox).execute(); // Recopie/remplace les règles définie par l'utilisateur (table de ihm_) dans // l'environnement d'excécution courant // mettre à jour les tables métier avec les paramêtres de la famille de norme - synchroniserSchemaExecutionAllNods(connexion.getCoordinatorConnection(), envParameters, envExecution); + SynchronizeUserRulesAndMetadata.synchroniserSchemaExecutionAllNods(connexion.getCoordinatorConnection(), envExecution); // marque les fichiers ou les archives à rejouer reinstate(this.connexion.getCoordinatorConnection()); @@ -105,248 +79,17 @@ public void executer() throws ArcException { // Met en cohérence les table de données avec la table de pilotage de // l'environnement // La table de pilotage fait foi - synchroniserEnvironmentByPilotage(this.connexion.getCoordinatorConnection(), this.envExecution); + new SynchronizeDataByPilotage(this.coordinatorSandbox).execute(); // remettre les archives ou elle doivent etre en cas de restauration de la base - rebuildFileSystem(); + new RestoreFileSystem(this.connexion.getCoordinatorConnection(), envExecution).execute(); } - /** - * remet le filesystem en etat en cas de restauration de la base - * - * @throws ArcException - */ - private void rebuildFileSystem() throws ArcException { - LoggerHelper.info(LOGGER, "rebuildFileSystem"); - - // parcourir toutes les archives dans le répertoire d'archive - PropertiesHandler properties = PropertiesHandler.getInstance(); - String repertoire = properties.getBatchParametersDirectory(); - - String nomTableArchive = TableNaming.dbEnv(envExecution) + "pilotage_archive"; - - // pour chaque entrepot de données, - // Comparer les archives du répertoire aux archives enregistrées dans la table - // d'archive : - // comme la table d'archive serait dans l'ancien état de données - // on peut remettre dans le repertoire de reception les archives qu'on ne - // retrouvent pas dans la table - - if (UtilitaireDao.get(0).hasResults(null, FormatSQL.tableExists("arc.ihm_entrepot"))) { - ArrayList entrepotList = new GenericBean(UtilitaireDao.get(0).executeRequest(null, - new ArcPreparedStatementBuilder("select id_entrepot from arc.ihm_entrepot"))).mapContent() - .get("id_entrepot"); - - if (entrepotList != null) { - for (String entrepot : entrepotList) { - - String fullEnvDir = FileSystemManagement.directoryEnvRoot(repertoire, this.envExecution); - File envDirFile = new File(fullEnvDir); - FileUtilsArc.createDirIfNotexist(envDirFile); - - String dirIn = ApiReceptionService.directoryReceptionEntrepotArchive(repertoire, this.envExecution, - entrepot); - String dirOut = ApiReceptionService.directoryReceptionEntrepot(repertoire, this.envExecution, - entrepot); - - // on itère sur les fichiers trouvé dans le répertoire d'archive - File f = new File(dirIn); - FileUtilsArc.createDirIfNotexist(f); - - File[] fichiers = f.listFiles(); - - // on les insere dans une table temporaires t_files - StringBuilder requete = new StringBuilder(); - requete.append("DROP TABLE IF EXISTS t_files; CREATE TEMPORARY TABLE t_files (fname text); "); - - boolean first = true; - - for (File fichier : fichiers) { - if (!fichier.isDirectory()) { - if (first || requete.length() > FormatSQL.TAILLE_MAXIMAL_BLOC_SQL) { - UtilitaireDao.get(0).executeImmediate(this.connexion.getCoordinatorConnection(), - requete + ";"); - requete = new StringBuilder(); - requete.append( - "INSERT INTO t_files values ('" + fichier.getName().replace("'", "''") + "')"); - first = false; - } else { - requete.append(",('" + fichier.getName().replace("'", "''") + "')"); - } - } - } - UtilitaireDao.get(0).executeImmediate(this.connexion.getCoordinatorConnection(), requete + ";"); - // On cherche les fichiers du répertoire d'archive qui ne sont pas dans la table - // archive - // Si on en trouve ce n'est pas cohérent et on doit remettre ces fichiers dans - // le répertoire de reception - // pour être rechargé - ArcPreparedStatementBuilder requete2 = new ArcPreparedStatementBuilder(); - requete2.append(" SELECT fname FROM t_files a "); - requete2.append( - " WHERE NOT EXISTS (SELECT * FROM " + nomTableArchive + " b WHERE b.nom_archive=a.fname) "); - ArrayList fileToBeMoved = new GenericBean( - UtilitaireDao.get(0).executeRequest(this.connexion.getCoordinatorConnection(), requete2)) - .mapContent().get("fname"); - if (fileToBeMoved != null) { - for (String fname : fileToBeMoved) { - ApiReceptionService.deplacerFichier(dirIn, dirOut, fname, fname); - } - } - - // Traitement des # dans le repertoire de reception - // on efface les # dont le fichier existe déjà avec un autre nom sans # ou un - // numéro # inférieur - f = new File(dirOut); - FileUtilsArc.createDirIfNotexist(f); - - fichiers = f.listFiles(); - - for (File fichier : fichiers) { - String filenameWithoutExtension = ManipString.substringBeforeFirst(fichier.getName(), "."); - String ext = "." + ManipString.substringAfterFirst(fichier.getName(), "."); - - if (filenameWithoutExtension.contains("#")) { - Integer number = ManipString - .parseInteger(ManipString.substringAfterLast(filenameWithoutExtension, "#")); - - // c'est un fichier valide - if (number != null) { - - String originalIdSource = ManipString.substringBeforeLast(filenameWithoutExtension, - "#"); - - // tester ce qu'on doit en faire - - // comparer au fichier sans index - File autreFichier = new File(dirOut + File.separator + originalIdSource + ext); - try { - if (autreFichier.exists() && FileUtils.contentEquals(autreFichier, fichier)) { - FileUtilsArc.delete(fichier); - } - } catch (IOException exception) { - throw new ArcException(exception, ArcExceptionMessage.FILE_DELETE_FAILED, fichier); - } - - // comparer aux fichier avec un index précédent - for (int i = 2; i < number; i++) { - autreFichier = new File(dirOut + File.separator + originalIdSource + "#" + i + ext); - - try { - if (autreFichier.exists() && FileUtils.contentEquals(autreFichier, fichier)) { - FileUtilsArc.delete(fichier); - } - } catch (IOException exception) { - throw new ArcException(exception, ArcExceptionMessage.FILE_DELETE_FAILED, - fichier); - } - - } - - } - } - - } - - } - } - - } - } - /** - * Build directories for the sandbox - * - * @param envExecutions - */ - public static void buildFileSystem(Connection connexion, String[] envExecutions) { - PropertiesHandler properties = PropertiesHandler.getInstance(); - - HashMap> entrepotList = new HashMap<>(); - - try { - entrepotList = new GenericBean(UtilitaireDao.get(0).executeRequest(connexion, - new ArcPreparedStatementBuilder("select id_entrepot from arc.ihm_entrepot"))).mapContent(); - - for (String envExecution : Arrays.asList(envExecutions)) { - - if (!entrepotList.isEmpty()) { - for (String d : entrepotList.get("id_entrepot")) { - FileUtilsArc.createDirIfNotexist(ApiReceptionService - .directoryReceptionEntrepot(properties.getBatchParametersDirectory(), envExecution, d)); - FileUtilsArc.createDirIfNotexist(ApiReceptionService.directoryReceptionEntrepotArchive( - properties.getBatchParametersDirectory(), envExecution, d)); - } - } - - FileUtilsArc.createDirIfNotexist(ApiReceptionService - .directoryReceptionEtatEnCours(properties.getBatchParametersDirectory(), envExecution)); - FileUtilsArc.createDirIfNotexist(ApiReceptionService - .directoryReceptionEtatOK(properties.getBatchParametersDirectory(), envExecution)); - FileUtilsArc.createDirIfNotexist(ApiReceptionService - .directoryReceptionEtatKO(properties.getBatchParametersDirectory(), envExecution)); - } - - } catch (ArcException ex) { - ex.logFullException(); - } - - } - - /** - * Recopie/remplace les règles définie par l'utilisateur (table de ihm_) dans - * Met à jour le schéma des tables métiers correspondant aux règles définies dans les familles - * @param connexion - * @param envParameters - * @param envExecution - * @throws ArcException - */ - public static void synchroniserSchemaExecutionAllNods(Connection connexion, String envParameters, String envExecution) throws ArcException { - - copyMetadataAllNods(connexion, envParameters, envExecution); - - mettreAJourSchemaTableMetierOnNods(connexion, envParameters, envExecution); - } - - /** - * drop the unused temporary table on coordinator and on executors if there is - * any - * - * @param coordinatorConnexion - * @return the number of executor nods in order to know if method worked on - * executors too - * @throws ArcException - */ - private int dropUnusedTemporaryTablesAllNods(Connection coordinatorConnexion) throws ArcException { - - ThrowingConsumer function = c -> { - dropUnusedTemporaryTablesOnConnection(c); - }; - - return ServiceScalability.dispatchOnNods(coordinatorConnexion, function, function); - - } - - /** - * Recopie/remplace les règles définie par l'utilisateur (table de ihm_) dans l'environnement d'excécution courant - * sur tous les noeuds postgres (coordinator et executors) - * @param connexion - * @param envParameters - * @param envExecution - * @throws ArcException - */ - public static void copyMetadataAllNods(Connection connexion, String envParameters, String envExecution) throws ArcException - { - copyMetadataToSandbox(connexion, envParameters, envExecution); - - copyMetadataToExecutorsAllNods(connexion, envExecution); - } - - /** * Méthode pour rejouer des fichiers * @@ -390,188 +133,8 @@ private void reinstate(Connection connexion) throws ArcException { + " b where a.container=b.container and b.to_delete='RA')"); } - - private static void mettreAJourSchemaTableMetierOnNods(Connection connexion, String envParameters, - String envExecution) throws ArcException { - - ThrowingConsumer function = executorConnection -> { - mettreAJourSchemaTableMetier(executorConnection, envParameters,envExecution); - }; - - ServiceScalability.dispatchOnNods(connexion, function, function); - - } - - - /** - * Créer ou detruire les colonnes ou les tables métiers en comparant ce qu'il y - * a en base à ce qu'il y a de déclaré dans la table des familles de norme - * - * @param connexion - * @throws ArcException - */ - private static void mettreAJourSchemaTableMetier(Connection connexion, String envParameters, - String envExecution) throws ArcException { - LoggerHelper.info(LOGGER, "mettreAJourSchemaTableMetier"); - /* - * Récupérer la table qui mappe : famille / table métier / variable métier et - * type de la variable - */ - ArcPreparedStatementBuilder requeteRef = new ArcPreparedStatementBuilder(); - requeteRef.append("SELECT lower(id_famille), lower('" + TableNaming.dbEnv(envExecution) - + "'||nom_table_metier), lower(nom_variable_metier), lower(type_variable_metier) FROM " + envParameters - + "_mod_variable_metier"); - - - - - List> relationalViewRef = Format - .patch(UtilitaireDao.get(0).executeRequestWithoutMetadata(connexion, requeteRef)); - HierarchicalView familleToTableToVariableToTypeRef = HierarchicalView.asRelationalToHierarchical( - "(Réf) Famille -> Table -> Variable -> Type", - Arrays.asList("id_famille", "nom_table_metier", "variable_metier", "type_variable_metier"), - relationalViewRef); - /* - * Récupérer dans le méta-modèle de la base les tables métiers correspondant à - * la famille chargée - */ - ArcPreparedStatementBuilder requete = new ArcPreparedStatementBuilder(); - requete.append( - "SELECT lower(id_famille), lower(table_schema||'.'||table_name) nom_table_metier, lower(column_name) nom_variable_metier"); - - // les types dans postgres sont horribles :( - // udt_name : float8 = float, int8=bigint, int4=int - // data_type : double precision = float, integer=int - requete.append( - ", case when lower(data_type)='array' then replace(replace(replace(ltrim(udt_name,'_'),'int4','int'),'int8','bigint'),'float8','float')||'[]' "); - requete.append( - " else replace(replace(lower(data_type),'double precision','float'),'integer','int') end type_variable_metier "); - requete.append("\n FROM information_schema.columns, " + envParameters + "_famille "); - requete.append("\n WHERE table_schema='" - + ManipString.substringBeforeFirst(TableNaming.dbEnv(envExecution), ".").toLowerCase() + "' "); - requete.append("\n and table_name LIKE '" - + ManipString.substringAfterFirst(TableNaming.dbEnv(envExecution), ".").toLowerCase() - + "mapping\\_%' "); - requete.append("\n and table_name LIKE '" - + ManipString.substringAfterFirst(TableNaming.dbEnv(envExecution), ".").toLowerCase() - + "mapping\\_'||lower(id_famille)||'\\_%';"); - - List> relationalView = Format - .patch(UtilitaireDao.get(0).executeRequestWithoutMetadata(connexion, requete)); - - HierarchicalView familleToTableToVariableToType = HierarchicalView.asRelationalToHierarchical( - "(Phy) Famille -> Table -> Variable -> Type", - Arrays.asList("id_famille", "nom_table_metier", "variable_metier", "type_variable_metier"), - relationalView); - StringBuilder requeteMAJSchema = new StringBuilder(); - - /* - * AJOUT/MODIFICATION DES COLONNES DE REFERENCE - */ - for (HierarchicalView famille : familleToTableToVariableToTypeRef.children()) { - /** - * Pour chaque table de référence - */ - for (HierarchicalView table : famille.children()) { - /** - * Est-ce que la table existe physiquement ? - */ - if (familleToTableToVariableToType.hasPath(famille, table)) { - /** - * Pour chaque variable de référence - */ - for (HierarchicalView variable : table.children()) { - /* - * Si la variable*type n'existe pas - */ - if (!familleToTableToVariableToType.hasPath(famille, table, variable, - variable.getUniqueChild())) { - // BUG POSTGRES : pb drop et add column : recréer la table sinon ca peut excéder - // la limite postgres de 1500 - requeteMAJSchema.append("DROP TABLE IF EXISTS " + table.getLocalRoot() + "_IMG ;"); - requeteMAJSchema.append("CREATE TABLE " + table.getLocalRoot() + "_IMG " - + FormatSQL.WITH_NO_VACUUM + " AS SELECT * FROM " + table.getLocalRoot() + ";"); - requeteMAJSchema.append("DROP TABLE IF EXISTS " + table.getLocalRoot() + " ;"); - requeteMAJSchema.append("ALTER TABLE " + table.getLocalRoot() + "_IMG RENAME TO " - + ManipString.substringAfterFirst(table.getLocalRoot(), ".") + ";"); - - /* - * Si la variable existe - */ - if (familleToTableToVariableToType.hasPath(famille, table, variable)) { - /* - * Drop de la variable - */ - requeteMAJSchema.append("ALTER TABLE " + table.getLocalRoot() + " DROP COLUMN " - + variable.getLocalRoot() + ";"); - } - /* - * Ajout de la variable - */ - requeteMAJSchema.append("ALTER TABLE " + table.getLocalRoot() + " ADD COLUMN " - + variable.getLocalRoot() + " " + variable.getUniqueChild().getLocalRoot() + " "); - if (variable.getUniqueChild().getLocalRoot().equals(TypeEnum.TEXT.getTypeName())) { - requeteMAJSchema.append(" collate \"C\" "); - } - requeteMAJSchema.append(";"); - - } - } - } else { - AttributeValue[] attr = new AttributeValue[table.children().size()]; - int i = 0; - for (HierarchicalView variable : table.children()) { - attr[i++] = new AttributeValue(variable.getLocalRoot(), - variable.getUniqueChild().getLocalRoot()); - } - requeteMAJSchema.append("CREATE TABLE " + table.getLocalRoot() + " ("); - for (int j = 0; j < attr.length; j++) { - if (j > 0) { - requeteMAJSchema.append(", "); - } - requeteMAJSchema.append(attr[j].getFirst() + " " + attr[j].getSecond()); - if (attr[j].getSecond().equals(TypeEnum.TEXT.getTypeName())) { - requeteMAJSchema.append(" collate \"C\" "); - } - } - requeteMAJSchema.append(") " + FormatSQL.WITH_NO_VACUUM + ";\n"); - } - - } - } - /* - * SUPPRESSION DES COLONNES QUI NE SONT PAS CENSEES EXISTER - */ - for (HierarchicalView famille : familleToTableToVariableToType.children()) { - /** - * Pour chaque table physique - */ - for (HierarchicalView table : familleToTableToVariableToType.get(famille).children()) { - /** - * Est-ce que la table devrait exister ? - */ - if (!familleToTableToVariableToTypeRef.hasPath(famille, table)) { - requeteMAJSchema.append("DROP TABLE IF EXISTS " + table.getLocalRoot() + ";\n"); - } else { - /** - * Pour chaque variable de cette table - */ - for (HierarchicalView variable : table.children()) { - /** - * Est-ce que la variable devrait exister ? - */ - if (!familleToTableToVariableToTypeRef.hasPath(famille, table, variable)) { - requeteMAJSchema.append("ALTER TABLE " + table.getLocalRoot() + " DROP COLUMN " - + variable.getLocalRoot() + ";\n"); - } - } - } - } - } - UtilitaireDao.get(0).executeBlock(connexion, requeteMAJSchema); - } /** * Suppression dans la table de pilotage des fichiers qui ont été marqué par la @@ -591,345 +154,7 @@ private void cleanToDelete(Connection connexion, String tablePil) throws ArcExce UtilitaireDao.get(0).executeBlock(connexion, requete); } - /** - * Suppression dans la table de pilotage des fichiers consommés 1- une copie des - * données du fichier doit avoir été récupérée par tous les clients décalrés 2- - * pour un fichier donné, l'ancienneté de son dernier transfert doit dépasser - * Nb_Jour_A_Conserver jours RG2. - * - * @param connexion - * @param tablePil - * @param tablePil - * @throws ArcException - */ - private void nettoyerTablePilotage(Connection connexion, String envExecution) throws ArcException { - LoggerHelper.info(LOGGER, "nettoyerTablePilotage"); - - BDParameters bdParameters = new BDParameters(ArcDatabase.COORDINATOR); - - // indique combien de jour doivent etre conservé les fichiers apres avoir été - int numberOfDaysToKeepFiles = bdParameters.getInt(this.connexion.getCoordinatorConnection(), - "ApiInitialisationService.Nb_Jour_A_Conserver", 365); - - // nombre de fichier à traiter lors à chaque itération d'archivage - int numberOfFilesToProceed = bdParameters.getInt(this.connexion.getCoordinatorConnection(), - "ApiInitialisationService.NB_FICHIER_PER_ARCHIVE", 10000); - - String nomTablePilotage = TableNaming.dbEnv(envExecution) + "pilotage_fichier"; - String nomTableArchive = TableNaming.dbEnv(envExecution) + "pilotage_archive"; - - ArcPreparedStatementBuilder requete; - - requete = new ArcPreparedStatementBuilder(); - - requete.append("DROP TABLE IF EXISTS fichier_to_delete; "); - requete.append("CREATE TEMPORARY TABLE fichier_to_delete AS "); - requete.append("WITH ") - - // 1. on récupère sous forme de tableau les clients de chaque famille - .append("clientsParFamille AS ( ").append("SELECT array_agg(id_application) as client, id_famille ") - .append("FROM arc.ihm_client ").append("GROUP BY id_famille ").append(") ") - - // 2. on fait une première selection des fichiers candidats au Delete - .append(",isFichierToDelete AS ( ") - .append("SELECT " + ColumnEnum.ID_SOURCE.getColumnName() + ", container, date_client ").append("FROM ") - .append(nomTablePilotage).append(" a ").append(", arc.ihm_norme b ").append(", clientsParFamille c ") - .append("WHERE a.phase_traitement='" + TraitementPhase.MAPPING + "' ") - .append("AND a.etat_traitement='{" + TraitementEtat.OK + "}' ").append("AND a.client is not null ") - .append("AND a.id_norme=b.id_norme ").append("AND a.periodicite=b.periodicite ") - .append("AND b.id_famille=c.id_famille ") - // on filtre selon RG1 - .append("AND (a.client <@ c.client AND c.client <@ a.client) ") - // test d'égalité des 2 tableaux (a.client,c.client) - .append(") ") - // par double inclusion (A dans B & B dans A) - - // 3. on selectionne les fichiers éligibles - .append("SELECT " + ColumnEnum.ID_SOURCE.getColumnName() - + ", container FROM (SELECT unnest(date_client) as t, " + ColumnEnum.ID_SOURCE.getColumnName() - + ", container FROM isFichierToDelete) ww ") - .append("GROUP BY " + ColumnEnum.ID_SOURCE.getColumnName() + ", container ") - // on filtre selon RG2 - .append("HAVING (current_date - max(t) ::date ) >=" + numberOfDaysToKeepFiles + " ").append("; "); - - UtilitaireDao.get(0).executeRequest(connexion, requete); - - // requete sur laquelle on va itérer : on selectionne un certain nombre de - // fichier et on itere - requete = new ArcPreparedStatementBuilder(); - - // 3b. on selectionne les fichiers éligibles et on limite le nombre de retour - // pour que l'update ne soit pas trop massif (perf) - requete.append("WITH fichier_to_delete_limit AS ( ") - .append(" SELECT * FROM fichier_to_delete LIMIT " + numberOfFilesToProceed + " ").append(") ") - - // 4. suppression des archive de la table d'archive (bien retirer le nom de - // l'entrepot du début du container) - .append(",delete_archive AS (").append("DELETE FROM ").append(nomTableArchive).append(" a ") - .append("USING fichier_to_delete_limit b ") - .append("WHERE a.nom_archive=substring(b.container,strpos(b.container,'_')+1) ").append("returning *) ") - - // 5. suppression des fichier de la table de pilotage - .append(",delete_idsource AS (").append("DELETE FROM ").append(nomTablePilotage).append(" a ") - .append("USING fichier_to_delete_limit b ") - .append("WHERE a." + ColumnEnum.ID_SOURCE.getColumnName() + "=b." + ColumnEnum.ID_SOURCE.getColumnName() - + " ") - .append(") ") - - // 5b. suppression de la tgable des fichiers eligibles - .append(",delete_source as (DELETE FROM fichier_to_delete a using fichier_to_delete_limit b where row(a." - + ColumnEnum.ID_SOURCE.getColumnName() + ",a.container)::text=row(b." - + ColumnEnum.ID_SOURCE.getColumnName() + ",b.container)::text) ") - // 6. récuperer la liste des archives - .append("SELECT entrepot, nom_archive FROM delete_archive "); - // initialisation de la liste contenant les archives à déplacer - HashMap> m = new HashMap<>(); - m.put("entrepot", new ArrayList()); - m.put("nom_archive", new ArrayList()); - - HashMap> n = new HashMap<>(); - - // on continue jusqu'a ce qu'on ne trouve plus rien à effacer - do { - // récupérer le résultat de la requete - LoggerHelper.info(LOGGER, "Archivage de " + numberOfFilesToProceed + " fichiers - Début"); - n = new GenericBean(UtilitaireDao.get(0).executeRequest(connexion, requete)).mapContent(); - - // ajouter à la liste m les enregistrements qu'ils n'existent pas déjà dans m - - // on parcours n - if (!n.isEmpty()) { - for (int k = 0; k < n.get("entrepot").size(); k++) { - boolean toInsert = true; - - // vérifier en parcourant m si on doit réaliser l'insertion - for (int l = 0; l < m.get("entrepot").size(); l++) { - if (n.get("entrepot").get(k).equals(m.get("entrepot").get(l)) - && n.get("nom_archive").get(k).equals(m.get("nom_archive").get(l))) { - toInsert = false; - break; - } - } - - // si aprés avoir parcouru tout m, l'enreigstrement de n n'est pas trouvé on - // l'insere - if (toInsert) { - m.get("entrepot").add(n.get("entrepot").get(k)); - m.get("nom_archive").add(n.get("nom_archive").get(k)); - } - - } - } - LoggerHelper.info(LOGGER, "Archivage Fin"); - - } while (UtilitaireDao.get(0).hasResults(connexion, - new ArcPreparedStatementBuilder("select 1 from fichier_to_delete limit 1"))); - - // y'a-til des choses à faire ? - if (m.get("entrepot").size() > 0) { - - // 7. Déplacer les archives effacées dans le répertoire de sauvegarde "OLD" - PropertiesHandler properties = PropertiesHandler.getInstance(); - String repertoire = properties.getBatchParametersDirectory(); - - String entrepotSav = ""; - for (int i = 0; i < m.get("entrepot").size(); i++) { - String entrepot = m.get("entrepot").get(i); - String archive = m.get("nom_archive").get(i); - String dirIn = ApiReceptionService.directoryReceptionEntrepotArchive(repertoire, this.envExecution, - entrepot); - String dirOut = ApiReceptionService.directoryReceptionEntrepotArchiveOldYearStamped(repertoire, this.envExecution, - entrepot); - - // création du répertoire "OLD" s'il n'existe pas - if (!entrepotSav.equals(entrepot)) { - File f = new File(dirOut); - FileUtilsArc.createDirIfNotexist(f); - entrepotSav = entrepot; - } - - // déplacement de l'archive de dirIn vers dirOut - ApiReceptionService.deplacerFichier(dirIn, dirOut, archive, archive); - - } - - StringBuilder requeteMaintenance = new StringBuilder(); - requete.append("vacuum analyze " + nomTablePilotage + "; "); - requete.append("vacuum analyze " + nomTableArchive + "; "); - UtilitaireDao.get(0).executeImmediate(connexion, requeteMaintenance); - } - - } - - /** - * Recopier les tables de l'environnement de parametres (IHM) vers - * l'environnement d'execution (batch, bas, ...) - * - * @param connexion - * @param anParametersEnvironment - * @param anExecutionEnvironment - * @throws ArcException - */ - private static void copyMetadataToSandbox(Connection connexion, String anParametersEnvironment, - String anExecutionEnvironment) throws ArcException { - copyRulesTablesToExecution(connexion, anParametersEnvironment, anExecutionEnvironment); - applyExpressions(connexion, anExecutionEnvironment); - } - - /** - * Copy the table containing user rules to the sandbox so they will be used by - * the sandbox process - * - * @param coordinatorConnexion - * @param anParametersEnvironment - * @param anExecutionEnvironment - * @throws ArcException - */ - private static void copyRulesTablesToExecution(Connection coordinatorConnexion, String anParametersEnvironment, - String anExecutionEnvironment) throws ArcException { - LoggerHelper.info(LOGGER, "copyTablesToExecution"); - try { - - anExecutionEnvironment = anExecutionEnvironment.replace(".", "_"); - - StringBuilder requete = new StringBuilder(); - TraitementTableParametre[] r = TraitementTableParametre.values(); - StringBuilder condition = new StringBuilder(); - String modaliteEtat = anExecutionEnvironment.replace("_", "."); - String tableImage; - String tableCurrent; - for (int i = 0; i < r.length; i++) { - // on créé une table image de la table venant de l'ihm - // (environnement de parametre) - TraitementTableParametre parameterTable = r[i]; - tableCurrent = TableNaming.dbEnv(anExecutionEnvironment) + parameterTable; - tableImage = FormatSQL - .temporaryTableName(TableNaming.dbEnv(anExecutionEnvironment) + parameterTable); - - // recopie partielle (en fonction de l'environnement - // d'exécution) - // pour les tables JEUDEREGLE, CONTROLE_REGLE et MAPPING_REGLE - condition.setLength(0); - if (parameterTable == TraitementTableParametre.NORME) { - condition.append(" WHERE etat='1'"); - } else if (parameterTable == TraitementTableParametre.CALENDRIER) { - condition.append(" WHERE etat='1' "); - condition.append(" and exists (select 1 from " + anParametersEnvironment - + "_norme b where a.id_norme=b.id_norme and b.etat='1')"); - } else if (parameterTable == TraitementTableParametre.JEUDEREGLE) { - condition.append(" WHERE etat=lower('" + modaliteEtat + "')"); - condition.append(" and exists (select 1 from " + anParametersEnvironment - + "_norme b where a.id_norme=b.id_norme and b.etat='1')"); - condition.append(" and exists (select 1 from " + anParametersEnvironment - + "_calendrier b where a.id_norme=b.id_norme and a.periodicite=b.periodicite and a.validite_inf=b.validite_inf and a.validite_sup=b.validite_sup and b.etat='1')"); - } else if (parameterTable.isPartOfRuleset()) { - condition.append(" WHERE exists (select 1 from " + anParametersEnvironment - + "_norme b where a.id_norme=b.id_norme and b.etat='1')"); - condition.append(" and exists (select 1 from " + anParametersEnvironment - + "_calendrier b where a.id_norme=b.id_norme and a.periodicite=b.periodicite and a.validite_inf=b.validite_inf and a.validite_sup=b.validite_sup and b.etat='1')"); - condition.append(" and exists (select 1 from " + anParametersEnvironment - + "_jeuderegle b where a.id_norme=b.id_norme and a.periodicite=b.periodicite and a.validite_inf=b.validite_inf and a.validite_sup=b.validite_sup AND a.version=b.version and b.etat=lower('" - + modaliteEtat + "'))"); - } - requete.append(FormatSQL.dropTable(tableImage)); - - requete.append("CREATE TABLE " + tableImage + " " + FormatSQL.WITH_NO_VACUUM + " AS SELECT a.* FROM " - + anParametersEnvironment + "_" + r[i] + " AS a " + condition + ";\n"); - - requete.append(FormatSQL.dropTable(tableCurrent)); - requete.append("ALTER TABLE " + tableImage + " rename to " - + ManipString.substringAfterLast(tableCurrent, ".") + "; \n"); - } - UtilitaireDao.get(0).executeBlock(coordinatorConnexion, requete); - - // Dernière étape : recopie des tables de nomenclature et des tables prefixées - // par ext_ du schéma arc vers schéma courant - - requete.setLength(0); - - // 1.Préparation des requêtes de suppression des tables nmcl_ et ext_ du schéma - // courant - - ArcPreparedStatementBuilder requeteSelectDrop = new ArcPreparedStatementBuilder(); - requeteSelectDrop - .append(" SELECT 'DROP TABLE IF EXISTS '||schemaname||'.'||tablename||';' AS requete_drop"); - requeteSelectDrop.append(" FROM pg_tables where schemaname = " - + requeteSelectDrop.quoteText(anExecutionEnvironment.toLowerCase()) + " "); - requeteSelectDrop.append(" AND tablename SIMILAR TO '%nmcl%|%ext%'"); - - ArrayList requetesDeSuppressionTablesNmcl = new GenericBean( - UtilitaireDao.get(0).executeRequest(coordinatorConnexion, requeteSelectDrop)).mapContent().get("requete_drop"); - - if (requetesDeSuppressionTablesNmcl != null) { - for (String requeteDeSuppression : requetesDeSuppressionTablesNmcl) { - requete.append("\n ").append(requeteDeSuppression); - } - } - - // 2.Préparation des requêtes de création des tables - ArrayList requetesDeCreationTablesNmcl = new GenericBean(UtilitaireDao.get(0) - .executeRequest(coordinatorConnexion, new ArcPreparedStatementBuilder( - "select tablename from pg_tables where (tablename like 'nmcl\\_%' OR tablename like 'ext\\_%') and schemaname='arc'"))) - .mapContent().get("tablename"); - - if (requetesDeCreationTablesNmcl != null) { - for (String tableName : requetesDeCreationTablesNmcl) { - requete.append("\n CREATE TABLE " + TableNaming.dbEnv(anExecutionEnvironment) + tableName - + " " + FormatSQL.WITH_NO_VACUUM + " AS SELECT * FROM arc." + tableName + ";"); - } - } - - // 3.Execution du script Sql de suppression/création - UtilitaireDao.get(0).executeBlock(coordinatorConnexion, requete); - - } catch (Exception e) { - LoggerHelper.trace(LOGGER, - "Problème lors de la copie des tables vers l'environnement : " + anExecutionEnvironment); - LoggerHelper.error(LOGGER, "Error in ApiInitialisation.copyRulesTablesToExecution"); - throw e; - } - } - - /** - * replace an expression in rules - * - * @param connexion - * @param anExecutionEnvironment - * @throws ArcException - */ - private static void applyExpressions(Connection connexion, String anExecutionEnvironment) throws ArcException { - // Checks expression validity - ExpressionService expressionService = new ExpressionService(); - ArrayList allRuleSets = JeuDeRegleDao.recupJeuDeRegle(connexion, - anExecutionEnvironment + ".jeuderegle"); - for (JeuDeRegle ruleSet : allRuleSets) { - // Check - GenericBean expressions = expressionService.fetchExpressions(connexion, anExecutionEnvironment, ruleSet); - if (expressions.isEmpty()) { - continue; - } - - Optional loopInExpressionSet = expressionService.loopInExpressionSet(expressions); - if (loopInExpressionSet.isPresent()) { - LoggerHelper.info(LOGGER, "A loop is present in the expression set : " + loopInExpressionSet.get()); - LoggerHelper.info(LOGGER, "The expression set is not applied"); - continue; - } - - // Apply - expressions = expressionService.fetchOrderedExpressions(connexion, anExecutionEnvironment, ruleSet); - if (expressionService.isExpressionSyntaxPresentInControl(connexion, anExecutionEnvironment, ruleSet)) { - UtilitaireDao.get(0).executeRequest(connexion, - expressionService.applyExpressionsToControl(ruleSet, expressions, anExecutionEnvironment)); - } - if (expressionService.isExpressionSyntaxPresentInMapping(connexion, anExecutionEnvironment, ruleSet)) { - UtilitaireDao.get(0).executeRequest(connexion, - expressionService.applyExpressionsToMapping(ruleSet, expressions, anExecutionEnvironment)); - } - } - - } /** * Méthode pour remettre le système d'information dans la phase précédente @@ -1013,7 +238,7 @@ public void retourPhasePrecedente(TraitementPhase phase, ArcPreparedStatementBui // Run a database synchronization with the pilotage table try { - synchroniserEnvironmentByPilotage(this.connexion.getCoordinatorConnection(), this.envExecution); + new SynchronizeDataByPilotage(this.coordinatorSandbox).execute(); } catch (Exception e) { LoggerHelper.error(LOGGER, e); } @@ -1027,381 +252,16 @@ public void retourPhasePrecedente(TraitementPhase phase, ArcPreparedStatementBui public void resetEnvironnement() { try { - synchroniserEnvironmentByPilotage(this.connexion.getCoordinatorConnection(), this.envExecution); + new SynchronizeDataByPilotage(this.coordinatorSandbox).execute(); DatabaseMaintenance.maintenanceDatabaseClassic(connexion.getCoordinatorConnection(), envExecution); } catch (Exception e) { LoggerHelper.error(LOGGER, e); } } - /** - * Récupere toutes les tables temporaires d'un environnement - * - * @param env - * @return - */ - private ArcPreparedStatementBuilder requeteListAllTablesEnvTmp(String env) { - ArcPreparedStatementBuilder requete = new ArcPreparedStatementBuilder(); - TraitementPhase[] phase = TraitementPhase.values(); - // on commence après la phase "initialisation". i=2 - for (int i = 2; i < phase.length; i++) { - if (i > 2) { - requete.append(" UNION ALL "); - } - requete.append(FormatSQL.tableExists(TableNaming.dbEnv(this.envExecution) + phase[i] + "$%$tmp$%")); - requete.append(" UNION ALL "); - requete.append( - FormatSQL.tableExists(TableNaming.dbEnv(this.envExecution) + phase[i] + "\\_%$tmp$%")); - } - return requete; - } - - /** - * Remise en coherence des tables de données avec la table de pilotage - * - * @param connexion - * @param envExecution - * @throws ArcException - */ - private void synchroniserEnvironmentByPilotage(Connection connexion, String envExecution) throws ArcException { - LoggerHelper.info(LOGGER, "synchronisationEnvironmentByPilotage"); - try { - // maintenance de la table de pilotage - // retirer les "encours" de la table de pilotage - LoggerHelper.info(LOGGER, "** Maintenance table de pilotage **"); - - // pour chaque fichier de la phase de pilotage, remet à etape='1' pour sa - // derniere phase valide - remettreEtapePilotage(connexion); - - // recrée la table de pilotage, ses index, son trigger - rebuildPilotage(connexion, this.tablePil); - - // drop des tables temporaires de travail - dropUnusedTemporaryTablesAllNods(connexion); - - // pour chaque table de l'environnement d'execution courant - dropUnusedDataTablesAllNods(connexion, envExecution, tablePil, null); - - // pour chaque table de l'environnement d'execution courant - deleteUnusedDataRecordsAllNods(connexion, envExecution, tablePil, null); - - - } catch (Exception ex) { - LoggerHelper.errorGenTextAsComment(getClass(), "synchroniserEnvironnementByPilotage()", LOGGER, ex); - throw ex; - } - - // maintenance des tables de catalogue car postgres ne le réalise pas - // correctement sans mettre en oeuvre - // une stratégie de vacuum hyper agressive et donc ajouter une spécificité pour - // les DBAs - DatabaseMaintenance.maintenanceDatabaseClassic(connexion, envExecution); - - } - - /** - * delete the record from the data tables on all nods - * - according to pilotage table if providedIdSourceToDelete is not provided - * - according to providedIdSourceToDelete if provided - * @param coordinatorConnexion - * @param envExecution - * @param tablePilotage - * @param providedIdSourceToDelete - * @throws ArcException - */ - public static void deleteUnusedDataRecordsAllNods(Connection coordinatorConnexion, String envExecution, String tablePilotage, List providedIdSourceToDelete) throws ArcException { - - ListIdSourceInPilotage listIdSourceInPilotage = new ListIdSourceInPilotage(); - - if (providedIdSourceToDelete==null) - { - listIdSourceInPilotage - .addSource(coordinatorConnexion, tablePilotage, TraitementPhase.MAPPING, TraitementEtat.OK) - .addSource(coordinatorConnexion, tablePilotage, TraitementPhase.MAPPING, TraitementEtat.KO) - ; - } - - ThrowingConsumer function = executorConnection -> - deleteUnusedDataRecordsAllTables(executorConnection, envExecution, listIdSourceInPilotage, providedIdSourceToDelete); - - ServiceScalability.dispatchOnNods(coordinatorConnexion, function, function); - - } - - /** - * Delete the unreferenced data records found in all tables that may contains data - * i.e. currently tables of the "mapping" phase - * @param executorConnection - * @param envExecution - * @param listIdSourceInPilotage - * @throws ArcException - */ - private static void deleteUnusedDataRecordsAllTables(Connection executorConnection, String envExecution, ListIdSourceInPilotage listIdSourceInPilotage, List providedIdSourceToDelete) throws ArcException - { - - List dataTables = PhaseOperations.selectPhaseDataTablesFoundInEnv(executorConnection, envExecution); - - // if no phase tables, exit - if (dataTables.isEmpty()) { - return; - } - - deleteUnusedDataRecords(executorConnection, listIdSourceInPilotage, dataTables, TraitementPhase.MAPPING, TraitementEtat.OK, providedIdSourceToDelete); - - deleteUnusedDataRecords(executorConnection, listIdSourceInPilotage, dataTables, TraitementPhase.MAPPING, TraitementEtat.KO, providedIdSourceToDelete); - - } - - - /** - * Delete the unreferenced data records found in the tables corresponding to a given phase and state - * @param executorConnection - * @param envExecution - * @param listIdSourceInPilotage - * @throws ArcException - */ - private static void deleteUnusedDataRecords(Connection executorConnection, ListIdSourceInPilotage listIdSourceInPilotage, List envTables, TraitementPhase phase, TraitementEtat etat, List providedIdSourceToDelete) throws ArcException - { - // récupérer la liste des tables de la phase - List envTablesWithRecords = envTables.stream().filter(nomTable -> PhaseOperations.extractPhaseFromTableName(nomTable).equals(phase) && PhaseOperations.extractEtatFromTableName(nomTable).equals(etat)).collect(Collectors.toList()); - - // quels enregistrements à effacer - if (envTablesWithRecords.isEmpty()) - { - return; - } - - for (String dataTable : envTablesWithRecords) { - - // retrieve the idSource that shouldn't be in data table according to pilotage table - List idSourceInDataTableThatShouldntBe; - - if (providedIdSourceToDelete!=null) - { - idSourceInDataTableThatShouldntBe = providedIdSourceToDelete; - } - else - { - idSourceInDataTableThatShouldntBe= PhaseOperations.selectIdSourceOfDataTable(executorConnection, dataTable); - idSourceInDataTableThatShouldntBe.removeAll(listIdSourceInPilotage.getIdSourceInPilotage(phase, etat)); - } - - - if (!idSourceInDataTableThatShouldntBe.isEmpty()) - { - deleteDataRecords(executorConnection, idSourceInDataTableThatShouldntBe, dataTable); - } - } - - } - - - - /** - * Delete data records from a target table according to a given list of source to delete - * @param executorConnection - * @param idSourceToDelete - * @param targetDataTable - * @throws ArcException - */ - private static void deleteDataRecords(Connection executorConnection, List idSourceToDelete, String targetDataTable) throws ArcException - { - ArcPreparedStatementBuilder query= new ArcPreparedStatementBuilder(); - - GenericBean gb = - new GenericBean(ColumnEnum.ID_SOURCE.getColumnName(), TypeEnum.TEXT.getTypeName(), idSourceToDelete); - query.copyFromGenericBean(ViewEnum.T1.getTableName(), gb, true); - - query.build(SQL.DELETE, targetDataTable); - query.build(SQL.WHERE, ColumnEnum.ID_SOURCE, SQL.IN ); - query.build("(", SQL.SELECT, ColumnEnum.ID_SOURCE, SQL.FROM, ViewEnum.T1.getTableName(), ")"); - - UtilitaireDao.get(0).executeRequest(executorConnection, query); - - } - - - /** - * dispatch on every nods the void that drop unused data tables - * @param coordinatorConnexion - * @param envExecution - * @param tablePilotage - * @throws ArcException - */ - public static void dropUnusedDataTablesAllNods(Connection coordinatorConnexion, String envExecution, String tablePilotage, List providedIdSourceToDrop) throws ArcException { - - ThrowingConsumer function = executorConnection -> - dropUnusedDataTables(coordinatorConnexion, executorConnection, envExecution, tablePilotage, providedIdSourceToDrop); - - ServiceScalability.dispatchOnNods(coordinatorConnexion, function, function); - - } - - - /** - * call method to drop the unused data table found on an given executor nod - * @param coordinatorConnexion - * @param executorConnection - * @param envExecution - * @param tablePilotage - * @throws ArcException - */ - private static void dropUnusedDataTables(Connection coordinatorConnexion, Connection executorConnection, String envExecution, String tablePilotage, List providedIdSourceToDrop) throws ArcException - { - // This returns the list of the template data table for phases - // For example, "chargement_ok" is the template table for the phase called "CHARGEMENT" in an "OK" state - // The table names from the files proceeded in the phase "CHARGEMENT" will be based on the table template name - // chargement_ok_child_ - // chargement_ok_child_ - // ... - // Historically there was an inheritance link between template table (parent) and all the real data tables (children) - // but it had been removed for performance issue - List templateDataTablesThatCanBeDropped = PhaseOperations.selectPhaseDataTablesFoundInEnv(executorConnection, envExecution) - .stream().filter(nomTable -> !PhaseOperations.extractPhaseFromTableName(nomTable).equals(TraitementPhase.MAPPING)).collect(Collectors.toList()); - - // no data tables to check ? exit - if (templateDataTablesThatCanBeDropped.isEmpty()) { - return; - } - - dropUnusedDataTables(coordinatorConnexion, executorConnection, tablePilotage, templateDataTablesThatCanBeDropped, providedIdSourceToDrop); - - } - - /** - * iterate over data table found in executor nod - * drop the ones that are no longer referenced in the pilotage table found on coordinator nod - * @param coordinatorConnexion - * @param executorConnection - * @param tablePilotage - * @param dataTablesThatCanBeDropped - * @throws ArcException - */ - private static void dropUnusedDataTables(Connection coordinatorConnexion, Connection executorConnection, String tablePilotage, List templateDataTablesThatCanBeDropped, List providedIdSourceToDrop) throws ArcException - { - // Build the list of child data tables to drop - - List childDataTablesToBeDropped = new ArrayList<>(); - - for (String templateDataTable : templateDataTablesThatCanBeDropped) { - - if (providedIdSourceToDrop != null) - { - // if list of idSource is provided, calculate the corresponding tablenames and add it to drop list - for (String idSource:providedIdSourceToDrop) - { - childDataTablesToBeDropped.add(HashFileNameConversion.tableOfIdSource(templateDataTable, idSource)); - } - } - else - { - TraitementPhase phase = PhaseOperations.extractPhaseFromTableName(templateDataTable); - TraitementEtat etat = PhaseOperations.extractEtatFromTableName(templateDataTable); - - // retrieve all the children tables of the template table - List childDataTables = PhaseOperations.selectAllChildrenPhaseDataTables(executorConnection, templateDataTable); - - // it could be more bulky but it would be less readable and useless; this is rarely triggered and access 10000 objects at max - for (String childDataTable : childDataTables) { - - // retrieve the idSource of the childDataTable - String idSource = PhaseOperations.selectIdSourceOfChildDataTable(executorConnection, childDataTable); - String etape = PilotageOperations.accessSelectEtapeForIdSource(coordinatorConnexion, tablePilotage, phase, etat, idSource); - - // if no references in pilotage table, mark for drop - if (etape == null) { - childDataTablesToBeDropped.add(childDataTable); - } - } - } - } - - dropDataTables (executorConnection, childDataTablesToBeDropped); - } - - - public static void dropDataTables(Connection executorConnection, List dataTablesToDrop) - { - UtilitaireDao.get(0).dropTable(executorConnection, dataTablesToDrop); - } - - /** - * drop the unused temporary table on the target connection - * - * @param targetConnexion - * @throws ArcException - */ - private void dropUnusedTemporaryTablesOnConnection(Connection targetConnexion) throws ArcException { - GenericBean g = new GenericBean( - UtilitaireDao.get(0).executeRequest(targetConnexion, requeteListAllTablesEnvTmp(envExecution))); - if (!g.mapContent().isEmpty()) { - ArrayList envTables = g.mapContent().get("table_name"); - for (String nomTable : envTables) { - UtilitaireDao.get(0).executeBlock(targetConnexion, FormatSQL.dropTable(nomTable)); - } - } - } - - private static void rebuildPilotage(Connection connexion, String tablePilotage) throws ArcException { - UtilitaireDao.get(0).executeBlock(connexion, - FormatSQL.rebuildTableAsSelectWhere(tablePilotage, "true", - "create index idx1_" + ManipString.substringAfterFirst(tablePilotage, ".") + " on " - + tablePilotage + " (" + ColumnEnum.ID_SOURCE.getColumnName() + ");", - "create index idx2_" + ManipString.substringAfterFirst(tablePilotage, ".") + " on " - + tablePilotage + " (phase_traitement, etape);", - "create index idx4_" + ManipString.substringAfterFirst(tablePilotage, ".") + " on " - + tablePilotage + " (rapport) where rapport is not null;", - "create index idx5_" + ManipString.substringAfterFirst(tablePilotage, ".") + " on " - + tablePilotage + " (o_container,v_container);", - "create index idx6_" + ManipString.substringAfterFirst(tablePilotage, ".") + " on " - + tablePilotage + " (to_delete);", - "create index idx7_" + ManipString.substringAfterFirst(tablePilotage, ".") + " on " - + tablePilotage + " (date_entree, phase_traitement, etat_traitement);")); - - UtilitaireDao.get(0).executeBlock(connexion, "analyze " + tablePilotage + ";"); - } - - /** - * la variable etape indique si c'est bien l'etape à considerer pour traitement - * ou pas etape='1' : phase à considerer, sinon etape='0' - * - * @return - * @throws ArcException - */ - private boolean remettreEtapePilotage(Connection connexion) throws ArcException { - - StringBuilder requete = new StringBuilder(); - - requete.append("DELETE FROM " + this.tablePil + " WHERE etat_traitement='{ENCOURS}';"); - - requete.append(ApiService.resetPreviousPhaseMark(this.tablePil, null, null)); - requete.append("WITH tmp_1 as (select " + ColumnEnum.ID_SOURCE.getColumnName() + ", max("); - new StringBuilder(); - requete.append("case "); - for (TraitementPhase p : TraitementPhase.values()) { - requete.append("when phase_traitement='" + p.toString() + "' then " + p.ordinal() + " "); - } - requete.append("end ) as p "); - requete.append("FROM " + this.tablePil + " "); - requete.append("GROUP BY " + ColumnEnum.ID_SOURCE.getColumnName() + " "); - requete.append("having max(etape)=0 ) "); - requete.append("update " + this.tablePil + " a "); - requete.append("set etape=1 "); - requete.append("from tmp_1 b "); - requete.append( - "where a." + ColumnEnum.ID_SOURCE.getColumnName() + "=b." + ColumnEnum.ID_SOURCE.getColumnName() + " "); - requete.append("and a.phase_traitement= case "); - for (TraitementPhase p : TraitementPhase.values()) { - requete.append("when p=" + p.ordinal() + " then '" + p.toString() + "' "); - } - requete.append("end ; "); - UtilitaireDao.get(0).executeBlock(connexion, requete); - return true; - } public static void clearPilotageAndDirectories(String repertoire, String env) throws ArcException { UtilitaireDao.get(0).executeBlock(null, "truncate " + TableNaming.dbEnv(env) + "pilotage_fichier; "); @@ -1429,61 +289,7 @@ public static void clearPilotageAndDirectories(String repertoire, String env) th FileUtilsArc.deleteAndRecreateDirectory( Paths.get(FileSystemManagement.directoryEnvExport(repertoire, env)).toFile()); } - - /** - * Instanciate the metadata required into all executors pod - * - * @param envExecution - * @throws ArcException - */ - public static int copyMetadataToExecutorsAllNods(Connection coordinatorConnexion, String envExecution) - throws ArcException { - ThrowingConsumer onCoordinator = c -> { - }; - ThrowingConsumer onExecutor = executorConnection -> { - copyMetaDataToExecutors(coordinatorConnexion, executorConnection, envExecution); - }; - return ServiceScalability.dispatchOnNods(coordinatorConnexion, onCoordinator, onExecutor); - - } - - /** - * Instanciate the metadata required into the given executor pod - * @param coordinatorConnexion - * @param executorConnection - * @param envExecution - * @throws ArcException - */ - public static void copyMetaDataToExecutors(Connection coordinatorConnexion, Connection executorConnection, String envExecution) throws ArcException - { - PropertiesHandler properties = PropertiesHandler.getInstance(); - - // add utility functions - BddPatcher.executeBddScript(executorConnection, "BdD/script_function_utility.sql", - properties.getDatabaseRestrictedUsername(), null, null); - - // add tables for phases if required - BddPatcher.bddScriptEnvironmentExecutor(executorConnection, properties.getDatabaseRestrictedUsername(), - new String[] { envExecution }); - - // copy tables - - ArrayList tablesToCopyIntoExecutor = BddPatcher.retrieveRulesTablesFromSchema(coordinatorConnexion, - envExecution); - tablesToCopyIntoExecutor - .addAll(BddPatcher.retrieveExternalTablesUsedInRules(coordinatorConnexion, envExecution)); - tablesToCopyIntoExecutor - .addAll(BddPatcher.retrieveModelTablesFromSchema(coordinatorConnexion, envExecution)); - - for (String table : new HashSet(tablesToCopyIntoExecutor)) { - GenericBean gb = new GenericBean(UtilitaireDao.get(0).executeRequest(coordinatorConnexion, - new ArcPreparedStatementBuilder("SELECT * FROM " + table))); - ArcPreparedStatementBuilder query = new ArcPreparedStatementBuilder(); - UtilitaireDao.get(0).executeRequest(executorConnection, query.copyFromGenericBean(table, gb, false)); - } - } - } diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/engine/BddPatcher.java b/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/dbmaintenance/BddPatcher.java similarity index 99% rename from arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/engine/BddPatcher.java rename to arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/dbmaintenance/BddPatcher.java index 628c1307c..cd4eaa878 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/engine/BddPatcher.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/dbmaintenance/BddPatcher.java @@ -1,4 +1,4 @@ -package fr.insee.arc.core.service.p0initialisation.engine; +package fr.insee.arc.core.service.p0initialisation.dbmaintenance; import java.io.IOException; import java.nio.charset.StandardCharsets; diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/filesystem/BuildFileSystem.java b/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/filesystem/BuildFileSystem.java new file mode 100644 index 000000000..cedfdd2ca --- /dev/null +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/filesystem/BuildFileSystem.java @@ -0,0 +1,68 @@ +package fr.insee.arc.core.service.p0initialisation.filesystem; + +import java.sql.Connection; +import java.util.Arrays; +import java.util.List; + +import fr.insee.arc.core.service.global.dao.DataStorage; +import fr.insee.arc.core.service.p1reception.ApiReceptionService; +import fr.insee.arc.utils.exception.ArcException; +import fr.insee.arc.utils.files.FileUtilsArc; +import fr.insee.arc.utils.ressourceUtils.PropertiesHandler; + +public class BuildFileSystem { + + /** + * Build the file system required for arc to proceed for a list of given sandboxes @param envExecutions + * @param connexion + * @param envExecutions + */ + public BuildFileSystem(Connection connexion, String[] envExecutions) { + super(); + this.connexion = connexion; + this.envExecutions = envExecutions; + } + + private Connection connexion; + + private String[] envExecutions; + + + + + /** + * Build directories for the sandbox + * + * @param envExecutions + */ + public void execute() { + PropertiesHandler properties = PropertiesHandler.getInstance(); + + try { + + List listEntrepot = DataStorage.execQuerySelectDatastorage(connexion); + + for (String envExecution : Arrays.asList(envExecutions)) { + + for (String d : listEntrepot) { + FileUtilsArc.createDirIfNotexist(ApiReceptionService + .directoryReceptionEntrepot(properties.getBatchParametersDirectory(), envExecution, d)); + FileUtilsArc.createDirIfNotexist(ApiReceptionService.directoryReceptionEntrepotArchive( + properties.getBatchParametersDirectory(), envExecution, d)); + } + + FileUtilsArc.createDirIfNotexist(ApiReceptionService + .directoryReceptionEtatEnCours(properties.getBatchParametersDirectory(), envExecution)); + FileUtilsArc.createDirIfNotexist(ApiReceptionService + .directoryReceptionEtatOK(properties.getBatchParametersDirectory(), envExecution)); + FileUtilsArc.createDirIfNotexist(ApiReceptionService + .directoryReceptionEtatKO(properties.getBatchParametersDirectory(), envExecution)); + } + + } catch (ArcException ex) { + ex.logFullException(); + } + + } + +} diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/filesystem/RestoreFileSystem.java b/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/filesystem/RestoreFileSystem.java new file mode 100644 index 000000000..f2dfa486c --- /dev/null +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/filesystem/RestoreFileSystem.java @@ -0,0 +1,162 @@ +package fr.insee.arc.core.service.p0initialisation.filesystem; + +import java.io.File; +import java.sql.Connection; +import java.util.Arrays; +import java.util.List; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +import fr.insee.arc.core.service.global.dao.DataStorage; +import fr.insee.arc.core.service.global.dao.FileSystemManagement; +import fr.insee.arc.core.service.p1reception.ApiReceptionService; +import fr.insee.arc.utils.exception.ArcException; +import fr.insee.arc.utils.files.FileUtilsArc; +import fr.insee.arc.utils.ressourceUtils.PropertiesHandler; +import fr.insee.arc.utils.utils.LoggerHelper; +import fr.insee.arc.utils.utils.ManipString; + +public class RestoreFileSystem { + + private static final Logger LOGGER = LogManager.getLogger(RestoreFileSystem.class); + + private Connection connection; + private String envExecution; + + + + public RestoreFileSystem(Connection connection, String envExecution) { + super(); + this.connection = connection; + this.envExecution = envExecution; + } + + + /** + * remet le filesystem en etat en cas de restauration de la base + * + * @throws ArcException + */ + public void execute() throws ArcException { + LoggerHelper.info(LOGGER, RestoreFileSystem.class.getName()); + + // parcourir toutes les archives dans le répertoire d'archive + String rootDirectory = PropertiesHandler.getInstance().getBatchParametersDirectory(); + FileUtilsArc.createDirIfNotexist(FileSystemManagement.directoryEnvRoot(rootDirectory, envExecution)); + + // pour chaque entrepot de données, + // Comparer les archives du répertoire aux archives enregistrées dans la table + // d'archive : + // comme la table d'archive serait dans l'ancien état de données + // on peut remettre dans le repertoire de reception les archives qu'on ne + // retrouvent pas dans la table + + List entrepotList = DataStorage.execQuerySelectDatastorage(connection); + + for (String entrepot : entrepotList) { + rebuildFileSystemInEntrepot(rootDirectory, entrepot); + } + } + + + private void rebuildFileSystemInEntrepot(String rootDirectory, String entrepot) throws ArcException + { + String dirEntrepotArchive = ApiReceptionService.directoryReceptionEntrepotArchive(rootDirectory, envExecution, + entrepot); + String dirEntrepot = ApiReceptionService.directoryReceptionEntrepot(rootDirectory, envExecution, entrepot); + + FileUtilsArc.createDirIfNotexist(dirEntrepotArchive); + FileUtilsArc.createDirIfNotexist(dirEntrepot); + + // On cherche les fichiers du répertoire d'archive qui ne sont pas dans la table + // archive + // Si on en trouve ce n'est pas cohérent et on doit remettre ces fichiers dans + // le répertoire de reception + // pour être rechargés + List dirEntrepotArchiveFiles = Arrays.asList(new File(dirEntrepotArchive).listFiles()); + // on les insere dans une table temporaires t_files + DataStorage.execQueryRegisterFilesInDatabase(connection, dirEntrepotArchiveFiles); + + List fileToBeMoved = DataStorage.execQuerySelectFilesNotInRegisteredArchives(connection, envExecution); + for (String fname : fileToBeMoved) { + ApiReceptionService.deplacerFichier(dirEntrepotArchive, dirEntrepot, fname, fname); + } + + moveBackNotRegisteredFilesFromEntrepotArchiveToEntrepot(dirEntrepot, dirEntrepotArchive); + + manageDuplicateArchives(dirEntrepot); + + } + + /** + * Remettre en chargement les archives non enregistrées dans la base + * Utile en cas de crash pour remettre les fichiers au bon endroit automatiquement + * @param dirEntrepot + * @param dirEntrepotArchive + * @throws ArcException + */ + private void moveBackNotRegisteredFilesFromEntrepotArchiveToEntrepot(String dirEntrepot, String dirEntrepotArchive) throws ArcException + { + // On cherche les fichiers du répertoire d'archive qui ne sont pas dans la table + // archive + // Si on en trouve ce n'est pas cohérent et on doit remettre ces fichiers dans + // le répertoire de reception + // pour être rechargés + List dirEntrepotArchiveFiles = Arrays.asList(new File(dirEntrepotArchive).listFiles()); + // on les insere dans une table temporaires t_files + DataStorage.execQueryRegisterFilesInDatabase(connection, dirEntrepotArchiveFiles); + + List fileToBeMoved = DataStorage.execQuerySelectFilesNotInRegisteredArchives(connection, envExecution); + for (String fname : fileToBeMoved) { + ApiReceptionService.deplacerFichier(dirEntrepotArchive, dirEntrepot, fname, fname); + } + } + + /** + * Effacer les archives identiques avec le nom sans # ou un numéro # inférieur + * @param dirEntrepot + * @throws ArcException + */ + private void manageDuplicateArchives(String dirEntrepot) throws ArcException + { + // Traitement des # dans le repertoire de reception + // on efface les # dont le fichier existe déjà avec un autre nom sans # ou un + // numéro # inférieur + + List dirEntrepotFiles = Arrays.asList(new File(dirEntrepot).listFiles()); + + for (File fichier : dirEntrepotFiles) { + String filenameWithoutExtension = ManipString.substringBeforeFirst(fichier.getName(), "."); + String ext = "." + ManipString.substringAfterFirst(fichier.getName(), "."); + + if (filenameWithoutExtension.contains("#")) { + Integer number = ManipString + .parseInteger(ManipString.substringAfterLast(filenameWithoutExtension, "#")); + + // c'est un fichier marqué + if (number != null) { + + String originalIdSource = ManipString.substringBeforeLast(filenameWithoutExtension, "#"); + + // tester ce qu'on doit en faire + + // comparer au fichier sans index + File fichierDeReference; + + fichierDeReference = new File(dirEntrepot + File.separator + originalIdSource + ext); + FileSystemManagement.deleteFileIfSameAs(fichier, fichierDeReference); + + // comparer aux fichier avec un index précédent + for (int i = 2; i < number; i++) { + fichierDeReference = new File(dirEntrepot + File.separator + originalIdSource + "#" + i + ext); + FileSystemManagement.deleteFileIfSameAs(fichier, fichierDeReference); + } + + } + } + + } + } + +} diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/pilotage/CleanPilotage.java b/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/pilotage/CleanPilotage.java new file mode 100644 index 000000000..d6e329df8 --- /dev/null +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/pilotage/CleanPilotage.java @@ -0,0 +1,215 @@ +package fr.insee.arc.core.service.p0initialisation.pilotage; + +import java.io.File; +import java.util.ArrayList; +import java.util.HashMap; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +import fr.insee.arc.core.dataobjects.ArcDatabase; +import fr.insee.arc.core.dataobjects.ArcPreparedStatementBuilder; +import fr.insee.arc.core.dataobjects.ColumnEnum; +import fr.insee.arc.core.model.TraitementEtat; +import fr.insee.arc.core.model.TraitementPhase; +import fr.insee.arc.core.service.global.bo.Sandbox; +import fr.insee.arc.core.service.global.dao.TableNaming; +import fr.insee.arc.core.service.p1reception.ApiReceptionService; +import fr.insee.arc.core.util.BDParameters; +import fr.insee.arc.utils.dao.UtilitaireDao; +import fr.insee.arc.utils.exception.ArcException; +import fr.insee.arc.utils.files.FileUtilsArc; +import fr.insee.arc.utils.ressourceUtils.PropertiesHandler; +import fr.insee.arc.utils.structure.GenericBean; +import fr.insee.arc.utils.utils.LoggerHelper; + +/** + * remove deprecated files from a target sandbox + * @author FY2QEQ + * + */ +public class CleanPilotage { + + private static final Logger LOGGER = LogManager.getLogger(CleanPilotage.class); + + + public CleanPilotage(Sandbox sandbox) { + super(); + this.sandbox = sandbox; + } + + private Sandbox sandbox; + + /** + * Suppression dans la table de pilotage des fichiers consommés 1- une copie des + * données du fichier doit avoir été récupérée par tous les clients décalrés 2- + * pour un fichier donné, l'ancienneté de son dernier transfert doit dépasser + * Nb_Jour_A_Conserver jours RG2. + * + * @param targetSandbox.getConnection() + * @param tablePil + * @param tablePil + * @throws ArcException + */ + public void execute() throws ArcException { + LoggerHelper.info(LOGGER, "nettoyerTablePilotage"); + + BDParameters bdParameters = new BDParameters(ArcDatabase.COORDINATOR); + + // indique combien de jour doivent etre conservé les fichiers apres avoir été + int numberOfDaysToKeepFiles = bdParameters.getInt(sandbox.getConnection(), + "ApiInitialisationService.Nb_Jour_A_Conserver", 365); + + // nombre de fichier à traiter lors à chaque itération d'archivage + int numberOfFilesToProceed = bdParameters.getInt(sandbox.getConnection(), + "ApiInitialisationService.NB_FICHIER_PER_ARCHIVE", 10000); + + String nomTablePilotage = TableNaming.dbEnv(sandbox.getSchema()) + "pilotage_fichier"; + String nomTableArchive = TableNaming.dbEnv(sandbox.getSchema()) + "pilotage_archive"; + + ArcPreparedStatementBuilder requete; + + requete = new ArcPreparedStatementBuilder(); + + requete.append("DROP TABLE IF EXISTS fichier_to_delete; "); + requete.append("CREATE TEMPORARY TABLE fichier_to_delete AS "); + requete.append("WITH ") + + // 1. on récupère sous forme de tableau les clients de chaque famille + .append("clientsParFamille AS ( ").append("SELECT array_agg(id_application) as client, id_famille ") + .append("FROM arc.ihm_client ").append("GROUP BY id_famille ").append(") ") + + // 2. on fait une première selection des fichiers candidats au Delete + .append(",isFichierToDelete AS ( ") + .append("SELECT " + ColumnEnum.ID_SOURCE.getColumnName() + ", container, date_client ").append("FROM ") + .append(nomTablePilotage).append(" a ").append(", arc.ihm_norme b ").append(", clientsParFamille c ") + .append("WHERE a.phase_traitement='" + TraitementPhase.MAPPING + "' ") + .append("AND a.etat_traitement='{" + TraitementEtat.OK + "}' ").append("AND a.client is not null ") + .append("AND a.id_norme=b.id_norme ").append("AND a.periodicite=b.periodicite ") + .append("AND b.id_famille=c.id_famille ") + // on filtre selon RG1 + .append("AND (a.client <@ c.client AND c.client <@ a.client) ") + // test d'égalité des 2 tableaux (a.client,c.client) + .append(") ") + // par double inclusion (A dans B & B dans A) + + // 3. on selectionne les fichiers éligibles + .append("SELECT " + ColumnEnum.ID_SOURCE.getColumnName() + + ", container FROM (SELECT unnest(date_client) as t, " + ColumnEnum.ID_SOURCE.getColumnName() + + ", container FROM isFichierToDelete) ww ") + .append("GROUP BY " + ColumnEnum.ID_SOURCE.getColumnName() + ", container ") + // on filtre selon RG2 + .append("HAVING (current_date - max(t) ::date ) >=" + numberOfDaysToKeepFiles + " ").append("; "); + + UtilitaireDao.get(0).executeRequest(sandbox.getConnection(), requete); + + // requete sur laquelle on va itérer : on selectionne un certain nombre de + // fichier et on itere + requete = new ArcPreparedStatementBuilder(); + + // 3b. on selectionne les fichiers éligibles et on limite le nombre de retour + // pour que l'update ne soit pas trop massif (perf) + requete.append("WITH fichier_to_delete_limit AS ( ") + .append(" SELECT * FROM fichier_to_delete LIMIT " + numberOfFilesToProceed + " ").append(") ") + + // 4. suppression des archive de la table d'archive (bien retirer le nom de + // l'entrepot du début du container) + .append(",delete_archive AS (").append("DELETE FROM ").append(nomTableArchive).append(" a ") + .append("USING fichier_to_delete_limit b ") + .append("WHERE a.nom_archive=substring(b.container,strpos(b.container,'_')+1) ").append("returning *) ") + + // 5. suppression des fichier de la table de pilotage + .append(",delete_idsource AS (").append("DELETE FROM ").append(nomTablePilotage).append(" a ") + .append("USING fichier_to_delete_limit b ") + .append("WHERE a." + ColumnEnum.ID_SOURCE.getColumnName() + "=b." + ColumnEnum.ID_SOURCE.getColumnName() + + " ") + .append(") ") + + // 5b. suppression de la tgable des fichiers eligibles + .append(",delete_source as (DELETE FROM fichier_to_delete a using fichier_to_delete_limit b where row(a." + + ColumnEnum.ID_SOURCE.getColumnName() + ",a.container)::text=row(b." + + ColumnEnum.ID_SOURCE.getColumnName() + ",b.container)::text) ") + // 6. récuperer la liste des archives + .append("SELECT entrepot, nom_archive FROM delete_archive "); + + // initialisation de la liste contenant les archives à déplacer + HashMap> m = new HashMap<>(); + m.put("entrepot", new ArrayList()); + m.put("nom_archive", new ArrayList()); + + HashMap> n = new HashMap<>(); + + // on continue jusqu'a ce qu'on ne trouve plus rien à effacer + do { + // récupérer le résultat de la requete + LoggerHelper.info(LOGGER, "Archivage de " + numberOfFilesToProceed + " fichiers - Début"); + n = new GenericBean(UtilitaireDao.get(0).executeRequest(sandbox.getConnection(), requete)).mapContent(); + + // ajouter à la liste m les enregistrements qu'ils n'existent pas déjà dans m + + // on parcours n + if (!n.isEmpty()) { + for (int k = 0; k < n.get("entrepot").size(); k++) { + boolean toInsert = true; + + // vérifier en parcourant m si on doit réaliser l'insertion + for (int l = 0; l < m.get("entrepot").size(); l++) { + if (n.get("entrepot").get(k).equals(m.get("entrepot").get(l)) + && n.get("nom_archive").get(k).equals(m.get("nom_archive").get(l))) { + toInsert = false; + break; + } + } + + // si aprés avoir parcouru tout m, l'enreigstrement de n n'est pas trouvé on + // l'insere + if (toInsert) { + m.get("entrepot").add(n.get("entrepot").get(k)); + m.get("nom_archive").add(n.get("nom_archive").get(k)); + } + + } + } + LoggerHelper.info(LOGGER, "Archivage Fin"); + + } while (UtilitaireDao.get(0).hasResults(sandbox.getConnection(), + new ArcPreparedStatementBuilder("select 1 from fichier_to_delete limit 1"))); + + // y'a-til des choses à faire ? + if (m.get("entrepot").size() > 0) { + + // 7. Déplacer les archives effacées dans le répertoire de sauvegarde "OLD" + PropertiesHandler properties = PropertiesHandler.getInstance(); + String repertoire = properties.getBatchParametersDirectory(); + + String entrepotSav = ""; + for (int i = 0; i < m.get("entrepot").size(); i++) { + String entrepot = m.get("entrepot").get(i); + String archive = m.get("nom_archive").get(i); + String dirIn = ApiReceptionService.directoryReceptionEntrepotArchive(repertoire, this.sandbox.getSchema(), + entrepot); + String dirOut = ApiReceptionService.directoryReceptionEntrepotArchiveOldYearStamped(repertoire, + this.sandbox.getSchema(), entrepot); + + // création du répertoire "OLD" s'il n'existe pas + if (!entrepotSav.equals(entrepot)) { + File f = new File(dirOut); + FileUtilsArc.createDirIfNotexist(f); + entrepotSav = entrepot; + } + + // déplacement de l'archive de dirIn vers dirOut + ApiReceptionService.deplacerFichier(dirIn, dirOut, archive, archive); + + } + + StringBuilder requeteMaintenance = new StringBuilder(); + requete.append("vacuum analyze " + nomTablePilotage + "; "); + requete.append("vacuum analyze " + nomTableArchive + "; "); + UtilitaireDao.get(0).executeImmediate(sandbox.getConnection(), requeteMaintenance); + } + + } + + +} diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/pilotage/SynchronizeDataByPilotage.java b/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/pilotage/SynchronizeDataByPilotage.java new file mode 100644 index 000000000..33d012ed4 --- /dev/null +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/pilotage/SynchronizeDataByPilotage.java @@ -0,0 +1,350 @@ +package fr.insee.arc.core.service.p0initialisation.pilotage; + +import java.sql.Connection; +import java.util.ArrayList; +import java.util.List; +import java.util.stream.Collectors; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +import fr.insee.arc.core.model.TraitementEtat; +import fr.insee.arc.core.model.TraitementPhase; +import fr.insee.arc.core.service.global.bo.Sandbox; +import fr.insee.arc.core.service.global.dao.DatabaseMaintenance; +import fr.insee.arc.core.service.global.dao.HashFileNameConversion; +import fr.insee.arc.core.service.global.dao.PhaseOperations; +import fr.insee.arc.core.service.global.dao.PilotageOperations; +import fr.insee.arc.core.service.global.scalability.ServiceScalability; +import fr.insee.arc.core.service.p0initialisation.pilotage.bo.ListIdSourceInPilotage; +import fr.insee.arc.core.service.p0initialisation.pilotage.dao.PilotageDao; +import fr.insee.arc.utils.consumer.ThrowingConsumer; +import fr.insee.arc.utils.dao.UtilitaireDao; +import fr.insee.arc.utils.exception.ArcException; +import fr.insee.arc.utils.structure.GenericBean; +import fr.insee.arc.utils.utils.FormatSQL; +import fr.insee.arc.utils.utils.LoggerHelper; + +public class SynchronizeDataByPilotage { + + private static final Logger LOGGER = LogManager.getLogger(SynchronizeDataByPilotage.class); + + public SynchronizeDataByPilotage(Sandbox sandbox) { + super(); + this.sandbox = sandbox; + } + + private Sandbox sandbox; + + + /** + * Remise en coherence des tables de données avec la table de pilotage + * + * @param connexion + * @param envExecution + * @throws ArcException + */ + public void execute() throws ArcException { + LoggerHelper.info(LOGGER, "synchronisationEnvironmentByPilotage"); + + // maintenance de la table de pilotage + // retirer les "encours" de la table de pilotage + LoggerHelper.info(LOGGER, "** Maintenance table de pilotage **"); + + // pour chaque fichier de la phase de pilotage, remet à etape='1' pour sa + // derniere phase valide + resetEtapePilotage(); + + // recrée la table de pilotage, ses index, son trigger + rebuildPilotage(); + + // drop des tables temporaires de travail + dropUnusedTemporaryTablesAllNods(); + + // pour chaque table de l'environnement d'execution courant + dropUnusedDataTablesAllNods(null); + + // pour chaque table de l'environnement d'execution courant + deleteUnusedDataRecordsAllNods(null); + + // maintenance des tables de catalogue car postgres ne le réalise pas + // correctement sans mettre en oeuvre + // une stratégie de vacuum hyper agressive et donc ajouter une spécificité pour + // les DBAs + DatabaseMaintenance.maintenanceDatabaseClassic(this.sandbox.getConnection(), this.sandbox.getSchema()); + + } + + + /** + * la variable etape indique si c'est bien l'etape à considerer pour traitement + * ou pas etape='1' : phase à considerer, sinon etape='0' + * + * @return + * @throws ArcException + */ + private void resetEtapePilotage() throws ArcException { + PilotageDao.resetEtapePilotageDao(this.sandbox.getConnection(), this.sandbox.getSchema()); + } + + + + private void rebuildPilotage() throws ArcException { + PilotageDao.rebuildPilotageDao(this.sandbox.getConnection(), this.sandbox.getSchema()); + } + + + /** + * drop the unused temporary table on coordinator and on executors if there is + * any + * + * @param coordinatorConnexion + * @return the number of executor nods in order to know if method worked on + * executors too + * @throws ArcException + */ + private int dropUnusedTemporaryTablesAllNods() throws ArcException { + + ThrowingConsumer function = c -> { + dropUnusedTemporaryTablesOnConnection(c); + }; + + return ServiceScalability.dispatchOnNods(this.sandbox.getConnection(), function, function); + + } + + + /** + * dispatch on every nods the void that drop unused data tables + * + * @param coordinatorConnexion + * @param envExecution + * @param tablePilotage + * @throws ArcException + */ + public void dropUnusedDataTablesAllNods(List optionalProvidedIdSourceToDrop) throws ArcException { + + ThrowingConsumer function = executorConnection -> dropUnusedDataTables( + this.sandbox.getConnection(), executorConnection, this.sandbox.getSchema(), optionalProvidedIdSourceToDrop); + + ServiceScalability.dispatchOnNods(this.sandbox.getConnection(), function, function); + + } + + /** + * call method to drop the unused data table found on an given executor nod + * + * @param coordinatorConnexion + * @param executorConnection + * @param envExecution + * @param tablePilotage + * @throws ArcException + */ + private static void dropUnusedDataTables(Connection coordinatorConnexion, Connection executorConnection, + String envExecution, List providedIdSourceToDrop) throws ArcException { + // This returns the list of the template data table for phases + // For example, "chargement_ok" is the template table for the phase called + // "CHARGEMENT" in an "OK" state + // The table names from the files proceeded in the phase "CHARGEMENT" will be + // based on the table template name + // chargement_ok_child_ + // chargement_ok_child_ + // ... + // Historically there was an inheritance link between template table (parent) + // and all the real data tables (children) + // but it had been removed for performance issue + List templateDataTablesThatCanBeDropped = PhaseOperations + .selectPhaseDataTablesFoundInEnv(executorConnection, envExecution).stream() + .filter(nomTable -> !PhaseOperations.extractPhaseFromTableName(nomTable) + .equals(TraitementPhase.MAPPING)) + .collect(Collectors.toList()); + + // no data tables to check ? exit + if (templateDataTablesThatCanBeDropped.isEmpty()) { + return; + } + + dropUnusedDataTables(coordinatorConnexion, executorConnection, envExecution, + templateDataTablesThatCanBeDropped, providedIdSourceToDrop); + + } + + /** + * iterate over data table found in executor nod drop the ones that are no + * longer referenced in the pilotage table found on coordinator nod + * + * @param coordinatorConnexion + * @param executorConnection + * @param tablePilotage + * @param dataTablesThatCanBeDropped + * @throws ArcException + */ + private static void dropUnusedDataTables(Connection coordinatorConnexion, Connection executorConnection, + String envExecution, List templateDataTablesThatCanBeDropped, List providedIdSourceToDrop) + throws ArcException { + // Build the list of child data tables to drop + + List childDataTablesToBeDropped = new ArrayList<>(); + + for (String templateDataTable : templateDataTablesThatCanBeDropped) { + + if (providedIdSourceToDrop != null) { + // if list of idSource is provided, calculate the corresponding tablenames and + // add it to drop list + for (String idSource : providedIdSourceToDrop) { + childDataTablesToBeDropped.add(HashFileNameConversion.tableOfIdSource(templateDataTable, idSource)); + } + } else { + TraitementPhase phase = PhaseOperations.extractPhaseFromTableName(templateDataTable); + TraitementEtat etat = PhaseOperations.extractEtatFromTableName(templateDataTable); + + // retrieve all the children tables of the template table + List childDataTables = PhaseOperations.selectAllChildrenPhaseDataTables(executorConnection, + templateDataTable); + + // it could be more bulky but it would be less readable and useless; this is + // rarely triggered and access 10000 objects at max + for (String childDataTable : childDataTables) { + + // retrieve the idSource of the childDataTable + String idSource = PhaseOperations.selectIdSourceOfChildDataTable(executorConnection, + childDataTable); + String etape = PilotageOperations.accessSelectEtapeForIdSource(coordinatorConnexion, envExecution, + phase, etat, idSource); + + // if no references in pilotage table, mark for drop + if (etape == null) { + childDataTablesToBeDropped.add(childDataTable); + } + } + } + } + + dropDataTables(executorConnection, childDataTablesToBeDropped); + } + + private static void dropDataTables(Connection executorConnection, List dataTablesToDrop) { + UtilitaireDao.get(0).dropTable(executorConnection, dataTablesToDrop); + } + + + + /** + * drop the unused temporary table on the target connection + * + * @param targetConnexion + * @throws ArcException + */ + private void dropUnusedTemporaryTablesOnConnection(Connection targetConnexion) throws ArcException { + GenericBean g = new GenericBean( + UtilitaireDao.get(0).executeRequest(targetConnexion, PilotageDao.requeteListAllTemporaryTablesInEnv(this.sandbox.getSchema()))); + if (!g.mapContent().isEmpty()) { + ArrayList envTables = g.mapContent().get("table_name"); + for (String nomTable : envTables) { + UtilitaireDao.get(0).executeBlock(targetConnexion, FormatSQL.dropTable(nomTable)); + } + } + } + + + /** + * delete the record from the data tables on all nods - according to pilotage + * table if providedIdSourceToDelete is not provided - according to + * providedIdSourceToDelete if provided + * + * @param coordinatorConnexion + * @param envExecution + * @param tablePilotage + * @param optionalProvidedIdSourceToDelete + * @throws ArcException + */ + public void deleteUnusedDataRecordsAllNods(List optionalProvidedIdSourceToDelete) throws ArcException { + + ListIdSourceInPilotage listIdSourceInPilotage = new ListIdSourceInPilotage(); + + if (optionalProvidedIdSourceToDelete == null) { + listIdSourceInPilotage + .addSource(this.sandbox.getConnection(), this.sandbox.getSchema(), TraitementPhase.MAPPING, TraitementEtat.OK) + .addSource(this.sandbox.getConnection(), this.sandbox.getSchema(), TraitementPhase.MAPPING, TraitementEtat.KO); + } + + ThrowingConsumer function = executorConnection -> deleteUnusedDataRecordsAllTables( + executorConnection, this.sandbox.getSchema(), listIdSourceInPilotage, optionalProvidedIdSourceToDelete); + + ServiceScalability.dispatchOnNods(this.sandbox.getConnection(), function, function); + + } + + /** + * Delete the unreferenced data records found in all tables that may contains + * data i.e. currently tables of the "mapping" phase + * + * @param executorConnection + * @param envExecution + * @param listIdSourceInPilotage + * @throws ArcException + */ + private static void deleteUnusedDataRecordsAllTables(Connection executorConnection, String envExecution, + ListIdSourceInPilotage listIdSourceInPilotage, List providedIdSourceToDelete) throws ArcException { + + List dataTables = PhaseOperations.selectPhaseDataTablesFoundInEnv(executorConnection, envExecution); + + // if no phase tables, exit + if (dataTables.isEmpty()) { + return; + } + + deleteUnusedDataRecords(executorConnection, listIdSourceInPilotage, dataTables, TraitementPhase.MAPPING, + TraitementEtat.OK, providedIdSourceToDelete); + + deleteUnusedDataRecords(executorConnection, listIdSourceInPilotage, dataTables, TraitementPhase.MAPPING, + TraitementEtat.KO, providedIdSourceToDelete); + + } + + /** + * Delete the unreferenced data records found in the tables corresponding to a + * given phase and state + * + * @param executorConnection + * @param envExecution + * @param listIdSourceInPilotage + * @throws ArcException + */ + private static void deleteUnusedDataRecords(Connection executorConnection, + ListIdSourceInPilotage listIdSourceInPilotage, List envTables, TraitementPhase phase, + TraitementEtat etat, List providedIdSourceToDelete) throws ArcException { + // récupérer la liste des tables de la phase + List envTablesWithRecords = envTables.stream() + .filter(nomTable -> PhaseOperations.extractPhaseFromTableName(nomTable).equals(phase) + && PhaseOperations.extractEtatFromTableName(nomTable).equals(etat)) + .collect(Collectors.toList()); + + // quels enregistrements à effacer + if (envTablesWithRecords.isEmpty()) { + return; + } + + for (String dataTable : envTablesWithRecords) { + + // retrieve the idSource that shouldn't be in data table according to pilotage + // table + List idSourceInDataTableThatShouldntBe; + + if (providedIdSourceToDelete != null) { + idSourceInDataTableThatShouldntBe = providedIdSourceToDelete; + } else { + idSourceInDataTableThatShouldntBe = PhaseOperations.selectIdSourceOfDataTable(executorConnection, + dataTable); + idSourceInDataTableThatShouldntBe.removeAll(listIdSourceInPilotage.getIdSourceInPilotage(phase, etat)); + } + + if (!idSourceInDataTableThatShouldntBe.isEmpty()) { + PilotageDao.deleteDataRecords(executorConnection, idSourceInDataTableThatShouldntBe, dataTable); + } + } + + } + + +} diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/model/ListIdSourceInPilotage.java b/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/pilotage/bo/ListIdSourceInPilotage.java similarity index 71% rename from arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/model/ListIdSourceInPilotage.java rename to arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/pilotage/bo/ListIdSourceInPilotage.java index 22536e1da..fd9bc5812 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/model/ListIdSourceInPilotage.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/pilotage/bo/ListIdSourceInPilotage.java @@ -1,4 +1,4 @@ -package fr.insee.arc.core.service.p0initialisation.model; +package fr.insee.arc.core.service.p0initialisation.pilotage.bo; import java.sql.Connection; import java.util.ArrayList; @@ -27,11 +27,10 @@ public ListIdSourceInPilotage() { } - public ListIdSourceInPilotage addSource(Connection coordinatorConnexion, String tablePilotage, TraitementPhase phase, TraitementEtat etat) throws ArcException + public ListIdSourceInPilotage addSource(Connection coordinatorConnexion, String envExecution, TraitementPhase phase, TraitementEtat etat) throws ArcException { String key= serializeAsKey(phase, etat); - this.idSourceInPilotage.put(key, ObjectUtils.firstNonNull(new GenericBean(UtilitaireDao.get(0).executeRequest(coordinatorConnexion, PilotageOperations.querySelectIdSourceFromPilotage(tablePilotage, phase, etat))).mapContent().get(ColumnEnum.ID_SOURCE.getColumnName()) - , new ArrayList())); + this.idSourceInPilotage.put(key, new GenericBean(UtilitaireDao.get(0).executeRequest(coordinatorConnexion, PilotageOperations.querySelectIdSourceFromPilotage(envExecution, phase, etat))).getColumnValues(ColumnEnum.ID_SOURCE.getColumnName())); return this; } diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/pilotage/dao/PilotageDao.java b/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/pilotage/dao/PilotageDao.java new file mode 100644 index 000000000..869becf52 --- /dev/null +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/pilotage/dao/PilotageDao.java @@ -0,0 +1,149 @@ +package fr.insee.arc.core.service.p0initialisation.pilotage.dao; + +import java.sql.Connection; +import java.util.List; + +import fr.insee.arc.core.dataobjects.ArcPreparedStatementBuilder; +import fr.insee.arc.core.dataobjects.ColumnEnum; +import fr.insee.arc.core.dataobjects.ViewEnum; +import fr.insee.arc.core.model.TraitementPhase; +import fr.insee.arc.core.service.global.ApiService; +import fr.insee.arc.core.service.global.dao.TableNaming; +import fr.insee.arc.utils.dao.CopyObjectsToDatabase; +import fr.insee.arc.utils.dao.SQL; +import fr.insee.arc.utils.dao.UtilitaireDao; +import fr.insee.arc.utils.dataobjects.TypeEnum; +import fr.insee.arc.utils.exception.ArcException; +import fr.insee.arc.utils.structure.GenericBean; +import fr.insee.arc.utils.utils.FormatSQL; +import fr.insee.arc.utils.utils.ManipString; + +public class PilotageDao { + + + /** + * remove temporary states from pilotage table + * @param connection + * @param envExecution + * @throws ArcException + */ + public static void resetEtapePilotageDao(Connection connection, String envExecution) throws ArcException + { + String tablePil = ViewEnum.PILOTAGE_FICHIER.getFullName(envExecution); + + StringBuilder requete = new StringBuilder(); + + requete.append("DELETE FROM " + tablePil + " WHERE etat_traitement='{ENCOURS}';"); + + requete.append(ApiService.resetPreviousPhaseMark(tablePil, null, null)); + + requete.append("WITH tmp_1 as (select " + ColumnEnum.ID_SOURCE.getColumnName() + ", max("); + new StringBuilder(); + requete.append("case "); + for (TraitementPhase p : TraitementPhase.values()) { + requete.append("when phase_traitement='" + p.toString() + "' then " + p.ordinal() + " "); + } + requete.append("end ) as p "); + requete.append("FROM " + tablePil + " "); + requete.append("GROUP BY " + ColumnEnum.ID_SOURCE.getColumnName() + " "); + requete.append("having max(etape)=0 ) "); + requete.append("update " + tablePil + " a "); + requete.append("set etape=1 "); + requete.append("from tmp_1 b "); + requete.append( + "where a." + ColumnEnum.ID_SOURCE.getColumnName() + "=b." + ColumnEnum.ID_SOURCE.getColumnName() + " "); + requete.append("and a.phase_traitement= case "); + for (TraitementPhase p : TraitementPhase.values()) { + requete.append("when p=" + p.ordinal() + " then '" + p.toString() + "' "); + } + requete.append("end ; "); + + UtilitaireDao.get(0).executeBlock(connection, requete); + } + + + /** + * rebuild to defragment pilotage table + * @param connexion + * @param envExecution + * @throws ArcException + */ + public static void rebuildPilotageDao(Connection connexion, String envExecution) throws ArcException { + + String tablePilotage = ViewEnum.PILOTAGE_FICHIER.getFullName(envExecution); + + StringBuilder query = FormatSQL.rebuildTableAsSelectWhere(tablePilotage, "true"); + + query.append("create index idx1_" + ManipString.substringAfterFirst(tablePilotage, ".") + " on " + + tablePilotage + " (" + ColumnEnum.ID_SOURCE.getColumnName() + ");"); + + query.append("create index idx2_" + ManipString.substringAfterFirst(tablePilotage, ".") + " on " + + tablePilotage + " (phase_traitement, etape);"); + + query.append("create index idx4_" + ManipString.substringAfterFirst(tablePilotage, ".") + " on " + + tablePilotage + " (rapport) where rapport is not null;"); + + query.append("create index idx5_" + ManipString.substringAfterFirst(tablePilotage, ".") + " on " + + tablePilotage + " (o_container,v_container);"); + + query.append("create index idx6_" + ManipString.substringAfterFirst(tablePilotage, ".") + " on " + + tablePilotage + " (to_delete);"); + + query.append("create index idx7_" + ManipString.substringAfterFirst(tablePilotage, ".") + " on " + + tablePilotage + " (date_entree, phase_traitement, etat_traitement);"); + + query.append("analyze " + tablePilotage + ";"); + + UtilitaireDao.get(0).executeBlock(connexion, "analyze " + tablePilotage + ";"); + } + + + + /** + * Récupere toutes les tables temporaires d'un environnement + * + * @param env + * @return + */ + public static ArcPreparedStatementBuilder requeteListAllTemporaryTablesInEnv(String envExecution) { + ArcPreparedStatementBuilder requete = new ArcPreparedStatementBuilder(); + TraitementPhase[] phase = TraitementPhase.values(); + // on commence après la phase "initialisation". i=2 + for (int i = 2; i < phase.length; i++) { + if (i > 2) { + requete.append(" UNION ALL "); + } + requete.append(FormatSQL.tableExists(TableNaming.dbEnv(envExecution) + phase[i] + "$%$tmp$%")); + requete.append(" UNION ALL "); + requete.append(FormatSQL.tableExists(TableNaming.dbEnv(envExecution) + phase[i] + "\\_%$tmp$%")); + } + return requete; + } + + + /** + * Delete data records from a target table according to a given list of source + * to delete + * + * @param executorConnection + * @param idSourceToDelete + * @param targetDataTable + * @throws ArcException + */ + public static void deleteDataRecords(Connection executorConnection, List idSourceToDelete, + String targetDataTable) throws ArcException { + + GenericBean gb = new GenericBean(ColumnEnum.ID_SOURCE.getColumnName(), TypeEnum.TEXT.getTypeName(), + idSourceToDelete); + + CopyObjectsToDatabase.execCopyFromGenericBean(executorConnection, ViewEnum.T1.getTableName(), gb); + + ArcPreparedStatementBuilder query = new ArcPreparedStatementBuilder(); + query.build(SQL.DELETE, targetDataTable); + query.build(SQL.WHERE, ColumnEnum.ID_SOURCE, SQL.IN); + query.build("(", SQL.SELECT, ColumnEnum.ID_SOURCE, SQL.FROM, ViewEnum.T1.getTableName(), ")"); + + UtilitaireDao.get(0).executeRequest(executorConnection, query); + + } +} diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/userdata/SynchronizeUserRulesAndMetadata.java b/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/userdata/SynchronizeUserRulesAndMetadata.java new file mode 100644 index 000000000..0a28ab6b3 --- /dev/null +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/userdata/SynchronizeUserRulesAndMetadata.java @@ -0,0 +1,485 @@ +package fr.insee.arc.core.service.p0initialisation.userdata; + +import java.sql.Connection; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Optional; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +import fr.insee.arc.core.dataobjects.ArcPreparedStatementBuilder; +import fr.insee.arc.core.dataobjects.ViewEnum; +import fr.insee.arc.core.model.TraitementTableParametre; +import fr.insee.arc.core.service.global.bo.JeuDeRegle; +import fr.insee.arc.core.service.global.bo.JeuDeRegleDao; +import fr.insee.arc.core.service.global.bo.Sandbox; +import fr.insee.arc.core.service.global.dao.TableNaming; +import fr.insee.arc.core.service.global.scalability.ServiceScalability; +import fr.insee.arc.core.service.p0initialisation.dbmaintenance.BddPatcher; +import fr.insee.arc.core.service.p5mapping.engine.ExpressionService; +import fr.insee.arc.utils.consumer.ThrowingConsumer; +import fr.insee.arc.utils.dao.CopyObjectsToDatabase; +import fr.insee.arc.utils.dao.UtilitaireDao; +import fr.insee.arc.utils.dataobjects.TypeEnum; +import fr.insee.arc.utils.exception.ArcException; +import fr.insee.arc.utils.format.Format; +import fr.insee.arc.utils.ressourceUtils.PropertiesHandler; +import fr.insee.arc.utils.structure.AttributeValue; +import fr.insee.arc.utils.structure.GenericBean; +import fr.insee.arc.utils.structure.tree.HierarchicalView; +import fr.insee.arc.utils.utils.FormatSQL; +import fr.insee.arc.utils.utils.LoggerHelper; +import fr.insee.arc.utils.utils.ManipString; + +public class SynchronizeUserRulesAndMetadata { + + + private static final Logger LOGGER = LogManager.getLogger(SynchronizeUserRulesAndMetadata.class); + + public SynchronizeUserRulesAndMetadata(Sandbox sandbox) { + super(); + this.sandbox = sandbox; + } + + private Sandbox sandbox; + + + /** + * Recopie/remplace les règles définie par l'utilisateur (table de ihm_) dans + * Met à jour le schéma des tables métiers correspondant aux règles définies + * dans les familles + * + * @param connexion + * @param envParameters + * @param envExecution + * @throws ArcException + */ + public static void synchroniserSchemaExecutionAllNods(Connection connexion, String envExecution) throws ArcException { + + copyMetadataAllNods(connexion, envExecution); + + mettreAJourSchemaTableMetierOnNods(connexion, envExecution); + } + + + /** + * Recopie/remplace les règles définie par l'utilisateur (table de ihm_) dans + * l'environnement d'excécution courant sur tous les noeuds postgres + * (coordinator et executors) + * + * @param connexion + * @param envParameters + * @param envExecution + * @throws ArcException + */ + public static void copyMetadataAllNods(Connection connexion, String envExecution) + throws ArcException { + copyMetadataToSandbox(connexion, envExecution); + + copyMetadataToExecutorsAllNods(connexion, envExecution); + } + + /** + * Recopier les tables de l'environnement de parametres (IHM) vers + * l'environnement d'execution (batch, bas, ...) + * + * @param connexion + * @param anParametersEnvironment + * @param anExecutionEnvironment + * @throws ArcException + */ + private static void copyMetadataToSandbox(Connection connexion, String anExecutionEnvironment) throws ArcException { + copyRulesTablesToExecution(connexion, anExecutionEnvironment); + applyExpressions(connexion, anExecutionEnvironment); + } + + /** + * Instanciate the metadata required into all executors pod + * + * @param envExecution + * @throws ArcException + */ + public static int copyMetadataToExecutorsAllNods(Connection coordinatorConnexion, String envExecution) + throws ArcException { + + ThrowingConsumer onCoordinator = c -> { + }; + + ThrowingConsumer onExecutor = executorConnection -> { + copyMetaDataToExecutors(coordinatorConnexion, executorConnection, envExecution); + }; + + return ServiceScalability.dispatchOnNods(coordinatorConnexion, onCoordinator, onExecutor); + + } + + /** + * Instanciate the metadata required into the given executor pod + * + * @param coordinatorConnexion + * @param executorConnection + * @param envExecution + * @throws ArcException + */ + public static void copyMetaDataToExecutors(Connection coordinatorConnexion, Connection executorConnection, + String envExecution) throws ArcException { + PropertiesHandler properties = PropertiesHandler.getInstance(); + + // add utility functions + BddPatcher.executeBddScript(executorConnection, "BdD/script_function_utility.sql", + properties.getDatabaseRestrictedUsername(), null, null); + + // add tables for phases if required + BddPatcher.bddScriptEnvironmentExecutor(executorConnection, properties.getDatabaseRestrictedUsername(), + new String[] { envExecution }); + + // copy tables + + ArrayList tablesToCopyIntoExecutor = BddPatcher.retrieveRulesTablesFromSchema(coordinatorConnexion, + envExecution); + tablesToCopyIntoExecutor + .addAll(BddPatcher.retrieveExternalTablesUsedInRules(coordinatorConnexion, envExecution)); + tablesToCopyIntoExecutor.addAll(BddPatcher.retrieveModelTablesFromSchema(coordinatorConnexion, envExecution)); + + for (String table : new HashSet(tablesToCopyIntoExecutor)) { + GenericBean gb = new GenericBean(UtilitaireDao.get(0).executeRequest(coordinatorConnexion, + new ArcPreparedStatementBuilder("SELECT * FROM " + table))); + + CopyObjectsToDatabase.execCopyFromGenericBean(executorConnection, table, gb); + + } + } + + /** + * replace an expression in rules + * + * @param connexion + * @param anExecutionEnvironment + * @throws ArcException + */ + private static void applyExpressions(Connection connexion, String anExecutionEnvironment) throws ArcException { + // Checks expression validity + ExpressionService expressionService = new ExpressionService(); + ArrayList allRuleSets = JeuDeRegleDao.recupJeuDeRegle(connexion, + anExecutionEnvironment + ".jeuderegle"); + for (JeuDeRegle ruleSet : allRuleSets) { + // Check + GenericBean expressions = expressionService.fetchExpressions(connexion, anExecutionEnvironment, ruleSet); + if (expressions.isEmpty()) { + continue; + } + + Optional loopInExpressionSet = expressionService.loopInExpressionSet(expressions); + if (loopInExpressionSet.isPresent()) { + LoggerHelper.info(LOGGER, "A loop is present in the expression set : " + loopInExpressionSet.get()); + LoggerHelper.info(LOGGER, "The expression set is not applied"); + continue; + } + + // Apply + expressions = expressionService.fetchOrderedExpressions(connexion, anExecutionEnvironment, ruleSet); + if (expressionService.isExpressionSyntaxPresentInControl(connexion, anExecutionEnvironment, ruleSet)) { + UtilitaireDao.get(0).executeRequest(connexion, + expressionService.applyExpressionsToControl(ruleSet, expressions, anExecutionEnvironment)); + } + if (expressionService.isExpressionSyntaxPresentInMapping(connexion, anExecutionEnvironment, ruleSet)) { + UtilitaireDao.get(0).executeRequest(connexion, + expressionService.applyExpressionsToMapping(ruleSet, expressions, anExecutionEnvironment)); + } + } + + } + + + /** + * Copy the table containing user rules to the sandbox so they will be used by + * the sandbox process + * + * @param coordinatorConnexion + * @param anParametersEnvironment + * @param anExecutionEnvironment + * @throws ArcException + */ + private static void copyRulesTablesToExecution(Connection coordinatorConnexion, String anExecutionEnvironment) throws ArcException { + LoggerHelper.info(LOGGER, "copyTablesToExecution"); + try { + + anExecutionEnvironment = anExecutionEnvironment.replace(".", "_"); + + StringBuilder requete = new StringBuilder(); + TraitementTableParametre[] r = TraitementTableParametre.values(); + StringBuilder condition = new StringBuilder(); + String modaliteEtat = anExecutionEnvironment.replace("_", "."); + String tableImage; + String tableCurrent; + for (int i = 0; i < r.length; i++) { + // on créé une table image de la table venant de l'ihm + // (environnement de parametre) + TraitementTableParametre parameterTable = r[i]; + tableCurrent = TableNaming.dbEnv(anExecutionEnvironment) + parameterTable; + tableImage = FormatSQL.temporaryTableName(TableNaming.dbEnv(anExecutionEnvironment) + parameterTable); + + // recopie partielle (en fonction de l'environnement + // d'exécution) + // pour les tables JEUDEREGLE, CONTROLE_REGLE et MAPPING_REGLE + condition.setLength(0); + if (parameterTable == TraitementTableParametre.NORME) { + condition.append(" WHERE etat='1'"); + } else if (parameterTable == TraitementTableParametre.CALENDRIER) { + condition.append(" WHERE etat='1' "); + condition.append(" and exists (select 1 from " + ViewEnum.IHM_NORME.getFullName() + + " b where a.id_norme=b.id_norme and b.etat='1')"); + } else if (parameterTable == TraitementTableParametre.JEUDEREGLE) { + condition.append(" WHERE etat=lower('" + modaliteEtat + "')"); + condition.append(" and exists (select 1 from " + ViewEnum.IHM_NORME.getFullName() + + " b where a.id_norme=b.id_norme and b.etat='1')"); + condition.append(" and exists (select 1 from " + ViewEnum.IHM_CALENDRIER.getFullName() + + " b where a.id_norme=b.id_norme and a.periodicite=b.periodicite and a.validite_inf=b.validite_inf and a.validite_sup=b.validite_sup and b.etat='1')"); + } else if (parameterTable.isPartOfRuleset()) { + condition.append(" WHERE exists (select 1 from " + ViewEnum.IHM_NORME.getFullName() + + " b where a.id_norme=b.id_norme and b.etat='1')"); + condition.append(" and exists (select 1 from " + ViewEnum.IHM_CALENDRIER.getFullName() + + " b where a.id_norme=b.id_norme and a.periodicite=b.periodicite and a.validite_inf=b.validite_inf and a.validite_sup=b.validite_sup and b.etat='1')"); + condition.append(" and exists (select 1 from " + ViewEnum.IHM_JEUDEREGLE.getFullName() + + " b where a.id_norme=b.id_norme and a.periodicite=b.periodicite and a.validite_inf=b.validite_inf and a.validite_sup=b.validite_sup AND a.version=b.version and b.etat=lower('" + + modaliteEtat + "'))"); + } + requete.append(FormatSQL.dropTable(tableImage)); + + requete.append("CREATE TABLE " + tableImage + " " + FormatSQL.WITH_NO_VACUUM + " AS SELECT a.* FROM " + + r[i] + " AS a " + condition + ";\n"); + + requete.append(FormatSQL.dropTable(tableCurrent)); + requete.append("ALTER TABLE " + tableImage + " rename to " + + ManipString.substringAfterLast(tableCurrent, ".") + "; \n"); + } + UtilitaireDao.get(0).executeBlock(coordinatorConnexion, requete); + + // Dernière étape : recopie des tables de nomenclature et des tables prefixées + // par ext_ du schéma arc vers schéma courant + + requete.setLength(0); + + // 1.Préparation des requêtes de suppression des tables nmcl_ et ext_ du schéma + // courant + + ArcPreparedStatementBuilder requeteSelectDrop = new ArcPreparedStatementBuilder(); + requeteSelectDrop + .append(" SELECT 'DROP TABLE IF EXISTS '||schemaname||'.'||tablename||';' AS requete_drop"); + requeteSelectDrop.append(" FROM pg_tables where schemaname = " + + requeteSelectDrop.quoteText(anExecutionEnvironment.toLowerCase()) + " "); + requeteSelectDrop.append(" AND tablename SIMILAR TO '%nmcl%|%ext%'"); + + ArrayList requetesDeSuppressionTablesNmcl = new GenericBean( + UtilitaireDao.get(0).executeRequest(coordinatorConnexion, requeteSelectDrop)).mapContent() + .get("requete_drop"); + + if (requetesDeSuppressionTablesNmcl != null) { + for (String requeteDeSuppression : requetesDeSuppressionTablesNmcl) { + requete.append("\n ").append(requeteDeSuppression); + } + } + + // 2.Préparation des requêtes de création des tables + ArrayList requetesDeCreationTablesNmcl = new GenericBean(UtilitaireDao.get(0) + .executeRequest(coordinatorConnexion, new ArcPreparedStatementBuilder( + "select tablename from pg_tables where (tablename like 'nmcl\\_%' OR tablename like 'ext\\_%') and schemaname='arc'"))) + .mapContent().get("tablename"); + + if (requetesDeCreationTablesNmcl != null) { + for (String tableName : requetesDeCreationTablesNmcl) { + requete.append("\n CREATE TABLE " + TableNaming.dbEnv(anExecutionEnvironment) + tableName + " " + + FormatSQL.WITH_NO_VACUUM + " AS SELECT * FROM arc." + tableName + ";"); + } + } + + // 3.Execution du script Sql de suppression/création + UtilitaireDao.get(0).executeBlock(coordinatorConnexion, requete); + + } catch (Exception e) { + LoggerHelper.trace(LOGGER, + "Problème lors de la copie des tables vers l'environnement : " + anExecutionEnvironment); + LoggerHelper.error(LOGGER, "Error in ApiInitialisation.copyRulesTablesToExecution"); + throw e; + } + } + + private static void mettreAJourSchemaTableMetierOnNods(Connection connexion, String envExecution) throws ArcException { + + ThrowingConsumer function = executorConnection -> { + mettreAJourSchemaTableMetier(executorConnection, envExecution); + }; + + ServiceScalability.dispatchOnNods(connexion, function, function); + + } + + /** + * Créer ou detruire les colonnes ou les tables métiers en comparant ce qu'il y + * a en base à ce qu'il y a de déclaré dans la table des familles de norme + * + * @param connexion + * @throws ArcException + */ + private static void mettreAJourSchemaTableMetier(Connection connexion, String envExecution) + throws ArcException { + LoggerHelper.info(LOGGER, "mettreAJourSchemaTableMetier"); + /* + * Récupérer la table qui mappe : famille / table métier / variable métier et + * type de la variable + */ + ArcPreparedStatementBuilder requeteRef = new ArcPreparedStatementBuilder(); + requeteRef.append("SELECT lower(id_famille), lower('" + TableNaming.dbEnv(envExecution) + + "'||nom_table_metier), lower(nom_variable_metier), lower(type_variable_metier) FROM " + ViewEnum.MOD_VARIABLE_METIER.getFullName()); + + List> relationalViewRef = Format + .patch(UtilitaireDao.get(0).executeRequestWithoutMetadata(connexion, requeteRef)); + HierarchicalView familleToTableToVariableToTypeRef = HierarchicalView.asRelationalToHierarchical( + "(Réf) Famille -> Table -> Variable -> Type", + Arrays.asList("id_famille", "nom_table_metier", "variable_metier", "type_variable_metier"), + relationalViewRef); + /* + * Récupérer dans le méta-modèle de la base les tables métiers correspondant à + * la famille chargée + */ + ArcPreparedStatementBuilder requete = new ArcPreparedStatementBuilder(); + requete.append( + "SELECT lower(id_famille), lower(table_schema||'.'||table_name) nom_table_metier, lower(column_name) nom_variable_metier"); + + // les types dans postgres sont horribles :( + // udt_name : float8 = float, int8=bigint, int4=int + // data_type : double precision = float, integer=int + requete.append( + ", case when lower(data_type)='array' then replace(replace(replace(ltrim(udt_name,'_'),'int4','int'),'int8','bigint'),'float8','float')||'[]' "); + requete.append( + " else replace(replace(lower(data_type),'double precision','float'),'integer','int') end type_variable_metier "); + requete.append("\n FROM information_schema.columns, " + ViewEnum.IHM_FAMILLE.getFullName()); + requete.append("\n WHERE table_schema='" + + ManipString.substringBeforeFirst(TableNaming.dbEnv(envExecution), ".").toLowerCase() + "' "); + requete.append("\n and table_name LIKE '" + + ManipString.substringAfterFirst(TableNaming.dbEnv(envExecution), ".").toLowerCase() + + "mapping\\_%' "); + requete.append("\n and table_name LIKE '" + + ManipString.substringAfterFirst(TableNaming.dbEnv(envExecution), ".").toLowerCase() + + "mapping\\_'||lower(id_famille)||'\\_%';"); + + List> relationalView = Format + .patch(UtilitaireDao.get(0).executeRequestWithoutMetadata(connexion, requete)); + + HierarchicalView familleToTableToVariableToType = HierarchicalView.asRelationalToHierarchical( + "(Phy) Famille -> Table -> Variable -> Type", + Arrays.asList("id_famille", "nom_table_metier", "variable_metier", "type_variable_metier"), + relationalView); + StringBuilder requeteMAJSchema = new StringBuilder(); + + /* + * AJOUT/MODIFICATION DES COLONNES DE REFERENCE + */ + for (HierarchicalView famille : familleToTableToVariableToTypeRef.children()) { + /** + * Pour chaque table de référence + */ + for (HierarchicalView table : famille.children()) { + /** + * Est-ce que la table existe physiquement ? + */ + if (familleToTableToVariableToType.hasPath(famille, table)) { + /** + * Pour chaque variable de référence + */ + for (HierarchicalView variable : table.children()) { + /* + * Si la variable*type n'existe pas + */ + if (!familleToTableToVariableToType.hasPath(famille, table, variable, + variable.getUniqueChild())) { + + // BUG POSTGRES : pb drop et add column : recréer la table sinon ca peut excéder + // la limite postgres de 1500 + requeteMAJSchema.append("DROP TABLE IF EXISTS " + table.getLocalRoot() + "_IMG ;"); + requeteMAJSchema.append("CREATE TABLE " + table.getLocalRoot() + "_IMG " + + FormatSQL.WITH_NO_VACUUM + " AS SELECT * FROM " + table.getLocalRoot() + ";"); + requeteMAJSchema.append("DROP TABLE IF EXISTS " + table.getLocalRoot() + " ;"); + requeteMAJSchema.append("ALTER TABLE " + table.getLocalRoot() + "_IMG RENAME TO " + + ManipString.substringAfterFirst(table.getLocalRoot(), ".") + ";"); + + /* + * Si la variable existe + */ + if (familleToTableToVariableToType.hasPath(famille, table, variable)) { + /* + * Drop de la variable + */ + requeteMAJSchema.append("ALTER TABLE " + table.getLocalRoot() + " DROP COLUMN " + + variable.getLocalRoot() + ";"); + } + /* + * Ajout de la variable + */ + requeteMAJSchema.append("ALTER TABLE " + table.getLocalRoot() + " ADD COLUMN " + + variable.getLocalRoot() + " " + variable.getUniqueChild().getLocalRoot() + " "); + if (variable.getUniqueChild().getLocalRoot().equals(TypeEnum.TEXT.getTypeName())) { + requeteMAJSchema.append(" collate \"C\" "); + } + requeteMAJSchema.append(";"); + + } + } + } else { + AttributeValue[] attr = new AttributeValue[table.children().size()]; + int i = 0; + for (HierarchicalView variable : table.children()) { + attr[i++] = new AttributeValue(variable.getLocalRoot(), + variable.getUniqueChild().getLocalRoot()); + } + requeteMAJSchema.append("CREATE TABLE " + table.getLocalRoot() + " ("); + for (int j = 0; j < attr.length; j++) { + if (j > 0) { + requeteMAJSchema.append(", "); + } + requeteMAJSchema.append(attr[j].getFirst() + " " + attr[j].getSecond()); + if (attr[j].getSecond().equals(TypeEnum.TEXT.getTypeName())) { + requeteMAJSchema.append(" collate \"C\" "); + } + } + requeteMAJSchema.append(") " + FormatSQL.WITH_NO_VACUUM + ";\n"); + } + + } + } + /* + * SUPPRESSION DES COLONNES QUI NE SONT PAS CENSEES EXISTER + */ + for (HierarchicalView famille : familleToTableToVariableToType.children()) { + /** + * Pour chaque table physique + */ + for (HierarchicalView table : familleToTableToVariableToType.get(famille).children()) { + /** + * Est-ce que la table devrait exister ? + */ + if (!familleToTableToVariableToTypeRef.hasPath(famille, table)) { + requeteMAJSchema.append("DROP TABLE IF EXISTS " + table.getLocalRoot() + ";\n"); + } else { + /** + * Pour chaque variable de cette table + */ + for (HierarchicalView variable : table.children()) { + /** + * Est-ce que la variable devrait exister ? + */ + if (!familleToTableToVariableToTypeRef.hasPath(famille, table, variable)) { + requeteMAJSchema.append("ALTER TABLE " + table.getLocalRoot() + " DROP COLUMN " + + variable.getLocalRoot() + ";\n"); + } + } + } + } + } + UtilitaireDao.get(0).executeBlock(connexion, requeteMAJSchema); + } + +} diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/ApiReceptionService.java b/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/ApiReceptionService.java index e931238c3..c3f74feb7 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/ApiReceptionService.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/ApiReceptionService.java @@ -39,7 +39,8 @@ import fr.insee.arc.core.service.global.dao.TableOperations; import fr.insee.arc.core.service.global.scalability.ServiceScalability; import fr.insee.arc.core.service.p0initialisation.ApiInitialisationService; -import fr.insee.arc.core.service.p0initialisation.model.ListIdSourceInPilotage; +import fr.insee.arc.core.service.p0initialisation.pilotage.SynchronizeDataByPilotage; +import fr.insee.arc.core.service.p0initialisation.pilotage.bo.ListIdSourceInPilotage; import fr.insee.arc.core.util.BDParameters; import fr.insee.arc.core.util.StaticLoggerDispatcher; import fr.insee.arc.utils.consumer.ThrowingConsumer; @@ -631,8 +632,9 @@ private void registerAndDispatchFiles(Connection connexion, GenericBean archiveC soumettreRequete(requete); if (idSourceToBeDeleted!=null) { - ApiInitialisationService.dropUnusedDataTablesAllNods(connexion, this.envExecution, this.tablePil, idSourceToBeDeleted); - ApiInitialisationService.deleteUnusedDataRecordsAllNods(connexion, envExecution, tablePil, idSourceToBeDeleted); + SynchronizeDataByPilotage synchronizationInstance = new SynchronizeDataByPilotage(this.coordinatorSandbox); + synchronizationInstance.dropUnusedDataTablesAllNods(idSourceToBeDeleted); + synchronizationInstance.deleteUnusedDataRecordsAllNods(idSourceToBeDeleted); } } diff --git a/arc-core/src/test/java/fr/insee/arc/core/service/api/ApiInitialisationServiceTest.java b/arc-core/src/test/java/fr/insee/arc/core/service/api/SynchronizeUserRulesAndMetadataTest.java similarity index 70% rename from arc-core/src/test/java/fr/insee/arc/core/service/api/ApiInitialisationServiceTest.java rename to arc-core/src/test/java/fr/insee/arc/core/service/api/SynchronizeUserRulesAndMetadataTest.java index dec82bf99..123780244 100644 --- a/arc-core/src/test/java/fr/insee/arc/core/service/api/ApiInitialisationServiceTest.java +++ b/arc-core/src/test/java/fr/insee/arc/core/service/api/SynchronizeUserRulesAndMetadataTest.java @@ -1,23 +1,22 @@ package fr.insee.arc.core.service.api; -import static org.junit.Assert.*; +import static org.junit.Assert.assertEquals; import java.sql.SQLException; import org.junit.Test; -import fr.insee.arc.core.model.TraitementPhase; import fr.insee.arc.core.service.engine.initialisation.BddPatcherTest; -import fr.insee.arc.core.service.p0initialisation.ApiInitialisationService; +import fr.insee.arc.core.service.p0initialisation.userdata.SynchronizeUserRulesAndMetadata; import fr.insee.arc.utils.exception.ArcException; import fr.insee.arc.utils.query.InitializeQueryTest; -public class ApiInitialisationServiceTest extends InitializeQueryTest { +public class SynchronizeUserRulesAndMetadataTest extends InitializeQueryTest { @Test public void copyMetadataToExecutorsTestNotScalable() throws SQLException, ArcException { buildPropertiesWithNoScalability(null); - int result=ApiInitialisationService.copyMetadataToExecutorsAllNods(c, BddPatcherTest.testSandbox3); + int result=SynchronizeUserRulesAndMetadata.copyMetadataToExecutorsAllNods(c, BddPatcherTest.testSandbox3); assertEquals(0, result); } @@ -29,7 +28,7 @@ public void copyMetadataToExecutorsTestScalable() throws SQLException, ArcExcept BddPatcherTest.initializeDatabaseForRetrieveTablesFromSchemaTest(u); - int result=ApiInitialisationService.copyMetadataToExecutorsAllNods(c, BddPatcherTest.testSandbox3); + int result=SynchronizeUserRulesAndMetadata.copyMetadataToExecutorsAllNods(c, BddPatcherTest.testSandbox3); // copy should be a success assertEquals(1, result); diff --git a/arc-core/src/test/java/fr/insee/arc/core/service/engine/initialisation/BddPatcherTest.java b/arc-core/src/test/java/fr/insee/arc/core/service/engine/initialisation/BddPatcherTest.java index d9e0a5810..9416b0ac8 100644 --- a/arc-core/src/test/java/fr/insee/arc/core/service/engine/initialisation/BddPatcherTest.java +++ b/arc-core/src/test/java/fr/insee/arc/core/service/engine/initialisation/BddPatcherTest.java @@ -16,7 +16,7 @@ import fr.insee.arc.core.dataobjects.SchemaEnum; import fr.insee.arc.core.dataobjects.ViewEnum; import fr.insee.arc.core.service.p0initialisation.ApiInitialisationService; -import fr.insee.arc.core.service.p0initialisation.engine.BddPatcher; +import fr.insee.arc.core.service.p0initialisation.dbmaintenance.BddPatcher; import fr.insee.arc.utils.dao.GenericPreparedStatementBuilder; import fr.insee.arc.utils.dao.UtilitaireDao; import fr.insee.arc.utils.exception.ArcException; diff --git a/arc-core/src/test/java/fr/insee/arc/core/service/global/dao/ServicePilotageOperationTest.java b/arc-core/src/test/java/fr/insee/arc/core/service/global/dao/ServicePilotageOperationTest.java index fd0b26ec3..52b400477 100644 --- a/arc-core/src/test/java/fr/insee/arc/core/service/global/dao/ServicePilotageOperationTest.java +++ b/arc-core/src/test/java/fr/insee/arc/core/service/global/dao/ServicePilotageOperationTest.java @@ -1,6 +1,6 @@ package fr.insee.arc.core.service.global.dao; -import static org.junit.Assert.*; +import static org.junit.Assert.assertEquals; import java.util.ArrayList; @@ -8,8 +8,6 @@ import fr.insee.arc.core.model.TraitementEtat; import fr.insee.arc.core.model.TraitementPhase; -import fr.insee.arc.core.service.global.dao.PilotageOperations; -import fr.insee.arc.utils.dao.UtilitaireDao; import fr.insee.arc.utils.exception.ArcException; import fr.insee.arc.utils.query.InitializeQueryTest; import fr.insee.arc.utils.structure.GenericBean; @@ -18,7 +16,7 @@ public class ServicePilotageOperationTest extends InitializeQueryTest { @Test public void retrieveIdSourceFromPilotageQueryTest() throws ArcException { - String tablePil = "public.table_pilotage"; + String tablePil = "public.pilotage_fichier"; u.executeImmediate(c, "CREATE TABLE "+tablePil+" (id_source text, phase_traitement text, etat_traitement text[]);"); u.executeImmediate(c, "INSERT INTO "+tablePil+" select 'f1', 'MAPPING', '{OK}'"); @@ -27,7 +25,7 @@ public void retrieveIdSourceFromPilotageQueryTest() throws ArcException { ArrayList listOfIdSource = new GenericBean(u.executeRequest(c, - PilotageOperations.querySelectIdSourceFromPilotage(tablePil, TraitementPhase.MAPPING, TraitementEtat.OK) + PilotageOperations.querySelectIdSourceFromPilotage("public", TraitementPhase.MAPPING, TraitementEtat.OK) )).mapContent().get("id_source"); assertEquals(2, listOfIdSource.size()); diff --git a/arc-utils/src/main/java/fr/insee/arc/utils/dao/CopyObjectsToDatabase.java b/arc-utils/src/main/java/fr/insee/arc/utils/dao/CopyObjectsToDatabase.java new file mode 100644 index 000000000..260d5c3b6 --- /dev/null +++ b/arc-utils/src/main/java/fr/insee/arc/utils/dao/CopyObjectsToDatabase.java @@ -0,0 +1,60 @@ +package fr.insee.arc.utils.dao; + +import java.sql.Connection; + +import fr.insee.arc.utils.exception.ArcException; +import fr.insee.arc.utils.structure.GenericBean; + +public class CopyObjectsToDatabase { + + private static final int CHUNK_SIZE = 10000; + + + private CopyObjectsToDatabase() { + throw new IllegalStateException("Utility class"); + } + + /** + * execute copy by chunk. It is mandatory for large GenericBean objects + * @param connection + * @param tableName + * @param gb + * @throws ArcException + */ + public static void execCopyFromGenericBean(Connection connection, String tableName, GenericBean gb) + throws ArcException { + execCopyFromGenericBean(connection, tableName, gb, CHUNK_SIZE); + } + + /** + * execute copy from GenericBean to database by chunk of size @param chunkSize + * + * @param connection + * @param tableName + * @param gb + * @param chunkSize + * @throws ArcException + */ + private static void execCopyFromGenericBean(Connection connection, String tableName, GenericBean gb, int chunkSize) + throws ArcException { + GenericPreparedStatementBuilder query = new GenericPreparedStatementBuilder(); + + query.append(query.createWithGenericBean(tableName, gb)); + + int cursor = 0; + + do { + int startChunk = cursor; + int endChunk = cursor + chunkSize; + + query.insertWithGenericBeanByChunk(tableName, gb, startChunk, endChunk); + + UtilitaireDao.get(0).executeImmediate(connection, query); + + query = new GenericPreparedStatementBuilder(); + cursor = endChunk; + + } while (cursor < gb.getContent().size()); + } + +} diff --git a/arc-utils/src/main/java/fr/insee/arc/utils/dao/GenericPreparedStatementBuilder.java b/arc-utils/src/main/java/fr/insee/arc/utils/dao/GenericPreparedStatementBuilder.java index 70c87ff5c..8df718e34 100644 --- a/arc-utils/src/main/java/fr/insee/arc/utils/dao/GenericPreparedStatementBuilder.java +++ b/arc-utils/src/main/java/fr/insee/arc/utils/dao/GenericPreparedStatementBuilder.java @@ -9,6 +9,7 @@ import fr.insee.arc.utils.dataobjects.TypeEnum; import fr.insee.arc.utils.structure.GenericBean; +import fr.insee.arc.utils.utils.FormatSQL; public class GenericPreparedStatementBuilder { @@ -156,7 +157,7 @@ public String quoteBytes(byte[] s) { * @return */ public String quoteTextWithoutBinding(String p) { - return p == null ? "NULL" : "'" + p.replace("'", "''") + "'"; + return FormatSQL.textToSql(p); } public String quoteNumberWithoutBinding(String p) { @@ -254,15 +255,12 @@ public String getQueryWithParameters() { * * @return */ - public GenericPreparedStatementBuilder copyFromGenericBean(String tableName, GenericBean gb, boolean temporary) { - // drop target table if exists - query.append(SQL.DROP).append(SQL.TABLE).append(SQL.IF_EXISTS).append(tableName).append(SQL.END_QUERY); - + public GenericPreparedStatementBuilder copyFromGenericBean(String tableName, GenericBean gb) { // create the table structure - createWithGenericBean(tableName, gb, temporary); + createWithGenericBean(tableName, gb); // insert - insertWithGenericBean(tableName, gb); + insertWithGenericBeanByChunk(tableName, gb, 0, gb.getContent().size()); return this; } @@ -273,10 +271,14 @@ public GenericPreparedStatementBuilder copyFromGenericBean(String tableName, Gen * @param gb * @return */ - public GenericPreparedStatementBuilder createWithGenericBean(String tableName, GenericBean gb, boolean temporary) + public GenericPreparedStatementBuilder createWithGenericBean(String tableName, GenericBean gb) { + // drop target table if exists + query.append(SQL.DROP).append(SQL.TABLE).append(SQL.IF_EXISTS).append(tableName).append(SQL.END_QUERY); + query.append(SQL.CREATE); - if (temporary) { + + if (FormatSQL.isTemporary(tableName)) { query.append(SQL.TEMPORARY); } @@ -301,14 +303,19 @@ public GenericPreparedStatementBuilder createWithGenericBean(String tableName, G * @param tableName * @param gb */ - public GenericPreparedStatementBuilder insertWithGenericBean(String tableName, GenericBean gb) + public GenericPreparedStatementBuilder insertWithGenericBeanByChunk(String tableName, GenericBean gb, int chunkStart, int chunkStop) { if (!gb.getContent().isEmpty()) { query.append(SQL.INSERT_INTO).append(tableName).append(SQL.VALUES); boolean firstLine = true; + + // if chunkstop too high, limit it to the size of generic bean content + chunkStop = (chunkStop > gb.getContent().size()) ? gb.getContent().size() : chunkStop; - for (int i = 0; i < gb.getContent().size(); i++) { + ArrayList types = gb.getTypes(); + + for (int i = chunkStart; i < chunkStop; i++) { ArrayList line = gb.getContent().get(i); if (firstLine) { @@ -317,32 +324,43 @@ public GenericPreparedStatementBuilder insertWithGenericBean(String tableName, G query.append(","); } - boolean firstCell = true; + insertLine(types, line); - query.append("("); + } + query.append(SQL.END_QUERY); + } + return this; + } + + /** + * insert in the query the corresponding tuple of a given list of values + * @param types + * @param line + */ + private void insertLine(List types, List line) + { + boolean firstCell = true; - for (int j = 0; j < line.size(); j++) { + query.append("("); - String cell = line.get(j); + for (int j = 0; j < line.size(); j++) { - if (firstCell) { - firstCell = false; - } else { - query.append(","); - } - query.append(quoteText(cell)); - if (!gb.getTypes().get(j).equals(TypeEnum.TEXT.getTypeName())) - { - query.append(SQL.CAST_OPERATOR); - query.append(gb.getTypes().get(j)); - } - } - - query.append(")"); + String cell = line.get(j); + + if (firstCell) { + firstCell = false; + } else { + query.append(","); + } + // cannot use bind variables here : potentially too many bounded values + query.append(quoteTextWithoutBinding(cell)); + if (!types.get(j).equals(TypeEnum.TEXT.getTypeName())) + { + query.append(SQL.CAST_OPERATOR); + query.append(types.get(j)); } - query.append(SQL.END_QUERY); } - return this; + query.append(")"); } } diff --git a/arc-utils/src/main/java/fr/insee/arc/utils/dao/SQL.java b/arc-utils/src/main/java/fr/insee/arc/utils/dao/SQL.java index 0a62c221f..2a6f754c7 100644 --- a/arc-utils/src/main/java/fr/insee/arc/utils/dao/SQL.java +++ b/arc-utils/src/main/java/fr/insee/arc/utils/dao/SQL.java @@ -11,7 +11,7 @@ public enum SQL { BEGIN("BEGIN;"), END("END;"), - AND("AND"), OR("OR"), UNION_ALL("UNION ALL"), IN("IN"), + AND("AND"), OR("OR"), UNION_ALL("UNION ALL"), IN("IN"), NOT("NOT"), LIMIT("LIMIT"), OFFSET("OFFSET"), @@ -29,8 +29,10 @@ public enum SQL { VALUES("VALUES"), + SPACE(" "), + // symbol - END_QUERY(";", false), BR(System.lineSeparator(), false), CAST_OPERATOR("::", false), COMMA(",", false), + END_QUERY(";", false), BR(System.lineSeparator(), false), CAST_OPERATOR("::", false), COMMA(",", false), DOT(".",false) ; @@ -38,8 +40,6 @@ public enum SQL { private boolean escapeWithSpace; - private static final String SPACE = " "; - private SQL(String sqlCode, boolean escapeWithSpace) { this.sqlCode = sqlCode; this.escapeWithSpace = escapeWithSpace; @@ -57,7 +57,7 @@ public String getSqlCode() { @Override public String toString() { if (escapeWithSpace) { - return SPACE + this.sqlCode + SPACE; + return SPACE.sqlCode + this.sqlCode + SPACE.sqlCode; } else { return this.sqlCode; } diff --git a/arc-utils/src/main/java/fr/insee/arc/utils/dao/UtilitaireDao.java b/arc-utils/src/main/java/fr/insee/arc/utils/dao/UtilitaireDao.java index f6305dd46..16c99f427 100644 --- a/arc-utils/src/main/java/fr/insee/arc/utils/dao/UtilitaireDao.java +++ b/arc-utils/src/main/java/fr/insee/arc/utils/dao/UtilitaireDao.java @@ -547,11 +547,11 @@ public T executeRequest(Connection connexion, GenericPreparedStatementBuilde * @param l * @return */ - public static Boolean hasResults(ArrayList> l) { + public static boolean hasResults(ArrayList> l) { return (l.size() > 2); } - public Boolean hasResults(Connection connexion, GenericPreparedStatementBuilder requete) throws ArcException { + public boolean hasResults(Connection connexion, GenericPreparedStatementBuilder requete) throws ArcException { return hasResults(executeRequest(connexion, requete)); } diff --git a/arc-utils/src/main/java/fr/insee/arc/utils/dataobjects/PgViewEnum.java b/arc-utils/src/main/java/fr/insee/arc/utils/dataobjects/PgViewEnum.java index 36e9f43ab..3005c2ee4 100644 --- a/arc-utils/src/main/java/fr/insee/arc/utils/dataobjects/PgViewEnum.java +++ b/arc-utils/src/main/java/fr/insee/arc/utils/dataobjects/PgViewEnum.java @@ -17,6 +17,9 @@ public enum PgViewEnum { // view for table aliases or temporary table in query , T1("t1",PgSchemaEnum.TEMPORARY), T2("t2",PgSchemaEnum.TEMPORARY), T3("t3",PgSchemaEnum.TEMPORARY) + , ALIAS_A("a",PgSchemaEnum.TEMPORARY), ALIAS_B("b",PgSchemaEnum.TEMPORARY), ALIAS_C("c",PgSchemaEnum.TEMPORARY) + + // postgres meta table , PG_TABLES("pg_tables", PgSchemaEnum.CATALOG, PgColumnEnum.SCHEMANAME, PgColumnEnum.TABLENAME) ; diff --git a/arc-utils/src/main/java/fr/insee/arc/utils/format/Format.java b/arc-utils/src/main/java/fr/insee/arc/utils/format/Format.java index 3cd0dfa43..6c477fdda 100644 --- a/arc-utils/src/main/java/fr/insee/arc/utils/format/Format.java +++ b/arc-utils/src/main/java/fr/insee/arc/utils/format/Format.java @@ -13,6 +13,7 @@ private Format() { } /** + * Transforme a list of list in list * @param array * an ArrayList * @return a List diff --git a/arc-utils/src/main/java/fr/insee/arc/utils/structure/GenericBean.java b/arc-utils/src/main/java/fr/insee/arc/utils/structure/GenericBean.java index 64626d6b9..e4e0e976d 100644 --- a/arc-utils/src/main/java/fr/insee/arc/utils/structure/GenericBean.java +++ b/arc-utils/src/main/java/fr/insee/arc/utils/structure/GenericBean.java @@ -5,6 +5,8 @@ import java.util.HashMap; import java.util.List; +import org.apache.commons.lang3.ObjectUtils; + import fr.insee.arc.utils.exception.ArcException; import fr.insee.arc.utils.exception.ArcExceptionMessage; @@ -200,6 +202,17 @@ public HashMap keyValue() throws ArcException { } return r; } + + /** + * return column values as list + * @param columnName + * @return + */ + public List getColumnValues(String columnName) + { + return ObjectUtils.firstNonNull(this.mapContent().get(columnName), new ArrayList()); + } + public ArrayList getHeaders() { return headers; diff --git a/arc-utils/src/main/java/fr/insee/arc/utils/textUtils/IConstanteCaractere.java b/arc-utils/src/main/java/fr/insee/arc/utils/textUtils/IConstanteCaractere.java index 7df79cda1..c6d30e49d 100644 --- a/arc-utils/src/main/java/fr/insee/arc/utils/textUtils/IConstanteCaractere.java +++ b/arc-utils/src/main/java/fr/insee/arc/utils/textUtils/IConstanteCaractere.java @@ -9,21 +9,11 @@ public interface IConstanteCaractere { // Marker public static final String comma = ","; public static final String empty = ""; - public static final String BOM = "\uFEFF"; - public static final String colon = ":"; - public static final String closingBrace = "}"; public static final String closingParenthesis = ")"; - public static final String dash = "-"; public static final String dollar = "$"; - public static final String DOT = "."; - public static final String equals = "="; public static final String newline = "\n"; - public static final String openingBrace = "{"; public static final String openingParenthesis = "("; - public static final String percent = "%"; - public static final String plus = "+"; public static final String quote = "'"; - public static final String quotequote = "''"; public static final String semicolon = ";"; public static final String sharp = "#"; public static final String space = " "; diff --git a/arc-utils/src/main/java/fr/insee/arc/utils/utils/FormatSQL.java b/arc-utils/src/main/java/fr/insee/arc/utils/utils/FormatSQL.java index a186f6537..cc01def6f 100644 --- a/arc-utils/src/main/java/fr/insee/arc/utils/utils/FormatSQL.java +++ b/arc-utils/src/main/java/fr/insee/arc/utils/utils/FormatSQL.java @@ -72,12 +72,12 @@ public static GenericPreparedStatementBuilder tableExists(String table) { public static String extractSchemaNameToken(String fullTableName) { - return fullTableName.contains(DOT) ? ManipString.substringBeforeFirst(fullTableName, DOT) : null; + return fullTableName.contains(SQL.DOT.getSqlCode()) ? ManipString.substringBeforeFirst(fullTableName, SQL.DOT.getSqlCode()) : null; } public static String extractTableNameToken(String fullTableName) { - return ManipString.substringAfterFirst(fullTableName, DOT); + return ManipString.substringAfterFirst(fullTableName, SQL.DOT.getSqlCode()); } @@ -203,7 +203,7 @@ public static String createTableAsSelectWhere(String tableIn, String tableOut, S * @param triggersAndIndexes * @return */ - public static StringBuilder rebuildTableAsSelectWhere(String table, String where, String... triggersAndIndexes) + public static StringBuilder rebuildTableAsSelectWhere(String table, String where) { String tableRebuild = temporaryTableName(table, "RB"); @@ -217,11 +217,6 @@ public static StringBuilder rebuildTableAsSelectWhere(String table, String where requete.append( "\n ALTER TABLE " + tableRebuild + " RENAME TO " + ManipString.substringAfterFirst(table, ".") + " ;"); requete.append("set enable_nestloop=on; "); - for (int i = 0; i < triggersAndIndexes.length; i++) - { - requete.append(triggersAndIndexes[i]); - } - return requete; } @@ -260,6 +255,17 @@ public static String hasRecord(String tableIn) return "SELECT (count(*)>0) as has_record FROM (SELECT 1 FROM " + tableIn + " LIMIT 1) u"; } + /** + * check if table is temporary according to its name + * no SQL.DOT in temporary + * @return + */ + public static boolean isTemporary(String tablename) + { + return !tablename.contains(SQL.DOT.getSqlCode()); + } + + /** * Ajoute un suffixe de table temporaire au nom de table {@code aName} * @@ -313,14 +319,7 @@ public static final String randomNumber(int precision) */ public static String textToSql(String val) { - if (val == null || val.trim().equals("")) - { - return "null"; - } - else - { - return "'" + val.replace("'", "''") + "'"; - } + return val == null ? "NULL" : "'" + val.replace("'", "''") + "'"; } /** diff --git a/arc-utils/src/main/java/fr/insee/arc/utils/utils/ManipString.java b/arc-utils/src/main/java/fr/insee/arc/utils/utils/ManipString.java index 7eb8ef261..70e47c369 100644 --- a/arc-utils/src/main/java/fr/insee/arc/utils/utils/ManipString.java +++ b/arc-utils/src/main/java/fr/insee/arc/utils/utils/ManipString.java @@ -64,6 +64,19 @@ public static List stringToList (String listInString, String separateur) .collect(Collectors.toList()); } + + /** + * return null String if trim expression if empty + * @param element + * @return + */ + public static String nullIfEmptyTrim(String element) + { + return (element == null || element.trim().equals(""))?null:element; + } + + + /** * Convert a variadic elements into an arrayList * @param elements diff --git a/arc-utils/src/test/java/fr/insee/arc/utils/dao/GenericPreparedStatementBuilderTest.java b/arc-utils/src/test/java/fr/insee/arc/utils/dao/GenericPreparedStatementBuilderTest.java index c06535c03..ac2053ac3 100644 --- a/arc-utils/src/test/java/fr/insee/arc/utils/dao/GenericPreparedStatementBuilderTest.java +++ b/arc-utils/src/test/java/fr/insee/arc/utils/dao/GenericPreparedStatementBuilderTest.java @@ -1,7 +1,5 @@ package fr.insee.arc.utils.dao; -import static org.junit.Assert.*; - import org.junit.Test; import fr.insee.arc.utils.exception.ArcException; @@ -23,7 +21,7 @@ public void copyFromGenericBean() throws ArcException { // copy the content into a target table using copyFromGenericBean GenericPreparedStatementBuilder query = new GenericPreparedStatementBuilder(); - query.copyFromGenericBean("test.table_test_copy", gb , false); + query.copyFromGenericBean("test.table_test_copy", gb); UtilitaireDao.get(0).executeRequest(c, query); // test result diff --git a/arc-utils/src/test/java/fr/insee/arc/utils/utils/FormatSQLTest.java b/arc-utils/src/test/java/fr/insee/arc/utils/utils/FormatSQLTest.java index 53b1947e3..1ad4229dd 100644 --- a/arc-utils/src/test/java/fr/insee/arc/utils/utils/FormatSQLTest.java +++ b/arc-utils/src/test/java/fr/insee/arc/utils/utils/FormatSQLTest.java @@ -141,15 +141,12 @@ public void rebuildTableAsSelectWhere() throws ArcException { */ private static void rebuildTableAsSelectWhere(String aTable) throws ArcException { - String indexCreationQuery = "CREATE index idx1_test_index on " + aTable + " (i);"; - // create a table with an index UtilitaireDao.get(0).executeImmediate(c, "CREATE TABLE " + aTable + " as SELECT i FROM generate_series(1,20) i"); - UtilitaireDao.get(0).executeImmediate(c, indexCreationQuery); // execute the rebuild with a where condition - UtilitaireDao.get(0).executeImmediate(c, FormatSQL.rebuildTableAsSelectWhere(aTable, "i<=15", indexCreationQuery)); + UtilitaireDao.get(0).executeImmediate(c, FormatSQL.rebuildTableAsSelectWhere(aTable, "i<=15")); // test // the table must exists and should have only 15 records left diff --git a/arc-web/src/main/java/fr/insee/arc/web/gui/all/service/ArcWebGenericService.java b/arc-web/src/main/java/fr/insee/arc/web/gui/all/service/ArcWebGenericService.java index b3f099dee..3af3595c5 100644 --- a/arc-web/src/main/java/fr/insee/arc/web/gui/all/service/ArcWebGenericService.java +++ b/arc-web/src/main/java/fr/insee/arc/web/gui/all/service/ArcWebGenericService.java @@ -22,7 +22,7 @@ import fr.insee.arc.core.dataobjects.ArcPreparedStatementBuilder; import fr.insee.arc.core.dataobjects.DataObjectService; import fr.insee.arc.core.service.p0initialisation.ApiInitialisationService; -import fr.insee.arc.core.service.p0initialisation.engine.BddPatcher; +import fr.insee.arc.core.service.p0initialisation.dbmaintenance.BddPatcher; import fr.insee.arc.core.util.BDParameters; import fr.insee.arc.core.util.LoggerDispatcher; import fr.insee.arc.utils.dao.UtilitaireDao; diff --git a/arc-web/src/main/java/fr/insee/arc/web/gui/famillenorme/service/ServiceViewVariableMetier.java b/arc-web/src/main/java/fr/insee/arc/web/gui/famillenorme/service/ServiceViewVariableMetier.java index 946b163e8..fd3fbf596 100644 --- a/arc-web/src/main/java/fr/insee/arc/web/gui/famillenorme/service/ServiceViewVariableMetier.java +++ b/arc-web/src/main/java/fr/insee/arc/web/gui/famillenorme/service/ServiceViewVariableMetier.java @@ -14,6 +14,7 @@ import fr.insee.arc.core.dataobjects.ArcPreparedStatementBuilder; import fr.insee.arc.core.util.StaticLoggerDispatcher; import fr.insee.arc.utils.dao.UtilitaireDao; +import fr.insee.arc.utils.utils.FormatSQL; import fr.insee.arc.web.gui.all.util.ArcStringUtils; import fr.insee.arc.web.gui.all.util.VObject; @@ -363,7 +364,7 @@ private String computeMapcontent(HashMap> content, Str if (content.get(columnName) == null || content.get(columnName).get(index) == null) { return columnName; } else { - return "'" + content.get(columnName).get(index).replace(quote, quotequote) + "'"; + return FormatSQL.textToSql(content.get(columnName).get(index)); } } diff --git a/arc-web/src/main/java/fr/insee/arc/web/gui/pilotage/controller/ControllerFichierBAS.java b/arc-web/src/main/java/fr/insee/arc/web/gui/pilotage/controller/ControllerFichierBAS.java index 7e0a7b45e..36d042e90 100644 --- a/arc-web/src/main/java/fr/insee/arc/web/gui/pilotage/controller/ControllerFichierBAS.java +++ b/arc-web/src/main/java/fr/insee/arc/web/gui/pilotage/controller/ControllerFichierBAS.java @@ -78,14 +78,4 @@ public String undoActionBASAction(Model model) { return undoActionBAS(model); } - /** - * retour arriere d'une phase - * - * @return - */ - @RequestMapping("/resetPhaseBAS") - public String resetPhaseBASAction(Model model) { - return resetPhaseBAS(model); - } - } diff --git a/arc-web/src/main/java/fr/insee/arc/web/gui/pilotage/service/ServiceViewFichierBAS.java b/arc-web/src/main/java/fr/insee/arc/web/gui/pilotage/service/ServiceViewFichierBAS.java index f6adbf96b..bcf464aa7 100644 --- a/arc-web/src/main/java/fr/insee/arc/web/gui/pilotage/service/ServiceViewFichierBAS.java +++ b/arc-web/src/main/java/fr/insee/arc/web/gui/pilotage/service/ServiceViewFichierBAS.java @@ -380,36 +380,4 @@ private ArcPreparedStatementBuilder requeteUpdateToMark(ArcPreparedStatementBuil return updateToDelete; } - /** - * retour arriere d'une phase - * - * @return - */ - public String resetPhaseBAS(Model model) { - Map> selection = views.getViewFichierBAS().mapContentSelected(); - ArcPreparedStatementBuilder querySelection = this.vObjectService.queryView(views.getViewFichierBAS()); - - // si la selection de fichiers n'est pas vide, on se restreint aux fichiers - // choisis pour le retour arriere - // - if (!selection.isEmpty()) { - querySelection - .append(" AND id_source IN " + querySelection.sqlListeOfValues(selection.get("id_source")) + " "); - } - - // On recupere la phase - String phase = views.getViewFichierBAS().mapContent().get("phase_traitement").get(0); - - // Lancement du retour arrière - ApiInitialisationService serv = new ApiInitialisationService(TraitementPhase.INITIALISATION.toString(), - ApiService.IHM_SCHEMA, getBacASable(), this.repertoire, - TraitementPhase.INITIALISATION.getNbLigneATraiter(), null); - try { - serv.retourPhasePrecedente(TraitementPhase.valueOf(phase), querySelection, null); - } finally { - serv.finaliser(); - } - return generateDisplay(model, RESULT_SUCCESS); - } - } diff --git a/arc-web/src/main/java/fr/insee/arc/web/gui/pilotage/service/ServiceViewPilotageBAS.java b/arc-web/src/main/java/fr/insee/arc/web/gui/pilotage/service/ServiceViewPilotageBAS.java index a1d82cc69..45d1b1d54 100644 --- a/arc-web/src/main/java/fr/insee/arc/web/gui/pilotage/service/ServiceViewPilotageBAS.java +++ b/arc-web/src/main/java/fr/insee/arc/web/gui/pilotage/service/ServiceViewPilotageBAS.java @@ -16,7 +16,8 @@ import fr.insee.arc.core.service.global.ApiService; import fr.insee.arc.core.service.global.dao.ResetEnvironmentOperations; import fr.insee.arc.core.service.p0initialisation.ApiInitialisationService; -import fr.insee.arc.core.service.p0initialisation.engine.BddPatcher; +import fr.insee.arc.core.service.p0initialisation.dbmaintenance.BddPatcher; +import fr.insee.arc.core.service.p0initialisation.userdata.SynchronizeUserRulesAndMetadata; import fr.insee.arc.core.util.BDParameters; import fr.insee.arc.utils.dao.UtilitaireDao; import fr.insee.arc.utils.exception.ArcException; @@ -177,7 +178,7 @@ public String executerBatch(Model model, TraitementPhase phaseAExecuter) { // no need to do that if selected phase is INITIALISATION as INITIALISATION will synchronize the sandbox if (!phaseAExecuter.equals(TraitementPhase.INITIALISATION)) { try{ - ApiInitialisationService.synchroniserSchemaExecutionAllNods(null, ApiService.IHM_SCHEMA, getBacASable()); + SynchronizeUserRulesAndMetadata.synchroniserSchemaExecutionAllNods(null, getBacASable()); } catch (ArcException e) { diff --git a/arc-web/src/main/java/fr/insee/arc/web/gui/pilotage/service/ServiceViewPilotageProd.java b/arc-web/src/main/java/fr/insee/arc/web/gui/pilotage/service/ServiceViewPilotageProd.java index 52fd55195..c5227f491 100644 --- a/arc-web/src/main/java/fr/insee/arc/web/gui/pilotage/service/ServiceViewPilotageProd.java +++ b/arc-web/src/main/java/fr/insee/arc/web/gui/pilotage/service/ServiceViewPilotageProd.java @@ -10,6 +10,7 @@ import fr.insee.arc.core.dataobjects.ArcPreparedStatementBuilder; import fr.insee.arc.core.service.global.ApiService; import fr.insee.arc.core.service.p0initialisation.ApiInitialisationService; +import fr.insee.arc.core.service.p0initialisation.userdata.SynchronizeUserRulesAndMetadata; import fr.insee.arc.utils.dao.UtilitaireDao; import fr.insee.arc.utils.exception.ArcException; @@ -122,7 +123,7 @@ public String toggleOffPROD(Model model) { */ public String applyRulesProd(Model model) { try { - ApiInitialisationService.copyMetadataAllNods(null, ApiService.IHM_SCHEMA, getBacASable()); + SynchronizeUserRulesAndMetadata.copyMetadataAllNods(null, getBacASable()); } catch (ArcException e) { diff --git a/arc-ws/src/main/java/fr/insee/arc/ws/actions/SendResponse.java b/arc-ws/src/main/java/fr/insee/arc/ws/actions/SendResponse.java index 674775ada..d5e76e23b 100644 --- a/arc-ws/src/main/java/fr/insee/arc/ws/actions/SendResponse.java +++ b/arc-ws/src/main/java/fr/insee/arc/ws/actions/SendResponse.java @@ -15,27 +15,8 @@ /**Cette classe permet d'envoyer la réponse au client via un flux compressé. * La réponse aura une des formes suivantes : - * - * - dans le cas d'un appel au service "query" : - * { - * "type":"jsonwsp/response", - * "responses": - * [ - * { - * "id":"int", - * "table": - * [ - * "nomColonne1;nomColonne2;...;nomColonnen;", - * "typeColonne1;typeColonne2;...;typeColonnen;", - * "a11;a12;...;a1n;", - * "...", - * "an1;an2;...;ann;" - * ] - * }, - * ] - * } - * - * - dans le cas d'un appel au service "arcClient" : + * + * Dans le cas d'un appel au service "arcClient" : * { * "type":"jsonwsp/response", * "responses": diff --git a/arc-ws/src/main/java/fr/insee/arc/ws/dao/ClientDaoImpl.java b/arc-ws/src/main/java/fr/insee/arc/ws/dao/ClientDaoImpl.java index 66fde0920..83ef9b879 100644 --- a/arc-ws/src/main/java/fr/insee/arc/ws/dao/ClientDaoImpl.java +++ b/arc-ws/src/main/java/fr/insee/arc/ws/dao/ClientDaoImpl.java @@ -299,7 +299,7 @@ public void addImage(long timestamp, String client, String environnement, ArrayL public void getResponse(long timestamp, String client, String tableMetierName, String environnement, SendResponse resp) throws ArcException { LoggerHelper.debugAsComment(LOGGER, timestamp, ": ClientDaoImpl.getResponse()"); Connection connection = null; - ArrayList> result = new ArrayList>(); + ArrayList> result = new ArrayList<>(); int nbLines = 0; int blockSize = 10000; diff --git a/arc-ws/src/main/java/fr/insee/arc/ws/services/restServices/execute/ExecuteServiceController.java b/arc-ws/src/main/java/fr/insee/arc/ws/services/restServices/execute/ExecuteServiceController.java index ab2547876..9c4cb1b05 100644 --- a/arc-ws/src/main/java/fr/insee/arc/ws/services/restServices/execute/ExecuteServiceController.java +++ b/arc-ws/src/main/java/fr/insee/arc/ws/services/restServices/execute/ExecuteServiceController.java @@ -25,7 +25,9 @@ import fr.insee.arc.core.service.global.ApiService; import fr.insee.arc.core.service.global.dao.ResetEnvironmentOperations; import fr.insee.arc.core.service.p0initialisation.ApiInitialisationService; -import fr.insee.arc.core.service.p0initialisation.engine.BddPatcher; +import fr.insee.arc.core.service.p0initialisation.dbmaintenance.BddPatcher; +import fr.insee.arc.core.service.p0initialisation.filesystem.BuildFileSystem; +import fr.insee.arc.core.service.p0initialisation.userdata.SynchronizeUserRulesAndMetadata; import fr.insee.arc.core.service.p1reception.ApiReceptionService; import fr.insee.arc.core.util.LoggerDispatcher; import fr.insee.arc.utils.dao.UtilitaireDao; @@ -179,7 +181,7 @@ public ResponseEntity build( BddPatcher patcher = new BddPatcher(); patcher.bddScript(null); patcher.bddScript(null, env); - ApiInitialisationService.buildFileSystem(null,new String[] {env}); + new BuildFileSystem(null,new String[] {env}).execute(); return ResponseEntity.status(HttpStatus.OK).body(returnView); @@ -192,7 +194,7 @@ public ResponseEntity synchronize( { ReturnView returnView=new ReturnView(); try { - ApiInitialisationService.synchroniserSchemaExecutionAllNods(null, ApiService.IHM_SCHEMA, env); + SynchronizeUserRulesAndMetadata.synchroniserSchemaExecutionAllNods(null, env); return ResponseEntity.status(HttpStatus.OK).body(returnView); } catch (ArcException e) { diff --git a/arc-ws/src/main/java/fr/insee/arc/ws/services/restServices/setRules/SetRulesController.java b/arc-ws/src/main/java/fr/insee/arc/ws/services/restServices/setRules/SetRulesController.java deleted file mode 100644 index 39daba44e..000000000 --- a/arc-ws/src/main/java/fr/insee/arc/ws/services/restServices/setRules/SetRulesController.java +++ /dev/null @@ -1,201 +0,0 @@ -package fr.insee.arc.ws.services.restServices.setRules; - -import java.util.ArrayList; -import java.util.List; - -import org.json.JSONObject; -import org.springframework.http.HttpStatus; -import org.springframework.http.MediaType; -import org.springframework.http.ResponseEntity; -import org.springframework.web.bind.annotation.RequestBody; -import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RestController; - -import fr.insee.arc.core.dataobjects.ArcPreparedStatementBuilder; -import fr.insee.arc.utils.dao.UtilitaireDao; -import fr.insee.arc.utils.exception.ArcException; -import fr.insee.arc.ws.services.restServices.setRules.pojo.SetRulesPojo; - -@RestController -public class SetRulesController { - - @RequestMapping(value = "/setRules", consumes = MediaType.APPLICATION_JSON_VALUE, produces = MediaType.APPLICATION_JSON_VALUE) - public ResponseEntity changeRulesClient( - @RequestBody(required = true) SetRulesPojo bodyPojo - ) - { - JSONObject response = new JSONObject(); - - try { - - if (bodyPojo.targetRule.equals("model")) - { - replaceRulesDAO(bodyPojo,"arc.ihm_famille", "id_famille"); - } - - if (bodyPojo.targetRule.equals("model_tables")) - { - replaceRulesDAO(bodyPojo,"arc.ihm_mod_table_metier", "id_famille", "nom_table_metier"); - } - - if (bodyPojo.targetRule.equals("model_variables")) - { - replaceRulesDAO(bodyPojo,"arc.ihm_mod_variable_metier", "id_famille", "nom_table_metier", "nom_variable_metier"); - } - - if (bodyPojo.targetRule.equals("warehouse")) - { - replaceRulesDAO(bodyPojo,"arc.ihm_entrepot", "id_entrepot"); - } - - if (bodyPojo.targetRule.equals("sandbox")) - { - replaceRulesDAO(bodyPojo,"arc.ext_etat_jeuderegle", "id"); - } - - if (bodyPojo.targetRule.equals("norm")) - { - replaceRulesDAO(bodyPojo,"arc.ihm_norme","id_norme","periodicite"); - } - - if (bodyPojo.targetRule.equals("calendar")) - { - replaceRulesDAO(bodyPojo,"arc.ihm_calendrier","id_norme","periodicite","validite_inf","validite_sup"); - } - - if (bodyPojo.targetRule.equals("ruleset")) - { - replaceRulesDAO(bodyPojo,"arc.ihm_jeuderegle", "id_norme", "periodicite", "validite_inf", "validite_sup", "version"); - } - - if (bodyPojo.targetRule.equals("load")) - { - replaceRulesDAO(bodyPojo,"arc.ihm_chargement_regle", "id_norme", "periodicite", "validite_inf", "validite_sup", "version"); - } - - if (bodyPojo.targetRule.equals("control")) - { - replaceRulesDAO(bodyPojo,"arc.ihm_controle_regle", "id_norme", "periodicite", "validite_inf", "validite_sup", "version"); - } - - if (bodyPojo.targetRule.equals("structure")) - { - replaceRulesDAO(bodyPojo,"arc.ihm_normage_regle", "id_norme", "periodicite", "validite_inf", "validite_sup", "version"); - } - - if (bodyPojo.targetRule.equals("map")) - { - replaceRulesDAO(bodyPojo,"arc.ihm_mapping_regle", "id_norme", "periodicite", "validite_inf", "validite_sup", "version"); - } - - - response.put("status", "OK"); - - } - catch (Exception e) - { - response.put("status", "KO"); - } - - return ResponseEntity.status(HttpStatus.OK).body(response.toString()); - - } - - - /** - * replace = delete + insert - * @param bodyPojo - * @throws ArcException - */ - public void replaceRulesDAO(SetRulesPojo bodyPojo, String tablename, String...primaryKeys) throws ArcException - { - ArcPreparedStatementBuilder requete=new ArcPreparedStatementBuilder(); - - requete.append(deleteRulesQuery(bodyPojo, tablename, primaryKeys)); - requete.append(insertRulesQuery(bodyPojo, tablename, primaryKeys)); - - System.out.println(requete.getQuery()); - System.out.println(requete.getParameters()); - - UtilitaireDao.get(0).executeRequest(null, requete); - } - - - /** - * Query to insert rules - * @param bodyPojo - * @param tablename - * @param primaryKeys - * @return - */ - public ArcPreparedStatementBuilder insertRulesQuery(SetRulesPojo bodyPojo, String tablename, String...primaryKeys) - { - ArcPreparedStatementBuilder requete=new ArcPreparedStatementBuilder(); - List columns=new ArrayList<>(bodyPojo.content.keySet()); - - // fetch data to insert - for (int i=0;i columns=new ArrayList<>(bodyPojo.content.keySet()); - - for (int i=0;i content; - - public String getSandbox() { - return sandbox; - } - - public void setSandbox(String sandbox) { - this.sandbox = sandbox; - } - - public String getTargetRule() { - return targetRule; - } - - public void setTargetRule(String targetRule) { - this.targetRule = targetRule; - } - - public SetRulesActionEnum getActionType() { - return actionType; - } - - public void setActionType(SetRulesActionEnum actionType) { - this.actionType = actionType; - } - - public Map getContent() { - return content; - } - - public void setContent(Map content) { - this.content = content; - } - - - -}