From a34b3ddf9f8c3108069392d112ad5afeb27dac41 Mon Sep 17 00:00:00 2001 From: Manuel Soulier Date: Thu, 30 Nov 2023 11:14:02 +0100 Subject: [PATCH 01/19] feat: retrieval ws refactor + test initialisation --- .../insee/arc/core/dataobjects/ViewEnum.java | 7 +- .../core/service/global/dao/TableNaming.java | 2 +- .../scalability/ServiceScalability.java | 4 +- .../SynchronizeRulesAndMetadataOperation.java | 2 +- .../arc/utils/dao/CopyObjectsToDatabase.java | 30 +- .../utils/exception/ArcExceptionMessage.java | 1 + arc-ws/pom.xml | 14 + ...ortStep1InitializeClientTablesService.java | 109 ++++- .../ImportStep2GetTableNameService.java | 55 ++- .../ImportStep3GetTableDataService.java | 11 +- .../importServlet/bo/ArcClientIdentifier.java | 56 ++- .../importServlet/bo/ExportTrackingType.java | 7 + .../importServlet/bo/TableToRetrieve.java | 29 ++ .../services/importServlet/dao/ClientDao.java | 419 ++++++++++-------- .../services/importServlet/dao/NameDao.java | 5 +- .../importServlet/dao/ServiceDao.java | 23 +- .../bo/ArcClientIdentifierTest.java | 40 ++ .../importServlet/dao/ClientDaoTest.java | 103 +++++ 18 files changed, 654 insertions(+), 263 deletions(-) create mode 100644 arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/bo/ExportTrackingType.java create mode 100644 arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/bo/TableToRetrieve.java create mode 100644 arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/bo/ArcClientIdentifierTest.java create mode 100644 arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/dao/ClientDaoTest.java diff --git a/arc-core/src/main/java/fr/insee/arc/core/dataobjects/ViewEnum.java b/arc-core/src/main/java/fr/insee/arc/core/dataobjects/ViewEnum.java index 2e80d5bd1..140039519 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/dataobjects/ViewEnum.java +++ b/arc-core/src/main/java/fr/insee/arc/core/dataobjects/ViewEnum.java @@ -150,6 +150,7 @@ public enum ViewEnum { , WS_INFO("ws_info", SchemaEnum.SANDBOX_GENERATED) , WS_PENDING("ws_pending", SchemaEnum.SANDBOX_GENERATED) , WS_KO("ws_ko", SchemaEnum.SANDBOX_GENERATED) + , WS_TRACKING("ws_tracking", SchemaEnum.SANDBOX_GENERATED) ; @@ -205,8 +206,12 @@ public String getFullName(String schema) { return normalizeTableName(schema + SQL.DOT.getSqlCode() + this.tableName); } + public static String getFullNameNotNormalized(String schema, String providedTableName) { + return providedTableName.contains(SQL.DOT.getSqlCode())? providedTableName : schema + SQL.DOT.getSqlCode() + providedTableName; + } + public static String getFullName(String schema, String providedTableName) { - return normalizeTableName(providedTableName.contains(SQL.DOT.getSqlCode())? providedTableName : schema + SQL.DOT.getSqlCode() + providedTableName); + return normalizeTableName(getFullNameNotNormalized(schema, providedTableName)); } public static String normalizeTableName(String providedTableName) diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/global/dao/TableNaming.java b/arc-core/src/main/java/fr/insee/arc/core/service/global/dao/TableNaming.java index 05b725416..a92fb7281 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/global/dao/TableNaming.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/global/dao/TableNaming.java @@ -61,7 +61,7 @@ public static String buildTableNameWithTokens(String schema, String mainSuffix, } s.append(mainSuffix); - return ViewEnum.getFullName(schema, s.toString()); + return ViewEnum.getFullNameNotNormalized(schema, s.toString()); } diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/global/scalability/ServiceScalability.java b/arc-core/src/main/java/fr/insee/arc/core/service/global/scalability/ServiceScalability.java index e819dce32..29daa7f9a 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/global/scalability/ServiceScalability.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/global/scalability/ServiceScalability.java @@ -44,10 +44,8 @@ public static int dispatchOnNods(Connection coordinatorConnexion, ThrowingConsum actionOnCoordinator.accept(coordinatorConnexion); } - + // dispatch when scaled int numberOfExecutorNods = ArcDatabase.numberOfExecutorNods(); - - // meta data copy is only necessary when scaled if (numberOfExecutorNods==0) { return numberOfExecutorNods; diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/metadata/SynchronizeRulesAndMetadataOperation.java b/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/metadata/SynchronizeRulesAndMetadataOperation.java index a321d21fd..1d835169d 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/metadata/SynchronizeRulesAndMetadataOperation.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/metadata/SynchronizeRulesAndMetadataOperation.java @@ -150,7 +150,7 @@ private static void copyMetaDataToExecutors(Connection coordinatorConnexion, Con GenericBean gb = SynchronizeRulesAndMetadataDao.execQuerySelectMetaDataOnlyFrom(coordinatorConnexion, table); - CopyObjectsToDatabase.execCopyFromGenericBeanIfTableNotExists(executorConnection, table, gb); + CopyObjectsToDatabase.execCopyFromGenericBeanWithoutDroppingTargetTable(executorConnection, table, gb); } } diff --git a/arc-utils/src/main/java/fr/insee/arc/utils/dao/CopyObjectsToDatabase.java b/arc-utils/src/main/java/fr/insee/arc/utils/dao/CopyObjectsToDatabase.java index 0f6c3f46f..56678fe76 100644 --- a/arc-utils/src/main/java/fr/insee/arc/utils/dao/CopyObjectsToDatabase.java +++ b/arc-utils/src/main/java/fr/insee/arc/utils/dao/CopyObjectsToDatabase.java @@ -18,35 +18,35 @@ private CopyObjectsToDatabase() { /** * execute copy by chunk. It is mandatory for large GenericBean objects * @param connection - * @param tableName - * @param gb + * @param targetTableName + * @param genericBeanContainingData * @throws ArcException */ - public static void execCopyFromGenericBean(Connection connection, String tableName, GenericBean gb) + public static void execCopyFromGenericBean(Connection connection, String targetTableName, GenericBean genericBeanContainingData) throws ArcException { - execCopyFromGenericBean(connection, tableName, gb, CHUNK_SIZE, true); + execCopyFromGenericBean(connection, targetTableName, genericBeanContainingData, CHUNK_SIZE, true); } - public static void execCopyFromGenericBeanIfTableNotExists(Connection connection, String tableName, GenericBean gb) + public static void execCopyFromGenericBeanWithoutDroppingTargetTable(Connection targetConnection, String targetTableName, GenericBean genericBeanContainingData) throws ArcException { - execCopyFromGenericBean(connection, tableName, gb, CHUNK_SIZE, false); + execCopyFromGenericBean(targetConnection, targetTableName, genericBeanContainingData, CHUNK_SIZE, false); } /** * execute copy from GenericBean to database by chunk of size @param chunkSize * - * @param connection - * @param tableName - * @param gb + * @param targetConnection + * @param targetTableName + * @param genericBeanContainingData * @param chunkSize * @throws ArcException */ - private static void execCopyFromGenericBean(Connection connection, String tableName, GenericBean gb, int chunkSize, boolean replaceTargetTable) + private static void execCopyFromGenericBean(Connection targetConnection, String targetTableName, GenericBean genericBeanContainingData, int chunkSize, boolean replaceTargetTable) throws ArcException { GenericPreparedStatementBuilder query = new GenericPreparedStatementBuilder(); - query.append(query.createWithGenericBean(tableName, gb, replaceTargetTable)); + query.append(query.createWithGenericBean(targetTableName, genericBeanContainingData, replaceTargetTable)); int cursor = 0; boolean stillToDo = true; @@ -55,18 +55,18 @@ private static void execCopyFromGenericBean(Connection connection, String tableN int startChunk = cursor; int endChunk = cursor + chunkSize; cursor = endChunk; - stillToDo=(cursor < gb.getContent().size()); + stillToDo=(cursor < genericBeanContainingData.getContent().size()); - query.insertWithGenericBeanByChunk(tableName, gb, startChunk, endChunk); + query.insertWithGenericBeanByChunk(targetTableName, genericBeanContainingData, startChunk, endChunk); // analyze on the table at the end if (!stillToDo) { query.append(SQL.COMMIT).append(SQL.END_QUERY); - query.append(FormatSQL.analyzeSecured(tableName)); + query.append(FormatSQL.analyzeSecured(targetTableName)); } - UtilitaireDao.get(0).executeImmediate(connection, query); + UtilitaireDao.get(0).executeImmediate(targetConnection, query); query = new GenericPreparedStatementBuilder(); } while (stillToDo); diff --git a/arc-utils/src/main/java/fr/insee/arc/utils/exception/ArcExceptionMessage.java b/arc-utils/src/main/java/fr/insee/arc/utils/exception/ArcExceptionMessage.java index bbdff899b..1058776fe 100644 --- a/arc-utils/src/main/java/fr/insee/arc/utils/exception/ArcExceptionMessage.java +++ b/arc-utils/src/main/java/fr/insee/arc/utils/exception/ArcExceptionMessage.java @@ -87,6 +87,7 @@ public enum ArcExceptionMessage { WS_RETRIEVE_DATA_FAMILY_FORBIDDEN("Vous ne pouvez pas accéder à cette famille de norme"), WS_RETRIEVE_DATA_FAMILY_CREATION_FAILED("Les tables de la famille de norme n'ont pas pu être créées"), + WS_RETRIEVE_DATA_SCALABLE_TABLE_MUST_BE_EXPORT_IN_CSV("Scalable tables can only be retrieved in csv_gzip mode"), IHM_NMCL_COLUMN_IN_FILE_BUT_NOT_IN_SCHEMA("La colonne %s n'est pas déclarée dans le schéma"), IHM_NMCL_COLUMN_IN_SCHEMA_BUT_NOT_IN_FILE("La colonne est déclarée dans le schéma mais absente du fichier"), diff --git a/arc-ws/pom.xml b/arc-ws/pom.xml index 8a3208bc1..662ef3772 100644 --- a/arc-ws/pom.xml +++ b/arc-ws/pom.xml @@ -19,6 +19,20 @@ + + fr.insee.arc + arc-utils + ${project.version} + test-jar + test + + + fr.insee.arc + arc-core + ${project.version} + test-jar + test + fr.insee.arc arc-utils diff --git a/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/ImportStep1InitializeClientTablesService.java b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/ImportStep1InitializeClientTablesService.java index d851e5262..614454c2f 100644 --- a/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/ImportStep1InitializeClientTablesService.java +++ b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/ImportStep1InitializeClientTablesService.java @@ -8,6 +8,7 @@ import org.json.JSONArray; import org.json.JSONObject; +import fr.insee.arc.core.dataobjects.ArcDatabase; import fr.insee.arc.core.dataobjects.SchemaEnum; import fr.insee.arc.core.model.Delimiters; import fr.insee.arc.utils.exception.ArcException; @@ -36,7 +37,7 @@ public ImportStep1InitializeClientTablesService(JSONObject dsnRequest) { this.dsnRequest = dsnRequest; - this.arcClientIdentifier = new ArcClientIdentifier(dsnRequest); + this.arcClientIdentifier = new ArcClientIdentifier(dsnRequest, true); this.sources = makeSource(dsnRequest); @@ -63,25 +64,68 @@ private static List makeSource(JSONObject dsnRequest) { } public void execute(SendResponse resp) throws ArcException { - this.clientDao.dropPendingClientTables(); + + // drop tables from the client that had been requested from a former call + dropPendingClientTables(); - this.clientDao.createTableWsStatus(); + // create the table that will track the data table which has been built and retrieved + createTrackTable(); + + // create the wsinfo and the wspending table + // wspending table will be delete when all + createWsTables(); - if (!arcClientIdentifier.getEnvironnement().equalsIgnoreCase(SchemaEnum.ARC_METADATA.getSchemaName())) { - clientDao.verificationClientFamille(); - tablesMetierNames = clientDao.getIdSrcTableMetier(dsnRequest); - } + // create tables to retrieve family data table + createMetaFamilyTables(); + // create data table in an asynchronous parallel thread startTableCreationInParallel(); // on renvoie l'id du client avec son timestamp resp.send(arcClientIdentifier.getEnvironnement() + Delimiters.SQL_SCHEMA_DELIMITER - + arcClientIdentifier.getClient() + Delimiters.SQL_TOKEN_DELIMITER + + arcClientIdentifier.getClientIdentifier() + Delimiters.SQL_TOKEN_DELIMITER + arcClientIdentifier.getTimestamp()); resp.endSending(); } + /** + * 1. check if the client has the right to retrieve the family. If so : + * 2. build the table of id_source to be retrieved in the family data table + * 3. return the list of family data table to retrieve + * @throws ArcException + */ + private void createMetaFamilyTables() throws ArcException { + if (!arcClientIdentifier.getEnvironnement().equalsIgnoreCase(SchemaEnum.ARC_METADATA.getSchemaName())) { + + if (!clientDao.verificationClientFamille()) { + throw new ArcException(ArcExceptionMessage.WS_RETRIEVE_DATA_FAMILY_FORBIDDEN); + } + + clientDao.createTableOfIdSource(dsnRequest); + tablesMetierNames = clientDao.selectBusinessDataTables(); + } + } + + /** + * create the table that tracks the client table which had been built + * when the data of a table will be retrieved by the client, the table entry will be deleted from the track table + * @throws ArcException + */ + private void createTrackTable() throws ArcException { + clientDao.createTableTrackRetrievedTables(); + } + + /** + * create the wsinfo and wspending tables + * wspending will be deleted when all client tables will have been retrieved + * wsinfo table will be looped transfered to the client until wspending table is dropped + * @throws ArcException + */ + private void createWsTables() throws ArcException { + this.clientDao.createTableWsInfo(); + } + /** * Will send handshake to client every @HANDSHAKE_TIMER_IN_MS milliseconds Ugly * but we failed at fixing that in front of a F5 controller @@ -94,11 +138,12 @@ private void startTableCreationInParallel() { public void run() { try { if (tablesMetierNames != null) { - executeIf(ExportSource.MAPPING, () -> clientDao.createImages(tablesMetierNames)); + + executeIf(ExportSource.MAPPING, () -> createImages(tablesMetierNames)); executeIf(ExportSource.METADATA, () -> clientDao.createTableMetier()); - executeIf(ExportSource.METADATA, () -> clientDao.createVarMetier()); + executeIf(ExportSource.METADATA, () -> clientDao.createTableVarMetier()); } - executeIf(ExportSource.NOMENCLATURE, () -> clientDao.createNmcl()); + executeIf(ExportSource.NOMENCLATURE, () -> clientDao.createTableNmcl()); executeIf(ExportSource.METADATA, () -> clientDao.createTableFamille()); executeIf(ExportSource.METADATA, () -> clientDao.createTablePeriodicite()); } catch (ArcException e) { @@ -125,4 +170,46 @@ public void run() { maintenance.start(); } + + /** + * drop tables on coordinator and executors if the exists + * @throws ArcException + */ + private void dropPendingClientTables() throws ArcException { + + this.clientDao.dropPendingClientTables(0); + + int numberOfExecutorNods = ArcDatabase.numberOfExecutorNods(); + for (int executorConnectionId = ArcDatabase.EXECUTOR.getIndex(); executorConnectionId < ArcDatabase.EXECUTOR + .getIndex() + numberOfExecutorNods; executorConnectionId++) { + this.clientDao.dropPendingClientTables(executorConnectionId); + } + } + + + + /** + * create image tables on executor nods if connection is scaled, on coordinator + * nod if not + * + * @param tablesMetierNames + * @throws ArcException + */ + private void createImages(List tablesMetierNames) throws ArcException { + int numberOfExecutorNods = ArcDatabase.numberOfExecutorNods(); + if (numberOfExecutorNods == 0) { + clientDao.createImages(tablesMetierNames, ArcDatabase.COORDINATOR.getIndex()); + } else { + for (int executorConnectionId = ArcDatabase.EXECUTOR.getIndex(); executorConnectionId < ArcDatabase.EXECUTOR + .getIndex() + numberOfExecutorNods; executorConnectionId++) { + + // copy the table containing id_source to be retrieved on executor nods + clientDao.copyTableOfIdSourceToExecutorNod(executorConnectionId); + + // create the business table containing data of id_source found in table tableOfIdSource + clientDao.createImages(tablesMetierNames, executorConnectionId); + } + } + } + } diff --git a/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/ImportStep2GetTableNameService.java b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/ImportStep2GetTableNameService.java index 9610a9727..8409fd25f 100644 --- a/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/ImportStep2GetTableNameService.java +++ b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/ImportStep2GetTableNameService.java @@ -7,9 +7,12 @@ import org.json.JSONObject; import fr.insee.arc.utils.exception.ArcException; +import fr.insee.arc.utils.exception.ArcExceptionMessage; import fr.insee.arc.ws.services.importServlet.actions.SendResponse; import fr.insee.arc.ws.services.importServlet.bo.ArcClientIdentifier; +import fr.insee.arc.ws.services.importServlet.bo.ExportTrackingType; import fr.insee.arc.ws.services.importServlet.bo.JsonKeys; +import fr.insee.arc.ws.services.importServlet.bo.TableToRetrieve; import fr.insee.arc.ws.services.importServlet.dao.ClientDao; import fr.insee.arc.ws.services.importServlet.dao.NameDao; @@ -29,7 +32,7 @@ public ImportStep2GetTableNameService(JSONObject dsnRequest) { this.dsnRequest = dsnRequest; - this.arcClientIdentifier = new ArcClientIdentifier(dsnRequest); + this.arcClientIdentifier = new ArcClientIdentifier(dsnRequest, false); reprise = this.dsnRequest.getBoolean(JsonKeys.REPRISE.getKey()); @@ -39,26 +42,20 @@ public ImportStep2GetTableNameService(JSONObject dsnRequest) { public void execute(SendResponse resp) throws ArcException { - StringBuilder type = new StringBuilder(); - - String tableName = this.clientDao.getAClientTable(); - - if (tableName == null) { - tableName = this.clientDao.getIdTable(); - - if (!reprise) { - this.clientDao.updatePilotage(tableName); - } + // check if a KO + if (this.clientDao.getAClientTableByType(ExportTrackingType.KO).getTableName() != null) { + throw new ArcException(ArcExceptionMessage.WS_RETRIEVE_DATA_FAMILY_CREATION_FAILED); + } + + // try to get a data table + TableToRetrieve table = this.clientDao.getAClientTableByType(ExportTrackingType.DATA); - this.clientDao.dropTable(tableName); + if (table.getTableName() != null) { - resp.send(" "); - resp.endSending(); - return; + StringBuilder type = new StringBuilder(); - } else { // récupération du type - List> metadataOnlyTable = NameDao.execQuerySelectMetadata(tableName); + List> metadataOnlyTable = NameDao.execQuerySelectMetadata(table); for (int j = 0; j < metadataOnlyTable.get(0).size(); j++) { if (j > 0) { @@ -69,11 +66,31 @@ public void execute(SendResponse resp) throws ArcException { type.append(" " + metadataOnlyTable.get(i).get(j)); } } + + // renvoie un nom de table du client si il en reste une + resp.send(table.getTableName() + " " + type); + resp.endSending(); + + return; } - // renvoie un nom de table du client si il en reste une - resp.send(tableName + " " + type); + // if no data table found, get source table to register + table = this.clientDao.getAClientTableByType(ExportTrackingType.ID_SOURCE); + + if (table.getTableName() != null) { + if (!reprise) { + this.clientDao.updatePilotage(table.getTableName()); + } + + this.clientDao.dropTable(table.getTableName()); + } + + table = this.clientDao.getAClientTableByType(ExportTrackingType.TRACK); + this.clientDao.dropTable(table.getTableName()); + + resp.send(" "); resp.endSending(); + } } diff --git a/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/ImportStep3GetTableDataService.java b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/ImportStep3GetTableDataService.java index c1f8e325d..f848750b0 100644 --- a/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/ImportStep3GetTableDataService.java +++ b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/ImportStep3GetTableDataService.java @@ -10,6 +10,7 @@ import fr.insee.arc.ws.services.importServlet.actions.SendResponse; import fr.insee.arc.ws.services.importServlet.bo.ArcClientIdentifier; import fr.insee.arc.ws.services.importServlet.bo.ExportFormat; +import fr.insee.arc.ws.services.importServlet.bo.TableToRetrieve; import fr.insee.arc.ws.services.importServlet.dao.ClientDao; import fr.insee.arc.ws.services.importServlet.dao.ServiceDao; @@ -27,7 +28,7 @@ public class ImportStep3GetTableDataService { public ImportStep3GetTableDataService(JSONObject dsnRequest) { super(); - this.arcClientIdentifier = new ArcClientIdentifier(dsnRequest); + this.arcClientIdentifier = new ArcClientIdentifier(dsnRequest, false); clientDao = new ClientDao(arcClientIdentifier); @@ -35,12 +36,14 @@ public ImportStep3GetTableDataService(JSONObject dsnRequest) { public void execute(SendResponse resp) throws ArcException { + TableToRetrieve table = clientDao.getAClientTableByName(arcClientIdentifier.getClientInputParameter()); + // binary transfer - ServiceDao.execQueryExportDataToResponse(resp.getWr(), - ViewEnum.normalizeTableName(arcClientIdentifier.getClient()), ExportFormat.isCsv(this.arcClientIdentifier.getFormat())); + ServiceDao.execQueryExportDataToResponse(resp.getWr(), table, ExportFormat.isCsv(this.arcClientIdentifier.getFormat())); if (this.clientDao.isWebServiceNotPending()) { - this.clientDao.dropTable(arcClientIdentifier.getClient()); + this.clientDao.dropTable(table); + this.clientDao.deleteFromTrackTable(table.getTableName()); } else { Sleep.sleep(WAIT_DELAY_ON_PENDING_TABLES_CREATION_IN_MS); } diff --git a/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/bo/ArcClientIdentifier.java b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/bo/ArcClientIdentifier.java index 2e2f31ae6..1542ca8f9 100644 --- a/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/bo/ArcClientIdentifier.java +++ b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/bo/ArcClientIdentifier.java @@ -4,41 +4,56 @@ import org.json.JSONObject; +import fr.insee.arc.core.model.Delimiters; import fr.insee.arc.core.service.global.util.Patch; public class ArcClientIdentifier { - - public ArcClientIdentifier(JSONObject dsnRequest) { + + public ArcClientIdentifier(JSONObject dsnRequest, boolean generateTimeStamp) { this.dsnRequest = dsnRequest; - this.client = getKeyIfExists(JsonKeys.CLIENT); - this.timestamp = System.currentTimeMillis(); - this.environnement = getKeyIfExists(JsonKeys.ENVIRONNEMENT, Patch::normalizeSchemaName); + this.clientInputParameter = dsnRequest.getString(JsonKeys.CLIENT.getKey()); + + if (generateTimeStamp) + { + this.clientIdentifier = this.clientInputParameter; + this.timestamp = System.currentTimeMillis(); + this.environnement = getKeyIfExists(JsonKeys.ENVIRONNEMENT, Patch::normalizeSchemaName); + } + else + { + // as example : arc_bas1.ARTEMIS_1701299079078 + String[] tokens = this.clientInputParameter.split("\\"+Delimiters.SQL_SCHEMA_DELIMITER); + this.environnement = tokens[0]; + this.clientIdentifier = tokens[1]; + tokens = this.clientIdentifier.split("\\"+Delimiters.SQL_TOKEN_DELIMITER); + this.clientIdentifier = tokens[0]; + this.timestamp = Long.parseLong(tokens[1]); + } this.famille = getKeyIfExists(JsonKeys.FAMILLE); this.format = getKeyIfExists(JsonKeys.FORMAT); } private JSONObject dsnRequest; + private String clientInputParameter; + private long timestamp; private String environnement; - private String client; + private String clientIdentifier; private String famille; - - private String format; + private String format; - private String getKeyIfExists(JsonKeys key, UnaryOperator f ) - { - return dsnRequest.keySet().contains(key.getKey())?f.apply(dsnRequest.getString(key.getKey())):null; + private String getKeyIfExists(JsonKeys key, UnaryOperator f) { + return dsnRequest.keySet().contains(key.getKey()) ? f.apply(dsnRequest.getString(key.getKey())) : null; } - - private String getKeyIfExists(JsonKeys key) - { - return getKeyIfExists(key, t -> t ); + + private String getKeyIfExists(JsonKeys key) { + return getKeyIfExists(key, t -> t); } public long getTimestamp() { @@ -49,9 +64,8 @@ public String getEnvironnement() { return environnement; } - - public String getClient() { - return client; + public String getClientIdentifier() { + return clientIdentifier; } public String getFamille() { @@ -61,5 +75,9 @@ public String getFamille() { public String getFormat() { return format; } - + + public String getClientInputParameter() { + return clientInputParameter; + } + } diff --git a/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/bo/ExportTrackingType.java b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/bo/ExportTrackingType.java new file mode 100644 index 000000000..dd6dc97ee --- /dev/null +++ b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/bo/ExportTrackingType.java @@ -0,0 +1,7 @@ +package fr.insee.arc.ws.services.importServlet.bo; + +public enum ExportTrackingType { + + ID_SOURCE, KO, DATA, TRACK; + +} diff --git a/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/bo/TableToRetrieve.java b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/bo/TableToRetrieve.java new file mode 100644 index 000000000..26f61f00c --- /dev/null +++ b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/bo/TableToRetrieve.java @@ -0,0 +1,29 @@ +package fr.insee.arc.ws.services.importServlet.bo; + +import fr.insee.arc.core.dataobjects.ArcDatabase; + +public class TableToRetrieve { + + private ArcDatabase nod; + + private String tableName; + + public TableToRetrieve() { + super(); + } + + public TableToRetrieve(String nod, String tableName) { + super(); + this.nod = ArcDatabase.valueOf(nod); + this.tableName = tableName; + } + + public ArcDatabase getNod() { + return nod; + } + + public String getTableName() { + return tableName; + } + +} diff --git a/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/dao/ClientDao.java b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/dao/ClientDao.java index 54156db49..e538406aa 100644 --- a/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/dao/ClientDao.java +++ b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/dao/ClientDao.java @@ -1,13 +1,16 @@ package fr.insee.arc.ws.services.importServlet.dao; import java.sql.Connection; +import java.sql.SQLException; import java.util.List; +import java.util.Map; import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.json.JSONObject; +import fr.insee.arc.core.dataobjects.ArcDatabase; import fr.insee.arc.core.dataobjects.ArcPreparedStatementBuilder; import fr.insee.arc.core.dataobjects.ColumnEnum; import fr.insee.arc.core.dataobjects.ViewEnum; @@ -15,6 +18,8 @@ import fr.insee.arc.core.model.TraitementEtat; import fr.insee.arc.core.model.TraitementPhase; import fr.insee.arc.core.service.global.dao.TableNaming; +import fr.insee.arc.utils.dao.CopyObjectsToDatabase; +import fr.insee.arc.utils.dao.SQL; import fr.insee.arc.utils.dao.UtilitaireDao; import fr.insee.arc.utils.exception.ArcException; import fr.insee.arc.utils.exception.ArcExceptionMessage; @@ -23,7 +28,9 @@ import fr.insee.arc.utils.utils.LoggerHelper; import fr.insee.arc.utils.utils.ManipString; import fr.insee.arc.ws.services.importServlet.bo.ArcClientIdentifier; +import fr.insee.arc.ws.services.importServlet.bo.ExportTrackingType; import fr.insee.arc.ws.services.importServlet.bo.JsonKeys; +import fr.insee.arc.ws.services.importServlet.bo.TableToRetrieve; public class ClientDao { @@ -34,21 +41,32 @@ public class ClientDao { private String client; private String famille; + // the tablename of the table that contains document data to retrieve identified + // by id_source private String tableOfIdSource; + + // the tablename of the table that shows webservice is still creating table to + // be consumed by the client + // it is dropped when client had built all the data table private String tableWsPending; - Connection connection; + // the tablename of the table that tracks tables left to retrieved + private String tableWsTracking; + + private Connection connection; public ClientDao(ArcClientIdentifier arcClientIdentifier) { this.timestamp = arcClientIdentifier.getTimestamp(); this.environnement = arcClientIdentifier.getEnvironnement(); - this.client = arcClientIdentifier.getClient(); + this.client = arcClientIdentifier.getClientIdentifier(); this.famille = arcClientIdentifier.getFamille(); - this.tableOfIdSource = TableNaming.buildTableNameWithTokens(environnement, - ViewEnum.ID_SOURCE, client, timestamp); - this.tableWsPending = TableNaming.buildTableNameWithTokens(environnement, - ViewEnum.WS_PENDING, client, timestamp); + this.tableOfIdSource = TableNaming.buildTableNameWithTokens(environnement, ViewEnum.ID_SOURCE, client, + timestamp); + this.tableWsPending = TableNaming.buildTableNameWithTokens(environnement, ViewEnum.WS_PENDING, client, + timestamp); + this.tableWsTracking = TableNaming.buildTableNameWithTokens(environnement, ViewEnum.WS_TRACKING, client, + timestamp); } @@ -56,7 +74,7 @@ public ClientDao(ArcClientIdentifier arcClientIdentifier) { * Vérifie que le client peut consulter les tables métiers de la famille de * normes */ - public void verificationClientFamille() throws ArcException { + public boolean verificationClientFamille() throws ArcException { LoggerHelper.debugAsComment(LOGGER, timestamp, "ClientDaoImpl#verificationClientFamille()"); ArcPreparedStatementBuilder request = new ArcPreparedStatementBuilder(); @@ -66,52 +84,108 @@ public void verificationClientFamille() throws ArcException { String bool = UtilitaireDao.get(0).executeRequestWithoutMetadata(connection, request).get(0).get(0); - if (!bool.equals("t")) { - throw new ArcException(ArcExceptionMessage.WS_RETRIEVE_DATA_FAMILY_FORBIDDEN); - } + return bool.equals("t"); } /** - * Créer une image des ids sources répondants aux critères et récupère la liste - * des noms des tables métiers + * return the list of business data table related to the famille provided * - * @param JSONObject contient les paramètres de la requête - * @return La liste des noms des tables métiers. + * @return * @throws ArcException */ - public List getIdSrcTableMetier(JSONObject requeteJSON) throws ArcException { + public List selectBusinessDataTables() throws ArcException { - LoggerHelper.debugAsComment(LOGGER, timestamp, "ClientDaoImpl#getIdSrcTableMetier()"); + ArcPreparedStatementBuilder request = new ArcPreparedStatementBuilder(); + request.append("SELECT " + ColumnEnum.NOM_TABLE_METIER + " "); + request.append("FROM " + ViewEnum.MOD_TABLE_METIER.getFullName(environnement) + " T1 "); + request.append("WHERE T1.id_famille='" + this.famille + "' "); + request.append(";"); - // Initialisation des variables + return new GenericBean(UtilitaireDao.get(0).executeRequest(connection, request)) + .getColumnValues(ColumnEnum.NOM_TABLE_METIER.getColumnName()); + } - // Préparation du block de requêtes à executer + private void registerTableToBeRetrieved(ExportTrackingType wsTrackingType, ArcDatabase targetNod, String nomTable) + throws ArcException { + ArcPreparedStatementBuilder query = new ArcPreparedStatementBuilder(); + query.build(SQL.INSERT_INTO, this.tableWsTracking, "(tracking_type, nod, table_to_retrieve)"); + query.build(SQL.SELECT, query.quoteText(wsTrackingType.toString()), ",", query.quoteText(targetNod.toString()), + ",", query.quoteText(nomTable)); + UtilitaireDao.get(0).executeRequest(connection, query); + } - // Création de la requête de création de la table temporaire contenant la liste - // des id_sources - execQueryCreateTableOfIdSource(requeteJSON); + /** + * Créer une image des tables métiers. + * + * @param tablesMetierNames La liste des noms des tables métiers. + * + * @return liste des noms de tables images crées + * @throws ArcException + */ + private void addImage(String tableMetier, int executorConnectionId) throws ArcException { + StringBuilder request = new StringBuilder(); + + String nomTableImage = TableNaming.buildTableNameWithTokens(environnement, tableMetier, client, timestamp); + + request.append("DROP TABLE IF EXISTS " + nomTableImage + "; "); + + request.append("CREATE TABLE " + nomTableImage + FormatSQL.WITH_NO_VACUUM + " AS "); + request.append("SELECT * "); + request.append("FROM " + ViewEnum.getFullName(environnement, tableMetier) + " T1 WHERE true "); + request.append("AND exists (SELECT 1 FROM " + tableOfIdSource + " T2 where T2." + + ColumnEnum.ID_SOURCE.getColumnName() + "=T1." + ColumnEnum.ID_SOURCE.getColumnName() + "); "); + + UtilitaireDao.get(executorConnectionId).executeBlock(connection, request); + + registerTableToBeRetrieved(ExportTrackingType.DATA, ArcDatabase.EXECUTOR, nomTableImage); - return execQuerySelectBusinessDataTables(); - } /** - * return the list of business data table related to the famille provided - * @return + * Met à jours les colonnes client et date_client de la table + * environnement_pilotage_fichier. + * + * @param tableSource * @throws ArcException */ - private List execQuerySelectBusinessDataTables() throws ArcException { + public void updatePilotage(String tableSource) throws ArcException { + LoggerHelper.debugAsComment(LOGGER, timestamp, ": ClientDaoImpl.updatePilotage()"); - ArcPreparedStatementBuilder request = new ArcPreparedStatementBuilder(); - request.append("SELECT "+ColumnEnum.NOM_TABLE_METIER +" "); - request.append("FROM " + ViewEnum.MOD_TABLE_METIER.getFullName(environnement) + " T1 "); - request.append("WHERE T1.id_famille='" + this.famille + "' "); - request.append("AND exists (select 1 from pg_tables T2 where "); - request.append("T2.schemaname='" + ManipString.substringBeforeFirst(environnement, ".") + "' "); - request.append("AND T1.nom_table_metier=T2.tablename);"); + String clientOfTableSource = extractClientFromToken(); - return new GenericBean(UtilitaireDao.get(0).executeRequest(connection, request)).getColumnValues(ColumnEnum.NOM_TABLE_METIER.getColumnName()); + StringBuilder query = new StringBuilder(); + query.append("UPDATE " + ViewEnum.PILOTAGE_FICHIER.getFullName(environnement) + " T1 "); + query.append("SET client = array_append(client, '" + clientOfTableSource + "') "); + query.append(", date_client = array_append( date_client, localtimestamp ) "); + query.append("WHERE true "); + query.append("AND EXISTS (SELECT 1 FROM " + tableSource + " T2 where T1." + ColumnEnum.ID_SOURCE.getColumnName() + + "=T2." + ColumnEnum.ID_SOURCE.getColumnName() + ") "); + query.append("AND T1.phase_traitement='" + TraitementPhase.MAPPING + "';"); + + UtilitaireDao.get(0).executeBlock(connection, query.toString()); + } + + /** + * extract the client token name from the client + * + * @param client2 + * @return + */ + private String extractClientFromToken() { + return ManipString.substringBeforeFirst( + ManipString.substringAfterFirst(this.client, Delimiters.SQL_SCHEMA_DELIMITER), + Delimiters.SQL_TOKEN_DELIMITER); + } + + public void createTableTrackRetrievedTables() throws ArcException { + ArcPreparedStatementBuilder query = new ArcPreparedStatementBuilder(); + query.build(SQL.DROP, SQL.TABLE, SQL.IF_EXISTS, this.tableWsTracking, SQL.END_QUERY); + query.build(SQL.CREATE, SQL.TABLE, this.tableWsTracking, + " (tracking_type text, nod text, table_to_retrieve text) ", SQL.END_QUERY); + UtilitaireDao.get(0).executeRequest(connection, query); + + registerTableToBeRetrieved(ExportTrackingType.TRACK, ArcDatabase.COORDINATOR, this.tableWsTracking); } /** @@ -124,9 +198,9 @@ private List execQuerySelectBusinessDataTables() throws ArcException { * * @param query * @param requeteJSON - * @throws ArcException + * @throws ArcException */ - private void execQueryCreateTableOfIdSource(JSONObject requeteJSON) throws ArcException { + public void createTableOfIdSource(JSONObject requeteJSON) throws ArcException { String periodicite = requeteJSON.getString(JsonKeys.PERIODICITE.getKey()); String validiteInf = requeteJSON.keySet().contains(JsonKeys.VALINF.getKey()) @@ -176,9 +250,11 @@ private void execQueryCreateTableOfIdSource(JSONObject requeteJSON) throws ArcEx query.append(nbFichiers); } query.append(") as foo; "); - + UtilitaireDao.get(0).executeBlock(connection, query); + registerTableToBeRetrieved(ExportTrackingType.ID_SOURCE, ArcDatabase.EXECUTOR, tableOfIdSource); + } /** @@ -189,82 +265,21 @@ private void execQueryCreateTableOfIdSource(JSONObject requeteJSON) throws ArcEx * @return liste des noms de tables images crées * @throws ArcException */ - public void createImages(List tablesMetierNames) throws ArcException { + public void createImages(List tablesMetierNames, int executorConnectionId) throws ArcException { LoggerHelper.debugAsComment(LOGGER, timestamp, "ClientDaoImpl.createImage()"); for (String tableMetier : tablesMetierNames) { - addImage(tableMetier); + addImage(tableMetier, executorConnectionId); } } - /** - * Créer une image des tables métiers. - * - * @param tablesMetierNames La liste des noms des tables métiers. - * - * @return liste des noms de tables images crées - * @throws ArcException - */ - public void addImage(String tableMetier) throws ArcException { - StringBuilder request = new StringBuilder(); - - String nomTableImage = TableNaming.buildTableNameWithTokens(environnement, tableMetier, client, timestamp); - - request.append("DROP TABLE IF EXISTS " + nomTableImage + "; "); - - request.append("CREATE TABLE " + nomTableImage + FormatSQL.WITH_NO_VACUUM + " AS "); - request.append("SELECT * "); - request.append("FROM " + ViewEnum.getFullName(environnement, tableMetier) + " T1 WHERE true "); - request.append("AND exists (SELECT 1 FROM " + tableOfIdSource + " T2 where T2." - + ColumnEnum.ID_SOURCE.getColumnName() + "=T1." + ColumnEnum.ID_SOURCE.getColumnName() + "); "); - - UtilitaireDao.get(0).executeBlock(connection, request); - - } - - /** - * Met à jours les colonnes client et date_client de la table - * environnement_pilotage_fichier. - * - * @param tableSource - * @throws ArcException - */ - public void updatePilotage(String tableSource) throws ArcException { - LoggerHelper.debugAsComment(LOGGER, timestamp, ": ClientDaoImpl.updatePilotage()"); - - String clientOfTableSource = extractClientFromToken(); - - StringBuilder query = new StringBuilder(); - query.append("UPDATE " + ViewEnum.PILOTAGE_FICHIER.getFullName(environnement) + " T1 "); - query.append("SET client = array_append(client, '" + clientOfTableSource + "') "); - query.append(", date_client = array_append( date_client, localtimestamp ) "); - query.append("WHERE true "); - query.append("AND EXISTS (SELECT 1 FROM " + tableSource + " T2 where T1." - + ColumnEnum.ID_SOURCE.getColumnName() + "=T2." + ColumnEnum.ID_SOURCE.getColumnName() + ") "); - query.append("AND T1.phase_traitement='" + TraitementPhase.MAPPING + "';"); - - UtilitaireDao.get(0).executeBlock(connection, query.toString()); - } - - /** - * extract the client token name from the client - * @param client2 - * @return - */ - private String extractClientFromToken() { - return - ManipString.substringBeforeFirst( - ManipString.substringAfterFirst(this.client, Delimiters.SQL_SCHEMA_DELIMITER), - Delimiters.SQL_TOKEN_DELIMITER); - } - /* * (non-Javadoc) * * @see * fr.insee.arc_essnet.ws.dao.ClientDarcl(fr.insee.arc_essnet.ws.actions.Senarc */ - public void createNmcl() throws ArcException { + public void createTableNmcl() throws ArcException { LoggerHelper.debugAsComment(LOGGER, "ClientDaoImpl.createNmcl()"); ArcPreparedStatementBuilder requete = new ArcPreparedStatementBuilder(); @@ -275,9 +290,10 @@ public void createNmcl() throws ArcException { List> nmclNames = UtilitaireDao.get(0).executeRequestWithoutMetadata(connection, requete); for (List nmcl : nmclNames) { - String nomTableImage = ViewEnum.getFullName(environnement, client + "_" + timestamp + "_" + nmcl.get(0)); + String nomTableImage = ViewEnum.getFullNameNotNormalized(environnement, client + "_" + timestamp + "_" + nmcl.get(0)); UtilitaireDao.get(0).executeImmediate(connection, "CREATE TABLE " + nomTableImage + FormatSQL.WITH_NO_VACUUM + " AS SELECT * FROM " + ViewEnum.getFullName(environnement, nmcl.get(0)) + ";"); + registerTableToBeRetrieved(ExportTrackingType.DATA, ArcDatabase.COORDINATOR, nomTableImage); } } @@ -288,11 +304,11 @@ public void createNmcl() throws ArcException { * @see fr.insee.arc_essnet.ws.dao.ClientDarcMetier(java.lang.String, * fr.insee.arc_essnet.ws.actions.Senarc */ - public void createVarMetier() throws ArcException { + public void createTableVarMetier() throws ArcException { LoggerHelper.debugAsComment(LOGGER, "ClientDaoImpl.createVarMetier()"); - String nomTableImage = TableNaming.buildTableNameWithTokens(environnement, - ViewEnum.MOD_VARIABLE_METIER, client, timestamp); + String nomTableImage = TableNaming.buildTableNameWithTokens(environnement, ViewEnum.MOD_VARIABLE_METIER, client, + timestamp); ArcPreparedStatementBuilder requete = new ArcPreparedStatementBuilder(); requete.append("CREATE TABLE " + nomTableImage + FormatSQL.WITH_NO_VACUUM + " AS"); @@ -301,6 +317,8 @@ public void createVarMetier() throws ArcException { requete.append(";"); UtilitaireDao.get(0).executeRequest(connection, requete); + registerTableToBeRetrieved(ExportTrackingType.DATA, ArcDatabase.COORDINATOR, nomTableImage); + } /* @@ -312,9 +330,9 @@ public void createVarMetier() throws ArcException { public void createTableFamille() throws ArcException { LoggerHelper.debugAsComment(LOGGER, "ClientDaoImpl.createTableFamille()"); - String nomTableImage = TableNaming.buildTableNameWithTokens(environnement, - ViewEnum.EXT_MOD_FAMILLE, client, timestamp); - + String nomTableImage = TableNaming.buildTableNameWithTokens(environnement, ViewEnum.EXT_MOD_FAMILLE, client, + timestamp); + ArcPreparedStatementBuilder requete = new ArcPreparedStatementBuilder(); requete.append("CREATE TABLE " + nomTableImage + FormatSQL.WITH_NO_VACUUM + " AS SELECT DISTINCT f.id_famille FROM arc.ihm_famille f INNER JOIN " @@ -322,6 +340,8 @@ public void createTableFamille() throws ArcException { + requete.quoteText(client) + ");"); UtilitaireDao.get(0).executeRequest(connection, requete); + registerTableToBeRetrieved(ExportTrackingType.ID_SOURCE, ArcDatabase.COORDINATOR, nomTableImage); + } /* @@ -339,6 +359,8 @@ public void createTablePeriodicite() throws ArcException { UtilitaireDao.get(0).executeImmediate(connection, "CREATE TABLE " + nomTableImage + FormatSQL.WITH_NO_VACUUM + " AS SELECT DISTINCT id, val FROM " + ViewEnum.EXT_MOD_PERIODICITE.getFullName() + ";"); + registerTableToBeRetrieved(ExportTrackingType.DATA, ArcDatabase.COORDINATOR, nomTableImage); + } /* @@ -350,7 +372,8 @@ public void createTablePeriodicite() throws ArcException { public void createTableMetier() throws ArcException { LoggerHelper.debugAsComment(LOGGER, "ClientDaoImpl.sendTableMetier()"); - String nomTableImage = TableNaming.buildTableNameWithTokens(environnement, ViewEnum.MOD_TABLE_METIER, client, timestamp); + String nomTableImage = TableNaming.buildTableNameWithTokens(environnement, ViewEnum.MOD_TABLE_METIER, client, + timestamp); ArcPreparedStatementBuilder requete = new ArcPreparedStatementBuilder( "\n CREATE TABLE " + nomTableImage + FormatSQL.WITH_NO_VACUUM + " AS"); @@ -359,62 +382,77 @@ public void createTableMetier() throws ArcException { requete.append(";"); UtilitaireDao.get(0).executeRequest(connection, requete); + registerTableToBeRetrieved(ExportTrackingType.DATA, ArcDatabase.COORDINATOR, nomTableImage); } /** + * Get the table object of the table to retrieve by its type * - * @param client - * @param isSourceListTable : is it the table containing the list of id_source - * of the files to be marked ? + * @param tableName * @return * @throws ArcException */ - public String getAClientTable(boolean isSourceListTable) throws ArcException { - - String schema = ManipString.substringBeforeFirst(client, "."); - String tableToFind = ViewEnum.normalizeTableName(ManipString.substringAfterFirst(client, ".").replace("_", "\\_") + "%"); - String tableWsInfo = ViewEnum.normalizeTableName(this.client + Delimiters.SQL_TOKEN_DELIMITER + ViewEnum.WS_INFO.getTableName()); - String tableWsKO = ViewEnum.normalizeTableName(this.client + Delimiters.SQL_TOKEN_DELIMITER + ViewEnum.WS_KO.getTableName()); + public TableToRetrieve getAClientTableByType(ExportTrackingType type) throws ArcException { + // return data table found in track table for the given type - ArcPreparedStatementBuilder requete = new ArcPreparedStatementBuilder(); - requete.append("SELECT schemaname||'.'||tablename FROM pg_tables") - .append(" WHERE tablename like " + requete.quoteText(tableToFind)) - .append(" AND schemaname=" + requete.quoteText(schema)).append(" AND tablename " - + (isSourceListTable ? "" : "NOT") + " like " + requete.quoteText("%id\\_source%")) - // ws_info must be first if exists as this table wil be always created - // others might be pending in creation process - .append(" ORDER BY CASE schemaname||'.'||tablename ") - .append(" WHEN "+requete.quoteText(tableWsInfo)+" THEN 1 ") - .append(" WHEN "+requete.quoteText(tableWsKO)+" THEN 2 ") - .append(" ELSE 3 END ") - .append(" LIMIT 1 "); - - String selectedTableName = UtilitaireDao.get(0).getString(connection, requete); - - // if selectedTableName is ws_ko, there was a problem return exception - if (selectedTableName!=null && selectedTableName.equals(tableWsKO)) - { - throw new ArcException(ArcExceptionMessage.WS_RETRIEVE_DATA_FAMILY_CREATION_FAILED); - } - - return selectedTableName==null? null : this.client + selectedTableName.substring(this.client.length()); - - } + ArcPreparedStatementBuilder query = new ArcPreparedStatementBuilder(); + query.build(SQL.SELECT, "nod, table_to_retrieve", SQL.FROM, this.tableWsTracking); + query.build(SQL.WHERE, "tracking_type=", query.quoteText(type.toString())); + query.build(SQL.LIMIT, "1"); + + Map> content = new GenericBean(UtilitaireDao.get(0).executeRequest(connection, query)) + .mapContent(); + + return content.isEmpty() ? new TableToRetrieve() + : new TableToRetrieve(content.get("nod").get(0), content.get("table_to_retrieve").get(0)); - public String getAClientTable() throws ArcException { - return getAClientTable(false); } - public String getIdTable() throws ArcException { - return getAClientTable(true); + /** + * Get the table object of the table to retrieve by its name + * + * @param tableName + * @return + * @throws ArcException + */ + public TableToRetrieve getAClientTableByName(String tableName) throws ArcException { + + ArcPreparedStatementBuilder query = new ArcPreparedStatementBuilder(); + query.build(SQL.SELECT, "nod, table_to_retrieve", SQL.FROM, this.tableWsTracking); + query.build(SQL.WHERE, "table_to_retrieve=", query.quoteText(tableName)); + query.build(SQL.LIMIT, "1"); + + Map> content = new GenericBean(UtilitaireDao.get(0).executeRequest(connection, query)) + .mapContent(); + + return content.isEmpty() ? new TableToRetrieve() + : new TableToRetrieve(content.get("nod").get(0), content.get("table_to_retrieve").get(0)); } public void dropTable(String clientTable) { + dropTable(ArcDatabase.COORDINATOR.getIndex(), clientTable); + } + + public void dropTable(int connectionIndex, String clientTable) { if (StringUtils.isBlank(clientTable)) { return; } - UtilitaireDao.get(0).dropTable(connection, clientTable); + UtilitaireDao.get(connectionIndex).dropTable(connection, clientTable); + } + + public void dropTable(TableToRetrieve table) { + + if (table.getNod().equals(ArcDatabase.EXECUTOR)) { + int numberOfExecutorNods = ArcDatabase.numberOfExecutorNods(); + for (int executorConnectionId = ArcDatabase.EXECUTOR.getIndex(); executorConnectionId < ArcDatabase.EXECUTOR + .getIndex() + numberOfExecutorNods; executorConnectionId++) { + dropTable(executorConnectionId, table.getTableName()); + } + } else { + dropTable(0, table.getTableName()); + } + } /** @@ -422,7 +460,7 @@ public void dropTable(String clientTable) { * * @throws ArcException */ - public void dropPendingClientTables() throws ArcException { + public void dropPendingClientTables(int connectionId) throws ArcException { String findClientTable = ViewEnum.normalizeTableName(client + "\\_%"); @@ -431,74 +469,87 @@ public void dropPendingClientTables() throws ArcException { requete.append(" WHERE tablename like " + requete.quoteText(findClientTable)); requete.append(" AND schemaname = " + requete.quoteText(this.environnement)); - List tablesToDrop = new GenericBean(UtilitaireDao.get(0).executeRequest(connection, requete)) + List tablesToDrop = new GenericBean(UtilitaireDao.get(connectionId).executeRequest(connection, requete)) .getColumnValues(ColumnEnum.TABLE_NAME.getColumnName()); - UtilitaireDao.get(0).executeImmediate(connection, FormatSQL.dropTable(tablesToDrop.toArray(new String[0]))); - + UtilitaireDao.get(connectionId).executeImmediate(null, + FormatSQL.dropTable(tablesToDrop.toArray(new String[0]))); } /** * create reporting table + * * @throws ArcException */ - public void createTableWsStatus() throws ArcException { + public void createTableWsInfo() throws ArcException { + + String tableWsInfo = TableNaming.buildTableNameWithTokens(environnement, ViewEnum.WS_INFO, client, timestamp); - String tableWsInfo = TableNaming.buildTableNameWithTokens(environnement, - ViewEnum.WS_INFO, client, timestamp); - ArcPreparedStatementBuilder requete = new ArcPreparedStatementBuilder(); - requete.append("\n DROP TABLE IF EXISTS "+ tableWsInfo +";"); - + requete.append("\n DROP TABLE IF EXISTS " + tableWsInfo + ";"); + requete.append("\n CREATE TABLE " + tableWsInfo + FormatSQL.WITH_NO_VACUUM + " AS"); - requete.append("\n SELECT "+ requete.quoteText(client)+ " as client "); - requete.append(", "+requete.quoteText(Long.toString(timestamp)) +" as timestamp "); + requete.append("\n SELECT " + requete.quoteText(client) + " as client "); + requete.append(", " + requete.quoteText(Long.toString(timestamp)) + " as timestamp "); requete.append(";"); - - requete.append("\n DROP TABLE IF EXISTS "+ tableWsPending +";"); + + requete.append("\n DROP TABLE IF EXISTS " + tableWsPending + ";"); requete.append("\n CREATE TABLE " + tableWsPending + "();"); - + UtilitaireDao.get(0).executeImmediate(connection, requete); + registerTableToBeRetrieved(ExportTrackingType.DATA, ArcDatabase.COORDINATOR, tableWsInfo); + } public void createTableWsKO() throws ArcException { - String tableWsKO = TableNaming.buildTableNameWithTokens(environnement, - ViewEnum.WS_KO, client, timestamp); - - ArcPreparedStatementBuilder requete = new ArcPreparedStatementBuilder(); - requete.append("\n DROP TABLE IF EXISTS "+ tableWsKO +";"); - requete.append("\n CREATE TABLE " + tableWsKO + "();"); - - UtilitaireDao.get(0).executeImmediate(connection, requete); + registerTableToBeRetrieved(ExportTrackingType.KO, ArcDatabase.COORDINATOR, ViewEnum.WS_KO.toString()); } - + public void dropTableWsPending() throws ArcException { ArcPreparedStatementBuilder requete = new ArcPreparedStatementBuilder(); - requete.append("DROP TABLE IF EXISTS "+ tableWsPending +";"); + requete.append("DROP TABLE IF EXISTS " + tableWsPending + ";"); UtilitaireDao.get(0).executeImmediate(connection, requete); } /** - * web service data creation is not pending if tableWsPending doesn't exists anymore + * web service data creation is not pending if tableWsPending doesn't exists + * anymore + * * @return * @throws ArcException */ public boolean isWebServiceNotPending() throws ArcException { + ArcPreparedStatementBuilder requete = new ArcPreparedStatementBuilder(); - - String[] schemaAndClient = this.client.split("\\"+Delimiters.SQL_SCHEMA_DELIMITER); - String schema = schemaAndClient[0]; - String[] clientTokens = schemaAndClient[1].split(Delimiters.SQL_TOKEN_DELIMITER); - String clientExtract = clientTokens[0]; - String timestampExtract = clientTokens[1]; - - String tableWsPendingExtract = TableNaming.buildTableNameWithTokens(schema, ViewEnum.WS_PENDING, clientExtract, timestampExtract); - - requete.append("SELECT 1 FROM pg_tables"); - requete.append(" WHERE schemaname||'.'||tablename = "+requete.quoteText(tableWsPendingExtract)+" "); + requete.append("SELECT 1 FROM pg_tables WHERE schemaname||'.'||tablename = " + requete.quoteText(tableWsPending) + + " "); return !UtilitaireDao.get(0).hasResults(connection, requete); } + public void copyTableOfIdSourceToExecutorNod(int connectionId) throws ArcException { + GenericBean gb = new GenericBean(UtilitaireDao.get(0).executeRequest(connection, + new ArcPreparedStatementBuilder("SELECT * FROM " + tableOfIdSource))); + + try (Connection executorConnection = UtilitaireDao.get(connectionId).getDriverConnexion()) { + CopyObjectsToDatabase.execCopyFromGenericBean(executorConnection, tableOfIdSource, gb); + } catch (SQLException e) { + ArcException customException = new ArcException(e, ArcExceptionMessage.DATABASE_CONNECTION_EXECUTOR_FAILED); + customException.logFullException(); + throw customException; + } + } + + public void deleteFromTrackTable(String tableName) throws ArcException { + ArcPreparedStatementBuilder query = new ArcPreparedStatementBuilder(); + query.build(SQL.DELETE, this.tableWsTracking); + query.build(SQL.WHERE, "table_to_retrieve=", query.quoteText(tableName)); + UtilitaireDao.get(0).executeImmediate(connection, query); + } + + public void setConnection(Connection connection) { + this.connection = connection; + } + } diff --git a/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/dao/NameDao.java b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/dao/NameDao.java index cd24a63d5..e40356cf5 100644 --- a/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/dao/NameDao.java +++ b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/dao/NameDao.java @@ -5,13 +5,14 @@ import fr.insee.arc.core.dataobjects.ArcPreparedStatementBuilder; import fr.insee.arc.utils.dao.UtilitaireDao; import fr.insee.arc.utils.exception.ArcException; +import fr.insee.arc.ws.services.importServlet.bo.TableToRetrieve; public class NameDao { - public static List> execQuerySelectMetadata(String tableName) throws ArcException + public static List> execQuerySelectMetadata(TableToRetrieve table) throws ArcException { - return UtilitaireDao.get(0).executeRequest(null,new ArcPreparedStatementBuilder("select * from " + tableName + " where false ")); + return UtilitaireDao.get(table.getNod().getIndex()).executeRequest(null,new ArcPreparedStatementBuilder("select * from " + table.getTableName() + " where false ")); } diff --git a/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/dao/ServiceDao.java b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/dao/ServiceDao.java index 41d45b3b0..62ce0bb68 100644 --- a/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/dao/ServiceDao.java +++ b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/dao/ServiceDao.java @@ -4,26 +4,43 @@ import java.io.OutputStream; import java.util.zip.GZIPOutputStream; +import fr.insee.arc.core.dataobjects.ArcDatabase; import fr.insee.arc.utils.dao.UtilitaireDao; import fr.insee.arc.utils.exception.ArcException; import fr.insee.arc.utils.exception.ArcExceptionMessage; +import fr.insee.arc.ws.services.importServlet.bo.TableToRetrieve; public class ServiceDao { - public static void execQueryExportDataToResponse(OutputStream os, String tableName, boolean csvExportFormat) throws ArcException { + public static void execQueryExportDataToResponse(OutputStream os, TableToRetrieve table, boolean csvExportFormat) throws ArcException { if (csvExportFormat) { try(GZIPOutputStream goz=new GZIPOutputStream(os);) { - UtilitaireDao.get(0).exporting(null, tableName, goz, csvExportFormat); + if (table.getNod().equals(ArcDatabase.EXECUTOR)) + { + int numberOfExecutorNods = ArcDatabase.numberOfExecutorNods(); + for (int executorConnectionId = ArcDatabase.EXECUTOR.getIndex(); executorConnectionId < ArcDatabase.EXECUTOR + .getIndex() + numberOfExecutorNods; executorConnectionId++) { + UtilitaireDao.get(executorConnectionId).exporting(null, table.getTableName(), goz, csvExportFormat); + } + } + else + { + UtilitaireDao.get(0).exporting(null, table.getTableName(), goz, csvExportFormat); + } } catch (IOException e) { throw new ArcException(ArcExceptionMessage.STREAM_WRITE_FAILED); } } else { - UtilitaireDao.get(0).exporting(null, tableName, os, csvExportFormat); + if (table.getNod().equals(ArcDatabase.EXECUTOR)) + { + throw new ArcException(ArcExceptionMessage.WS_RETRIEVE_DATA_SCALABLE_TABLE_MUST_BE_EXPORT_IN_CSV); + } + UtilitaireDao.get(0).exporting(null, table.getTableName(), os, csvExportFormat); } } diff --git a/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/bo/ArcClientIdentifierTest.java b/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/bo/ArcClientIdentifierTest.java new file mode 100644 index 000000000..54ada3262 --- /dev/null +++ b/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/bo/ArcClientIdentifierTest.java @@ -0,0 +1,40 @@ +package fr.insee.arc.ws.services.importServlet.bo; + +import static org.junit.Assert.*; + +import org.json.JSONObject; +import org.junit.Test; + +public class ArcClientIdentifierTest { + + @Test + public void testArcClientIdentifierInitializeClient() { + + JSONObject json = new JSONObject("{\"client\":\"ARTEMIS\",\"environnement\":\"arc.bas1\",\"familleNorme\":\"DSN\",\"format\":\"csv_gzip\"}"); + ArcClientIdentifier clientParameters = new ArcClientIdentifier(json, true); + + assertEquals("ARTEMIS",clientParameters.getClientInputParameter()); + assertEquals("ARTEMIS",clientParameters.getClientIdentifier()); + assertEquals("arc_bas1",clientParameters.getEnvironnement()); + assertEquals("DSN",clientParameters.getFamille()); + assertEquals(ExportFormat.CSV_GZIP.getFormat(),clientParameters.getFormat()); + } + + + @Test + public void testArcClientIdentifierRetrieveClientAttributes() { + + JSONObject json = new JSONObject("{\"client\":\"arc_bas1.ARTEMIS_1701335653112_nmcl_code_pays_etranger_2015\",\"environnement\":\"arc.bas1\",\"familleNorme\":\"DSN\",\"format\":\"csv_gzip\"}"); + + ArcClientIdentifier clientParameters = new ArcClientIdentifier(json, false); + + assertEquals("arc_bas1.ARTEMIS_1701335653112_nmcl_code_pays_etranger_2015",clientParameters.getClientInputParameter()); + assertEquals("ARTEMIS",clientParameters.getClientIdentifier()); + assertEquals(1701335653112L,clientParameters.getTimestamp()); + assertEquals("arc_bas1",clientParameters.getEnvironnement()); + assertEquals("DSN",clientParameters.getFamille()); + assertEquals(ExportFormat.CSV_GZIP.getFormat(),clientParameters.getFormat()); + + } + +} diff --git a/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/dao/ClientDaoTest.java b/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/dao/ClientDaoTest.java new file mode 100644 index 000000000..50aea94ab --- /dev/null +++ b/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/dao/ClientDaoTest.java @@ -0,0 +1,103 @@ +package fr.insee.arc.ws.services.importServlet.dao; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +import java.sql.SQLException; +import java.util.List; + +import org.json.JSONObject; +import org.junit.Test; + +import fr.insee.arc.core.dataobjects.ArcPreparedStatementBuilder; +import fr.insee.arc.utils.dao.SQL; +import fr.insee.arc.utils.dao.UtilitaireDao; +import fr.insee.arc.utils.exception.ArcException; +import fr.insee.arc.utils.query.InitializeQueryTest; +import fr.insee.arc.utils.ressourceUtils.PropertiesHandler; +import fr.insee.arc.ws.services.importServlet.bo.ArcClientIdentifier; + +public class ClientDaoTest extends InitializeQueryTest { + + @Test + public void clientDaoTest() throws ArcException, SQLException { + + InitializeQueryTest.buildPropertiesWithoutScalability("any"); + + initializeTestData(); + + testVerificationFamilleOK(); + testVerificationFamilleKO(); + + testSelectBusinessDataTables(); + + destroyTestData(); + } + + private void testSelectBusinessDataTables() throws ArcException { + JSONObject json = new JSONObject( + "{\"client\":\"ARTEMIS\",\"environnement\":\"arc.bas1\",\"familleNorme\":\"DSN\",\"format\":\"csv_gzip\"}"); + ArcClientIdentifier queryParameters = new ArcClientIdentifier(json, true); + ClientDao clientDao = new ClientDao(queryParameters); + List clientTables = clientDao.selectBusinessDataTables(); + + assertTrue(clientTables.contains("mapping_dsn_test1_ok")); + assertTrue(clientTables.contains("mapping_dsn_test2_ok")); + assertEquals(2,clientTables.size()); + } + + @Test + public void testVerificationFamilleOK() throws ArcException { + JSONObject json = new JSONObject( + "{\"client\":\"ARTEMIS\",\"environnement\":\"arc.bas1\",\"familleNorme\":\"DSN\",\"format\":\"csv_gzip\"}"); + ArcClientIdentifier queryParameters = new ArcClientIdentifier(json, true); + ClientDao clientDao = new ClientDao(queryParameters); + assertTrue(clientDao.verificationClientFamille()); + } + + @Test + public void testVerificationFamilleKO() throws ArcException { + JSONObject json = new JSONObject( + "{\"client\":\"ARTEMIS\",\"environnement\":\"arc.bas1\",\"familleNorme\":\"BATI\",\"format\":\"csv_gzip\"}"); + ArcClientIdentifier queryParameters = new ArcClientIdentifier(json, true); + ClientDao clientDao = new ClientDao(queryParameters); + assertFalse(clientDao.verificationClientFamille()); + } + + + private void initializeTestData() throws SQLException, ArcException { + + ArcPreparedStatementBuilder query; + + query = new ArcPreparedStatementBuilder(); + + query.append("CREATE SCHEMA arc;"); + query.append("CREATE SCHEMA arc_bas1;"); + + query.append("CREATE TABLE arc.ihm_client AS "); + query.append("SELECT 'DSN' as id_famille,'ARTEMIS' as id_application UNION ALL "); + query.append("SELECT 'DSN' as id_famille,'DSNFLASH' as id_application"); + query.append(SQL.END_QUERY); + + query.append("CREATE TABLE arc_bas1.ihm_mod_table_metier AS "); + query.append("SELECT 'DSN' as id_famille,'mapping_dsn_test1_ok' as nom_table_metier UNION ALL "); + query.append("SELECT 'DSN' as id_famille,'mapping_dsn_test2_ok' as nom_table_metier UNION ALL "); + query.append("SELECT 'PASRAU' as id_famille,'mapping_pasrau_test_ok' as nom_table_metier"); + query.append(SQL.END_QUERY); + + UtilitaireDao.get(0).executeImmediate(c, query); + } + + private void destroyTestData() throws SQLException, ArcException { + + ArcPreparedStatementBuilder query; + + query = new ArcPreparedStatementBuilder(); + + query.append("DROP SCHEMA arc CASCADE;"); + query.append("DROP SCHEMA arc_bas1 CASCADE;"); + UtilitaireDao.get(0).executeImmediate(c, query); + } + +} From 529a5bedb13d1073f06ed967906f0e524859a59b Mon Sep 17 00:00:00 2001 From: Manuel Soulier Date: Thu, 30 Nov 2023 11:44:27 +0100 Subject: [PATCH 02/19] fix: test on clientDao --- .../arc/core/service/global/dao/TableNamingTest.java | 6 +++--- .../arc/ws/services/importServlet/dao/ClientDao.java | 3 ++- .../ws/services/importServlet/dao/ClientDaoTest.java | 12 ++++++------ 3 files changed, 11 insertions(+), 10 deletions(-) diff --git a/arc-core/src/test/java/fr/insee/arc/core/service/global/dao/TableNamingTest.java b/arc-core/src/test/java/fr/insee/arc/core/service/global/dao/TableNamingTest.java index 8fe5b9510..ff4a9ba5c 100644 --- a/arc-core/src/test/java/fr/insee/arc/core/service/global/dao/TableNamingTest.java +++ b/arc-core/src/test/java/fr/insee/arc/core/service/global/dao/TableNamingTest.java @@ -15,10 +15,10 @@ public void buildTableNameTokensSuffix() { String client = "ARTEMIS"; long timestamp = System.currentTimeMillis(); - assertEquals("arc_bas2.artemis_"+timestamp+"_pilotage_fichier", TableNaming.buildTableNameWithTokens("arc_bas2", ViewEnum.PILOTAGE_FICHIER, client, timestamp)); - assertEquals("arc_bas2.artemis_"+timestamp+"_id_source", TableNaming.buildTableNameWithTokens("arc_bas2", ColumnEnum.ID_SOURCE, client, timestamp)); + assertEquals("arc_bas2.ARTEMIS_"+timestamp+"_pilotage_fichier", TableNaming.buildTableNameWithTokens("arc_bas2", ViewEnum.PILOTAGE_FICHIER, client, timestamp)); + assertEquals("arc_bas2.ARTEMIS_"+timestamp+"_id_source", TableNaming.buildTableNameWithTokens("arc_bas2", ColumnEnum.ID_SOURCE, client, timestamp)); assertEquals(null, TableNaming.buildTableNameWithTokens("arc_bas2", ColumnEnum.ID_SOURCE, null, timestamp)); - assertEquals("arc_bas2.artemis_"+timestamp+"_test", TableNaming.buildTableNameWithTokens("arc_bas2", "test", client, timestamp)); + assertEquals("arc_bas2.ARTEMIS_"+timestamp+"_test", TableNaming.buildTableNameWithTokens("arc_bas2", "test", client, timestamp)); assertEquals("arc_bas2.test", TableNaming.buildTableNameWithTokens("arc_bas2", "TEST")); } diff --git a/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/dao/ClientDao.java b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/dao/ClientDao.java index e538406aa..1aeed1314 100644 --- a/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/dao/ClientDao.java +++ b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/dao/ClientDao.java @@ -80,7 +80,8 @@ public boolean verificationClientFamille() throws ArcException { ArcPreparedStatementBuilder request = new ArcPreparedStatementBuilder(); request.append("SELECT EXISTS (SELECT 1 FROM arc.ihm_client") .append(" WHERE id_application=" + request.quoteText(client)) - .append(" AND id_famille=" + request.quoteText(famille)).append(" LIMIT 1);"); + .append(" AND id_famille=" + request.quoteText(famille)) + .append(" LIMIT 1);"); String bool = UtilitaireDao.get(0).executeRequestWithoutMetadata(connection, request).get(0).get(0); diff --git a/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/dao/ClientDaoTest.java b/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/dao/ClientDaoTest.java index 50aea94ab..b76d56414 100644 --- a/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/dao/ClientDaoTest.java +++ b/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/dao/ClientDaoTest.java @@ -23,16 +23,18 @@ public class ClientDaoTest extends InitializeQueryTest { @Test public void clientDaoTest() throws ArcException, SQLException { - InitializeQueryTest.buildPropertiesWithoutScalability("any"); + InitializeQueryTest.buildPropertiesWithoutScalability(null); + + PropertiesHandler p = PropertiesHandler.getInstance(); initializeTestData(); - + testVerificationFamilleOK(); testVerificationFamilleKO(); testSelectBusinessDataTables(); - destroyTestData(); +// destroyTestData(); } private void testSelectBusinessDataTables() throws ArcException { @@ -47,7 +49,6 @@ private void testSelectBusinessDataTables() throws ArcException { assertEquals(2,clientTables.size()); } - @Test public void testVerificationFamilleOK() throws ArcException { JSONObject json = new JSONObject( "{\"client\":\"ARTEMIS\",\"environnement\":\"arc.bas1\",\"familleNorme\":\"DSN\",\"format\":\"csv_gzip\"}"); @@ -56,7 +57,6 @@ public void testVerificationFamilleOK() throws ArcException { assertTrue(clientDao.verificationClientFamille()); } - @Test public void testVerificationFamilleKO() throws ArcException { JSONObject json = new JSONObject( "{\"client\":\"ARTEMIS\",\"environnement\":\"arc.bas1\",\"familleNorme\":\"BATI\",\"format\":\"csv_gzip\"}"); @@ -80,7 +80,7 @@ private void initializeTestData() throws SQLException, ArcException { query.append("SELECT 'DSN' as id_famille,'DSNFLASH' as id_application"); query.append(SQL.END_QUERY); - query.append("CREATE TABLE arc_bas1.ihm_mod_table_metier AS "); + query.append("CREATE TABLE arc_bas1.mod_table_metier AS "); query.append("SELECT 'DSN' as id_famille,'mapping_dsn_test1_ok' as nom_table_metier UNION ALL "); query.append("SELECT 'DSN' as id_famille,'mapping_dsn_test2_ok' as nom_table_metier UNION ALL "); query.append("SELECT 'PASRAU' as id_famille,'mapping_pasrau_test_ok' as nom_table_metier"); From 1c1fff02a9cee14524c02c42c40c29a922e338b4 Mon Sep 17 00:00:00 2001 From: Manuel Soulier Date: Fri, 1 Dec 2023 13:41:07 +0100 Subject: [PATCH 03/19] feat: clientDao test --- .../arc/ws/services/importServlet/dao/ClientDao.java | 7 +++++++ .../arc/ws/services/importServlet/dao/ClientDaoTest.java | 9 +++++---- 2 files changed, 12 insertions(+), 4 deletions(-) diff --git a/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/dao/ClientDao.java b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/dao/ClientDao.java index 1aeed1314..ebca0cbf9 100644 --- a/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/dao/ClientDao.java +++ b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/dao/ClientDao.java @@ -107,6 +107,13 @@ public List selectBusinessDataTables() throws ArcException { .getColumnValues(ColumnEnum.NOM_TABLE_METIER.getColumnName()); } + /** + * register the table to be retrieved in tracking table + * @param wsTrackingType + * @param targetNod + * @param nomTable + * @throws ArcException + */ private void registerTableToBeRetrieved(ExportTrackingType wsTrackingType, ArcDatabase targetNod, String nomTable) throws ArcException { ArcPreparedStatementBuilder query = new ArcPreparedStatementBuilder(); diff --git a/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/dao/ClientDaoTest.java b/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/dao/ClientDaoTest.java index b76d56414..373e6905a 100644 --- a/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/dao/ClientDaoTest.java +++ b/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/dao/ClientDaoTest.java @@ -15,7 +15,6 @@ import fr.insee.arc.utils.dao.UtilitaireDao; import fr.insee.arc.utils.exception.ArcException; import fr.insee.arc.utils.query.InitializeQueryTest; -import fr.insee.arc.utils.ressourceUtils.PropertiesHandler; import fr.insee.arc.ws.services.importServlet.bo.ArcClientIdentifier; public class ClientDaoTest extends InitializeQueryTest { @@ -25,16 +24,18 @@ public void clientDaoTest() throws ArcException, SQLException { InitializeQueryTest.buildPropertiesWithoutScalability(null); - PropertiesHandler p = PropertiesHandler.getInstance(); - initializeTestData(); + // test family check testVerificationFamilleOK(); testVerificationFamilleKO(); + // test data tables retrieved according to query testSelectBusinessDataTables(); + +// testCreateTableOfIdSource(); -// destroyTestData(); + destroyTestData(); } private void testSelectBusinessDataTables() throws ArcException { From c6ed374a52413413533b5e931c7dbbb099d08953 Mon Sep 17 00:00:00 2001 From: Manuel Soulier Date: Fri, 1 Dec 2023 17:11:46 +0100 Subject: [PATCH 04/19] Merge branch 'scalable_data_retrieval_webservice' --- .../services/importServlet/bo/JsonKeys.java | 1 - .../services/importServlet/dao/ClientDao.java | 53 ++++++-- .../importServlet/dao/ClientDaoTest.java | 126 +++++++++++++++--- 3 files changed, 144 insertions(+), 36 deletions(-) diff --git a/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/bo/JsonKeys.java b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/bo/JsonKeys.java index 1a867f26c..024e99401 100644 --- a/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/bo/JsonKeys.java +++ b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/bo/JsonKeys.java @@ -12,7 +12,6 @@ public enum JsonKeys { ,VALINF( "validiteInf" ) ,VALSUP( "validiteSup" ) ,PERIODICITE( "periodicite" ) - ,NBFICHIERS("nbfichiers") //Réponse ,ID( "id" ) //Aussi utilisé dans les réponses quelque soit le service diff --git a/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/dao/ClientDao.java b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/dao/ClientDao.java index ebca0cbf9..804054b2e 100644 --- a/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/dao/ClientDao.java +++ b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/dao/ClientDao.java @@ -215,9 +215,7 @@ public void createTableOfIdSource(JSONObject requeteJSON) throws ArcException { ? requeteJSON.getString(JsonKeys.VALINF.getKey()) : null; String validiteSup = requeteJSON.getString(JsonKeys.VALSUP.getKey()); - int nbFichiers = requeteJSON.keySet().contains(JsonKeys.NBFICHIERS.getKey()) - ? requeteJSON.getInt(JsonKeys.NBFICHIERS.getKey()) - : 0; + boolean reprise = requeteJSON.getBoolean(JsonKeys.REPRISE.getKey()); StringBuilder query = new StringBuilder(); @@ -228,8 +226,7 @@ public void createTableOfIdSource(JSONObject requeteJSON) throws ArcException { query.append("("); query.append("SELECT " + ColumnEnum.ID_SOURCE.getColumnName() - + (nbFichiers > 0 ? ", substr(date_entree,1,10)::date as date_entree " : " ") // - + "FROM " + ViewEnum.PILOTAGE_FICHIER.getFullName(this.environnement) + " T1 "); + + " FROM " + ViewEnum.PILOTAGE_FICHIER.getFullName(this.environnement) + " T1 "); query.append( "WHERE '" + TraitementEtat.OK + "'=ANY(T1.etat_traitement) AND T1.periodicite='" + periodicite + "' "); @@ -250,13 +247,7 @@ public void createTableOfIdSource(JSONObject requeteJSON) throws ArcException { LoggerHelper.debugAsComment(LOGGER, "ClientDaoImpl.getIdSrcTableMetier() : Reprise = true"); } - query.append("GROUP BY " + ColumnEnum.ID_SOURCE.getColumnName() + (nbFichiers > 0 ? ", date_entree " : " ")); // ) - - // on trie par ordre decroissant de date d'entree - if (nbFichiers > 0) { - query.append("ORDER BY date_entree DESC LIMIT "); - query.append(nbFichiers); - } + query.append("GROUP BY " + ColumnEnum.ID_SOURCE.getColumnName()); // ) query.append(") as foo; "); UtilitaireDao.get(0).executeBlock(connection, query); @@ -348,7 +339,7 @@ public void createTableFamille() throws ArcException { + requete.quoteText(client) + ");"); UtilitaireDao.get(0).executeRequest(connection, requete); - registerTableToBeRetrieved(ExportTrackingType.ID_SOURCE, ArcDatabase.COORDINATOR, nomTableImage); + registerTableToBeRetrieved(ExportTrackingType.DATA, ArcDatabase.COORDINATOR, nomTableImage); } @@ -361,7 +352,7 @@ public void createTableFamille() throws ArcException { public void createTablePeriodicite() throws ArcException { LoggerHelper.debugAsComment(LOGGER, "ClientDaoImpl.createTablePeriodicite()"); - String nomTableImage = ViewEnum.getFullName(environnement, + String nomTableImage = ViewEnum.getFullNameNotNormalized(environnement, client + "_" + timestamp + "_" + ViewEnum.EXT_MOD_PERIODICITE.getTableName()); UtilitaireDao.get(0).executeImmediate(connection, "CREATE TABLE " + nomTableImage + FormatSQL.WITH_NO_VACUUM @@ -560,4 +551,38 @@ public void setConnection(Connection connection) { this.connection = connection; } + public long getTimestamp() { + return timestamp; + } + + public String getEnvironnement() { + return environnement; + } + + public String getClient() { + return client; + } + + public String getFamille() { + return famille; + } + + public String getTableOfIdSource() { + return tableOfIdSource; + } + + public String getTableWsPending() { + return tableWsPending; + } + + public String getTableWsTracking() { + return tableWsTracking; + } + + public Connection getConnection() { + return connection; + } + + + } diff --git a/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/dao/ClientDaoTest.java b/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/dao/ClientDaoTest.java index 373e6905a..ed1fbbf93 100644 --- a/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/dao/ClientDaoTest.java +++ b/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/dao/ClientDaoTest.java @@ -15,17 +15,30 @@ import fr.insee.arc.utils.dao.UtilitaireDao; import fr.insee.arc.utils.exception.ArcException; import fr.insee.arc.utils.query.InitializeQueryTest; +import fr.insee.arc.utils.structure.GenericBean; import fr.insee.arc.ws.services.importServlet.bo.ArcClientIdentifier; +import fr.insee.arc.ws.services.importServlet.bo.ExportTrackingType; public class ClientDaoTest extends InitializeQueryTest { + // request for DSN family, ARTEMIS client and reprise = true + JSONObject jsonDsnStep1 = new JSONObject( + "{\"familleNorme\":\"DSN\",\"periodicite\":\"M\",\"service\":\"arcClient\",\"validiteSup\":\"2032-03-01\",\"format\":\"csv_gzip\",\"reprise\":true,\"client\":\"ARTEMIS\",\"environnement\":\"arc_bas1\"}"); + ArcClientIdentifier queryParametersDsnStep1 = new ArcClientIdentifier(jsonDsnStep1, true); + ClientDao clientDaoDsnStep1 = new ClientDao(queryParametersDsnStep1); + + @Test public void clientDaoTest() throws ArcException, SQLException { InitializeQueryTest.buildPropertiesWithoutScalability(null); - + + destroyTestData(); initializeTestData(); + // test tracking table creation and registration + testCreateTableTrackRetrievedTables(); + // test family check testVerificationFamilleOK(); testVerificationFamilleKO(); @@ -33,40 +46,90 @@ public void clientDaoTest() throws ArcException, SQLException { // test data tables retrieved according to query testSelectBusinessDataTables(); -// testCreateTableOfIdSource(); + testCreateTableOfIdSourceRepriseFalse(); + testCreateTableOfIdSourceRepriseTrue(); destroyTestData(); } - private void testSelectBusinessDataTables() throws ArcException { - JSONObject json = new JSONObject( - "{\"client\":\"ARTEMIS\",\"environnement\":\"arc.bas1\",\"familleNorme\":\"DSN\",\"format\":\"csv_gzip\"}"); - ArcClientIdentifier queryParameters = new ArcClientIdentifier(json, true); - ClientDao clientDao = new ClientDao(queryParameters); - List clientTables = clientDao.selectBusinessDataTables(); + private void testCreateTableOfIdSourceRepriseTrue() throws ArcException { + clientDaoDsnStep1.createTableOfIdSource(jsonDsnStep1); + + ArcPreparedStatementBuilder query = new ArcPreparedStatementBuilder(); + query.append("SELECT id_source FROM "+clientDaoDsnStep1.getTableOfIdSource()+";"); + + List content = new GenericBean(UtilitaireDao.get(0).executeRequest(c, query)).getColumnValues("id_source"); + assertEquals(2, content.size()); + } + + private void testCreateTableOfIdSourceRepriseFalse() throws ArcException { + + // request on DSN family, ARTEMIS client and reprise = false + // as reprise = false, only files not already retrieved by client must be selected + JSONObject jsonDsnStep1RepriseFalse = new JSONObject( + "{\"familleNorme\":\"DSN\",\"periodicite\":\"M\",\"service\":\"arcClient\",\"validiteSup\":\"2032-03-01\",\"format\":\"csv_gzip\",\"reprise\":false,\"client\":\"ARTEMIS\",\"environnement\":\"arc_bas1\"}"); + ArcClientIdentifier queryParametersDsnStep1RepriseFalse = new ArcClientIdentifier(jsonDsnStep1RepriseFalse, true); + ClientDao clientDaoDsnStep1RepriseFalse = new ClientDao(queryParametersDsnStep1RepriseFalse); + + // create tracking table + clientDaoDsnStep1RepriseFalse.createTableTrackRetrievedTables(); + + clientDaoDsnStep1RepriseFalse.createTableOfIdSource(jsonDsnStep1RepriseFalse); + ArcPreparedStatementBuilder query = new ArcPreparedStatementBuilder(); + query.append("SELECT id_source FROM "+clientDaoDsnStep1RepriseFalse.getTableOfIdSource()+";"); + + List content = new GenericBean(UtilitaireDao.get(0).executeRequest(c, query)).getColumnValues("id_source"); + // only 1 file must be selected as reprise = false + // file_not_to_retrieve_when_reprise_false has already been marked as retrieved by 'ARTEMIS' client + assertEquals(1, content.size()); + } + + private void testCreateTableTrackRetrievedTables() throws ArcException { + clientDaoDsnStep1.createTableTrackRetrievedTables(); + + // test + // retrieve table content + ArcPreparedStatementBuilder query = new ArcPreparedStatementBuilder(); + query.append("SELECT tracking_type FROM "+clientDaoDsnStep1.getTableWsTracking()+";"); + List content = new GenericBean(UtilitaireDao.get(0).executeRequest(c, query)).getColumnValues("tracking_type"); + // test that the table had been created and that it had been registered in itself + assertEquals(1, content.size()); + assertEquals(ExportTrackingType.TRACK.toString(), content.get(0)); + + } + + private void testSelectBusinessDataTables() throws ArcException { + + List clientTables = clientDaoDsnStep1.selectBusinessDataTables(); + assertTrue(clientTables.contains("mapping_dsn_test1_ok")); assertTrue(clientTables.contains("mapping_dsn_test2_ok")); assertEquals(2,clientTables.size()); } public void testVerificationFamilleOK() throws ArcException { - JSONObject json = new JSONObject( - "{\"client\":\"ARTEMIS\",\"environnement\":\"arc.bas1\",\"familleNorme\":\"DSN\",\"format\":\"csv_gzip\"}"); - ArcClientIdentifier queryParameters = new ArcClientIdentifier(json, true); - ClientDao clientDao = new ClientDao(queryParameters); - assertTrue(clientDao.verificationClientFamille()); + assertTrue(clientDaoDsnStep1.verificationClientFamille()); } public void testVerificationFamilleKO() throws ArcException { - JSONObject json = new JSONObject( - "{\"client\":\"ARTEMIS\",\"environnement\":\"arc.bas1\",\"familleNorme\":\"BATI\",\"format\":\"csv_gzip\"}"); - ArcClientIdentifier queryParameters = new ArcClientIdentifier(json, true); - ClientDao clientDao = new ClientDao(queryParameters); - assertFalse(clientDao.verificationClientFamille()); + // request on BATI family, RESIL client and reprise = true + // BATI family doesn't exists in the test data set + JSONObject jsonBatiStep1 = new JSONObject( + "{\"familleNorme\":\"BATI\",\"periodicite\":\"M\",\"service\":\"arcClient\",\"validiteSup\":\"2032-03-01\",\"format\":\"csv_gzip\",\"reprise\":true,\"client\":\"RESIL\",\"environnement\":\"arc_bas1\"}"); + ArcClientIdentifier queryParametersBatiStep1 = new ArcClientIdentifier(jsonBatiStep1, true); + ClientDao clientDaoBatiStep1 = new ClientDao(queryParametersBatiStep1); + + assertFalse(clientDaoBatiStep1.verificationClientFamille()); } - + + + /** + * initialize data for the tests + * @throws SQLException + * @throws ArcException + */ private void initializeTestData() throws SQLException, ArcException { ArcPreparedStatementBuilder query; @@ -86,18 +149,39 @@ private void initializeTestData() throws SQLException, ArcException { query.append("SELECT 'DSN' as id_famille,'mapping_dsn_test2_ok' as nom_table_metier UNION ALL "); query.append("SELECT 'PASRAU' as id_famille,'mapping_pasrau_test_ok' as nom_table_metier"); query.append(SQL.END_QUERY); + + query.append("CREATE TABLE arc_bas1.pilotage_fichier AS "); + query.append("SELECT 'file_to_retrieve.xml' as id_source, 'PHASE3V1' as id_norme, '2023-10-01' as validite,'M' as periodicite"); + query.append(", 'MAPPING' as phase_traitement, '{OK}'::text[] as etat_traitement, '2023-11-30 10:29:47.000'::timestamp as date_traitement"); + query.append(", null::text[] as client, null::timestamp[] as date_client"); + query.append(" UNION ALL "); + // file that mustn't be retrieved when reprise is false and family is DSN + query.append("SELECT 'file_not_to_retrieve_when_reprise_false.xml' as id_source, 'PHASE3V1' as id_norme, '2023-10-01' as validite,'M' as periodicite"); + query.append(", 'MAPPING' as phase_traitement, '{OK}'::text[] as etat_traitement, '2023-11-30 10:29:47.000'::timestamp as date_traitement"); + query.append(", '{ARTEMIS}'::text[] as client, '{2023-11-30 10:29:47.000}'::timestamp[] as date_client");; + query.append(SQL.END_QUERY); + + query.append("CREATE TABLE arc_bas1.norme AS "); + query.append("SELECT 'PHASE3V1' as id_norme, 'DSN' as id_famille UNION ALL "); + query.append("SELECT 'PASRAU' as id_norme, 'PASRAU' as id_famille"); + query.append(SQL.END_QUERY); UtilitaireDao.get(0).executeImmediate(c, query); } + /** + * destroy data for the tests + * @throws SQLException + * @throws ArcException + */ private void destroyTestData() throws SQLException, ArcException { ArcPreparedStatementBuilder query; query = new ArcPreparedStatementBuilder(); - query.append("DROP SCHEMA arc CASCADE;"); - query.append("DROP SCHEMA arc_bas1 CASCADE;"); + query.append("DROP SCHEMA IF EXISTS arc CASCADE;"); + query.append("DROP SCHEMA IF EXISTS arc_bas1 CASCADE;"); UtilitaireDao.get(0).executeImmediate(c, query); } From f8f8898e7105c06929e37c9e5922fc5ee9660554 Mon Sep 17 00:00:00 2001 From: Manuel Soulier Date: Fri, 1 Dec 2023 17:46:51 +0100 Subject: [PATCH 05/19] feat: test on data table image creation --- .../services/importServlet/dao/ClientDao.java | 12 +- .../importServlet/dao/ClientDaoTest.java | 108 ++++++++++++++---- 2 files changed, 96 insertions(+), 24 deletions(-) diff --git a/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/dao/ClientDao.java b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/dao/ClientDao.java index 804054b2e..96e8966f8 100644 --- a/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/dao/ClientDao.java +++ b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/dao/ClientDao.java @@ -2,6 +2,7 @@ import java.sql.Connection; import java.sql.SQLException; +import java.util.ArrayList; import java.util.List; import java.util.Map; @@ -131,7 +132,7 @@ private void registerTableToBeRetrieved(ExportTrackingType wsTrackingType, ArcDa * @return liste des noms de tables images crées * @throws ArcException */ - private void addImage(String tableMetier, int executorConnectionId) throws ArcException { + private String addImage(String tableMetier, int executorConnectionId) throws ArcException { StringBuilder request = new StringBuilder(); String nomTableImage = TableNaming.buildTableNameWithTokens(environnement, tableMetier, client, timestamp); @@ -147,6 +148,8 @@ private void addImage(String tableMetier, int executorConnectionId) throws ArcEx UtilitaireDao.get(executorConnectionId).executeBlock(connection, request); registerTableToBeRetrieved(ExportTrackingType.DATA, ArcDatabase.EXECUTOR, nomTableImage); + + return nomTableImage; } @@ -264,12 +267,15 @@ public void createTableOfIdSource(JSONObject requeteJSON) throws ArcException { * @return liste des noms de tables images crées * @throws ArcException */ - public void createImages(List tablesMetierNames, int executorConnectionId) throws ArcException { + public List createImages(List tablesMetierNames, int executorConnectionId) throws ArcException { LoggerHelper.debugAsComment(LOGGER, timestamp, "ClientDaoImpl.createImage()"); + List dataTableImages = new ArrayList<>(); + for (String tableMetier : tablesMetierNames) { - addImage(tableMetier, executorConnectionId); + dataTableImages.add(addImage(tableMetier, executorConnectionId)); } + return dataTableImages; } /* diff --git a/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/dao/ClientDaoTest.java b/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/dao/ClientDaoTest.java index ed1fbbf93..a28a37f41 100644 --- a/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/dao/ClientDaoTest.java +++ b/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/dao/ClientDaoTest.java @@ -10,6 +10,7 @@ import org.json.JSONObject; import org.junit.Test; +import fr.insee.arc.core.dataobjects.ArcDatabase; import fr.insee.arc.core.dataobjects.ArcPreparedStatementBuilder; import fr.insee.arc.utils.dao.SQL; import fr.insee.arc.utils.dao.UtilitaireDao; @@ -18,12 +19,13 @@ import fr.insee.arc.utils.structure.GenericBean; import fr.insee.arc.ws.services.importServlet.bo.ArcClientIdentifier; import fr.insee.arc.ws.services.importServlet.bo.ExportTrackingType; +import fr.insee.arc.ws.services.importServlet.bo.TableToRetrieve; public class ClientDaoTest extends InitializeQueryTest { - // request for DSN family, ARTEMIS client and reprise = true + // request for DSN family, ARTEMIS client and reprise = false JSONObject jsonDsnStep1 = new JSONObject( - "{\"familleNorme\":\"DSN\",\"periodicite\":\"M\",\"service\":\"arcClient\",\"validiteSup\":\"2032-03-01\",\"format\":\"csv_gzip\",\"reprise\":true,\"client\":\"ARTEMIS\",\"environnement\":\"arc_bas1\"}"); + "{\"familleNorme\":\"DSN\",\"periodicite\":\"M\",\"service\":\"arcClient\",\"validiteSup\":\"2032-03-01\",\"format\":\"csv_gzip\",\"reprise\":false,\"client\":\"ARTEMIS\",\"environnement\":\"arc_bas1\"}"); ArcClientIdentifier queryParametersDsnStep1 = new ArcClientIdentifier(jsonDsnStep1, true); ClientDao clientDaoDsnStep1 = new ClientDao(queryParametersDsnStep1); @@ -44,44 +46,104 @@ public void clientDaoTest() throws ArcException, SQLException { testVerificationFamilleKO(); // test data tables retrieved according to query - testSelectBusinessDataTables(); + List selectedDataTables = testSelectBusinessDataTables(); + // test id_source selection table testCreateTableOfIdSourceRepriseFalse(); testCreateTableOfIdSourceRepriseTrue(); + // test data table image creation + // table must had been registered in track table + List dataTableImages = testCreateImages(selectedDataTables); + + // test return table from track table + // the dataTable in dataTableImages must be found the the track data table with type ExportTrackingType.DATA + testGetAClientTableByType(dataTableImages); + // the dataTable in dataTableImages must be found the the track data table by its name + testGetAClientTableByName(dataTableImages); + destroyTestData(); } - private void testCreateTableOfIdSourceRepriseTrue() throws ArcException { + private void testGetAClientTableByType(List dataTableImages) throws ArcException { + TableToRetrieve registeredTable = clientDaoDsnStep1.getAClientTableByType(ExportTrackingType.DATA); + + // now that image had been created we should find it in tracking table + // check the name + assertEquals(dataTableImages.get(0),registeredTable.getTableName()); + // data table are found on executor nod + assertEquals(ArcDatabase.EXECUTOR,registeredTable.getNod()); + } + + private void testGetAClientTableByName(List dataTableImages) throws ArcException { + + TableToRetrieve registeredTable = clientDaoDsnStep1.getAClientTableByName(dataTableImages.get(0)); + + // now that image had been created we should find it in tracking table + // check the name + assertEquals(dataTableImages.get(0),registeredTable.getTableName()); + // the test is in non scalable nod so the data table must be on coordinator + assertEquals(ArcDatabase.EXECUTOR,registeredTable.getNod()); + } + + private List testCreateImages(List selectedDataTables) throws ArcException { + List dataTableImages = clientDaoDsnStep1.createImages(selectedDataTables, 0); + + // only 1 table in model and 1 table should had been created + assertEquals(1, dataTableImages.size()); + + ArcPreparedStatementBuilder query = new ArcPreparedStatementBuilder(); + query.append("SELECT distinct id_source FROM "+dataTableImages.get(0)+";"); + List content = new GenericBean(UtilitaireDao.get(0).executeRequest(c, query)).getColumnValues("id_source"); + + // only table with 1 id_source must had been retrieved + assertEquals(1, content.size()); + + return dataTableImages; + + } + + /** + * test on retrieving idSource + * request on DSN family, ARTEMIS client and reprise = false + * as reprise = false, only files not already retrieved by client must be selected + * @throws ArcException + */ + private void testCreateTableOfIdSourceRepriseFalse() throws ArcException { + clientDaoDsnStep1.createTableOfIdSource(jsonDsnStep1); ArcPreparedStatementBuilder query = new ArcPreparedStatementBuilder(); query.append("SELECT id_source FROM "+clientDaoDsnStep1.getTableOfIdSource()+";"); - List content = new GenericBean(UtilitaireDao.get(0).executeRequest(c, query)).getColumnValues("id_source"); - assertEquals(2, content.size()); + + // only 1 file must be selected as reprise = false + // file_not_to_retrieve_when_reprise_false has already been marked as retrieved by 'ARTEMIS' client + assertEquals(1, content.size()); } - private void testCreateTableOfIdSourceRepriseFalse() throws ArcException { + /** + * test to select id_source to be retrieved when reprise=true + * @throws ArcException + */ + private void testCreateTableOfIdSourceRepriseTrue() throws ArcException { - // request on DSN family, ARTEMIS client and reprise = false - // as reprise = false, only files not already retrieved by client must be selected - JSONObject jsonDsnStep1RepriseFalse = new JSONObject( - "{\"familleNorme\":\"DSN\",\"periodicite\":\"M\",\"service\":\"arcClient\",\"validiteSup\":\"2032-03-01\",\"format\":\"csv_gzip\",\"reprise\":false,\"client\":\"ARTEMIS\",\"environnement\":\"arc_bas1\"}"); - ArcClientIdentifier queryParametersDsnStep1RepriseFalse = new ArcClientIdentifier(jsonDsnStep1RepriseFalse, true); - ClientDao clientDaoDsnStep1RepriseFalse = new ClientDao(queryParametersDsnStep1RepriseFalse); + JSONObject jsonDsnStep1RepriseTrue = new JSONObject( + "{\"familleNorme\":\"DSN\",\"periodicite\":\"M\",\"service\":\"arcClient\",\"validiteSup\":\"2032-03-01\",\"format\":\"csv_gzip\",\"reprise\":true,\"client\":\"ARTEMIS\",\"environnement\":\"arc_bas1\"}"); + ArcClientIdentifier queryParametersDsnStep1RepriseTrue = new ArcClientIdentifier(jsonDsnStep1RepriseTrue, true); + ClientDao clientDaoDsnStep1RepriseTrue = new ClientDao(queryParametersDsnStep1RepriseTrue); // create tracking table - clientDaoDsnStep1RepriseFalse.createTableTrackRetrievedTables(); + clientDaoDsnStep1RepriseTrue.createTableTrackRetrievedTables(); - clientDaoDsnStep1RepriseFalse.createTableOfIdSource(jsonDsnStep1RepriseFalse); + clientDaoDsnStep1RepriseTrue.createTableOfIdSource(jsonDsnStep1RepriseTrue); ArcPreparedStatementBuilder query = new ArcPreparedStatementBuilder(); - query.append("SELECT id_source FROM "+clientDaoDsnStep1RepriseFalse.getTableOfIdSource()+";"); + query.append("SELECT id_source FROM "+clientDaoDsnStep1RepriseTrue.getTableOfIdSource()+";"); List content = new GenericBean(UtilitaireDao.get(0).executeRequest(c, query)).getColumnValues("id_source"); // only 1 file must be selected as reprise = false // file_not_to_retrieve_when_reprise_false has already been marked as retrieved by 'ARTEMIS' client - assertEquals(1, content.size()); + assertEquals(2, content.size()); } private void testCreateTableTrackRetrievedTables() throws ArcException { @@ -99,13 +161,13 @@ private void testCreateTableTrackRetrievedTables() throws ArcException { } - private void testSelectBusinessDataTables() throws ArcException { + private List testSelectBusinessDataTables() throws ArcException { List clientTables = clientDaoDsnStep1.selectBusinessDataTables(); assertTrue(clientTables.contains("mapping_dsn_test1_ok")); - assertTrue(clientTables.contains("mapping_dsn_test2_ok")); - assertEquals(2,clientTables.size()); + assertEquals(1,clientTables.size()); + return clientTables; } public void testVerificationFamilleOK() throws ArcException { @@ -146,7 +208,6 @@ private void initializeTestData() throws SQLException, ArcException { query.append("CREATE TABLE arc_bas1.mod_table_metier AS "); query.append("SELECT 'DSN' as id_famille,'mapping_dsn_test1_ok' as nom_table_metier UNION ALL "); - query.append("SELECT 'DSN' as id_famille,'mapping_dsn_test2_ok' as nom_table_metier UNION ALL "); query.append("SELECT 'PASRAU' as id_famille,'mapping_pasrau_test_ok' as nom_table_metier"); query.append(SQL.END_QUERY); @@ -165,6 +226,11 @@ private void initializeTestData() throws SQLException, ArcException { query.append("SELECT 'PHASE3V1' as id_norme, 'DSN' as id_famille UNION ALL "); query.append("SELECT 'PASRAU' as id_norme, 'PASRAU' as id_famille"); query.append(SQL.END_QUERY); + + query.append("CREATE TABLE arc_bas1.mapping_dsn_test1_ok AS "); + query.append("SELECT 'file_to_retrieve.xml' as id_source, 'data_of_file_to_retrieve' as data UNION ALL "); + query.append("SELECT 'file_not_to_retrieve_when_reprise_false.xml' as id_source, 'data_of_file_not_to_retrieve_when_reprise_false' as data"); + query.append(SQL.END_QUERY); UtilitaireDao.get(0).executeImmediate(c, query); } From be7587d0217728216df958f241678bb5dd7d7eba Mon Sep 17 00:00:00 2001 From: Manuel Soulier Date: Fri, 1 Dec 2023 18:50:25 +0100 Subject: [PATCH 06/19] feat: client dao tests on metadata tables --- .../ImportStep3GetTableDataService.java | 1 - .../services/importServlet/dao/ClientDao.java | 44 +++++----- .../importServlet/dao/ClientDaoTest.java | 86 ++++++++++++++++++- 3 files changed, 107 insertions(+), 24 deletions(-) diff --git a/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/ImportStep3GetTableDataService.java b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/ImportStep3GetTableDataService.java index f848750b0..546bc1133 100644 --- a/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/ImportStep3GetTableDataService.java +++ b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/ImportStep3GetTableDataService.java @@ -4,7 +4,6 @@ import org.apache.logging.log4j.Logger; import org.json.JSONObject; -import fr.insee.arc.core.dataobjects.ViewEnum; import fr.insee.arc.utils.exception.ArcException; import fr.insee.arc.utils.utils.Sleep; import fr.insee.arc.ws.services.importServlet.actions.SendResponse; diff --git a/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/dao/ClientDao.java b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/dao/ClientDao.java index 96e8966f8..d4a044d85 100644 --- a/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/dao/ClientDao.java +++ b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/dao/ClientDao.java @@ -326,6 +326,28 @@ public void createTableVarMetier() throws ArcException { } + /* + * (non-Javadoc) + * + * @see fr.insee.arc_essnet.ws.dao.ClientDarcleMetier(java.lang.String, + * fr.insee.arc_essnet.ws.actions.Senarc + */ + public void createTableMetier() throws ArcException { + LoggerHelper.debugAsComment(LOGGER, "ClientDaoImpl.sendTableMetier()"); + + String nomTableImage = TableNaming.buildTableNameWithTokens(environnement, ViewEnum.MOD_TABLE_METIER, client, + timestamp); + + ArcPreparedStatementBuilder requete = new ArcPreparedStatementBuilder( + "\n CREATE TABLE " + nomTableImage + FormatSQL.WITH_NO_VACUUM + " AS"); + requete.append("\n SELECT * FROM " + ViewEnum.MOD_TABLE_METIER.getFullName(environnement) + " "); + requete.append("\n WHERE id_famille = " + requete.quoteText(famille)); + requete.append(";"); + UtilitaireDao.get(0).executeRequest(connection, requete); + + registerTableToBeRetrieved(ExportTrackingType.DATA, ArcDatabase.COORDINATOR, nomTableImage); + } + /* * (non-Javadoc) * @@ -368,28 +390,6 @@ public void createTablePeriodicite() throws ArcException { } - /* - * (non-Javadoc) - * - * @see fr.insee.arc_essnet.ws.dao.ClientDarcleMetier(java.lang.String, - * fr.insee.arc_essnet.ws.actions.Senarc - */ - public void createTableMetier() throws ArcException { - LoggerHelper.debugAsComment(LOGGER, "ClientDaoImpl.sendTableMetier()"); - - String nomTableImage = TableNaming.buildTableNameWithTokens(environnement, ViewEnum.MOD_TABLE_METIER, client, - timestamp); - - ArcPreparedStatementBuilder requete = new ArcPreparedStatementBuilder( - "\n CREATE TABLE " + nomTableImage + FormatSQL.WITH_NO_VACUUM + " AS"); - requete.append("\n SELECT * FROM " + ViewEnum.MOD_TABLE_METIER.getFullName(environnement) + " "); - requete.append("\n WHERE id_famille = " + requete.quoteText(famille)); - requete.append(";"); - UtilitaireDao.get(0).executeRequest(connection, requete); - - registerTableToBeRetrieved(ExportTrackingType.DATA, ArcDatabase.COORDINATOR, nomTableImage); - } - /** * Get the table object of the table to retrieve by its type * diff --git a/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/dao/ClientDaoTest.java b/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/dao/ClientDaoTest.java index a28a37f41..c991ab6be 100644 --- a/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/dao/ClientDaoTest.java +++ b/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/dao/ClientDaoTest.java @@ -2,6 +2,7 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import java.sql.SQLException; @@ -12,6 +13,7 @@ import fr.insee.arc.core.dataobjects.ArcDatabase; import fr.insee.arc.core.dataobjects.ArcPreparedStatementBuilder; +import fr.insee.arc.core.dataobjects.ViewEnum; import fr.insee.arc.utils.dao.SQL; import fr.insee.arc.utils.dao.UtilitaireDao; import fr.insee.arc.utils.exception.ArcException; @@ -41,6 +43,9 @@ public void clientDaoTest() throws ArcException, SQLException { // test tracking table creation and registration testCreateTableTrackRetrievedTables(); + //test return of client table when nothing found + testGetAClientTableByNameNotFound(); + // test family check testVerificationFamilleOK(); testVerificationFamilleKO(); @@ -62,9 +67,67 @@ public void clientDaoTest() throws ArcException, SQLException { // the dataTable in dataTableImages must be found the the track data table by its name testGetAClientTableByName(dataTableImages); + // test tables creation for metadata tables + testCreateTableNmcl(); + testCreateTableVarMetier(); + testCreateTableTableMetier(); + testCreateTableTableFamille(); + testCreateTableTablePeriodicite(); + + testDropPendingClientTables(); + + destroyTestData(); } + private void testDropPendingClientTables() throws ArcException { + clientDaoDsnStep1.dropPendingClientTables(ArcDatabase.COORDINATOR.getIndex()); + // all client tables should had been deleted + assertFalse(UtilitaireDao.get(0).isTableExiste(c, "arc_bas1.ARTEMIS%")); + } + + private void testCreateTableNmcl() throws ArcException { + // TODO Auto-generated method stub + clientDaoDsnStep1.createTableNmcl(); + // table image created should be like arc_bas1.ARTEMIS_timestamp_ + assertTrue(UtilitaireDao.get(0).isTableExiste(c, "arc_bas1.ARTEMIS_%_nmcl_table1")); + assertTrue(UtilitaireDao.get(0).isTableExiste(c, "arc_bas1.ARTEMIS_%_nmcl_table2")); + } + + private void testCreateTableVarMetier() throws ArcException { + // TODO Auto-generated method stub + clientDaoDsnStep1.createTableVarMetier(); + // table image created should be like arc_bas1.ARTEMIS_timestamp_ + assertTrue(UtilitaireDao.get(0).isTableExiste(c, "arc_bas1.ARTEMIS_%_mod_variable_metier")); + } + + private void testCreateTableTableMetier() throws ArcException { + // TODO Auto-generated method stub + clientDaoDsnStep1.createTableMetier(); + // table image created should be like arc_bas1.ARTEMIS_timestamp_ + assertTrue(UtilitaireDao.get(0).isTableExiste(c, "arc_bas1.ARTEMIS_%_mod_table_metier")); + } + + private void testCreateTableTableFamille() throws ArcException { + // TODO Auto-generated method stub + clientDaoDsnStep1.createTableFamille(); + // table image created should be like arc_bas1.ARTEMIS_timestamp_ + assertTrue(UtilitaireDao.get(0).isTableExiste(c, "arc_bas1.ARTEMIS_%_ext_mod_famille")); + } + + private void testCreateTableTablePeriodicite() throws ArcException { + // TODO Auto-generated method stub + clientDaoDsnStep1.createTablePeriodicite(); + // table image created should be like arc_bas1.ARTEMIS_timestamp_ + assertTrue(UtilitaireDao.get(0).isTableExiste(c, "arc_bas1.ARTEMIS_%_ext_mod_periodicite")); + } + + private void testGetAClientTableByNameNotFound() throws ArcException { + TableToRetrieve registeredTable = clientDaoDsnStep1.getAClientTableByName("not_existing_table"); + assertNull(registeredTable.getTableName()); + assertNull(registeredTable.getNod()); + } + private void testGetAClientTableByType(List dataTableImages) throws ArcException { TableToRetrieve registeredTable = clientDaoDsnStep1.getAClientTableByType(ExportTrackingType.DATA); @@ -98,7 +161,7 @@ private List testCreateImages(List selectedDataTables) throws Ar // only table with 1 id_source must had been retrieved assertEquals(1, content.size()); - + return dataTableImages; } @@ -120,6 +183,7 @@ private void testCreateTableOfIdSourceRepriseFalse() throws ArcException { // only 1 file must be selected as reprise = false // file_not_to_retrieve_when_reprise_false has already been marked as retrieved by 'ARTEMIS' client assertEquals(1, content.size()); + } /** @@ -201,16 +265,25 @@ private void initializeTestData() throws SQLException, ArcException { query.append("CREATE SCHEMA arc;"); query.append("CREATE SCHEMA arc_bas1;"); + + // family and client tables query.append("CREATE TABLE arc.ihm_client AS "); query.append("SELECT 'DSN' as id_famille,'ARTEMIS' as id_application UNION ALL "); query.append("SELECT 'DSN' as id_famille,'DSNFLASH' as id_application"); query.append(SQL.END_QUERY); + query.append("CREATE TABLE arc.ihm_famille AS SELECT 'DSN' as id_famille"); + query.append(SQL.END_QUERY); + query.append("CREATE TABLE arc_bas1.mod_table_metier AS "); query.append("SELECT 'DSN' as id_famille,'mapping_dsn_test1_ok' as nom_table_metier UNION ALL "); query.append("SELECT 'PASRAU' as id_famille,'mapping_pasrau_test_ok' as nom_table_metier"); query.append(SQL.END_QUERY); + query.append("CREATE TABLE arc_bas1.mod_variable_metier AS SELECT 'DSN' as id_famille, 'mapping_dsn_test1_ok' as nom_table_metier, 'id_source' as nom_variable_metier"); + query.append(SQL.END_QUERY); + + // pilotage tables query.append("CREATE TABLE arc_bas1.pilotage_fichier AS "); query.append("SELECT 'file_to_retrieve.xml' as id_source, 'PHASE3V1' as id_norme, '2023-10-01' as validite,'M' as periodicite"); query.append(", 'MAPPING' as phase_traitement, '{OK}'::text[] as etat_traitement, '2023-11-30 10:29:47.000'::timestamp as date_traitement"); @@ -222,15 +295,26 @@ private void initializeTestData() throws SQLException, ArcException { query.append(", '{ARTEMIS}'::text[] as client, '{2023-11-30 10:29:47.000}'::timestamp[] as date_client");; query.append(SQL.END_QUERY); + // norme table used to retrieve family of data query.append("CREATE TABLE arc_bas1.norme AS "); query.append("SELECT 'PHASE3V1' as id_norme, 'DSN' as id_famille UNION ALL "); query.append("SELECT 'PASRAU' as id_norme, 'PASRAU' as id_famille"); query.append(SQL.END_QUERY); + // data tables containing two files + // one had already been retrieved by client 'ARTEMIS', the other hadn't been retrieved yet query.append("CREATE TABLE arc_bas1.mapping_dsn_test1_ok AS "); query.append("SELECT 'file_to_retrieve.xml' as id_source, 'data_of_file_to_retrieve' as data UNION ALL "); query.append("SELECT 'file_not_to_retrieve_when_reprise_false.xml' as id_source, 'data_of_file_not_to_retrieve_when_reprise_false' as data"); query.append(SQL.END_QUERY); + + // nomenclature tables + query.append("CREATE TABLE arc_bas1.nmcl_table1 AS SELECT 1 as data"); + query.append(SQL.END_QUERY); + query.append("CREATE TABLE arc_bas1.nmcl_table2 AS SELECT 1 as data"); + query.append(SQL.END_QUERY); + query.append("CREATE TABLE arc.ext_mod_periodicite AS SELECT 1 as id, 'A' as VAL"); + query.append(SQL.END_QUERY); UtilitaireDao.get(0).executeImmediate(c, query); } From bc3a91919e6da7a5b788b309a340a2f97411e111 Mon Sep 17 00:00:00 2001 From: Manuel Soulier Date: Fri, 1 Dec 2023 19:06:28 +0100 Subject: [PATCH 07/19] fix: bug in not scalable case for drop table and export --- .../arc/ws/services/importServlet/dao/ClientDao.java | 11 ++--------- .../arc/ws/services/importServlet/dao/ServiceDao.java | 11 +++++++---- .../ws/services/importServlet/dao/ClientDaoTest.java | 1 - 3 files changed, 9 insertions(+), 14 deletions(-) diff --git a/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/dao/ClientDao.java b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/dao/ClientDao.java index d4a044d85..041016664 100644 --- a/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/dao/ClientDao.java +++ b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/dao/ClientDao.java @@ -440,13 +440,12 @@ public void dropTable(String clientTable) { } public void dropTable(int connectionIndex, String clientTable) { - if (StringUtils.isBlank(clientTable)) { - return; - } UtilitaireDao.get(connectionIndex).dropTable(connection, clientTable); } public void dropTable(TableToRetrieve table) { + + dropTable(table.getTableName()); if (table.getNod().equals(ArcDatabase.EXECUTOR)) { int numberOfExecutorNods = ArcDatabase.numberOfExecutorNods(); @@ -454,8 +453,6 @@ public void dropTable(TableToRetrieve table) { .getIndex() + numberOfExecutorNods; executorConnectionId++) { dropTable(executorConnectionId, table.getTableName()); } - } else { - dropTable(0, table.getTableName()); } } @@ -553,10 +550,6 @@ public void deleteFromTrackTable(String tableName) throws ArcException { UtilitaireDao.get(0).executeImmediate(connection, query); } - public void setConnection(Connection connection) { - this.connection = connection; - } - public long getTimestamp() { return timestamp; } diff --git a/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/dao/ServiceDao.java b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/dao/ServiceDao.java index 62ce0bb68..60494f05e 100644 --- a/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/dao/ServiceDao.java +++ b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/dao/ServiceDao.java @@ -13,14 +13,16 @@ public class ServiceDao { public static void execQueryExportDataToResponse(OutputStream os, TableToRetrieve table, boolean csvExportFormat) throws ArcException { - + + int numberOfExecutorNods = ArcDatabase.numberOfExecutorNods(); + if (csvExportFormat) { try(GZIPOutputStream goz=new GZIPOutputStream(os);) { - if (table.getNod().equals(ArcDatabase.EXECUTOR)) + + if (table.getNod().equals(ArcDatabase.EXECUTOR) && numberOfExecutorNods>0) { - int numberOfExecutorNods = ArcDatabase.numberOfExecutorNods(); for (int executorConnectionId = ArcDatabase.EXECUTOR.getIndex(); executorConnectionId < ArcDatabase.EXECUTOR .getIndex() + numberOfExecutorNods; executorConnectionId++) { UtilitaireDao.get(executorConnectionId).exporting(null, table.getTableName(), goz, csvExportFormat); @@ -36,7 +38,8 @@ public static void execQueryExportDataToResponse(OutputStream os, TableToRetriev } else { - if (table.getNod().equals(ArcDatabase.EXECUTOR)) + // binary transfer cannot be scaled + if (numberOfExecutorNods>0) { throw new ArcException(ArcExceptionMessage.WS_RETRIEVE_DATA_SCALABLE_TABLE_MUST_BE_EXPORT_IN_CSV); } diff --git a/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/dao/ClientDaoTest.java b/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/dao/ClientDaoTest.java index c991ab6be..7aa657830 100644 --- a/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/dao/ClientDaoTest.java +++ b/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/dao/ClientDaoTest.java @@ -13,7 +13,6 @@ import fr.insee.arc.core.dataobjects.ArcDatabase; import fr.insee.arc.core.dataobjects.ArcPreparedStatementBuilder; -import fr.insee.arc.core.dataobjects.ViewEnum; import fr.insee.arc.utils.dao.SQL; import fr.insee.arc.utils.dao.UtilitaireDao; import fr.insee.arc.utils.exception.ArcException; From 3d1098bc17b78012ed14b44cdfda8160247e9e7c Mon Sep 17 00:00:00 2001 From: Manuel Soulier Date: Fri, 1 Dec 2023 19:29:18 +0100 Subject: [PATCH 08/19] fix: drop tableOfIdSource logic --- .../importServlet/ImportStep2GetTableNameService.java | 5 ++--- .../arc/ws/services/importServlet/dao/ClientDao.java | 10 ++++------ 2 files changed, 6 insertions(+), 9 deletions(-) diff --git a/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/ImportStep2GetTableNameService.java b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/ImportStep2GetTableNameService.java index 8409fd25f..0a84ef8e9 100644 --- a/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/ImportStep2GetTableNameService.java +++ b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/ImportStep2GetTableNameService.java @@ -81,12 +81,11 @@ public void execute(SendResponse resp) throws ArcException { if (!reprise) { this.clientDao.updatePilotage(table.getTableName()); } - - this.clientDao.dropTable(table.getTableName()); + this.clientDao.dropTable(table); } table = this.clientDao.getAClientTableByType(ExportTrackingType.TRACK); - this.clientDao.dropTable(table.getTableName()); + this.clientDao.dropTable(table); resp.send(" "); resp.endSending(); diff --git a/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/dao/ClientDao.java b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/dao/ClientDao.java index 041016664..0e4305945 100644 --- a/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/dao/ClientDao.java +++ b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/dao/ClientDao.java @@ -435,20 +435,18 @@ public TableToRetrieve getAClientTableByName(String tableName) throws ArcExcepti : new TableToRetrieve(content.get("nod").get(0), content.get("table_to_retrieve").get(0)); } - public void dropTable(String clientTable) { - dropTable(ArcDatabase.COORDINATOR.getIndex(), clientTable); - } - public void dropTable(int connectionIndex, String clientTable) { + private void dropTable(int connectionIndex, String clientTable) { UtilitaireDao.get(connectionIndex).dropTable(connection, clientTable); } public void dropTable(TableToRetrieve table) { + + dropTable(ArcDatabase.COORDINATOR.getIndex(), table.getTableName()); - dropTable(table.getTableName()); + int numberOfExecutorNods = ArcDatabase.numberOfExecutorNods(); if (table.getNod().equals(ArcDatabase.EXECUTOR)) { - int numberOfExecutorNods = ArcDatabase.numberOfExecutorNods(); for (int executorConnectionId = ArcDatabase.EXECUTOR.getIndex(); executorConnectionId < ArcDatabase.EXECUTOR .getIndex() + numberOfExecutorNods; executorConnectionId++) { dropTable(executorConnectionId, table.getTableName()); From 687d1823791a4066d85c01346f5306130183a99b Mon Sep 17 00:00:00 2001 From: Manuel Soulier Date: Mon, 4 Dec 2023 08:11:23 +0100 Subject: [PATCH 09/19] feat: test on importStep1 --- .../ws/services/importServlet/ServletArc.java | 7 +- .../importServlet/actions/SendResponse.java | 47 +++-- .../services/importServlet/dao/ClientDao.java | 1 - ...tep1InitializeClientTablesServiceTest.java | 193 ++++++++++++++++++ 4 files changed, 220 insertions(+), 28 deletions(-) create mode 100644 arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/ImportStep1InitializeClientTablesServiceTest.java diff --git a/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/ServletArc.java b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/ServletArc.java index 85100d8f1..be752441f 100644 --- a/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/ServletArc.java +++ b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/ServletArc.java @@ -71,7 +71,7 @@ public void doPost(HttpServletRequest request, HttpServletResponse response) { if (request.getParameter("requests") != null) { - dsnRequest = buildRequest(request); + dsnRequest = validateRequest(new JSONObject(request.getParameter("requests"))); if (SecurityDao.securityAccessAndTracing(request, response, dsnRequest)) { @@ -97,10 +97,7 @@ public void doPost(HttpServletRequest request, HttpServletResponse response) { * @param request * @return */ - private JSONObject buildRequest(HttpServletRequest request) { - - // get parameters from request - JSONObject returned = new JSONObject(request.getParameter("requests")); + protected JSONObject validateRequest(JSONObject returned) { if (returned.isNull(JsonKeys.FORMAT.getKey())) { returned.put(JsonKeys.FORMAT.getKey(), ExportFormat.BINARY.getFormat()); diff --git a/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/actions/SendResponse.java b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/actions/SendResponse.java index 03a11026f..d6319e32e 100644 --- a/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/actions/SendResponse.java +++ b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/actions/SendResponse.java @@ -1,8 +1,8 @@ package fr.insee.arc.ws.services.importServlet.actions; import java.io.IOException; +import java.io.OutputStream; -import javax.servlet.ServletOutputStream; import javax.servlet.http.HttpServletResponse; import org.apache.logging.log4j.LogManager; @@ -17,36 +17,39 @@ */ public class SendResponse { - private static final Logger LOGGER = LogManager.getLogger(SendResponse.class); + private static final Logger LOGGER = LogManager.getLogger(SendResponse.class); - private ServletOutputStream wr; + private OutputStream wr; private HttpServletResponse response; - public SendResponse( HttpServletResponse response ){ + public SendResponse(OutputStream os) { + this.wr = os; + } + + public SendResponse(HttpServletResponse response) { this.response = response; try { this.response.setBufferSize(128 * 1024); - this.wr=this.response.getOutputStream(); - } - catch (IOException e) { + this.wr = this.response.getOutputStream(); + } catch (IOException e) { StaticLoggerDispatcher.error(LOGGER, "** Error in servlet SendResponse **"); } } - - - /**Ecrit la chaîne de caractères dans le flux de réponse compressé. + /** + * Ecrit la chaîne de caractères dans le flux de réponse compressé. + * * @param string */ - public void send( String string ){ - try { - this.wr.write( string.getBytes() );//"UTF-8" - } catch (IOException ex) { - LoggerHelper.errorGenTextAsComment(getClass(), "send()", LOGGER, ex); - } + public void send(String string) { + try { + this.wr.write(string.getBytes());// "UTF-8" + } catch (IOException ex) { + LoggerHelper.errorGenTextAsComment(getClass(), "send()", LOGGER, ex); + } } - public void sendError(ArcException e){ + public void sendError(ArcException e) { try { this.response.sendError(500, e.getMessage()); } catch (IOException e1) { @@ -54,11 +57,11 @@ public void sendError(ArcException e){ } } - - /**Fermeture du flux. + /** + * Fermeture du flux. * */ - public void endSending(){ + public void endSending() { try { this.wr.flush(); this.wr.close(); @@ -67,8 +70,8 @@ public void endSending(){ } } - public ServletOutputStream getWr() { + public OutputStream getWr() { return wr; } - + } diff --git a/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/dao/ClientDao.java b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/dao/ClientDao.java index 0e4305945..bf2020b38 100644 --- a/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/dao/ClientDao.java +++ b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/dao/ClientDao.java @@ -6,7 +6,6 @@ import java.util.List; import java.util.Map; -import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.json.JSONObject; diff --git a/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/ImportStep1InitializeClientTablesServiceTest.java b/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/ImportStep1InitializeClientTablesServiceTest.java new file mode 100644 index 000000000..eaa2cf35b --- /dev/null +++ b/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/ImportStep1InitializeClientTablesServiceTest.java @@ -0,0 +1,193 @@ +package fr.insee.arc.ws.services.importServlet; + +import static org.junit.Assert.assertTrue; + +import java.io.ByteArrayOutputStream; +import java.sql.SQLException; + +import org.json.JSONObject; +import org.junit.Test; + +import fr.insee.arc.core.dataobjects.ArcPreparedStatementBuilder; +import fr.insee.arc.utils.dao.SQL; +import fr.insee.arc.utils.dao.UtilitaireDao; +import fr.insee.arc.utils.exception.ArcException; +import fr.insee.arc.utils.query.InitializeQueryTest; +import fr.insee.arc.ws.services.importServlet.actions.SendResponse; + +public class ImportStep1InitializeClientTablesServiceTest extends ServletArc { + + /** + * + */ + private static final long serialVersionUID = -7832574224892526397L; + + + + @Test + public void testExecute() throws ArcException, SQLException { + + InitializeQueryTest.buildPropertiesWithoutScalability(null); + + destroyTestData(); + initializeTestData(); + + JSONObject jsonDsnStep1 = new JSONObject( + "{\"familleNorme\":\"DSN\",\"periodicite\":\"M\",\"service\":\"arcClient\",\"validiteSup\":\"2032-03-01\",\"format\":\"csv_gzip\",\"reprise\":false,\"client\":\"ARTEMIS\",\"environnement\":\"arc_bas1\"}"); + + jsonDsnStep1= validateRequest(jsonDsnStep1); + + ImportStep1InitializeClientTablesService imp = new ImportStep1InitializeClientTablesService(jsonDsnStep1); + ByteArrayOutputStream bos = new ByteArrayOutputStream(); + SendResponse sentResponse = new SendResponse(bos); + + imp.execute(sentResponse); + + testCreateAndDropWsPending(); + + testCreateTableNmcl(); + testCreateTableVarMetier(); + testCreateTableTableMetier(); + testCreateTableTableFamille(); + testCreateTableTablePeriodicite(); + + + + destroyTestData(); + } + + private void testCreateAndDropWsPending() throws ArcException { + + // check that the parallel thread that create tables drop the table ws_pending + + // it should be done in less than 50 iteration, test data is very little + int maxIteration = 50; + int i=0; + + while (i0); + assertTrue(i + assertTrue(UtilitaireDao.get(0).isTableExiste(InitializeQueryTest.c, "arc_bas1.ARTEMIS_%_nmcl_table1")); + assertTrue(UtilitaireDao.get(0).isTableExiste(InitializeQueryTest.c, "arc_bas1.ARTEMIS_%_nmcl_table2")); + } + + private void testCreateTableVarMetier() throws ArcException { + // table image created should be like arc_bas1.ARTEMIS_timestamp_ + assertTrue(UtilitaireDao.get(0).isTableExiste(InitializeQueryTest.c, "arc_bas1.ARTEMIS_%_mod_variable_metier")); + } + + private void testCreateTableTableMetier() throws ArcException { + // table image created should be like arc_bas1.ARTEMIS_timestamp_ + assertTrue(UtilitaireDao.get(0).isTableExiste(InitializeQueryTest.c, "arc_bas1.ARTEMIS_%_mod_table_metier")); + } + + private void testCreateTableTableFamille() throws ArcException { + // table image created should be like arc_bas1.ARTEMIS_timestamp_ + assertTrue(UtilitaireDao.get(0).isTableExiste(InitializeQueryTest.c, "arc_bas1.ARTEMIS_%_ext_mod_famille")); + } + + private void testCreateTableTablePeriodicite() throws ArcException { + // table image created should be like arc_bas1.ARTEMIS_timestamp_ + assertTrue(UtilitaireDao.get(0).isTableExiste(InitializeQueryTest.c, "arc_bas1.ARTEMIS_%_ext_mod_periodicite")); + } + + + /** + * initialize data for the tests + * @throws SQLException + * @throws ArcException + */ + private void initializeTestData() throws SQLException, ArcException { + + ArcPreparedStatementBuilder query; + + query = new ArcPreparedStatementBuilder(); + + query.append("CREATE SCHEMA arc;"); + query.append("CREATE SCHEMA arc_bas1;"); + + + // family and client tables + query.append("CREATE TABLE arc.ihm_client AS "); + query.append("SELECT 'DSN' as id_famille,'ARTEMIS' as id_application UNION ALL "); + query.append("SELECT 'DSN' as id_famille,'DSNFLASH' as id_application"); + query.append(SQL.END_QUERY); + + query.append("CREATE TABLE arc.ihm_famille AS SELECT 'DSN' as id_famille"); + query.append(SQL.END_QUERY); + + query.append("CREATE TABLE arc_bas1.mod_table_metier AS "); + query.append("SELECT 'DSN' as id_famille,'mapping_dsn_test1_ok' as nom_table_metier UNION ALL "); + query.append("SELECT 'PASRAU' as id_famille,'mapping_pasrau_test_ok' as nom_table_metier"); + query.append(SQL.END_QUERY); + + query.append("CREATE TABLE arc_bas1.mod_variable_metier AS SELECT 'DSN' as id_famille, 'mapping_dsn_test1_ok' as nom_table_metier, 'id_source' as nom_variable_metier"); + query.append(SQL.END_QUERY); + + // pilotage tables + query.append("CREATE TABLE arc_bas1.pilotage_fichier AS "); + query.append("SELECT 'file_to_retrieve.xml' as id_source, 'PHASE3V1' as id_norme, '2023-10-01' as validite,'M' as periodicite"); + query.append(", 'MAPPING' as phase_traitement, '{OK}'::text[] as etat_traitement, '2023-11-30 10:29:47.000'::timestamp as date_traitement"); + query.append(", null::text[] as client, null::timestamp[] as date_client"); + query.append(" UNION ALL "); + // file that mustn't be retrieved when reprise is false and family is DSN + query.append("SELECT 'file_not_to_retrieve_when_reprise_false.xml' as id_source, 'PHASE3V1' as id_norme, '2023-10-01' as validite,'M' as periodicite"); + query.append(", 'MAPPING' as phase_traitement, '{OK}'::text[] as etat_traitement, '2023-11-30 10:29:47.000'::timestamp as date_traitement"); + query.append(", '{ARTEMIS}'::text[] as client, '{2023-11-30 10:29:47.000}'::timestamp[] as date_client");; + query.append(SQL.END_QUERY); + + // norme table used to retrieve family of data + query.append("CREATE TABLE arc_bas1.norme AS "); + query.append("SELECT 'PHASE3V1' as id_norme, 'DSN' as id_famille UNION ALL "); + query.append("SELECT 'PASRAU' as id_norme, 'PASRAU' as id_famille"); + query.append(SQL.END_QUERY); + + // data tables containing two files + // one had already been retrieved by client 'ARTEMIS', the other hadn't been retrieved yet + query.append("CREATE TABLE arc_bas1.mapping_dsn_test1_ok AS "); + query.append("SELECT 'file_to_retrieve.xml' as id_source, 'data_of_file_to_retrieve' as data UNION ALL "); + query.append("SELECT 'file_not_to_retrieve_when_reprise_false.xml' as id_source, 'data_of_file_not_to_retrieve_when_reprise_false' as data"); + query.append(SQL.END_QUERY); + + // nomenclature tables + query.append("CREATE TABLE arc_bas1.nmcl_table1 AS SELECT 1 as data"); + query.append(SQL.END_QUERY); + query.append("CREATE TABLE arc_bas1.nmcl_table2 AS SELECT 1 as data"); + query.append(SQL.END_QUERY); + query.append("CREATE TABLE arc.ext_mod_periodicite AS SELECT 1 as id, 'A' as VAL"); + query.append(SQL.END_QUERY); + + UtilitaireDao.get(0).executeImmediate(InitializeQueryTest.c, query); + } + + + + /** + * destroy data for the tests + * @throws SQLException + * @throws ArcException + */ + private void destroyTestData() throws SQLException, ArcException { + + ArcPreparedStatementBuilder query; + + query = new ArcPreparedStatementBuilder(); + + query.append("DROP SCHEMA IF EXISTS arc CASCADE;"); + query.append("DROP SCHEMA IF EXISTS arc_bas1 CASCADE;"); + UtilitaireDao.get(0).executeImmediate(InitializeQueryTest.c, query); + } + + + + +} From 2722fe03db5b99451d2fc93609dc9da5b631f1dd Mon Sep 17 00:00:00 2001 From: Manuel Soulier Date: Mon, 4 Dec 2023 08:15:49 +0100 Subject: [PATCH 10/19] fix: test compilation problems --- .../java/fr/insee/arc/utils/query/InitializeQueryTest.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/arc-utils/src/test/java/fr/insee/arc/utils/query/InitializeQueryTest.java b/arc-utils/src/test/java/fr/insee/arc/utils/query/InitializeQueryTest.java index d2a425971..eee4172e3 100644 --- a/arc-utils/src/test/java/fr/insee/arc/utils/query/InitializeQueryTest.java +++ b/arc-utils/src/test/java/fr/insee/arc/utils/query/InitializeQueryTest.java @@ -32,19 +32,19 @@ public void testConnection() assertNotNull(c); } - protected static void buildPropertiesWithoutScalability(String repertoire) throws SQLException + public static void buildPropertiesWithoutScalability(String repertoire) throws SQLException { buildProperties(repertoire, new Connection[] {c}); } - protected static void buildPropertiesWithScalability(String repertoire) throws SQLException + public static void buildPropertiesWithScalability(String repertoire) throws SQLException { e = new TestDatabase().testConnection; buildProperties(repertoire, new Connection[] {c, e}); } - protected static void buildProperties(String repertoire, Connection[] connections) throws SQLException + private static void buildProperties(String repertoire, Connection[] connections) throws SQLException { PropertiesHandler testProperties=PropertiesHandler.getInstance(); From 2b3ce1d3c3e97e67b846f6ec72ad405e1d620845 Mon Sep 17 00:00:00 2001 From: Manuel Soulier Date: Mon, 4 Dec 2023 09:30:56 +0100 Subject: [PATCH 11/19] feat: test wsImport1 KO --- ...ortStep1InitializeClientTablesService.java | 12 +- .../services/importServlet/dao/ClientDao.java | 19 +- ...tep1InitializeClientTablesServiceTest.java | 52 +++-- ...p1InitializeClientTablesServiceTestKO.java | 182 ++++++++++++++++++ 4 files changed, 231 insertions(+), 34 deletions(-) create mode 100644 arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/ImportStep1InitializeClientTablesServiceTestKO.java diff --git a/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/ImportStep1InitializeClientTablesService.java b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/ImportStep1InitializeClientTablesService.java index 614454c2f..80574ef1a 100644 --- a/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/ImportStep1InitializeClientTablesService.java +++ b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/ImportStep1InitializeClientTablesService.java @@ -147,20 +147,12 @@ public void run() { executeIf(ExportSource.METADATA, () -> clientDao.createTableFamille()); executeIf(ExportSource.METADATA, () -> clientDao.createTablePeriodicite()); } catch (ArcException e) { - try { - clientDao.createTableWsKO(); - } catch (ArcException e1) { - new ArcException(ArcExceptionMessage.DATABASE_CONNECTION_FAILED).logFullException(); - } + clientDao.registerWsKO(); } finally { try { clientDao.dropTableWsPending(); } catch (ArcException e) { - try { - clientDao.createTableWsKO(); - } catch (ArcException e1) { - new ArcException(ArcExceptionMessage.DATABASE_CONNECTION_FAILED).logFullException(); - } + clientDao.registerWsKO(); } } diff --git a/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/dao/ClientDao.java b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/dao/ClientDao.java index bf2020b38..2391f2ef8 100644 --- a/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/dao/ClientDao.java +++ b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/dao/ClientDao.java @@ -52,6 +52,8 @@ public class ClientDao { // the tablename of the table that tracks tables left to retrieved private String tableWsTracking; + + private String tableWsInfo; private Connection connection; @@ -67,7 +69,7 @@ public ClientDao(ArcClientIdentifier arcClientIdentifier) { timestamp); this.tableWsTracking = TableNaming.buildTableNameWithTokens(environnement, ViewEnum.WS_TRACKING, client, timestamp); - + this.tableWsInfo = TableNaming.buildTableNameWithTokens(environnement, ViewEnum.WS_INFO, client, timestamp); } /** @@ -84,7 +86,6 @@ public boolean verificationClientFamille() throws ArcException { .append(" LIMIT 1);"); String bool = UtilitaireDao.get(0).executeRequestWithoutMetadata(connection, request).get(0).get(0); - return bool.equals("t"); } @@ -192,7 +193,7 @@ public void createTableTrackRetrievedTables() throws ArcException { ArcPreparedStatementBuilder query = new ArcPreparedStatementBuilder(); query.build(SQL.DROP, SQL.TABLE, SQL.IF_EXISTS, this.tableWsTracking, SQL.END_QUERY); query.build(SQL.CREATE, SQL.TABLE, this.tableWsTracking, - " (tracking_type text, nod text, table_to_retrieve text) ", SQL.END_QUERY); + " (id serial, tracking_type text, nod text, table_to_retrieve text) ", SQL.END_QUERY); UtilitaireDao.get(0).executeRequest(connection, query); registerTableToBeRetrieved(ExportTrackingType.TRACK, ArcDatabase.COORDINATOR, this.tableWsTracking); @@ -403,6 +404,7 @@ public TableToRetrieve getAClientTableByType(ExportTrackingType type) throws Arc ArcPreparedStatementBuilder query = new ArcPreparedStatementBuilder(); query.build(SQL.SELECT, "nod, table_to_retrieve", SQL.FROM, this.tableWsTracking); query.build(SQL.WHERE, "tracking_type=", query.quoteText(type.toString())); + query.build(SQL.ORDER_BY, "id"); query.build(SQL.LIMIT, "1"); Map> content = new GenericBean(UtilitaireDao.get(0).executeRequest(connection, query)) @@ -425,6 +427,7 @@ public TableToRetrieve getAClientTableByName(String tableName) throws ArcExcepti ArcPreparedStatementBuilder query = new ArcPreparedStatementBuilder(); query.build(SQL.SELECT, "nod, table_to_retrieve", SQL.FROM, this.tableWsTracking); query.build(SQL.WHERE, "table_to_retrieve=", query.quoteText(tableName)); + query.build(SQL.ORDER_BY, "id"); query.build(SQL.LIMIT, "1"); Map> content = new GenericBean(UtilitaireDao.get(0).executeRequest(connection, query)) @@ -482,8 +485,6 @@ public void dropPendingClientTables(int connectionId) throws ArcException { */ public void createTableWsInfo() throws ArcException { - String tableWsInfo = TableNaming.buildTableNameWithTokens(environnement, ViewEnum.WS_INFO, client, timestamp); - ArcPreparedStatementBuilder requete = new ArcPreparedStatementBuilder(); requete.append("\n DROP TABLE IF EXISTS " + tableWsInfo + ";"); @@ -500,8 +501,12 @@ public void createTableWsInfo() throws ArcException { } - public void createTableWsKO() throws ArcException { - registerTableToBeRetrieved(ExportTrackingType.KO, ArcDatabase.COORDINATOR, ViewEnum.WS_KO.toString()); + public void registerWsKO() { + try { + registerTableToBeRetrieved(ExportTrackingType.KO, ArcDatabase.COORDINATOR, ViewEnum.WS_KO.toString()); + } catch (ArcException e1) { + new ArcException(ArcExceptionMessage.DATABASE_CONNECTION_FAILED).logFullException(); + } } public void dropTableWsPending() throws ArcException { diff --git a/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/ImportStep1InitializeClientTablesServiceTest.java b/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/ImportStep1InitializeClientTablesServiceTest.java index eaa2cf35b..a9a5a753e 100644 --- a/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/ImportStep1InitializeClientTablesServiceTest.java +++ b/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/ImportStep1InitializeClientTablesServiceTest.java @@ -6,6 +6,8 @@ import java.sql.SQLException; import org.json.JSONObject; +import org.junit.AfterClass; +import org.junit.BeforeClass; import org.junit.Test; import fr.insee.arc.core.dataobjects.ArcPreparedStatementBuilder; @@ -23,37 +25,53 @@ public class ImportStep1InitializeClientTablesServiceTest extends ServletArc { private static final long serialVersionUID = -7832574224892526397L; - - @Test - public void testExecute() throws ArcException, SQLException { + @BeforeClass + public static void setup() throws SQLException, ArcException { InitializeQueryTest.buildPropertiesWithoutScalability(null); destroyTestData(); initializeTestData(); - - JSONObject jsonDsnStep1 = new JSONObject( - "{\"familleNorme\":\"DSN\",\"periodicite\":\"M\",\"service\":\"arcClient\",\"validiteSup\":\"2032-03-01\",\"format\":\"csv_gzip\",\"reprise\":false,\"client\":\"ARTEMIS\",\"environnement\":\"arc_bas1\"}"); + } + + @AfterClass + public static void tearDown() throws SQLException, ArcException { + destroyTestData(); + } - jsonDsnStep1= validateRequest(jsonDsnStep1); - - ImportStep1InitializeClientTablesService imp = new ImportStep1InitializeClientTablesService(jsonDsnStep1); + private String executeImportStep1(JSONObject clientJsonInput) throws ArcException + { + JSONObject clientJsonInputValidated= validateRequest(clientJsonInput); + ImportStep1InitializeClientTablesService imp = new ImportStep1InitializeClientTablesService(clientJsonInputValidated); ByteArrayOutputStream bos = new ByteArrayOutputStream(); SendResponse sentResponse = new SendResponse(bos); - imp.execute(sentResponse); + return sentResponse.getWr().toString(); + } + + + @Test(expected = ArcException.class) + public void testExecuteFamilyNotValid() throws ArcException { + JSONObject clientJsonInput = new JSONObject( + "{\"familleNorme\":\"RESIL\",\"periodicite\":\"M\",\"service\":\"arcClient\",\"validiteSup\":\"2032-03-01\",\"format\":\"csv_gzip\",\"reprise\":false,\"client\":\"ARTEMIS\",\"environnement\":\"arc_bas1\"}"); + executeImportStep1(clientJsonInput); + } + + + @Test + public void testExecute() throws ArcException { + + JSONObject clientJsonInput = new JSONObject( + "{\"familleNorme\":\"DSN\",\"periodicite\":\"M\",\"service\":\"arcClient\",\"validiteSup\":\"2032-03-01\",\"format\":\"csv_gzip\",\"reprise\":false,\"client\":\"ARTEMIS\",\"environnement\":\"arc_bas1\"}"); + + executeImportStep1(clientJsonInput); testCreateAndDropWsPending(); - testCreateTableNmcl(); testCreateTableVarMetier(); testCreateTableTableMetier(); testCreateTableTableFamille(); testCreateTableTablePeriodicite(); - - - - destroyTestData(); } private void testCreateAndDropWsPending() throws ArcException { @@ -106,7 +124,7 @@ private void testCreateTableTablePeriodicite() throws ArcException { * @throws SQLException * @throws ArcException */ - private void initializeTestData() throws SQLException, ArcException { + private static void initializeTestData() throws SQLException, ArcException { ArcPreparedStatementBuilder query; @@ -176,7 +194,7 @@ private void initializeTestData() throws SQLException, ArcException { * @throws SQLException * @throws ArcException */ - private void destroyTestData() throws SQLException, ArcException { + private static void destroyTestData() throws SQLException, ArcException { ArcPreparedStatementBuilder query; diff --git a/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/ImportStep1InitializeClientTablesServiceTestKO.java b/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/ImportStep1InitializeClientTablesServiceTestKO.java new file mode 100644 index 000000000..bbb5504ac --- /dev/null +++ b/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/ImportStep1InitializeClientTablesServiceTestKO.java @@ -0,0 +1,182 @@ +package fr.insee.arc.ws.services.importServlet; + +import static org.junit.Assert.assertTrue; + +import java.io.ByteArrayOutputStream; +import java.sql.SQLException; + +import org.json.JSONObject; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; + +import fr.insee.arc.core.dataobjects.ArcPreparedStatementBuilder; +import fr.insee.arc.utils.dao.SQL; +import fr.insee.arc.utils.dao.UtilitaireDao; +import fr.insee.arc.utils.exception.ArcException; +import fr.insee.arc.utils.query.InitializeQueryTest; +import fr.insee.arc.ws.services.importServlet.actions.SendResponse; +import fr.insee.arc.ws.services.importServlet.bo.ExportTrackingType; + +public class ImportStep1InitializeClientTablesServiceTestKO extends ServletArc { + + /** + * + */ + private static final long serialVersionUID = -7832574224892526397L; + + + @BeforeClass + public static void setup() throws SQLException, ArcException { + + InitializeQueryTest.buildPropertiesWithoutScalability(null); + + destroyTestData(); + initializeTestData(); + } + + @AfterClass + public static void tearDown() throws SQLException, ArcException { + destroyTestData(); + } + + private String executeImportStep1(JSONObject clientJsonInput) throws ArcException + { + JSONObject clientJsonInputValidated= validateRequest(clientJsonInput); + ImportStep1InitializeClientTablesService imp = new ImportStep1InitializeClientTablesService(clientJsonInputValidated); + ByteArrayOutputStream bos = new ByteArrayOutputStream(); + SendResponse sentResponse = new SendResponse(bos); + imp.execute(sentResponse); + return sentResponse.getWr().toString(); + } + + + @Test + public void testExecute() throws ArcException { + + JSONObject clientJsonInput = new JSONObject( + "{\"familleNorme\":\"DSN\",\"periodicite\":\"M\",\"service\":\"arcClient\",\"validiteSup\":\"2032-03-01\",\"format\":\"csv_gzip\",\"reprise\":false,\"client\":\"ARTEMIS\",\"environnement\":\"arc_bas1\"}"); + + String arcResponse = executeImportStep1(clientJsonInput); + + testCreateAndDropWsPending(arcResponse); + + testCreateTableWsKo(arcResponse); + + } + + private void testCreateAndDropWsPending(String arcResponse) throws ArcException { + + // check that the parallel thread that create tables drop the table ws_pending + + // it should be done in less than 50 iteration, test data is very little + int maxIteration = 50; + int i=0; + + while (i0); + assertTrue(i Date: Mon, 4 Dec 2023 11:41:09 +0100 Subject: [PATCH 12/19] fix: useless calculation as client token was already set --- .../ws/services/importServlet/dao/ClientDao.java | 16 +--------------- 1 file changed, 1 insertion(+), 15 deletions(-) diff --git a/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/dao/ClientDao.java b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/dao/ClientDao.java index 2391f2ef8..327b23bd4 100644 --- a/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/dao/ClientDao.java +++ b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/dao/ClientDao.java @@ -163,11 +163,9 @@ private String addImage(String tableMetier, int executorConnectionId) throws Arc public void updatePilotage(String tableSource) throws ArcException { LoggerHelper.debugAsComment(LOGGER, timestamp, ": ClientDaoImpl.updatePilotage()"); - String clientOfTableSource = extractClientFromToken(); - StringBuilder query = new StringBuilder(); query.append("UPDATE " + ViewEnum.PILOTAGE_FICHIER.getFullName(environnement) + " T1 "); - query.append("SET client = array_append(client, '" + clientOfTableSource + "') "); + query.append("SET client = array_append(client, '" + this.client + "') "); query.append(", date_client = array_append( date_client, localtimestamp ) "); query.append("WHERE true "); query.append("AND EXISTS (SELECT 1 FROM " + tableSource + " T2 where T1." + ColumnEnum.ID_SOURCE.getColumnName() @@ -177,18 +175,6 @@ public void updatePilotage(String tableSource) throws ArcException { UtilitaireDao.get(0).executeBlock(connection, query.toString()); } - /** - * extract the client token name from the client - * - * @param client2 - * @return - */ - private String extractClientFromToken() { - return ManipString.substringBeforeFirst( - ManipString.substringAfterFirst(this.client, Delimiters.SQL_SCHEMA_DELIMITER), - Delimiters.SQL_TOKEN_DELIMITER); - } - public void createTableTrackRetrievedTables() throws ArcException { ArcPreparedStatementBuilder query = new ArcPreparedStatementBuilder(); query.build(SQL.DROP, SQL.TABLE, SQL.IF_EXISTS, this.tableWsTracking, SQL.END_QUERY); From 35633e003b0cc55af19d209082d6132117df3212 Mon Sep 17 00:00:00 2001 From: Manuel Soulier Date: Mon, 4 Dec 2023 11:49:06 +0100 Subject: [PATCH 13/19] fix: removed test from clientDao --- ...tep1InitializeClientTablesServiceTest.java | 9 +- .../importServlet/dao/ClientDaoTest.java | 337 ------------------ 2 files changed, 5 insertions(+), 341 deletions(-) delete mode 100644 arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/dao/ClientDaoTest.java diff --git a/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/ImportStep1InitializeClientTablesServiceTest.java b/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/ImportStep1InitializeClientTablesServiceTest.java index a9a5a753e..19abc2089 100644 --- a/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/ImportStep1InitializeClientTablesServiceTest.java +++ b/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/ImportStep1InitializeClientTablesServiceTest.java @@ -127,7 +127,6 @@ private void testCreateTableTablePeriodicite() throws ArcException { private static void initializeTestData() throws SQLException, ArcException { ArcPreparedStatementBuilder query; - query = new ArcPreparedStatementBuilder(); query.append("CREATE SCHEMA arc;"); @@ -136,7 +135,8 @@ private static void initializeTestData() throws SQLException, ArcException { // family and client tables query.append("CREATE TABLE arc.ihm_client AS "); - query.append("SELECT 'DSN' as id_famille,'ARTEMIS' as id_application UNION ALL "); + query.append("SELECT 'DSN' as id_famille,'ARTEMIS' as id_application"); + query.append(SQL.UNION_ALL); query.append("SELECT 'DSN' as id_famille,'DSNFLASH' as id_application"); query.append(SQL.END_QUERY); @@ -144,7 +144,8 @@ private static void initializeTestData() throws SQLException, ArcException { query.append(SQL.END_QUERY); query.append("CREATE TABLE arc_bas1.mod_table_metier AS "); - query.append("SELECT 'DSN' as id_famille,'mapping_dsn_test1_ok' as nom_table_metier UNION ALL "); + query.append("SELECT 'DSN' as id_famille,'mapping_dsn_test1_ok' as nom_table_metier"); + query.append(SQL.UNION_ALL); query.append("SELECT 'PASRAU' as id_famille,'mapping_pasrau_test_ok' as nom_table_metier"); query.append(SQL.END_QUERY); @@ -156,7 +157,7 @@ private static void initializeTestData() throws SQLException, ArcException { query.append("SELECT 'file_to_retrieve.xml' as id_source, 'PHASE3V1' as id_norme, '2023-10-01' as validite,'M' as periodicite"); query.append(", 'MAPPING' as phase_traitement, '{OK}'::text[] as etat_traitement, '2023-11-30 10:29:47.000'::timestamp as date_traitement"); query.append(", null::text[] as client, null::timestamp[] as date_client"); - query.append(" UNION ALL "); + query.append(SQL.UNION_ALL); // file that mustn't be retrieved when reprise is false and family is DSN query.append("SELECT 'file_not_to_retrieve_when_reprise_false.xml' as id_source, 'PHASE3V1' as id_norme, '2023-10-01' as validite,'M' as periodicite"); query.append(", 'MAPPING' as phase_traitement, '{OK}'::text[] as etat_traitement, '2023-11-30 10:29:47.000'::timestamp as date_traitement"); diff --git a/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/dao/ClientDaoTest.java b/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/dao/ClientDaoTest.java deleted file mode 100644 index 7aa657830..000000000 --- a/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/dao/ClientDaoTest.java +++ /dev/null @@ -1,337 +0,0 @@ -package fr.insee.arc.ws.services.importServlet.dao; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; - -import java.sql.SQLException; -import java.util.List; - -import org.json.JSONObject; -import org.junit.Test; - -import fr.insee.arc.core.dataobjects.ArcDatabase; -import fr.insee.arc.core.dataobjects.ArcPreparedStatementBuilder; -import fr.insee.arc.utils.dao.SQL; -import fr.insee.arc.utils.dao.UtilitaireDao; -import fr.insee.arc.utils.exception.ArcException; -import fr.insee.arc.utils.query.InitializeQueryTest; -import fr.insee.arc.utils.structure.GenericBean; -import fr.insee.arc.ws.services.importServlet.bo.ArcClientIdentifier; -import fr.insee.arc.ws.services.importServlet.bo.ExportTrackingType; -import fr.insee.arc.ws.services.importServlet.bo.TableToRetrieve; - -public class ClientDaoTest extends InitializeQueryTest { - - // request for DSN family, ARTEMIS client and reprise = false - JSONObject jsonDsnStep1 = new JSONObject( - "{\"familleNorme\":\"DSN\",\"periodicite\":\"M\",\"service\":\"arcClient\",\"validiteSup\":\"2032-03-01\",\"format\":\"csv_gzip\",\"reprise\":false,\"client\":\"ARTEMIS\",\"environnement\":\"arc_bas1\"}"); - ArcClientIdentifier queryParametersDsnStep1 = new ArcClientIdentifier(jsonDsnStep1, true); - ClientDao clientDaoDsnStep1 = new ClientDao(queryParametersDsnStep1); - - - @Test - public void clientDaoTest() throws ArcException, SQLException { - - InitializeQueryTest.buildPropertiesWithoutScalability(null); - - destroyTestData(); - initializeTestData(); - - // test tracking table creation and registration - testCreateTableTrackRetrievedTables(); - - //test return of client table when nothing found - testGetAClientTableByNameNotFound(); - - // test family check - testVerificationFamilleOK(); - testVerificationFamilleKO(); - - // test data tables retrieved according to query - List selectedDataTables = testSelectBusinessDataTables(); - - // test id_source selection table - testCreateTableOfIdSourceRepriseFalse(); - testCreateTableOfIdSourceRepriseTrue(); - - // test data table image creation - // table must had been registered in track table - List dataTableImages = testCreateImages(selectedDataTables); - - // test return table from track table - // the dataTable in dataTableImages must be found the the track data table with type ExportTrackingType.DATA - testGetAClientTableByType(dataTableImages); - // the dataTable in dataTableImages must be found the the track data table by its name - testGetAClientTableByName(dataTableImages); - - // test tables creation for metadata tables - testCreateTableNmcl(); - testCreateTableVarMetier(); - testCreateTableTableMetier(); - testCreateTableTableFamille(); - testCreateTableTablePeriodicite(); - - testDropPendingClientTables(); - - - destroyTestData(); - } - - private void testDropPendingClientTables() throws ArcException { - clientDaoDsnStep1.dropPendingClientTables(ArcDatabase.COORDINATOR.getIndex()); - // all client tables should had been deleted - assertFalse(UtilitaireDao.get(0).isTableExiste(c, "arc_bas1.ARTEMIS%")); - } - - private void testCreateTableNmcl() throws ArcException { - // TODO Auto-generated method stub - clientDaoDsnStep1.createTableNmcl(); - // table image created should be like arc_bas1.ARTEMIS_timestamp_ - assertTrue(UtilitaireDao.get(0).isTableExiste(c, "arc_bas1.ARTEMIS_%_nmcl_table1")); - assertTrue(UtilitaireDao.get(0).isTableExiste(c, "arc_bas1.ARTEMIS_%_nmcl_table2")); - } - - private void testCreateTableVarMetier() throws ArcException { - // TODO Auto-generated method stub - clientDaoDsnStep1.createTableVarMetier(); - // table image created should be like arc_bas1.ARTEMIS_timestamp_ - assertTrue(UtilitaireDao.get(0).isTableExiste(c, "arc_bas1.ARTEMIS_%_mod_variable_metier")); - } - - private void testCreateTableTableMetier() throws ArcException { - // TODO Auto-generated method stub - clientDaoDsnStep1.createTableMetier(); - // table image created should be like arc_bas1.ARTEMIS_timestamp_ - assertTrue(UtilitaireDao.get(0).isTableExiste(c, "arc_bas1.ARTEMIS_%_mod_table_metier")); - } - - private void testCreateTableTableFamille() throws ArcException { - // TODO Auto-generated method stub - clientDaoDsnStep1.createTableFamille(); - // table image created should be like arc_bas1.ARTEMIS_timestamp_ - assertTrue(UtilitaireDao.get(0).isTableExiste(c, "arc_bas1.ARTEMIS_%_ext_mod_famille")); - } - - private void testCreateTableTablePeriodicite() throws ArcException { - // TODO Auto-generated method stub - clientDaoDsnStep1.createTablePeriodicite(); - // table image created should be like arc_bas1.ARTEMIS_timestamp_ - assertTrue(UtilitaireDao.get(0).isTableExiste(c, "arc_bas1.ARTEMIS_%_ext_mod_periodicite")); - } - - private void testGetAClientTableByNameNotFound() throws ArcException { - TableToRetrieve registeredTable = clientDaoDsnStep1.getAClientTableByName("not_existing_table"); - assertNull(registeredTable.getTableName()); - assertNull(registeredTable.getNod()); - } - - private void testGetAClientTableByType(List dataTableImages) throws ArcException { - TableToRetrieve registeredTable = clientDaoDsnStep1.getAClientTableByType(ExportTrackingType.DATA); - - // now that image had been created we should find it in tracking table - // check the name - assertEquals(dataTableImages.get(0),registeredTable.getTableName()); - // data table are found on executor nod - assertEquals(ArcDatabase.EXECUTOR,registeredTable.getNod()); - } - - private void testGetAClientTableByName(List dataTableImages) throws ArcException { - - TableToRetrieve registeredTable = clientDaoDsnStep1.getAClientTableByName(dataTableImages.get(0)); - - // now that image had been created we should find it in tracking table - // check the name - assertEquals(dataTableImages.get(0),registeredTable.getTableName()); - // the test is in non scalable nod so the data table must be on coordinator - assertEquals(ArcDatabase.EXECUTOR,registeredTable.getNod()); - } - - private List testCreateImages(List selectedDataTables) throws ArcException { - List dataTableImages = clientDaoDsnStep1.createImages(selectedDataTables, 0); - - // only 1 table in model and 1 table should had been created - assertEquals(1, dataTableImages.size()); - - ArcPreparedStatementBuilder query = new ArcPreparedStatementBuilder(); - query.append("SELECT distinct id_source FROM "+dataTableImages.get(0)+";"); - List content = new GenericBean(UtilitaireDao.get(0).executeRequest(c, query)).getColumnValues("id_source"); - - // only table with 1 id_source must had been retrieved - assertEquals(1, content.size()); - - return dataTableImages; - - } - - /** - * test on retrieving idSource - * request on DSN family, ARTEMIS client and reprise = false - * as reprise = false, only files not already retrieved by client must be selected - * @throws ArcException - */ - private void testCreateTableOfIdSourceRepriseFalse() throws ArcException { - - clientDaoDsnStep1.createTableOfIdSource(jsonDsnStep1); - - ArcPreparedStatementBuilder query = new ArcPreparedStatementBuilder(); - query.append("SELECT id_source FROM "+clientDaoDsnStep1.getTableOfIdSource()+";"); - List content = new GenericBean(UtilitaireDao.get(0).executeRequest(c, query)).getColumnValues("id_source"); - - // only 1 file must be selected as reprise = false - // file_not_to_retrieve_when_reprise_false has already been marked as retrieved by 'ARTEMIS' client - assertEquals(1, content.size()); - - } - - /** - * test to select id_source to be retrieved when reprise=true - * @throws ArcException - */ - private void testCreateTableOfIdSourceRepriseTrue() throws ArcException { - - JSONObject jsonDsnStep1RepriseTrue = new JSONObject( - "{\"familleNorme\":\"DSN\",\"periodicite\":\"M\",\"service\":\"arcClient\",\"validiteSup\":\"2032-03-01\",\"format\":\"csv_gzip\",\"reprise\":true,\"client\":\"ARTEMIS\",\"environnement\":\"arc_bas1\"}"); - ArcClientIdentifier queryParametersDsnStep1RepriseTrue = new ArcClientIdentifier(jsonDsnStep1RepriseTrue, true); - ClientDao clientDaoDsnStep1RepriseTrue = new ClientDao(queryParametersDsnStep1RepriseTrue); - - // create tracking table - clientDaoDsnStep1RepriseTrue.createTableTrackRetrievedTables(); - - clientDaoDsnStep1RepriseTrue.createTableOfIdSource(jsonDsnStep1RepriseTrue); - ArcPreparedStatementBuilder query = new ArcPreparedStatementBuilder(); - query.append("SELECT id_source FROM "+clientDaoDsnStep1RepriseTrue.getTableOfIdSource()+";"); - - List content = new GenericBean(UtilitaireDao.get(0).executeRequest(c, query)).getColumnValues("id_source"); - // only 1 file must be selected as reprise = false - // file_not_to_retrieve_when_reprise_false has already been marked as retrieved by 'ARTEMIS' client - assertEquals(2, content.size()); - } - - private void testCreateTableTrackRetrievedTables() throws ArcException { - clientDaoDsnStep1.createTableTrackRetrievedTables(); - - // test - // retrieve table content - ArcPreparedStatementBuilder query = new ArcPreparedStatementBuilder(); - query.append("SELECT tracking_type FROM "+clientDaoDsnStep1.getTableWsTracking()+";"); - List content = new GenericBean(UtilitaireDao.get(0).executeRequest(c, query)).getColumnValues("tracking_type"); - - // test that the table had been created and that it had been registered in itself - assertEquals(1, content.size()); - assertEquals(ExportTrackingType.TRACK.toString(), content.get(0)); - - } - - private List testSelectBusinessDataTables() throws ArcException { - - List clientTables = clientDaoDsnStep1.selectBusinessDataTables(); - - assertTrue(clientTables.contains("mapping_dsn_test1_ok")); - assertEquals(1,clientTables.size()); - return clientTables; - } - - public void testVerificationFamilleOK() throws ArcException { - assertTrue(clientDaoDsnStep1.verificationClientFamille()); - } - - public void testVerificationFamilleKO() throws ArcException { - // request on BATI family, RESIL client and reprise = true - // BATI family doesn't exists in the test data set - JSONObject jsonBatiStep1 = new JSONObject( - "{\"familleNorme\":\"BATI\",\"periodicite\":\"M\",\"service\":\"arcClient\",\"validiteSup\":\"2032-03-01\",\"format\":\"csv_gzip\",\"reprise\":true,\"client\":\"RESIL\",\"environnement\":\"arc_bas1\"}"); - ArcClientIdentifier queryParametersBatiStep1 = new ArcClientIdentifier(jsonBatiStep1, true); - ClientDao clientDaoBatiStep1 = new ClientDao(queryParametersBatiStep1); - - assertFalse(clientDaoBatiStep1.verificationClientFamille()); - } - - - - /** - * initialize data for the tests - * @throws SQLException - * @throws ArcException - */ - private void initializeTestData() throws SQLException, ArcException { - - ArcPreparedStatementBuilder query; - - query = new ArcPreparedStatementBuilder(); - - query.append("CREATE SCHEMA arc;"); - query.append("CREATE SCHEMA arc_bas1;"); - - - // family and client tables - query.append("CREATE TABLE arc.ihm_client AS "); - query.append("SELECT 'DSN' as id_famille,'ARTEMIS' as id_application UNION ALL "); - query.append("SELECT 'DSN' as id_famille,'DSNFLASH' as id_application"); - query.append(SQL.END_QUERY); - - query.append("CREATE TABLE arc.ihm_famille AS SELECT 'DSN' as id_famille"); - query.append(SQL.END_QUERY); - - query.append("CREATE TABLE arc_bas1.mod_table_metier AS "); - query.append("SELECT 'DSN' as id_famille,'mapping_dsn_test1_ok' as nom_table_metier UNION ALL "); - query.append("SELECT 'PASRAU' as id_famille,'mapping_pasrau_test_ok' as nom_table_metier"); - query.append(SQL.END_QUERY); - - query.append("CREATE TABLE arc_bas1.mod_variable_metier AS SELECT 'DSN' as id_famille, 'mapping_dsn_test1_ok' as nom_table_metier, 'id_source' as nom_variable_metier"); - query.append(SQL.END_QUERY); - - // pilotage tables - query.append("CREATE TABLE arc_bas1.pilotage_fichier AS "); - query.append("SELECT 'file_to_retrieve.xml' as id_source, 'PHASE3V1' as id_norme, '2023-10-01' as validite,'M' as periodicite"); - query.append(", 'MAPPING' as phase_traitement, '{OK}'::text[] as etat_traitement, '2023-11-30 10:29:47.000'::timestamp as date_traitement"); - query.append(", null::text[] as client, null::timestamp[] as date_client"); - query.append(" UNION ALL "); - // file that mustn't be retrieved when reprise is false and family is DSN - query.append("SELECT 'file_not_to_retrieve_when_reprise_false.xml' as id_source, 'PHASE3V1' as id_norme, '2023-10-01' as validite,'M' as periodicite"); - query.append(", 'MAPPING' as phase_traitement, '{OK}'::text[] as etat_traitement, '2023-11-30 10:29:47.000'::timestamp as date_traitement"); - query.append(", '{ARTEMIS}'::text[] as client, '{2023-11-30 10:29:47.000}'::timestamp[] as date_client");; - query.append(SQL.END_QUERY); - - // norme table used to retrieve family of data - query.append("CREATE TABLE arc_bas1.norme AS "); - query.append("SELECT 'PHASE3V1' as id_norme, 'DSN' as id_famille UNION ALL "); - query.append("SELECT 'PASRAU' as id_norme, 'PASRAU' as id_famille"); - query.append(SQL.END_QUERY); - - // data tables containing two files - // one had already been retrieved by client 'ARTEMIS', the other hadn't been retrieved yet - query.append("CREATE TABLE arc_bas1.mapping_dsn_test1_ok AS "); - query.append("SELECT 'file_to_retrieve.xml' as id_source, 'data_of_file_to_retrieve' as data UNION ALL "); - query.append("SELECT 'file_not_to_retrieve_when_reprise_false.xml' as id_source, 'data_of_file_not_to_retrieve_when_reprise_false' as data"); - query.append(SQL.END_QUERY); - - // nomenclature tables - query.append("CREATE TABLE arc_bas1.nmcl_table1 AS SELECT 1 as data"); - query.append(SQL.END_QUERY); - query.append("CREATE TABLE arc_bas1.nmcl_table2 AS SELECT 1 as data"); - query.append(SQL.END_QUERY); - query.append("CREATE TABLE arc.ext_mod_periodicite AS SELECT 1 as id, 'A' as VAL"); - query.append(SQL.END_QUERY); - - UtilitaireDao.get(0).executeImmediate(c, query); - } - - /** - * destroy data for the tests - * @throws SQLException - * @throws ArcException - */ - private void destroyTestData() throws SQLException, ArcException { - - ArcPreparedStatementBuilder query; - - query = new ArcPreparedStatementBuilder(); - - query.append("DROP SCHEMA IF EXISTS arc CASCADE;"); - query.append("DROP SCHEMA IF EXISTS arc_bas1 CASCADE;"); - UtilitaireDao.get(0).executeImmediate(c, query); - } - -} From 863274c8efc1aacd0fe53b31fdf9eced8886e19e Mon Sep 17 00:00:00 2001 From: Manuel Soulier Date: Mon, 4 Dec 2023 12:02:54 +0100 Subject: [PATCH 14/19] fix: test class rename --- ...esServiceTestKO.java => ImportStep1KoTest.java} | 14 ++++++-------- ...blesServiceTest.java => ImportStep1OkTest.java} | 7 ++----- 2 files changed, 8 insertions(+), 13 deletions(-) rename arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/{ImportStep1InitializeClientTablesServiceTestKO.java => ImportStep1KoTest.java} (95%) rename arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/{ImportStep1InitializeClientTablesServiceTest.java => ImportStep1OkTest.java} (98%) diff --git a/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/ImportStep1InitializeClientTablesServiceTestKO.java b/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/ImportStep1KoTest.java similarity index 95% rename from arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/ImportStep1InitializeClientTablesServiceTestKO.java rename to arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/ImportStep1KoTest.java index bbb5504ac..11637554a 100644 --- a/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/ImportStep1InitializeClientTablesServiceTestKO.java +++ b/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/ImportStep1KoTest.java @@ -18,12 +18,10 @@ import fr.insee.arc.ws.services.importServlet.actions.SendResponse; import fr.insee.arc.ws.services.importServlet.bo.ExportTrackingType; -public class ImportStep1InitializeClientTablesServiceTestKO extends ServletArc { - - /** - * - */ - private static final long serialVersionUID = -7832574224892526397L; +public class ImportStep1KoTest extends ServletArc { + + private static final long serialVersionUID = 4409305598494746785L; + @BeforeClass @@ -61,7 +59,7 @@ public void testExecute() throws ArcException { testCreateAndDropWsPending(arcResponse); - testCreateTableWsKo(arcResponse); + testRegisterWsKo(arcResponse); } @@ -83,7 +81,7 @@ private void testCreateAndDropWsPending(String arcResponse) throws ArcException assertTrue(i assertTrue(UtilitaireDao.get(0).isTableExiste(InitializeQueryTest.c, "arc_bas1.ARTEMIS_%_ext_mod_periodicite")); } - - - /** - * initialize data for the tests - * @throws SQLException - * @throws ArcException - */ - private static void initializeTestData() throws SQLException, ArcException { - - ArcPreparedStatementBuilder query; - query = new ArcPreparedStatementBuilder(); - - query.append("CREATE SCHEMA arc;"); - query.append("CREATE SCHEMA arc_bas1;"); - - - // family and client tables - query.append("CREATE TABLE arc.ihm_client AS "); - query.append("SELECT 'DSN' as id_famille,'ARTEMIS' as id_application"); - query.append(SQL.UNION_ALL); - query.append("SELECT 'DSN' as id_famille,'DSNFLASH' as id_application"); - query.append(SQL.END_QUERY); - - query.append("CREATE TABLE arc.ihm_famille AS SELECT 'DSN' as id_famille"); - query.append(SQL.END_QUERY); - - query.append("CREATE TABLE arc_bas1.mod_table_metier AS "); - query.append("SELECT 'DSN' as id_famille,'mapping_dsn_test1_ok' as nom_table_metier"); - query.append(SQL.UNION_ALL); - query.append("SELECT 'PASRAU' as id_famille,'mapping_pasrau_test_ok' as nom_table_metier"); - query.append(SQL.END_QUERY); - - query.append("CREATE TABLE arc_bas1.mod_variable_metier AS SELECT 'DSN' as id_famille, 'mapping_dsn_test1_ok' as nom_table_metier, 'id_source' as nom_variable_metier"); - query.append(SQL.END_QUERY); - - // pilotage tables - query.append("CREATE TABLE arc_bas1.pilotage_fichier AS "); - query.append("SELECT 'file_to_retrieve.xml' as id_source, 'PHASE3V1' as id_norme, '2023-10-01' as validite,'M' as periodicite"); - query.append(", 'MAPPING' as phase_traitement, '{OK}'::text[] as etat_traitement, '2023-11-30 10:29:47.000'::timestamp as date_traitement"); - query.append(", null::text[] as client, null::timestamp[] as date_client"); - query.append(SQL.UNION_ALL); - // file that mustn't be retrieved when reprise is false and family is DSN - query.append("SELECT 'file_not_to_retrieve_when_reprise_false.xml' as id_source, 'PHASE3V1' as id_norme, '2023-10-01' as validite,'M' as periodicite"); - query.append(", 'MAPPING' as phase_traitement, '{OK}'::text[] as etat_traitement, '2023-11-30 10:29:47.000'::timestamp as date_traitement"); - query.append(", '{ARTEMIS}'::text[] as client, '{2023-11-30 10:29:47.000}'::timestamp[] as date_client");; - query.append(SQL.END_QUERY); - - // norme table used to retrieve family of data - query.append("CREATE TABLE arc_bas1.norme AS "); - query.append("SELECT 'PHASE3V1' as id_norme, 'DSN' as id_famille UNION ALL "); - query.append("SELECT 'PASRAU' as id_norme, 'PASRAU' as id_famille"); - query.append(SQL.END_QUERY); - - // data tables containing two files - // one had already been retrieved by client 'ARTEMIS', the other hadn't been retrieved yet - query.append("CREATE TABLE arc_bas1.mapping_dsn_test1_ok AS "); - query.append("SELECT 'file_to_retrieve.xml' as id_source, 'data_of_file_to_retrieve' as data UNION ALL "); - query.append("SELECT 'file_not_to_retrieve_when_reprise_false.xml' as id_source, 'data_of_file_not_to_retrieve_when_reprise_false' as data"); - query.append(SQL.END_QUERY); - - // nomenclature tables - query.append("CREATE TABLE arc_bas1.nmcl_table1 AS SELECT 1 as data"); - query.append(SQL.END_QUERY); - query.append("CREATE TABLE arc_bas1.nmcl_table2 AS SELECT 1 as data"); - query.append(SQL.END_QUERY); - query.append("CREATE TABLE arc.ext_mod_periodicite AS SELECT 1 as id, 'A' as VAL"); - query.append(SQL.END_QUERY); - - UtilitaireDao.get(0).executeImmediate(InitializeQueryTest.c, query); - } - - - - /** - * destroy data for the tests - * @throws SQLException - * @throws ArcException - */ - private static void destroyTestData() throws SQLException, ArcException { - - ArcPreparedStatementBuilder query; - - query = new ArcPreparedStatementBuilder(); - - query.append("DROP SCHEMA IF EXISTS arc CASCADE;"); - query.append("DROP SCHEMA IF EXISTS arc_bas1 CASCADE;"); - UtilitaireDao.get(0).executeImmediate(InitializeQueryTest.c, query); - } - - - } diff --git a/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/dao/InitializeTestDataNoScalability.java b/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/dao/InitializeTestDataNoScalability.java new file mode 100644 index 000000000..113d0ced6 --- /dev/null +++ b/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/dao/InitializeTestDataNoScalability.java @@ -0,0 +1,109 @@ +package fr.insee.arc.ws.services.importServlet.dao; + +import java.sql.SQLException; + +import fr.insee.arc.core.dataobjects.ArcPreparedStatementBuilder; +import fr.insee.arc.utils.dao.SQL; +import fr.insee.arc.utils.dao.UtilitaireDao; +import fr.insee.arc.utils.exception.ArcException; +import fr.insee.arc.utils.query.InitializeQueryTest; + +public class InitializeTestDataNoScalability { + + /** + * initialize the data sample for wsimport test + * @param ok : true mean that data will be valid, false that a table will be lacking + * @throws SQLException + * @throws ArcException + */ + public static void initializeTestData(boolean ok) throws SQLException, ArcException { + + InitializeQueryTest.buildPropertiesWithoutScalability(null); + + ArcPreparedStatementBuilder query; + query = new ArcPreparedStatementBuilder(); + query.append("CREATE SCHEMA arc;"); + query.append("CREATE SCHEMA arc_bas1;"); + + + // family and client tables + query.append("CREATE TABLE arc.ihm_client AS "); + query.append("SELECT 'DSN' as id_famille,'ARTEMIS' as id_application"); + query.append(SQL.UNION_ALL); + query.append("SELECT 'DSN' as id_famille,'DSNFLASH' as id_application"); + query.append(SQL.END_QUERY); + + query.append("CREATE TABLE arc.ihm_famille AS SELECT 'DSN' as id_famille"); + query.append(SQL.END_QUERY); + + query.append("CREATE TABLE arc_bas1.mod_table_metier AS "); + query.append("SELECT 'DSN' as id_famille,'mapping_dsn_test1_ok' as nom_table_metier"); + query.append(SQL.UNION_ALL); + query.append("SELECT 'PASRAU' as id_famille,'mapping_pasrau_test_ok' as nom_table_metier"); + query.append(SQL.END_QUERY); + + + if (!ok) + { + query.append("CREATE TABLE arc_bas1.mod_variable_metier AS SELECT 'DSN' as id_famille, 'mapping_dsn_test1_ok' as nom_table_metier, 'id_source' as nom_variable_metier"); + query.append(SQL.END_QUERY); + } + + // pilotage tables + query.append("CREATE TABLE arc_bas1.pilotage_fichier AS "); + query.append("SELECT 'file_to_retrieve.xml' as id_source, 'PHASE3V1' as id_norme, '2023-10-01' as validite,'M' as periodicite"); + query.append(", 'MAPPING' as phase_traitement, '{OK}'::text[] as etat_traitement, '2023-11-30 10:29:47.000'::timestamp as date_traitement"); + query.append(", null::text[] as client, null::timestamp[] as date_client"); + query.append(SQL.UNION_ALL); + // file that mustn't be retrieved when reprise is false and family is DSN + query.append("SELECT 'file_not_to_retrieve_when_reprise_false.xml' as id_source, 'PHASE3V1' as id_norme, '2023-10-01' as validite,'M' as periodicite"); + query.append(", 'MAPPING' as phase_traitement, '{OK}'::text[] as etat_traitement, '2023-11-30 10:29:47.000'::timestamp as date_traitement"); + query.append(", '{ARTEMIS}'::text[] as client, '{2023-11-30 10:29:47.000}'::timestamp[] as date_client");; + query.append(SQL.END_QUERY); + + // norme table used to retrieve family of data + query.append("CREATE TABLE arc_bas1.norme AS "); + query.append("SELECT 'PHASE3V1' as id_norme, 'DSN' as id_famille UNION ALL "); + query.append("SELECT 'PASRAU' as id_norme, 'PASRAU' as id_famille"); + query.append(SQL.END_QUERY); + + // data tables containing two files + // one had already been retrieved by client 'ARTEMIS', the other hadn't been retrieved yet + query.append("CREATE TABLE arc_bas1.mapping_dsn_test1_ok AS "); + query.append("SELECT 'file_to_retrieve.xml' as id_source, 'data_of_file_to_retrieve' as data UNION ALL "); + query.append("SELECT 'file_not_to_retrieve_when_reprise_false.xml' as id_source, 'data_of_file_not_to_retrieve_when_reprise_false' as data"); + query.append(SQL.END_QUERY); + + // nomenclature tables + query.append("CREATE TABLE arc_bas1.nmcl_table1 AS SELECT 1 as data"); + query.append(SQL.END_QUERY); + query.append("CREATE TABLE arc_bas1.nmcl_table2 AS SELECT 1 as data"); + query.append(SQL.END_QUERY); + query.append("CREATE TABLE arc.ext_mod_periodicite AS SELECT 1 as id, 'A' as VAL"); + query.append(SQL.END_QUERY); + + UtilitaireDao.get(0).executeImmediate(InitializeQueryTest.c, query); + } + + + + /** + * destroy data for the tests + * @throws SQLException + * @throws ArcException + */ + public static void destroyTestData() throws SQLException, ArcException { + + ArcPreparedStatementBuilder query; + + query = new ArcPreparedStatementBuilder(); + + query.append("DROP SCHEMA IF EXISTS arc CASCADE;"); + query.append("DROP SCHEMA IF EXISTS arc_bas1 CASCADE;"); + UtilitaireDao.get(0).executeImmediate(InitializeQueryTest.c, query); + } + + + + +} From 0de2540deec13c8321e76dd1c7b11986ef492fc0 Mon Sep 17 00:00:00 2001 From: Manuel Soulier Date: Mon, 4 Dec 2023 12:33:09 +0100 Subject: [PATCH 16/19] fix: wrong boolean ok --- .../{ImportStep1KoTest.java => ImportKOTest.java} | 3 +-- .../{ImportStep1OkTest.java => ImportOKTest.java} | 6 +----- .../importServlet/dao/InitializeTestDataNoScalability.java | 7 +++---- 3 files changed, 5 insertions(+), 11 deletions(-) rename arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/{ImportStep1KoTest.java => ImportKOTest.java} (97%) rename arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/{ImportStep1OkTest.java => ImportOKTest.java} (96%) diff --git a/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/ImportStep1KoTest.java b/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/ImportKOTest.java similarity index 97% rename from arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/ImportStep1KoTest.java rename to arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/ImportKOTest.java index 94bc45a86..5b10856d7 100644 --- a/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/ImportStep1KoTest.java +++ b/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/ImportKOTest.java @@ -11,7 +11,6 @@ import org.junit.Test; import fr.insee.arc.core.dataobjects.ArcPreparedStatementBuilder; -import fr.insee.arc.utils.dao.SQL; import fr.insee.arc.utils.dao.UtilitaireDao; import fr.insee.arc.utils.exception.ArcException; import fr.insee.arc.utils.query.InitializeQueryTest; @@ -19,7 +18,7 @@ import fr.insee.arc.ws.services.importServlet.bo.ExportTrackingType; import fr.insee.arc.ws.services.importServlet.dao.InitializeTestDataNoScalability; -public class ImportStep1KoTest extends ServletArc { +public class ImportKOTest extends ServletArc { private static final long serialVersionUID = 4409305598494746785L; diff --git a/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/ImportStep1OkTest.java b/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/ImportOKTest.java similarity index 96% rename from arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/ImportStep1OkTest.java rename to arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/ImportOKTest.java index 0b9f0eed5..5d1ac9862 100644 --- a/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/ImportStep1OkTest.java +++ b/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/ImportOKTest.java @@ -10,22 +10,18 @@ import org.junit.BeforeClass; import org.junit.Test; -import fr.insee.arc.core.dataobjects.ArcPreparedStatementBuilder; -import fr.insee.arc.utils.dao.SQL; import fr.insee.arc.utils.dao.UtilitaireDao; import fr.insee.arc.utils.exception.ArcException; import fr.insee.arc.utils.query.InitializeQueryTest; import fr.insee.arc.ws.services.importServlet.actions.SendResponse; import fr.insee.arc.ws.services.importServlet.dao.InitializeTestDataNoScalability; -public class ImportStep1OkTest extends ServletArc { +public class ImportOKTest extends ServletArc { private static final long serialVersionUID = -7832574224892526397L; - @BeforeClass public static void setup() throws SQLException, ArcException { - InitializeTestDataNoScalability.destroyTestData(); InitializeTestDataNoScalability.initializeTestData(true); } diff --git a/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/dao/InitializeTestDataNoScalability.java b/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/dao/InitializeTestDataNoScalability.java index 113d0ced6..c6548614f 100644 --- a/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/dao/InitializeTestDataNoScalability.java +++ b/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/dao/InitializeTestDataNoScalability.java @@ -18,8 +18,6 @@ public class InitializeTestDataNoScalability { */ public static void initializeTestData(boolean ok) throws SQLException, ArcException { - InitializeQueryTest.buildPropertiesWithoutScalability(null); - ArcPreparedStatementBuilder query; query = new ArcPreparedStatementBuilder(); query.append("CREATE SCHEMA arc;"); @@ -43,7 +41,7 @@ public static void initializeTestData(boolean ok) throws SQLException, ArcExcept query.append(SQL.END_QUERY); - if (!ok) + if (ok) { query.append("CREATE TABLE arc_bas1.mod_variable_metier AS SELECT 'DSN' as id_famille, 'mapping_dsn_test1_ok' as nom_table_metier, 'id_source' as nom_variable_metier"); query.append(SQL.END_QUERY); @@ -94,8 +92,9 @@ public static void initializeTestData(boolean ok) throws SQLException, ArcExcept */ public static void destroyTestData() throws SQLException, ArcException { - ArcPreparedStatementBuilder query; + InitializeQueryTest.buildPropertiesWithoutScalability(null); + ArcPreparedStatementBuilder query; query = new ArcPreparedStatementBuilder(); query.append("DROP SCHEMA IF EXISTS arc CASCADE;"); From eaa78af3799b4a5eea285f5c4afc590a3acdc9a9 Mon Sep 17 00:00:00 2001 From: Manuel Soulier Date: Mon, 4 Dec 2023 12:54:06 +0100 Subject: [PATCH 17/19] feat: test wsimport step 2 --- .../services/importServlet/ImportOKTest.java | 35 +++++++++++++++++-- .../dao/InitializeTestDataNoScalability.java | 6 ++-- 2 files changed, 36 insertions(+), 5 deletions(-) diff --git a/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/ImportOKTest.java b/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/ImportOKTest.java index 5d1ac9862..f293712e4 100644 --- a/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/ImportOKTest.java +++ b/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/ImportOKTest.java @@ -1,5 +1,6 @@ package fr.insee.arc.ws.services.importServlet; +import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import java.io.ByteArrayOutputStream; @@ -41,6 +42,16 @@ private String executeImportStep1(JSONObject clientJsonInput) throws ArcExceptio return sentResponse.getWr().toString(); } + private String executeImportStep2(JSONObject clientJsonInput) throws ArcException + { + JSONObject clientJsonInputValidated= validateRequest(clientJsonInput); + ImportStep2GetTableNameService imp = new ImportStep2GetTableNameService(clientJsonInputValidated); + ByteArrayOutputStream bos = new ByteArrayOutputStream(); + SendResponse sentResponse = new SendResponse(bos); + imp.execute(sentResponse); + return sentResponse.getWr().toString(); + } + @Test(expected = ArcException.class) public void testExecuteFamilyNotValid() throws ArcException { @@ -53,10 +64,12 @@ public void testExecuteFamilyNotValid() throws ArcException { @Test public void testExecute() throws ArcException { - JSONObject clientJsonInput = new JSONObject( + // parameters sent by client for step 1 + JSONObject clientJsonInputStep1 = new JSONObject( "{\"familleNorme\":\"DSN\",\"periodicite\":\"M\",\"service\":\"arcClient\",\"validiteSup\":\"2032-03-01\",\"format\":\"csv_gzip\",\"reprise\":false,\"client\":\"ARTEMIS\",\"environnement\":\"arc_bas1\"}"); - executeImportStep1(clientJsonInput); + // response token will be used by client to invoke step2 + String arcResponseStep1 = executeImportStep1(clientJsonInputStep1); testCreateAndDropWsPending(); testCreateTableNmcl(); @@ -64,6 +77,24 @@ public void testExecute() throws ArcException { testCreateTableTableMetier(); testCreateTableTableFamille(); testCreateTableTablePeriodicite(); + + // parameters sent by client for step 2 + // it use response token provided as response of step1 + JSONObject clientJsonInputStep2 =new JSONObject( + "{\"familleNorme\":\"DSN\",\"periodicite\":\"M\",\"service\":\"tableName\",\"validiteSup\":\"2032-03-01\",\"format\":\"csv_gzip\",\"reprise\":false,\"client\":\""+arcResponseStep1+"\",\"environnement\":\"arc_bas1\",\"type\":\"jsonwsp/request\"}"); + + String arcResponseStep2 = executeImportStep2(clientJsonInputStep2); + + // ws info must be the first table to be retrieved + // token must return name of the table and the ddl of the table + assertEquals(arcResponseStep1+"_ws_info client text, timestamp text", arcResponseStep2); + + +// JSONObject clientJsonInputStep1 = new JSONObject( +// "{\"familleNorme\":\"DSN\",\"periodicite\":\"M\",\"service\":\"arcClient\",\"validiteSup\":\"2032-03-01\",\"format\":\"csv_gzip\",\"reprise\":false,\"client\":\"ARTEMIS\",\"environnement\":\"arc_bas1\"}"); +// + + } private void testCreateAndDropWsPending() throws ArcException { diff --git a/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/dao/InitializeTestDataNoScalability.java b/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/dao/InitializeTestDataNoScalability.java index c6548614f..be1637e3e 100644 --- a/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/dao/InitializeTestDataNoScalability.java +++ b/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/dao/InitializeTestDataNoScalability.java @@ -12,11 +12,11 @@ public class InitializeTestDataNoScalability { /** * initialize the data sample for wsimport test - * @param ok : true mean that data will be valid, false that a table will be lacking + * @param dataSampleOk : true mean that data sample will be valid, false that a table will be lacking * @throws SQLException * @throws ArcException */ - public static void initializeTestData(boolean ok) throws SQLException, ArcException { + public static void initializeTestData(boolean dataSampleOk) throws SQLException, ArcException { ArcPreparedStatementBuilder query; query = new ArcPreparedStatementBuilder(); @@ -41,7 +41,7 @@ public static void initializeTestData(boolean ok) throws SQLException, ArcExcept query.append(SQL.END_QUERY); - if (ok) + if (dataSampleOk) { query.append("CREATE TABLE arc_bas1.mod_variable_metier AS SELECT 'DSN' as id_famille, 'mapping_dsn_test1_ok' as nom_table_metier, 'id_source' as nom_variable_metier"); query.append(SQL.END_QUERY); From 9837eeee9eaeaf7c3b46169d52112f6c73ac51f1 Mon Sep 17 00:00:00 2001 From: Manuel Soulier Date: Mon, 4 Dec 2023 15:46:29 +0100 Subject: [PATCH 18/19] feat: test wsimport iteration through tables to retrieve --- .../ws/services/importServlet/ServletArc.java | 2 +- .../services/importServlet/ImportOKTest.java | 184 ++++++++++++------ .../importServlet/bo/ExecuteStep.java | 51 +++++ 3 files changed, 172 insertions(+), 65 deletions(-) create mode 100644 arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/bo/ExecuteStep.java diff --git a/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/ServletArc.java b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/ServletArc.java index be752441f..6bcf72a74 100644 --- a/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/ServletArc.java +++ b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/ServletArc.java @@ -97,7 +97,7 @@ public void doPost(HttpServletRequest request, HttpServletResponse response) { * @param request * @return */ - protected JSONObject validateRequest(JSONObject returned) { + protected static JSONObject validateRequest(JSONObject returned) { if (returned.isNull(JsonKeys.FORMAT.getKey())) { returned.put(JsonKeys.FORMAT.getKey(), ExportFormat.BINARY.getFormat()); diff --git a/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/ImportOKTest.java b/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/ImportOKTest.java index f293712e4..3555cd0a6 100644 --- a/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/ImportOKTest.java +++ b/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/ImportOKTest.java @@ -3,73 +3,58 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; +import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.UnsupportedEncodingException; +import java.nio.charset.StandardCharsets; import java.sql.SQLException; +import java.util.zip.GZIPInputStream; +import org.apache.commons.io.IOUtils; import org.json.JSONObject; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; +import fr.insee.arc.core.dataobjects.ArcPreparedStatementBuilder; import fr.insee.arc.utils.dao.UtilitaireDao; import fr.insee.arc.utils.exception.ArcException; import fr.insee.arc.utils.query.InitializeQueryTest; -import fr.insee.arc.ws.services.importServlet.actions.SendResponse; +import fr.insee.arc.utils.utils.ManipString; +import fr.insee.arc.ws.services.importServlet.bo.ExecuteStep; import fr.insee.arc.ws.services.importServlet.dao.InitializeTestDataNoScalability; -public class ImportOKTest extends ServletArc { - - private static final long serialVersionUID = -7832574224892526397L; +public class ImportOKTest { @BeforeClass - public static void setup() throws SQLException, ArcException { + public static void setup() throws SQLException, ArcException { InitializeTestDataNoScalability.destroyTestData(); InitializeTestDataNoScalability.initializeTestData(true); } - + @AfterClass - public static void tearDown() throws SQLException, ArcException { + public static void tearDown() throws SQLException, ArcException { InitializeTestDataNoScalability.destroyTestData(); - } - - private String executeImportStep1(JSONObject clientJsonInput) throws ArcException - { - JSONObject clientJsonInputValidated= validateRequest(clientJsonInput); - ImportStep1InitializeClientTablesService imp = new ImportStep1InitializeClientTablesService(clientJsonInputValidated); - ByteArrayOutputStream bos = new ByteArrayOutputStream(); - SendResponse sentResponse = new SendResponse(bos); - imp.execute(sentResponse); - return sentResponse.getWr().toString(); - } - - private String executeImportStep2(JSONObject clientJsonInput) throws ArcException - { - JSONObject clientJsonInputValidated= validateRequest(clientJsonInput); - ImportStep2GetTableNameService imp = new ImportStep2GetTableNameService(clientJsonInputValidated); - ByteArrayOutputStream bos = new ByteArrayOutputStream(); - SendResponse sentResponse = new SendResponse(bos); - imp.execute(sentResponse); - return sentResponse.getWr().toString(); } - - + @Test(expected = ArcException.class) - public void testExecuteFamilyNotValid() throws ArcException { + public void testExecuteFamilyNotValid() throws ArcException, UnsupportedEncodingException { JSONObject clientJsonInput = new JSONObject( "{\"familleNorme\":\"RESIL\",\"periodicite\":\"M\",\"service\":\"arcClient\",\"validiteSup\":\"2032-03-01\",\"format\":\"csv_gzip\",\"reprise\":false,\"client\":\"ARTEMIS\",\"environnement\":\"arc_bas1\"}"); - executeImportStep1(clientJsonInput); + ExecuteStep.executeImportStep1(clientJsonInput); } - - + @Test - public void testExecute() throws ArcException { + public void testExecute() throws ArcException, IOException { // parameters sent by client for step 1 JSONObject clientJsonInputStep1 = new JSONObject( "{\"familleNorme\":\"DSN\",\"periodicite\":\"M\",\"service\":\"arcClient\",\"validiteSup\":\"2032-03-01\",\"format\":\"csv_gzip\",\"reprise\":false,\"client\":\"ARTEMIS\",\"environnement\":\"arc_bas1\"}"); // response token will be used by client to invoke step2 - String arcResponseStep1 = executeImportStep1(clientJsonInputStep1); + String arcResponseStep1 = ExecuteStep.executeImportStep1(clientJsonInputStep1); testCreateAndDropWsPending(); testCreateTableNmcl(); @@ -77,67 +62,138 @@ public void testExecute() throws ArcException { testCreateTableTableMetier(); testCreateTableTableFamille(); testCreateTableTablePeriodicite(); + + // test ws Iteration + testWsIteration(arcResponseStep1); - // parameters sent by client for step 2 - // it use response token provided as response of step1 - JSONObject clientJsonInputStep2 =new JSONObject( - "{\"familleNorme\":\"DSN\",\"periodicite\":\"M\",\"service\":\"tableName\",\"validiteSup\":\"2032-03-01\",\"format\":\"csv_gzip\",\"reprise\":false,\"client\":\""+arcResponseStep1+"\",\"environnement\":\"arc_bas1\",\"type\":\"jsonwsp/request\"}"); + // test that client had been marked in pilotage + testClientMarkedInPilotage(); + + } + + private void testClientMarkedInPilotage() throws ArcException { + ArcPreparedStatementBuilder query = new ArcPreparedStatementBuilder(); - String arcResponseStep2 = executeImportStep2(clientJsonInputStep2); + query.append("SELECT client[1] as client from arc_bas1.pilotage_fichier where id_source = 'file_to_retrieve.xml';"); - // ws info must be the first table to be retrieved - // token must return name of the table and the ddl of the table - assertEquals(arcResponseStep1+"_ws_info client text, timestamp text", arcResponseStep2); + String clientMarkedInPilotage = UtilitaireDao.get(0).getString(null, query); + assertEquals("ARTEMIS", clientMarkedInPilotage); -// JSONObject clientJsonInputStep1 = new JSONObject( -// "{\"familleNorme\":\"DSN\",\"periodicite\":\"M\",\"service\":\"arcClient\",\"validiteSup\":\"2032-03-01\",\"format\":\"csv_gzip\",\"reprise\":false,\"client\":\"ARTEMIS\",\"environnement\":\"arc_bas1\"}"); -// + // "ws_info", "mapping_dsn_test1_ok", "mod_table_metier", "mod_variable_metier", "nmcl_table1", "nmcl_table2", "ext_mod_famille", "ext_mod_periodicite" + } + + /** + * iterate through webservice call until the end + * test the first table to be retrieved + * @param arcResponseStep1 + * @throws IOException + * @throws ArcException + */ + private void testWsIteration(String arcResponseStep1) throws IOException, ArcException { + boolean sortie = false; + boolean first = true; + + do { + String arcResponseStep2 = invokeStep2(arcResponseStep1); + // arcResponse2 return table,ame and DDL of the table + // tablename is the first token from the arcResponse2 + String tableBucket = ManipString.substringBeforeFirst(arcResponseStep2, " "); + + sortie = tableBucket.equals(""); + + if (sortie) + { + break; + } + + ByteArrayOutputStream arcResponseStep3 = invokeStep3(tableBucket); + String outputStep3 = ""; + + try (InputStream is = new ByteArrayInputStream(arcResponseStep3.toByteArray()); + GZIPInputStream zis = new GZIPInputStream(is);) { + outputStep3 = IOUtils.toString(zis, StandardCharsets.UTF_8); + } + + if (first) { + // ws info must be the first table to be retrieved + assertEquals(arcResponseStep1 + "_ws_info client text, timestamp text", arcResponseStep2); + // first info in the csv table of ws_info is client name + assertEquals("ARTEMIS", ManipString.substringBeforeFirst(outputStep3, ";")); + } + + first=false; + + } while (!sortie); } - + + private ByteArrayOutputStream invokeStep3(String tableResponseStep2) + throws UnsupportedEncodingException, ArcException { + JSONObject clientJsonInputStep3 = new JSONObject( + "{\"familleNorme\":\"DSN\",\"periodicite\":\"M\",\"service\":\"tableName\",\"validiteSup\":\"2032-03-01\",\"format\":\"csv_gzip\",\"reprise\":false,\"client\":\"" + + tableResponseStep2 + + "\",\"environnement\":\"arc_bas1\",\"type\":\"jsonwsp/request\"}"); + return ExecuteStep.executeImportStep3(clientJsonInputStep3); + } + + private String invokeStep2(String arcResponseStep1) throws UnsupportedEncodingException, ArcException { + // parameters sent by client for step 2 + // it use response token provided as response of step1 + JSONObject clientJsonInputStep2 = new JSONObject( + "{\"familleNorme\":\"DSN\",\"periodicite\":\"M\",\"service\":\"tableName\",\"validiteSup\":\"2032-03-01\",\"format\":\"csv_gzip\",\"reprise\":false,\"client\":\"" + + arcResponseStep1 + "\",\"environnement\":\"arc_bas1\",\"type\":\"jsonwsp/request\"}"); + + return ExecuteStep.executeImportStep2(clientJsonInputStep2); + } + private void testCreateAndDropWsPending() throws ArcException { - + // check that the parallel thread that create tables drop the table ws_pending // it should be done in less than 50 iteration, test data is very little int maxIteration = 50; - int i=0; - - while (i0); - assertTrue(i 0); + assertTrue(i < maxIteration); } - + private void testCreateTableNmcl() throws ArcException { - // table image created should be like arc_bas1.ARTEMIS_timestamp_ + // table image created should be like + // arc_bas1.ARTEMIS_timestamp_ assertTrue(UtilitaireDao.get(0).isTableExiste(InitializeQueryTest.c, "arc_bas1.ARTEMIS_%_nmcl_table1")); assertTrue(UtilitaireDao.get(0).isTableExiste(InitializeQueryTest.c, "arc_bas1.ARTEMIS_%_nmcl_table2")); } private void testCreateTableVarMetier() throws ArcException { - // table image created should be like arc_bas1.ARTEMIS_timestamp_ + // table image created should be like + // arc_bas1.ARTEMIS_timestamp_ assertTrue(UtilitaireDao.get(0).isTableExiste(InitializeQueryTest.c, "arc_bas1.ARTEMIS_%_mod_variable_metier")); } - + private void testCreateTableTableMetier() throws ArcException { - // table image created should be like arc_bas1.ARTEMIS_timestamp_ + // table image created should be like + // arc_bas1.ARTEMIS_timestamp_ assertTrue(UtilitaireDao.get(0).isTableExiste(InitializeQueryTest.c, "arc_bas1.ARTEMIS_%_mod_table_metier")); } - + private void testCreateTableTableFamille() throws ArcException { - // table image created should be like arc_bas1.ARTEMIS_timestamp_ + // table image created should be like + // arc_bas1.ARTEMIS_timestamp_ assertTrue(UtilitaireDao.get(0).isTableExiste(InitializeQueryTest.c, "arc_bas1.ARTEMIS_%_ext_mod_famille")); } - + private void testCreateTableTablePeriodicite() throws ArcException { - // table image created should be like arc_bas1.ARTEMIS_timestamp_ + // table image created should be like + // arc_bas1.ARTEMIS_timestamp_ assertTrue(UtilitaireDao.get(0).isTableExiste(InitializeQueryTest.c, "arc_bas1.ARTEMIS_%_ext_mod_periodicite")); } diff --git a/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/bo/ExecuteStep.java b/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/bo/ExecuteStep.java new file mode 100644 index 000000000..340ed6a7e --- /dev/null +++ b/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/bo/ExecuteStep.java @@ -0,0 +1,51 @@ +package fr.insee.arc.ws.services.importServlet.bo; + +import java.io.ByteArrayOutputStream; +import java.io.UnsupportedEncodingException; +import java.nio.charset.StandardCharsets; + +import org.json.JSONObject; + +import fr.insee.arc.utils.exception.ArcException; +import fr.insee.arc.ws.services.importServlet.ImportStep1InitializeClientTablesService; +import fr.insee.arc.ws.services.importServlet.ImportStep2GetTableNameService; +import fr.insee.arc.ws.services.importServlet.ImportStep3GetTableDataService; +import fr.insee.arc.ws.services.importServlet.ServletArc; +import fr.insee.arc.ws.services.importServlet.actions.SendResponse; + +public class ExecuteStep extends ServletArc { + + private static final long serialVersionUID = -4856211705461299454L; + + public static String executeImportStep1(JSONObject clientJsonInput) throws ArcException, UnsupportedEncodingException + { + JSONObject clientJsonInputValidated= validateRequest(clientJsonInput); + ImportStep1InitializeClientTablesService imp = new ImportStep1InitializeClientTablesService(clientJsonInputValidated); + ByteArrayOutputStream bos = new ByteArrayOutputStream(); + SendResponse sentResponse = new SendResponse(bos); + imp.execute(sentResponse); + return bos.toString(StandardCharsets.UTF_8); + } + + public static String executeImportStep2(JSONObject clientJsonInput) throws ArcException, UnsupportedEncodingException + { + JSONObject clientJsonInputValidated= validateRequest(clientJsonInput); + ImportStep2GetTableNameService imp = new ImportStep2GetTableNameService(clientJsonInputValidated); + ByteArrayOutputStream bos = new ByteArrayOutputStream(); + SendResponse sentResponse = new SendResponse(bos); + imp.execute(sentResponse); + return bos.toString(StandardCharsets.UTF_8); + } + + public static ByteArrayOutputStream executeImportStep3(JSONObject clientJsonInput) throws ArcException, UnsupportedEncodingException + { + JSONObject clientJsonInputValidated= validateRequest(clientJsonInput); + ImportStep3GetTableDataService imp = new ImportStep3GetTableDataService(clientJsonInputValidated); + ByteArrayOutputStream bos = new ByteArrayOutputStream(); + SendResponse sentResponse = new SendResponse(bos); + imp.execute(sentResponse); + return bos; + } + + +} From 50cec34a533db01c30ea800e4d20ef4c3ecc4055 Mon Sep 17 00:00:00 2001 From: Manuel Soulier Date: Mon, 4 Dec 2023 16:17:47 +0100 Subject: [PATCH 19/19] feat: test wsimport with data on 2 executors nods --- .../SynchronizeUserRulesAndMetadataTest.java | 2 +- .../arc/utils/query/InitializeQueryTest.java | 12 +- .../services/importServlet/dao/ClientDao.java | 2 - .../ImportOKTestScalability.java | 194 ++++++++++++++++++ .../dao/InitializeTestDataScalability.java | 128 ++++++++++++ 5 files changed, 334 insertions(+), 4 deletions(-) create mode 100644 arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/ImportOKTestScalability.java create mode 100644 arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/dao/InitializeTestDataScalability.java diff --git a/arc-core/src/test/java/fr/insee/arc/core/service/p0initialisation/metadata/SynchronizeUserRulesAndMetadataTest.java b/arc-core/src/test/java/fr/insee/arc/core/service/p0initialisation/metadata/SynchronizeUserRulesAndMetadataTest.java index 9661a810c..a498f1cb1 100644 --- a/arc-core/src/test/java/fr/insee/arc/core/service/p0initialisation/metadata/SynchronizeUserRulesAndMetadataTest.java +++ b/arc-core/src/test/java/fr/insee/arc/core/service/p0initialisation/metadata/SynchronizeUserRulesAndMetadataTest.java @@ -26,7 +26,7 @@ public void copyMetadataToExecutorsTestNotScalable() throws SQLException, ArcExc @Test public void copyMetadataToExecutorsTestScalable() throws SQLException, ArcException { - buildPropertiesWithScalability(null); + buildPropertiesWithOneExecutor(null); BddPatcherTest.initializeDatabaseForRetrieveTablesFromSchemaTest(u); diff --git a/arc-utils/src/test/java/fr/insee/arc/utils/query/InitializeQueryTest.java b/arc-utils/src/test/java/fr/insee/arc/utils/query/InitializeQueryTest.java index eee4172e3..cfdd7037b 100644 --- a/arc-utils/src/test/java/fr/insee/arc/utils/query/InitializeQueryTest.java +++ b/arc-utils/src/test/java/fr/insee/arc/utils/query/InitializeQueryTest.java @@ -25,6 +25,8 @@ public class InitializeQueryTest { public static Connection e; + public static Connection f; + @Test public void testConnection() @@ -38,12 +40,20 @@ public static void buildPropertiesWithoutScalability(String repertoire) throws S } - public static void buildPropertiesWithScalability(String repertoire) throws SQLException + public static void buildPropertiesWithOneExecutor(String repertoire) throws SQLException { e = new TestDatabase().testConnection; buildProperties(repertoire, new Connection[] {c, e}); } + public static void buildPropertiesWithTwoExecutors(String repertoire) throws SQLException + { + e = new TestDatabase().testConnection; + f = new TestDatabase().testConnection; + buildProperties(repertoire, new Connection[] {c, e, f}); + } + + private static void buildProperties(String repertoire, Connection[] connections) throws SQLException { PropertiesHandler testProperties=PropertiesHandler.getInstance(); diff --git a/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/dao/ClientDao.java b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/dao/ClientDao.java index 327b23bd4..6ff0c7d40 100644 --- a/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/dao/ClientDao.java +++ b/arc-ws/src/main/java/fr/insee/arc/ws/services/importServlet/dao/ClientDao.java @@ -14,7 +14,6 @@ import fr.insee.arc.core.dataobjects.ArcPreparedStatementBuilder; import fr.insee.arc.core.dataobjects.ColumnEnum; import fr.insee.arc.core.dataobjects.ViewEnum; -import fr.insee.arc.core.model.Delimiters; import fr.insee.arc.core.model.TraitementEtat; import fr.insee.arc.core.model.TraitementPhase; import fr.insee.arc.core.service.global.dao.TableNaming; @@ -26,7 +25,6 @@ import fr.insee.arc.utils.structure.GenericBean; import fr.insee.arc.utils.utils.FormatSQL; import fr.insee.arc.utils.utils.LoggerHelper; -import fr.insee.arc.utils.utils.ManipString; import fr.insee.arc.ws.services.importServlet.bo.ArcClientIdentifier; import fr.insee.arc.ws.services.importServlet.bo.ExportTrackingType; import fr.insee.arc.ws.services.importServlet.bo.JsonKeys; diff --git a/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/ImportOKTestScalability.java b/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/ImportOKTestScalability.java new file mode 100644 index 000000000..a7fc1b943 --- /dev/null +++ b/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/ImportOKTestScalability.java @@ -0,0 +1,194 @@ +package fr.insee.arc.ws.services.importServlet; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.UnsupportedEncodingException; +import java.nio.charset.StandardCharsets; +import java.sql.SQLException; +import java.util.zip.GZIPInputStream; + +import org.apache.commons.io.IOUtils; +import org.json.JSONObject; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; + +import fr.insee.arc.core.dataobjects.ArcPreparedStatementBuilder; +import fr.insee.arc.utils.dao.UtilitaireDao; +import fr.insee.arc.utils.exception.ArcException; +import fr.insee.arc.utils.query.InitializeQueryTest; +import fr.insee.arc.utils.utils.ManipString; +import fr.insee.arc.ws.services.importServlet.bo.ExecuteStep; +import fr.insee.arc.ws.services.importServlet.dao.InitializeTestDataNoScalability; +import fr.insee.arc.ws.services.importServlet.dao.InitializeTestDataScalability; + +public class ImportOKTestScalability { + + @BeforeClass + public static void setup() throws SQLException, ArcException { + InitializeTestDataScalability.destroyTestData(); + InitializeTestDataScalability.initializeTestData(true); + } + + @AfterClass + public static void tearDown() throws SQLException, ArcException { + InitializeTestDataScalability.destroyTestData(); + } + + @Test + public void testExecute() throws ArcException, IOException { + + // parameters sent by client for step 1 + JSONObject clientJsonInputStep1 = new JSONObject( + "{\"familleNorme\":\"DSN\",\"periodicite\":\"M\",\"service\":\"arcClient\",\"validiteSup\":\"2032-03-01\",\"format\":\"csv_gzip\",\"reprise\":false,\"client\":\"ARTEMIS\",\"environnement\":\"arc_bas1\"}"); + + // response token will be used by client to invoke step2 + String arcResponseStep1 = ExecuteStep.executeImportStep1(clientJsonInputStep1); + + testCreateAndDropWsPending(); + testCreateTableNmcl(); + testCreateTableVarMetier(); + testCreateTableTableMetier(); + testCreateTableTableFamille(); + testCreateTableTablePeriodicite(); + + // test ws Iteration + testWsIteration(arcResponseStep1); + + // test that client had been marked in pilotage + testClientMarkedInPilotage(); + + } + + private void testClientMarkedInPilotage() throws ArcException { + ArcPreparedStatementBuilder query = new ArcPreparedStatementBuilder(); + + query.append("SELECT client[1] as client from arc_bas1.pilotage_fichier where id_source = 'file_to_retrieve.xml';"); + + String clientMarkedInPilotage = UtilitaireDao.get(0).getString(null, query); + + assertEquals("ARTEMIS", clientMarkedInPilotage); + + // "ws_info", "mapping_dsn_test1_ok", "mod_table_metier", "mod_variable_metier", "nmcl_table1", "nmcl_table2", "ext_mod_famille", "ext_mod_periodicite" + + } + + /** + * iterate through webservice call until the end + * test the first table to be retrieved + * @param arcResponseStep1 + * @throws IOException + * @throws ArcException + */ + private void testWsIteration(String arcResponseStep1) throws IOException, ArcException { + boolean sortie = false; + boolean first = true; + + do { + String arcResponseStep2 = invokeStep2(arcResponseStep1); + // arcResponse2 return table,ame and DDL of the table + // tablename is the first token from the arcResponse2 + String tableBucket = ManipString.substringBeforeFirst(arcResponseStep2, " "); + + sortie = tableBucket.equals(""); + + if (sortie) + { + break; + } + + ByteArrayOutputStream arcResponseStep3 = invokeStep3(tableBucket); + String outputStep3 = ""; + + try (InputStream is = new ByteArrayInputStream(arcResponseStep3.toByteArray()); + GZIPInputStream zis = new GZIPInputStream(is);) { + outputStep3 = IOUtils.toString(zis, StandardCharsets.UTF_8); + } + + if (first) { + // ws info must be the first table to be retrieved + assertEquals(arcResponseStep1 + "_ws_info client text, timestamp text", arcResponseStep2); + // first info in the csv table of ws_info is client name + assertEquals("ARTEMIS", ManipString.substringBeforeFirst(outputStep3, ";")); + } + + first=false; + + } while (!sortie); + + } + + private ByteArrayOutputStream invokeStep3(String tableResponseStep2) + throws UnsupportedEncodingException, ArcException { + JSONObject clientJsonInputStep3 = new JSONObject( + "{\"familleNorme\":\"DSN\",\"periodicite\":\"M\",\"service\":\"tableName\",\"validiteSup\":\"2032-03-01\",\"format\":\"csv_gzip\",\"reprise\":false,\"client\":\"" + + tableResponseStep2 + + "\",\"environnement\":\"arc_bas1\",\"type\":\"jsonwsp/request\"}"); + return ExecuteStep.executeImportStep3(clientJsonInputStep3); + } + + private String invokeStep2(String arcResponseStep1) throws UnsupportedEncodingException, ArcException { + // parameters sent by client for step 2 + // it use response token provided as response of step1 + JSONObject clientJsonInputStep2 = new JSONObject( + "{\"familleNorme\":\"DSN\",\"periodicite\":\"M\",\"service\":\"tableName\",\"validiteSup\":\"2032-03-01\",\"format\":\"csv_gzip\",\"reprise\":false,\"client\":\"" + + arcResponseStep1 + "\",\"environnement\":\"arc_bas1\",\"type\":\"jsonwsp/request\"}"); + + return ExecuteStep.executeImportStep2(clientJsonInputStep2); + } + + private void testCreateAndDropWsPending() throws ArcException { + + // check that the parallel thread that create tables drop the table ws_pending + + // it should be done in less than 50 iteration, test data is very little + int maxIteration = 50; + int i = 0; + + while (i < maxIteration + && UtilitaireDao.get(0).isTableExiste(InitializeQueryTest.c, "arc_bas1.ARTEMIS_%_ws_pending")) { + i++; + UtilitaireDao.get(0).executeImmediate(InitializeQueryTest.c, "SELECT pg_sleep(1);"); + } + + assertTrue(i > 0); + assertTrue(i < maxIteration); + } + + private void testCreateTableNmcl() throws ArcException { + // table image created should be like + // arc_bas1.ARTEMIS_timestamp_ + assertTrue(UtilitaireDao.get(0).isTableExiste(InitializeQueryTest.c, "arc_bas1.ARTEMIS_%_nmcl_table1")); + assertTrue(UtilitaireDao.get(0).isTableExiste(InitializeQueryTest.c, "arc_bas1.ARTEMIS_%_nmcl_table2")); + } + + private void testCreateTableVarMetier() throws ArcException { + // table image created should be like + // arc_bas1.ARTEMIS_timestamp_ + assertTrue(UtilitaireDao.get(0).isTableExiste(InitializeQueryTest.c, "arc_bas1.ARTEMIS_%_mod_variable_metier")); + } + + private void testCreateTableTableMetier() throws ArcException { + // table image created should be like + // arc_bas1.ARTEMIS_timestamp_ + assertTrue(UtilitaireDao.get(0).isTableExiste(InitializeQueryTest.c, "arc_bas1.ARTEMIS_%_mod_table_metier")); + } + + private void testCreateTableTableFamille() throws ArcException { + // table image created should be like + // arc_bas1.ARTEMIS_timestamp_ + assertTrue(UtilitaireDao.get(0).isTableExiste(InitializeQueryTest.c, "arc_bas1.ARTEMIS_%_ext_mod_famille")); + } + + private void testCreateTableTablePeriodicite() throws ArcException { + // table image created should be like + // arc_bas1.ARTEMIS_timestamp_ + assertTrue(UtilitaireDao.get(0).isTableExiste(InitializeQueryTest.c, "arc_bas1.ARTEMIS_%_ext_mod_periodicite")); + } + +} diff --git a/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/dao/InitializeTestDataScalability.java b/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/dao/InitializeTestDataScalability.java new file mode 100644 index 000000000..1ec900c16 --- /dev/null +++ b/arc-ws/src/test/java/fr/insee/arc/ws/services/importServlet/dao/InitializeTestDataScalability.java @@ -0,0 +1,128 @@ +package fr.insee.arc.ws.services.importServlet.dao; + +import java.sql.SQLException; + +import fr.insee.arc.core.dataobjects.ArcPreparedStatementBuilder; +import fr.insee.arc.utils.dao.SQL; +import fr.insee.arc.utils.dao.UtilitaireDao; +import fr.insee.arc.utils.exception.ArcException; +import fr.insee.arc.utils.query.InitializeQueryTest; + +public class InitializeTestDataScalability { + + /** + * initialize the data sample for wsimport test + * @param dataSampleOk : true mean that data sample will be valid, false that a table will be lacking + * @throws SQLException + * @throws ArcException + */ + public static void initializeTestData(boolean dataSampleOk) throws SQLException, ArcException { + + ArcPreparedStatementBuilder query; + query = new ArcPreparedStatementBuilder(); + query.append("CREATE SCHEMA arc;"); + query.append("CREATE SCHEMA arc_bas1;"); + + + // family and client tables + query.append("CREATE TABLE arc.ihm_client AS "); + query.append("SELECT 'DSN' as id_famille,'ARTEMIS' as id_application"); + query.append(SQL.UNION_ALL); + query.append("SELECT 'DSN' as id_famille,'DSNFLASH' as id_application"); + query.append(SQL.END_QUERY); + + query.append("CREATE TABLE arc.ihm_famille AS SELECT 'DSN' as id_famille"); + query.append(SQL.END_QUERY); + + query.append("CREATE TABLE arc_bas1.mod_table_metier AS "); + query.append("SELECT 'DSN' as id_famille,'mapping_dsn_test1_ok' as nom_table_metier"); + query.append(SQL.UNION_ALL); + query.append("SELECT 'PASRAU' as id_famille,'mapping_pasrau_test_ok' as nom_table_metier"); + query.append(SQL.END_QUERY); + + + if (dataSampleOk) + { + query.append("CREATE TABLE arc_bas1.mod_variable_metier AS SELECT 'DSN' as id_famille, 'mapping_dsn_test1_ok' as nom_table_metier, 'id_source' as nom_variable_metier"); + query.append(SQL.END_QUERY); + } + + // pilotage tables + query.append("CREATE TABLE arc_bas1.pilotage_fichier AS "); + query.append("SELECT 'file_to_retrieve.xml' as id_source, 'PHASE3V1' as id_norme, '2023-10-01' as validite,'M' as periodicite"); + query.append(", 'MAPPING' as phase_traitement, '{OK}'::text[] as etat_traitement, '2023-11-30 10:29:47.000'::timestamp as date_traitement"); + query.append(", null::text[] as client, null::timestamp[] as date_client"); + query.append(SQL.UNION_ALL); + // file that mustn't be retrieved when reprise is false and family is DSN + query.append("SELECT 'file_not_to_retrieve_when_reprise_false.xml' as id_source, 'PHASE3V1' as id_norme, '2023-10-01' as validite,'M' as periodicite"); + query.append(", 'MAPPING' as phase_traitement, '{OK}'::text[] as etat_traitement, '2023-11-30 10:29:47.000'::timestamp as date_traitement"); + query.append(", '{ARTEMIS}'::text[] as client, '{2023-11-30 10:29:47.000}'::timestamp[] as date_client");; + query.append(SQL.END_QUERY); + + // norme table used to retrieve family of data + query.append("CREATE TABLE arc_bas1.norme AS "); + query.append("SELECT 'PHASE3V1' as id_norme, 'DSN' as id_famille UNION ALL "); + query.append("SELECT 'PASRAU' as id_norme, 'PASRAU' as id_famille"); + query.append(SQL.END_QUERY); + + // nomenclature tables + query.append("CREATE TABLE arc_bas1.nmcl_table1 AS SELECT 1 as data"); + query.append(SQL.END_QUERY); + query.append("CREATE TABLE arc_bas1.nmcl_table2 AS SELECT 1 as data"); + query.append(SQL.END_QUERY); + query.append("CREATE TABLE arc.ext_mod_periodicite AS SELECT 1 as id, 'A' as VAL"); + query.append(SQL.END_QUERY); + + UtilitaireDao.get(0).executeImmediate(null, query); + + + // data tables containing two files + // one had already been retrieved by client 'ARTEMIS', the other hadn't been retrieved yet + query = new ArcPreparedStatementBuilder(); + + query.append("CREATE SCHEMA arc_bas1;"); + + query.append("CREATE TABLE arc_bas1.mapping_dsn_test1_ok AS "); + query.append("SELECT 'file1_to_retrieve.xml' as id_source, 'data1_of_file_to_retrieve' as data UNION ALL "); + query.append("SELECT 'file1_not_to_retrieve_when_reprise_false.xml' as id_source, 'data1_of_file_not_to_retrieve_when_reprise_false' as data"); + query.append(SQL.END_QUERY); + UtilitaireDao.get(1).executeImmediate(null, query); + + query = new ArcPreparedStatementBuilder(); + + query.append("CREATE SCHEMA arc_bas1;"); + + query.append("CREATE TABLE arc_bas1.mapping_dsn_test1_ok AS "); + query.append("SELECT 'file2_to_retrieve.xml' as id_source, 'data2_of_file_to_retrieve' as data UNION ALL "); + query.append("SELECT 'file2_not_to_retrieve_when_reprise_false.xml' as id_source, 'data2_of_file_not_to_retrieve_when_reprise_false' as data"); + query.append(SQL.END_QUERY); + UtilitaireDao.get(2).executeImmediate(null, query); + + + } + + + + /** + * destroy data for the tests + * @throws SQLException + * @throws ArcException + */ + public static void destroyTestData() throws SQLException, ArcException { + + InitializeQueryTest.buildPropertiesWithTwoExecutors(null); + + ArcPreparedStatementBuilder query; + query = new ArcPreparedStatementBuilder(); + + query.append("DROP SCHEMA IF EXISTS arc CASCADE;"); + query.append("DROP SCHEMA IF EXISTS arc_bas1 CASCADE;"); + UtilitaireDao.get(0).executeImmediate(null, query); + UtilitaireDao.get(1).executeImmediate(null, query); + UtilitaireDao.get(2).executeImmediate(null, query); + } + + + + +}