diff --git a/arc-batch/src/main/java/fr/insee/arc/batch/BatchARC.java b/arc-batch/src/main/java/fr/insee/arc/batch/BatchARC.java index ba27477c..15634598 100644 --- a/arc-batch/src/main/java/fr/insee/arc/batch/BatchARC.java +++ b/arc-batch/src/main/java/fr/insee/arc/batch/BatchARC.java @@ -6,6 +6,7 @@ import java.net.UnknownHostException; import java.nio.file.Files; import java.nio.file.Paths; +import java.sql.Connection; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; @@ -18,7 +19,6 @@ import org.springframework.beans.factory.annotation.Autowired; import fr.insee.arc.batch.dao.BatchArcDao; -import fr.insee.arc.batch.operation.PhaseInitializationOperation; import fr.insee.arc.batch.threadrunners.PhaseParameterKeys; import fr.insee.arc.batch.threadrunners.PhaseThreadFactory; import fr.insee.arc.core.model.TraitementEtat; @@ -35,6 +35,7 @@ import fr.insee.arc.core.util.BDParameters; import fr.insee.arc.utils.batch.IReturnCode; import fr.insee.arc.utils.consumer.ThrowingRunnable; +import fr.insee.arc.utils.dao.UtilitaireDao; import fr.insee.arc.utils.database.ArcDatabase; import fr.insee.arc.utils.exception.ArcException; import fr.insee.arc.utils.exception.ArcExceptionMessage; @@ -124,14 +125,19 @@ private static void message(String msg) { LoggerHelper.warn(LOGGER, msg); } + private BatchArcDao dao; + + /** * Lanceur MAIN arc * * @param args */ void execute() { - - try { + try (Connection batchConnection = UtilitaireDao.get(0).getDriverConnexion();) + { + + dao = new BatchArcDao(batchConnection); batchAvoidDnsSpam(); @@ -235,14 +241,14 @@ private void batchPatchDatabaseAndFileSystem() throws ArcException { message("Patching database"); - new BddPatcher().bddScript(null); + new BddPatcher().bddScript(dao.getBatchConnection()); BDParameters bdParameters = new BDParameters(ArcDatabase.COORDINATOR); // either we take env and envExecution from database or properties // default is from properties - if (Boolean.parseBoolean(bdParameters.getString(null, "LanceurARC.envFromDatabase", "false"))) { - envExecution = bdParameters.getString(null, "LanceurARC.envExecution", "arc_prod"); + if (Boolean.parseBoolean(bdParameters.getString(dao.getBatchConnection(), "LanceurARC.envFromDatabase", "false"))) { + envExecution = bdParameters.getString(dao.getBatchConnection(), "LanceurARC.envExecution", "arc_prod"); } else { envExecution = properties.getBatchExecutionEnvironment(); } @@ -255,11 +261,11 @@ private void batchPatchDatabaseAndFileSystem() throws ArcException { message("Patching filesytem"); envExecution=SecurityDao.validateEnvironnement(envExecution); - new BddPatcher().bddScript(null, envExecution); + new BddPatcher().bddScript(dao.getBatchConnection(), envExecution); // build sandbox filesystem - new BuildFileSystem(null, new String[] {this.envExecution}).execute(); + new BuildFileSystem(dao.getBatchConnection(), new String[] {this.envExecution}).execute(); } @@ -273,40 +279,40 @@ private void batchParametersGet() { BDParameters bdParameters = new BDParameters(ArcDatabase.COORDINATOR); boolean keepInDatabase = Boolean - .parseBoolean(bdParameters.getString(null, "LanceurARC.keepInDatabase", "false")); + .parseBoolean(bdParameters.getString(dao.getBatchConnection(), "LanceurARC.keepInDatabase", "false")); // pour le batch en cours, l'ensemble des enveloppes traitées ne peut pas // excéder une certaine taille - int tailleMaxReceptionEnMb = bdParameters.getInt(null, "LanceurARC.tailleMaxReceptionEnMb", 10); + int tailleMaxReceptionEnMb = bdParameters.getInt(dao.getBatchConnection(), "LanceurARC.tailleMaxReceptionEnMb", 10); // Maximum number of files to load - int maxFilesToLoad = bdParameters.getInt(null, "LanceurARC.maxFilesToLoad", 101); + int maxFilesToLoad = bdParameters.getInt(dao.getBatchConnection(), "LanceurARC.maxFilesToLoad", 101); // Maximum number of files processed in each phase iteration - int maxFilesPerPhase = bdParameters.getInt(null, "LanceurARC.maxFilesPerPhase", 1000000); + int maxFilesPerPhase = bdParameters.getInt(dao.getBatchConnection(), "LanceurARC.maxFilesPerPhase", 1000000); // fréquence à laquelle les phases sont démarrées - this.poolingDelay = bdParameters.getInt(null, "LanceurARC.poolingDelay", 1000); + this.poolingDelay = bdParameters.getInt(dao.getBatchConnection(), "LanceurARC.poolingDelay", 1000); // heure d'initalisation en production - hourToTriggerInitializationInProduction = bdParameters.getInt(null, + hourToTriggerInitializationInProduction = bdParameters.getInt(dao.getBatchConnection(), "ApiService.HEURE_INITIALISATION_PRODUCTION", 22); // interval entre chaque initialisation en nb de jours - intervalForInitializationInDay = bdParameters.getInt(null, "LanceurARC.INTERVAL_JOUR_INITIALISATION", 7); + intervalForInitializationInDay = bdParameters.getInt(dao.getBatchConnection(), "LanceurARC.INTERVAL_JOUR_INITIALISATION", 7); // nombre d'iteration de la boucle batch entre chaque routine de maintenance de // la base de données - numberOfIterationBewteenDatabaseMaintenanceRoutine = bdParameters.getInt(null, + numberOfIterationBewteenDatabaseMaintenanceRoutine = bdParameters.getInt(dao.getBatchConnection(), "LanceurARC.DATABASE_MAINTENANCE_ROUTINE_INTERVAL", 500); // nombre d'iteration de la boucle batch entre chaque routine de vérification du // reste à faire - numberOfIterationBewteenCheckTodo = bdParameters.getInt(null, "LanceurARC.DATABASE_CHECKTODO_ROUTINE_INTERVAL", + numberOfIterationBewteenCheckTodo = bdParameters.getInt(dao.getBatchConnection(), "LanceurARC.DATABASE_CHECKTODO_ROUTINE_INTERVAL", 10); // wait executor pods - waitExecutorTimerInMS = bdParameters.getInt(null, "LanceurARC.DATABASE_WAIT_FOR_EXECUTORS_IN_MS", + waitExecutorTimerInMS = bdParameters.getInt(dao.getBatchConnection(), "LanceurARC.DATABASE_WAIT_FOR_EXECUTORS_IN_MS", 30000); @@ -371,11 +377,11 @@ private void batchEnvironmentPrepare() throws ArcException { */ private void resetPendingFilesFromPilotage() throws ArcException { - BatchArcDao.execQueryResetPendingFilesInPilotageTable(envExecution); + dao.execQueryResetPendingFilesInPilotageTable(envExecution); // if volatile mode on, put back all the not fully proceeded files in reception // phase - executeIfVolatile(() -> BatchArcDao.execQueryResetPendingFilesInPilotageTableVolatile(envExecution)); + executeIfVolatile(() -> dao.execQueryResetPendingFilesInPilotageTableVolatile(envExecution)); } private void executorsDatabaseCreate() throws ArcException { @@ -426,10 +432,10 @@ private void exportToParquet() throws ArcException { private void maintenanceTablePilotageBatch() throws ArcException { // postgres catalog maintenance - DatabaseMaintenance.maintenancePgCatalogAllNods(null, FormatSQL.VACUUM_OPTION_FULL); + DatabaseMaintenance.maintenancePgCatalogAllNods(dao.getBatchConnection(), FormatSQL.VACUUM_OPTION_FULL); // arc pilotage table maintenance - DatabaseMaintenance.maintenancePilotage(null, envExecution, FormatSQL.VACUUM_OPTION_NONE); + DatabaseMaintenance.maintenancePilotage(dao.getBatchConnection(), envExecution, FormatSQL.VACUUM_OPTION_NONE); } @@ -530,7 +536,7 @@ private void phaseInitializationExecute() throws ArcException { // date programmée d'initialisation (last_init) // on ne la lance que s'il n'y a rien en cours (pas essentiel mais plus // sécurisé) - if ((!dejaEnCours && PhaseInitializationOperation.isInitializationMustTrigger(this.envExecution))) { + if ((!dejaEnCours && dao.isInitializationMustTrigger(this.envExecution))) { message("Initialisation en cours"); PhaseThreadFactory initialiser = new PhaseThreadFactory(mapParam, TraitementPhase.INITIALISATION); @@ -539,7 +545,7 @@ private void phaseInitializationExecute() throws ArcException { message("Initialisation terminée : " + initialiser.getReport().getDuree() + " ms"); - BatchArcDao.execUpdateLastInitialisationTimestamp(envExecution, intervalForInitializationInDay, + dao.execUpdateLastInitialisationTimestamp(envExecution, intervalForInitializationInDay, hourToTriggerInitializationInProduction); return; @@ -560,7 +566,7 @@ private void synchronizeExecutorsMetadata() throws ArcException { message("Synchronization vers les executeurs en cours"); - new SynchronizeRulesAndMetadataOperation(new Sandbox(null, this.envExecution)) + new SynchronizeRulesAndMetadataOperation(new Sandbox(dao.getBatchConnection(), this.envExecution)) .synchroniserSchemaExecutionAllNods(); message("Synchronization terminé"); @@ -576,8 +582,8 @@ private void synchronizeExecutorsMetadata() throws ArcException { private void deplacerFichiersNonTraites() throws ArcException { List aBouger = exportOn ? // - BatchArcDao.execQuerySelectArchiveNotExported(envExecution) // - : BatchArcDao.execQuerySelectArchiveEnCours(envExecution); + dao.execQuerySelectArchiveNotExported(envExecution) // + : dao.execQuerySelectArchiveEnCours(envExecution); dejaEnCours = (!aBouger.isEmpty()); @@ -587,7 +593,7 @@ private void deplacerFichiersNonTraites() throws ArcException { } // si le s3 est actif, on sauvegarde les archives pending ou KO vers le s3 - List aBougerToS3 = ArcS3.INPUT_BUCKET.isS3Off() ? new ArrayList<>():BatchArcDao.execQuerySelectArchivePendingOrKO(envExecution); + List aBougerToS3 = ArcS3.INPUT_BUCKET.isS3Off() ? new ArrayList<>():dao.execQuerySelectArchivePendingOrKO(envExecution); if (!aBougerToS3.isEmpty()) { savePendingOrKOArchivesToS3(envExecution, repertoire, aBougerToS3); } @@ -793,7 +799,7 @@ public void run() { */ private boolean isNothingLeftToDo(String envExecution) { boolean isNothingLeftToDo = false; - if (BatchArcDao.execQueryAnythingLeftTodo(envExecution) == 0) { + if (dao.execQueryAnythingLeftTodo(envExecution) == 0) { isNothingLeftToDo = true; } return isNothingLeftToDo; @@ -806,7 +812,7 @@ private boolean isNothingLeftToDo(String envExecution) { * @throws ArcException */ private boolean isProductionOn() throws ArcException { - this.productionOn = BatchArcDao.execQueryIsProductionOn(this.envExecution); + this.productionOn = dao.execQueryIsProductionOn(this.envExecution); return productionOn; } diff --git a/arc-batch/src/main/java/fr/insee/arc/batch/dao/BatchArcDao.java b/arc-batch/src/main/java/fr/insee/arc/batch/dao/BatchArcDao.java index 5a4ee21a..8d993228 100644 --- a/arc-batch/src/main/java/fr/insee/arc/batch/dao/BatchArcDao.java +++ b/arc-batch/src/main/java/fr/insee/arc/batch/dao/BatchArcDao.java @@ -1,5 +1,9 @@ package fr.insee.arc.batch.dao; +import java.sql.Connection; +import java.text.ParseException; +import java.text.SimpleDateFormat; +import java.util.Date; import java.util.List; import fr.insee.arc.core.dataobjects.ArcPreparedStatementBuilder; @@ -8,15 +12,25 @@ import fr.insee.arc.core.model.TraitementEtat; import fr.insee.arc.core.model.TraitementPhase; import fr.insee.arc.core.model.TraitementPhase.ConditionExecution; +import fr.insee.arc.core.service.global.bo.ArcDateFormat; import fr.insee.arc.utils.dao.SQL; import fr.insee.arc.utils.dao.UtilitaireDao; import fr.insee.arc.utils.database.ArcDatabase; import fr.insee.arc.utils.exception.ArcException; +import fr.insee.arc.utils.exception.ArcExceptionMessage; import fr.insee.arc.utils.security.SqlInjectionChecked; import fr.insee.arc.utils.structure.GenericBean; public class BatchArcDao { + public BatchArcDao(Connection batchConnection) { + super(); + this.batchConnection = batchConnection; + } + + private Connection batchConnection; + + /** * Query to check what archives had not been fully proceeded depending on the phase execution condition * @@ -41,12 +55,12 @@ private static ArcPreparedStatementBuilder queryPipelineNotFinished(String envEx * @return * @throws ArcException */ - public static List execQuerySelectArchiveEnCours(String envExecution) throws ArcException { + public List execQuerySelectArchiveEnCours(String envExecution) throws ArcException { ArcPreparedStatementBuilder query = queryPipelineNotFinished(envExecution, ConditionExecution.PHASE_PRECEDENTE_TERMINE_PIPELINE_NON_TERMINE); - return new GenericBean(UtilitaireDao.get(ArcDatabase.COORDINATOR.getIndex()).executeRequest(null, query)) + return new GenericBean(UtilitaireDao.get(ArcDatabase.COORDINATOR.getIndex()).executeRequest(batchConnection, query)) .getColumnValues(ColumnEnum.CONTAINER.getColumnName()); } @@ -57,14 +71,14 @@ public static List execQuerySelectArchiveEnCours(String envExecution) th * @return * @throws ArcException */ - public static List execQuerySelectArchiveNotExported(String envExecution) throws ArcException { + public List execQuerySelectArchiveNotExported(String envExecution) throws ArcException { ArcPreparedStatementBuilder query = new ArcPreparedStatementBuilder(); query.append(queryPipelineNotFinished(envExecution, ConditionExecution.PHASE_PRECEDENTE_TERMINE_PIPELINE_NON_TERMINE)); query.build(SQL.UNION); query.append(queryPipelineNotFinished(envExecution, ConditionExecution.PIPELINE_TERMINE_DONNEES_NON_EXPORTEES)); - return new GenericBean(UtilitaireDao.get(ArcDatabase.COORDINATOR.getIndex()).executeRequest(null, query)) + return new GenericBean(UtilitaireDao.get(ArcDatabase.COORDINATOR.getIndex()).executeRequest(batchConnection, query)) .getColumnValues(ColumnEnum.CONTAINER.getColumnName()); } @@ -75,14 +89,14 @@ public static List execQuerySelectArchiveNotExported(String envExecution * @return * @throws ArcException */ - public static List execQuerySelectArchivePendingOrKO(String envExecution) throws ArcException { + public List execQuerySelectArchivePendingOrKO(String envExecution) throws ArcException { ArcPreparedStatementBuilder query = new ArcPreparedStatementBuilder(); query.append(queryPipelineNotFinished(envExecution, ConditionExecution.PHASE_PRECEDENTE_TERMINE_PIPELINE_NON_TERMINE)); query.build(SQL.UNION); query.append(queryPipelineNotFinished(envExecution, ConditionExecution.PIPELINE_TERMINE_DONNEES_KO)); - return new GenericBean(UtilitaireDao.get(ArcDatabase.COORDINATOR.getIndex()).executeRequest(null, query)) + return new GenericBean(UtilitaireDao.get(ArcDatabase.COORDINATOR.getIndex()).executeRequest(batchConnection, query)) .getColumnValues(ColumnEnum.CONTAINER.getColumnName()); } @@ -96,7 +110,7 @@ public static List execQuerySelectArchivePendingOrKO(String envExecution * @throws ArcException */ @SqlInjectionChecked - public static void execQueryResetPendingFilesInPilotageTable(String envExecution) throws ArcException { + public void execQueryResetPendingFilesInPilotageTable(String envExecution) throws ArcException { // delete files that are en cours ArcPreparedStatementBuilder query = new ArcPreparedStatementBuilder(); query.append("\n DELETE FROM " + ViewEnum.PILOTAGE_FICHIER.getFullName(envExecution)); @@ -108,7 +122,7 @@ public static void execQueryResetPendingFilesInPilotageTable(String envExecution query.append("\n set etape=1 "); query.append("\n WHERE etape=3"); query.append(";"); - UtilitaireDao.get(ArcDatabase.COORDINATOR.getIndex()).executeBlock(null, query); + UtilitaireDao.get(ArcDatabase.COORDINATOR.getIndex()).executeBlock(batchConnection, query); } @@ -118,7 +132,7 @@ public static void execQueryResetPendingFilesInPilotageTable(String envExecution * @param envExecution * @throws ArcException */ - public static void execQueryResetPendingFilesInPilotageTableVolatile(String envExecution) throws ArcException { + public void execQueryResetPendingFilesInPilotageTableVolatile(String envExecution) throws ArcException { ArcPreparedStatementBuilder query = new ArcPreparedStatementBuilder(); query.append("WITH tmp_pending_files AS ( "); @@ -140,7 +154,7 @@ public static void execQueryResetPendingFilesInPilotageTableVolatile(String envE query.append("\n AND a.phase_traitement = "+query.quoteText(TraitementPhase.RECEPTION.toString())); query.append("\n AND a.etat_traitement = "+query.quoteText(TraitementEtat.OK.getSqlArrayExpression())+"::text[]"); - UtilitaireDao.get(ArcDatabase.COORDINATOR.getIndex()).executeRequest(null, query); + UtilitaireDao.get(ArcDatabase.COORDINATOR.getIndex()).executeRequest(batchConnection, query); } @@ -150,10 +164,10 @@ public static void execQueryResetPendingFilesInPilotageTableVolatile(String envE * @return * @throws ArcException */ - public static String execQueryLastInitialisationTimestamp(String envExecution) throws ArcException { + public String execQueryLastInitialisationTimestamp(String envExecution) throws ArcException { ArcPreparedStatementBuilder query = new ArcPreparedStatementBuilder(); query.append("select last_init from " + ViewEnum.PILOTAGE_BATCH.getFullName(envExecution)); - return UtilitaireDao.get(ArcDatabase.COORDINATOR.getIndex()).getString(null, query); + return UtilitaireDao.get(ArcDatabase.COORDINATOR.getIndex()).getString(batchConnection, query); } /** @@ -163,7 +177,7 @@ public static String execQueryLastInitialisationTimestamp(String envExecution) t * @param hourToTriggerInitializationInProduction * @throws ArcException */ - public static void execUpdateLastInitialisationTimestamp(String envExecution, Integer intervalForInitializationInDay, + public void execUpdateLastInitialisationTimestamp(String envExecution, Integer intervalForInitializationInDay, Integer hourToTriggerInitializationInProduction) throws ArcException { ArcPreparedStatementBuilder query = new ArcPreparedStatementBuilder(); @@ -173,10 +187,10 @@ public static void execUpdateLastInitialisationTimestamp(String envExecution, In query.build(",", "operation=case when operation='R' then 'O' else operation end "); query.build(SQL.END_QUERY); - UtilitaireDao.get(ArcDatabase.COORDINATOR.getIndex()).executeRequest(null, query); + UtilitaireDao.get(ArcDatabase.COORDINATOR.getIndex()).executeRequest(batchConnection, query); } - public static Integer execQueryAnythingLeftTodo(String envExecution) { + public Integer execQueryAnythingLeftTodo(String envExecution) { ArcPreparedStatementBuilder query = new ArcPreparedStatementBuilder(); query.build(SQL.SELECT, "count(*)", SQL.FROM); @@ -186,14 +200,47 @@ public static Integer execQueryAnythingLeftTodo(String envExecution) { query.build(SQL.LIMIT, "1"); query.build(")", ViewEnum.ALIAS_A); - return UtilitaireDao.get(ArcDatabase.COORDINATOR.getIndex()).getInt(null, query); + return UtilitaireDao.get(ArcDatabase.COORDINATOR.getIndex()).getInt(batchConnection, query); } - public static Boolean execQueryIsProductionOn(String envExecution) throws ArcException { + public Boolean execQueryIsProductionOn(String envExecution) throws ArcException { ArcPreparedStatementBuilder query = new ArcPreparedStatementBuilder(); query.build(SQL.SELECT, "1", SQL.FROM, ViewEnum.PILOTAGE_BATCH.getFullName(envExecution)); query.build(SQL.WHERE, ColumnEnum.OPERATION, "=", query.quoteText("O")); - return UtilitaireDao.get(ArcDatabase.COORDINATOR.getIndex()).hasResults(null, query); + return UtilitaireDao.get(ArcDatabase.COORDINATOR.getIndex()).hasResults(batchConnection, query); + } + + /** + * The initialization phase can trigger when the current date is more than + * the initialization date stored in database + * true if Initialization date + * @return + * @throws ArcException + */ + public boolean isInitializationMustTrigger(String envExecution) throws ArcException + { + String lastInitialize = execQueryLastInitialisationTimestamp(envExecution); + + Date dNow = new Date(); + Date dLastInitialize; + + try { + dLastInitialize = new SimpleDateFormat(ArcDateFormat.DATE_HOUR_FORMAT_CONVERSION.getApplicationFormat()) + .parse(lastInitialize); + } catch (ParseException dateParseException) { + throw new ArcException(dateParseException, ArcExceptionMessage.BATCH_INITIALIZATION_DATE_PARSE_FAILED); + } + + return (dLastInitialize.compareTo(dNow) < 0); + } + + + public Connection getBatchConnection() { + return batchConnection; + } + + public void setBatchConnection(Connection batchConnection) { + this.batchConnection = batchConnection; } } diff --git a/arc-batch/src/main/java/fr/insee/arc/batch/operation/PhaseInitializationOperation.java b/arc-batch/src/main/java/fr/insee/arc/batch/operation/PhaseInitializationOperation.java deleted file mode 100644 index 746f25bc..00000000 --- a/arc-batch/src/main/java/fr/insee/arc/batch/operation/PhaseInitializationOperation.java +++ /dev/null @@ -1,43 +0,0 @@ -package fr.insee.arc.batch.operation; - -import java.text.ParseException; -import java.text.SimpleDateFormat; -import java.util.Date; - -import fr.insee.arc.batch.dao.BatchArcDao; -import fr.insee.arc.core.service.global.bo.ArcDateFormat; -import fr.insee.arc.utils.exception.ArcException; -import fr.insee.arc.utils.exception.ArcExceptionMessage; - -public class PhaseInitializationOperation { - - private PhaseInitializationOperation() { - throw new IllegalStateException("Utility class"); - } - - /** - * The initialization phase can trigger when the current date is more than - * the initialization date stored in database - * true if Initialization date - * @return - * @throws ArcException - */ - public static boolean isInitializationMustTrigger(String envExecution) throws ArcException - { - String lastInitialize = BatchArcDao.execQueryLastInitialisationTimestamp(envExecution); - - Date dNow = new Date(); - Date dLastInitialize; - - try { - dLastInitialize = new SimpleDateFormat(ArcDateFormat.DATE_HOUR_FORMAT_CONVERSION.getApplicationFormat()) - .parse(lastInitialize); - } catch (ParseException dateParseException) { - throw new ArcException(dateParseException, ArcExceptionMessage.BATCH_INITIALIZATION_DATE_PARSE_FAILED); - } - - return (dLastInitialize.compareTo(dNow) < 0); - } - - -} diff --git a/arc-utils/src/main/java/fr/insee/arc/utils/dao/UtilitaireDao.java b/arc-utils/src/main/java/fr/insee/arc/utils/dao/UtilitaireDao.java index b30715ca..14e7f0d8 100644 --- a/arc-utils/src/main/java/fr/insee/arc/utils/dao/UtilitaireDao.java +++ b/arc-utils/src/main/java/fr/insee/arc/utils/dao/UtilitaireDao.java @@ -744,9 +744,9 @@ public void maintenancePgCatalog(Connection connexion, String type) { executeImmediate(connexion, FormatSQL.setTimeOutMaintenance()); GenericBean gb = new GenericBean(executeRequest(connexion, new GenericPreparedStatementBuilder( - "select tablename from pg_tables where schemaname='pg_catalog'"))); + "select relname from pg_stat_all_tables where schemaname='pg_catalog' and n_dead_tup>"+FormatSQL.NUMBER_OF_DEAD_TUPLES_FOR_VACUUM))); StringBuilder requete = new StringBuilder(); - for (String t : gb.mapContent().get("tablename")) { + for (String t : gb.getColumnValues("relname")) { requete.append(FormatSQL.vacuumSecured(t, type)); } executeImmediate(connexion, requete.toString()); diff --git a/arc-utils/src/main/java/fr/insee/arc/utils/utils/FormatSQL.java b/arc-utils/src/main/java/fr/insee/arc/utils/utils/FormatSQL.java index 6020a6f2..2b0c7537 100644 --- a/arc-utils/src/main/java/fr/insee/arc/utils/utils/FormatSQL.java +++ b/arc-utils/src/main/java/fr/insee/arc/utils/utils/FormatSQL.java @@ -41,8 +41,10 @@ private FormatSQL() { public static final String VACUUM_OPTION_NONE = ""; public static final String VACUUM_OPTION_FREEZE = "freeze"; public static final String VACUUM_OPTION_FULL = "full"; - + public static final int NUMBER_OF_DEAD_TUPLES_FOR_VACUUM = 100000; + private static final Logger LOGGER = LogManager.getLogger(FormatSQL.class); + /** * query to drop a table in database