diff --git a/arc-batch/src/main/java/fr/insee/arc/batch/BatchARC.java b/arc-batch/src/main/java/fr/insee/arc/batch/BatchARC.java index 2c10c642e..93f7ae0b0 100644 --- a/arc-batch/src/main/java/fr/insee/arc/batch/BatchARC.java +++ b/arc-batch/src/main/java/fr/insee/arc/batch/BatchARC.java @@ -22,7 +22,7 @@ import fr.insee.arc.core.model.TraitementEtat; import fr.insee.arc.core.model.TraitementPhase; import fr.insee.arc.core.service.global.dao.DatabaseMaintenance; -import fr.insee.arc.core.service.p1reception.ApiReceptionService; +import fr.insee.arc.core.service.p1reception.provider.DirectoryPath; import fr.insee.arc.core.util.BDParameters; import fr.insee.arc.utils.batch.IReturnCode; import fr.insee.arc.utils.dao.UtilitaireDao; @@ -291,7 +291,7 @@ private void effacerRepertoireChargement(String directory, String envExecution) private static void cleanDirectory(String directory, String envExecution, String envDirectory, TraitementEtat etat) throws ArcException { - File f = Paths.get(ApiReceptionService.directoryReceptionEtat(directory, envDirectory, etat)).toFile(); + File f = Paths.get(DirectoryPath.directoryReceptionEtat(directory, envDirectory, etat)).toFile(); if (!f.exists()) { return; } @@ -322,7 +322,7 @@ private static void deleteIfArchived(String repertoire, String envExecution, Fil // ajout d'un garde fou : si le fichier n'est pas archivé : pas touche File fCheck = Paths - .get(ApiReceptionService.directoryReceptionEntrepotArchive(repertoire, envExecution, entrepot), + .get(DirectoryPath.directoryReceptionEntrepotArchive(repertoire, envExecution, entrepot), filename) .toFile(); @@ -435,10 +435,10 @@ private void copyFileFromArchiveDirectoryToOK(String envExecution, String repert String originalContainer = ManipString.substringAfterFirst(container, "_"); File fIn = Paths.get( - ApiReceptionService.directoryReceptionEntrepotArchive(repertoire, envExecution, entrepotContainer), + DirectoryPath.directoryReceptionEntrepotArchive(repertoire, envExecution, entrepotContainer), originalContainer).toFile(); - File fOut = Paths.get(ApiReceptionService.directoryReceptionEtatOK(repertoire, envExecution), container) + File fOut = Paths.get(DirectoryPath.directoryReceptionEtatOK(repertoire, envExecution), container) .toFile(); Files.copy(fIn.toPath(), fOut.toPath()); diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/ResetEnvironmentService.java b/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/ResetEnvironmentService.java index d29eb992e..7372ad16c 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/ResetEnvironmentService.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/ResetEnvironmentService.java @@ -1,29 +1,13 @@ package fr.insee.arc.core.service.p0initialisation; -import java.nio.file.Paths; -import java.util.ArrayList; -import java.util.Arrays; - import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import fr.insee.arc.core.dataobjects.ArcPreparedStatementBuilder; -import fr.insee.arc.core.model.TraitementEtat; import fr.insee.arc.core.model.TraitementPhase; -import fr.insee.arc.core.service.global.bo.Sandbox; -import fr.insee.arc.core.service.global.dao.DatabaseMaintenance; -import fr.insee.arc.core.service.global.dao.FileSystemManagement; -import fr.insee.arc.core.service.global.dao.TableNaming; import fr.insee.arc.core.service.p0initialisation.pilotage.SynchronizeDataByPilotage; import fr.insee.arc.core.service.p0initialisation.useroperation.ResetEnvironmentOperation; -import fr.insee.arc.core.service.p1reception.ApiReceptionService; -import fr.insee.arc.core.util.StaticLoggerDispatcher; -import fr.insee.arc.utils.dao.UtilitaireDao; import fr.insee.arc.utils.exception.ArcException; -import fr.insee.arc.utils.files.FileUtilsArc; -import fr.insee.arc.utils.structure.GenericBean; -import fr.insee.arc.utils.utils.FormatSQL; -import fr.insee.arc.utils.utils.LoggerHelper; public class ResetEnvironmentService { diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/filesystem/BuildFileSystem.java b/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/filesystem/BuildFileSystem.java index cedfdd2ca..1796a8fad 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/filesystem/BuildFileSystem.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/filesystem/BuildFileSystem.java @@ -5,7 +5,7 @@ import java.util.List; import fr.insee.arc.core.service.global.dao.DataStorage; -import fr.insee.arc.core.service.p1reception.ApiReceptionService; +import fr.insee.arc.core.service.p1reception.provider.DirectoryPath; import fr.insee.arc.utils.exception.ArcException; import fr.insee.arc.utils.files.FileUtilsArc; import fr.insee.arc.utils.ressourceUtils.PropertiesHandler; @@ -45,17 +45,17 @@ public void execute() { for (String envExecution : Arrays.asList(envExecutions)) { for (String d : listEntrepot) { - FileUtilsArc.createDirIfNotexist(ApiReceptionService + FileUtilsArc.createDirIfNotexist(DirectoryPath .directoryReceptionEntrepot(properties.getBatchParametersDirectory(), envExecution, d)); - FileUtilsArc.createDirIfNotexist(ApiReceptionService.directoryReceptionEntrepotArchive( + FileUtilsArc.createDirIfNotexist(DirectoryPath.directoryReceptionEntrepotArchive( properties.getBatchParametersDirectory(), envExecution, d)); } - FileUtilsArc.createDirIfNotexist(ApiReceptionService + FileUtilsArc.createDirIfNotexist(DirectoryPath .directoryReceptionEtatEnCours(properties.getBatchParametersDirectory(), envExecution)); - FileUtilsArc.createDirIfNotexist(ApiReceptionService + FileUtilsArc.createDirIfNotexist(DirectoryPath .directoryReceptionEtatOK(properties.getBatchParametersDirectory(), envExecution)); - FileUtilsArc.createDirIfNotexist(ApiReceptionService + FileUtilsArc.createDirIfNotexist(DirectoryPath .directoryReceptionEtatKO(properties.getBatchParametersDirectory(), envExecution)); } diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/filesystem/RestoreFileSystem.java b/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/filesystem/RestoreFileSystem.java index 095b149e6..6fd88aad5 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/filesystem/RestoreFileSystem.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/filesystem/RestoreFileSystem.java @@ -11,7 +11,7 @@ import fr.insee.arc.core.service.global.bo.Sandbox; import fr.insee.arc.core.service.global.dao.DataStorage; import fr.insee.arc.core.service.global.dao.FileSystemManagement; -import fr.insee.arc.core.service.p1reception.ApiReceptionService; +import fr.insee.arc.core.service.p1reception.provider.DirectoryPath; import fr.insee.arc.utils.exception.ArcException; import fr.insee.arc.utils.files.FileUtilsArc; import fr.insee.arc.utils.ressourceUtils.PropertiesHandler; @@ -61,9 +61,9 @@ public void execute() throws ArcException { private void rebuildFileSystemInEntrepot(String rootDirectory, String entrepot) throws ArcException { - String dirEntrepotArchive = ApiReceptionService.directoryReceptionEntrepotArchive(rootDirectory, envExecution, + String dirEntrepotArchive = DirectoryPath.directoryReceptionEntrepotArchive(rootDirectory, envExecution, entrepot); - String dirEntrepot = ApiReceptionService.directoryReceptionEntrepot(rootDirectory, envExecution, entrepot); + String dirEntrepot = DirectoryPath.directoryReceptionEntrepot(rootDirectory, envExecution, entrepot); FileUtilsArc.createDirIfNotexist(dirEntrepotArchive); FileUtilsArc.createDirIfNotexist(dirEntrepot); @@ -79,7 +79,7 @@ private void rebuildFileSystemInEntrepot(String rootDirectory, String entrepot) List fileToBeMoved = DataStorage.execQuerySelectFilesNotInRegisteredArchives(connection, envExecution); for (String fname : fileToBeMoved) { - ApiReceptionService.deplacerFichier(dirEntrepotArchive, dirEntrepot, fname, fname); + FileUtilsArc.deplacerFichier(dirEntrepotArchive, dirEntrepot, fname, fname); } moveBackNotRegisteredFilesFromEntrepotArchiveToEntrepot(dirEntrepot, dirEntrepotArchive); @@ -108,7 +108,7 @@ private void moveBackNotRegisteredFilesFromEntrepotArchiveToEntrepot(String dirE List fileToBeMoved = DataStorage.execQuerySelectFilesNotInRegisteredArchives(connection, envExecution); for (String fname : fileToBeMoved) { - ApiReceptionService.deplacerFichier(dirEntrepotArchive, dirEntrepot, fname, fname); + FileUtilsArc.deplacerFichier(dirEntrepotArchive, dirEntrepot, fname, fname); } } diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/pilotage/CleanPilotage.java b/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/pilotage/CleanPilotage.java index 807ba5807..448224cc1 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/pilotage/CleanPilotage.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/pilotage/CleanPilotage.java @@ -14,7 +14,7 @@ import fr.insee.arc.core.model.TraitementPhase; import fr.insee.arc.core.service.global.bo.Sandbox; import fr.insee.arc.core.service.global.dao.TableNaming; -import fr.insee.arc.core.service.p1reception.ApiReceptionService; +import fr.insee.arc.core.service.p1reception.provider.DirectoryPath; import fr.insee.arc.core.util.BDParameters; import fr.insee.arc.utils.dao.UtilitaireDao; import fr.insee.arc.utils.exception.ArcException; @@ -185,9 +185,9 @@ public void removeDeprecatedFiles() throws ArcException { for (int i = 0; i < m.get("entrepot").size(); i++) { String entrepot = m.get("entrepot").get(i); String archive = m.get("nom_archive").get(i); - String dirIn = ApiReceptionService.directoryReceptionEntrepotArchive(repertoire, this.sandbox.getSchema(), + String dirIn = DirectoryPath.directoryReceptionEntrepotArchive(repertoire, this.sandbox.getSchema(), entrepot); - String dirOut = ApiReceptionService.directoryReceptionEntrepotArchiveOldYearStamped(repertoire, + String dirOut = DirectoryPath.directoryReceptionEntrepotArchiveOldYearStamped(repertoire, this.sandbox.getSchema(), entrepot); // création du répertoire "OLD" s'il n'existe pas @@ -198,7 +198,7 @@ public void removeDeprecatedFiles() throws ArcException { } // déplacement de l'archive de dirIn vers dirOut - ApiReceptionService.deplacerFichier(dirIn, dirOut, archive, archive); + FileUtilsArc.deplacerFichier(dirIn, dirOut, archive, archive); } diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/useroperation/ReplayOrDeleteFiles.java b/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/useroperation/ReplayOrDeleteFiles.java index 6d0722267..1f29fa386 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/useroperation/ReplayOrDeleteFiles.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/useroperation/ReplayOrDeleteFiles.java @@ -8,8 +8,9 @@ import fr.insee.arc.core.service.global.bo.Sandbox; import fr.insee.arc.core.service.p0initialisation.useroperation.dao.ReplayOrDeleteFilesDao; -import fr.insee.arc.core.service.p1reception.ApiReceptionService; +import fr.insee.arc.core.service.p1reception.provider.DirectoryPath; import fr.insee.arc.utils.exception.ArcException; +import fr.insee.arc.utils.files.FileUtilsArc; import fr.insee.arc.utils.ressourceUtils.PropertiesHandler; import fr.insee.arc.utils.utils.LoggerHelper; import fr.insee.arc.utils.utils.ManipString; @@ -62,10 +63,10 @@ public void replayMarkedFiles() throws ArcException { String entrepot = ManipString.substringBeforeFirst(s, "_"); String archive = ManipString.substringAfterFirst(s, "_"); - String dirIn = ApiReceptionService.directoryReceptionEntrepotArchive(repertoire, envDir, entrepot); - String dirOut = ApiReceptionService.directoryReceptionEntrepot(repertoire, envDir, entrepot); + String dirIn = DirectoryPath.directoryReceptionEntrepotArchive(repertoire, envDir, entrepot); + String dirOut = DirectoryPath.directoryReceptionEntrepot(repertoire, envDir, entrepot); - ApiReceptionService.deplacerFichier(dirIn, dirOut, archive, archive); + FileUtilsArc.deplacerFichier(dirIn, dirOut, archive, archive); } diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/useroperation/ResetEnvironmentOperation.java b/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/useroperation/ResetEnvironmentOperation.java index e811fe537..ea1f4e042 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/useroperation/ResetEnvironmentOperation.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/useroperation/ResetEnvironmentOperation.java @@ -3,7 +3,6 @@ import java.nio.file.Paths; import java.sql.Connection; import java.util.ArrayList; -import java.util.List; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -11,14 +10,13 @@ import fr.insee.arc.core.dataobjects.ArcPreparedStatementBuilder; import fr.insee.arc.core.dataobjects.ColumnEnum; import fr.insee.arc.core.dataobjects.ViewEnum; -import fr.insee.arc.core.model.TraitementEtat; import fr.insee.arc.core.model.TraitementPhase; import fr.insee.arc.core.service.global.bo.Sandbox; import fr.insee.arc.core.service.global.dao.DatabaseMaintenance; import fr.insee.arc.core.service.global.dao.FileSystemManagement; import fr.insee.arc.core.service.global.dao.PilotageOperations; import fr.insee.arc.core.service.p0initialisation.pilotage.SynchronizeDataByPilotage; -import fr.insee.arc.core.service.p1reception.ApiReceptionService; +import fr.insee.arc.core.service.p1reception.provider.DirectoryPath; import fr.insee.arc.utils.dao.UtilitaireDao; import fr.insee.arc.utils.exception.ArcException; import fr.insee.arc.utils.files.FileUtilsArc; @@ -144,18 +142,18 @@ public void clearPilotageAndDirectories(String repertoire) throws ArcException { if (entrepotList != null) { for (String s : entrepotList) { FileUtilsArc.deleteAndRecreateDirectory( - Paths.get(ApiReceptionService.directoryReceptionEntrepot(repertoire, envExecution, s)).toFile()); + Paths.get(DirectoryPath.directoryReceptionEntrepot(repertoire, envExecution, s)).toFile()); FileUtilsArc.deleteAndRecreateDirectory(Paths - .get(ApiReceptionService.directoryReceptionEntrepotArchive(repertoire, envExecution, s)).toFile()); + .get(DirectoryPath.directoryReceptionEntrepotArchive(repertoire, envExecution, s)).toFile()); } } } FileUtilsArc.deleteAndRecreateDirectory( - Paths.get(ApiReceptionService.directoryReceptionEtatEnCours(repertoire, envExecution)).toFile()); + Paths.get(DirectoryPath.directoryReceptionEtatEnCours(repertoire, envExecution)).toFile()); FileUtilsArc.deleteAndRecreateDirectory( - Paths.get(ApiReceptionService.directoryReceptionEtatOK(repertoire, envExecution)).toFile()); + Paths.get(DirectoryPath.directoryReceptionEtatOK(repertoire, envExecution)).toFile()); FileUtilsArc.deleteAndRecreateDirectory( - Paths.get(ApiReceptionService.directoryReceptionEtatKO(repertoire, envExecution)).toFile()); + Paths.get(DirectoryPath.directoryReceptionEtatKO(repertoire, envExecution)).toFile()); FileUtilsArc.deleteAndRecreateDirectory( Paths.get(FileSystemManagement.directoryEnvExport(repertoire, envExecution)).toFile()); } diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/useroperation/dao/ReplayOrDeleteFilesDao.java b/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/useroperation/dao/ReplayOrDeleteFilesDao.java index 5b9c55c17..b271ea62d 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/useroperation/dao/ReplayOrDeleteFilesDao.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p0initialisation/useroperation/dao/ReplayOrDeleteFilesDao.java @@ -33,7 +33,7 @@ public static List execQuerySelectArchiveToReplay(Connection connection, query.build(SQL.WHERE, ColumnEnum.TO_DELETE, SQL.IN); - query.build(query.tuple(TraitementOperationFichier.R.getDbValue(), TraitementOperationFichier.RA.getDbValue())); + query.build(query.tupleOfValues(TraitementOperationFichier.R.getDbValue(), TraitementOperationFichier.RA.getDbValue())); return new GenericBean(UtilitaireDao.get(0).executeRequest(connection, query)) .getColumnValues(ColumnEnum.CONTAINER.getColumnName()); diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/ApiReceptionService.java b/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/ApiReceptionService.java index 01b62e812..6319f1e5a 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/ApiReceptionService.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/ApiReceptionService.java @@ -41,6 +41,10 @@ import fr.insee.arc.core.service.p0initialisation.ApiInitialisationService; import fr.insee.arc.core.service.p0initialisation.pilotage.SynchronizeDataByPilotage; import fr.insee.arc.core.service.p0initialisation.pilotage.bo.ListIdSourceInPilotage; +import fr.insee.arc.core.service.p1reception.registerarchive.ArchiveRegistration; +import fr.insee.arc.core.service.p1reception.registerarchive.bo.FilesDescriber; +import fr.insee.arc.core.service.p1reception.registerfiles.FileRegistration; +import fr.insee.arc.core.service.p1reception.registerfiles.dao.FileRegistrationDao; import fr.insee.arc.core.util.BDParameters; import fr.insee.arc.core.util.StaticLoggerDispatcher; import fr.insee.arc.utils.consumer.ThrowingConsumer; @@ -94,9 +98,9 @@ public ApiReceptionService(String aCurrentPhase, String aEnvExecution, String aDirectoryRoot, Integer aNbEnr, String paramBatch) { super(aCurrentPhase, aEnvExecution, aDirectoryRoot, aNbEnr, paramBatch); } - + @Override - public void executer() { + public void executer() throws ArcException { // Déplacement et archivage des fichiers int maxNumberOfFiles; @@ -110,826 +114,15 @@ public void executer() { } // Enregistrement des fichiers - GenericBean archiveContent = moveAndCheckClientFiles(this.nbEnr, maxNumberOfFiles); + ArchiveRegistration archiveRegistration = new ArchiveRegistration(coordinatorSandbox, maxNumberOfFiles, maxNumberOfFiles); - if (archiveContent != null) { - registerAndDispatchFiles(this.connexion.getCoordinatorConnection(), archiveContent); - } - } - - /** - * Initialize the application directories if needed List the the files received - * and start to move to the processing directory - * - * @param fileSizeLimit - * @param maxNumberOfFiles - * @return - */ - private GenericBean moveAndCheckClientFiles(int fileSizeLimit, int maxNumberOfFiles) { - - GenericBean archivesContent = null; - - StaticLoggerDispatcher.info(LOGGER, "moveAndCheckClientFiles"); - - try { - // Create target directories if they don't exist - FileUtilsArc.createDirIfNotexist( - ApiReceptionService.directoryReceptionEtatEnCours(this.directoryRoot, this.envExecution)); - FileUtilsArc.createDirIfNotexist( - ApiReceptionService.directoryReceptionEtatOK(this.directoryRoot, this.envExecution)); - FileUtilsArc.createDirIfNotexist( - ApiReceptionService.directoryReceptionEtatKO(this.directoryRoot, this.envExecution)); - HashMap> entrepotList = new GenericBean( - UtilitaireDao.get(0).executeRequest(this.connexion.getCoordinatorConnection(), - new ArcPreparedStatementBuilder("select id_entrepot from arc.ihm_entrepot"))) - .mapContent(); - - if (!entrepotList.isEmpty()) { - archivesContent = moveAndCheckUntilLimit(fileSizeLimit, maxNumberOfFiles, - entrepotList.get("id_entrepot")); - } - } catch (Exception ex) { - LoggerHelper.errorGenTextAsComment(getClass(), "moveClientFiles()", LOGGER, ex); - } - - return archivesContent; - } - - /** - * Moves files into RECEPTION_ENCOURS directory, check if the archives are - * readable and returns a description of the content of all treated files. - */ - private GenericBean moveAndCheckUntilLimit(int fileSizeLimit, int maxNumberOfFiles, - ArrayList entrepotIdList) throws ArcException { - String dirOut = directoryReceptionEtatEnCours(this.directoryRoot, this.envExecution); - GenericBean archivesContent = null; - int fileSize = 0; - int fileNb = 0; - - StaticLoggerDispatcher.info(LOGGER, "Taille limite de fichiers à charger : " + fileSizeLimit); - - for (String d : entrepotIdList) { - - if (fileSize > fileSizeLimit || fileNb > maxNumberOfFiles) { - setReportNumberOfObject(fileNb); - break; - } - - String dirIn = ApiReceptionService.directoryReceptionEntrepot(this.directoryRoot, this.envExecution, d); - String dirArchive = ApiReceptionService.directoryReceptionEntrepotArchive(this.directoryRoot, - this.envExecution, d); - - File fDirIn = new File(dirIn); - // créer le répertoire de l'entrepot et son repertoire archive - FileUtilsArc.createDirIfNotexist(dirArchive); - FileUtilsArc.createDirIfNotexist(fDirIn); - // vérifier le type (répertoire) - if (fDirIn.isDirectory()) { - - File[] filesDirIn = fDirIn.listFiles(); - - // trier par nom - Arrays.sort(filesDirIn, (f1, f2) -> f1.getName().compareTo(f2.getName())); - - for (File f : filesDirIn) { - - // traiter le fichier - // s'il n'est pas en cours d'ecriture - if (FileUtilsArc.isCompletelyWritten(f)) { - if (fileSize > fileSizeLimit || fileNb > maxNumberOfFiles) { - setReportNumberOfObject(fileNb); - break; - } - - // Archiver le fichier - // on regarde si le fichier existe déjà dans le repertoire archive; si c'est le - // cas, on va renommer - String fname; - - for (int i = 1; i < Integer.MAX_VALUE; i++) { - - // on reprend le nom du fichier - fname = f.getName(); - boolean isArchive = true; - - // les fichiers non archive sont archivés - if (CompressedUtils.isNotArchive(fname)) { - fname = fname + ".tar.gz"; - isArchive = false; - } - - // on ajoute un index au nom du fichier toto.tar.gz devient toto#1.tar.gz - if (i > 1) { - fname = ManipString.substringBeforeFirst(fname, ".") + "#" + i + "." - + ManipString.substringAfterFirst(fname, "."); - } - - File fileOutArchive = new File(dirArchive + File.separator + fname); - - // si le fichier n'existe pas dans le repertoire d'archive - // on le copie dans archive avec son nouveau nom - // on change le nom du fichier initial avec son nouveau nom indexé - // on le déplace dans encours - // on enregistre le fichier dans la table d'archive - // on sort de la boucle d'indexation - if (!fileOutArchive.exists()) { - - if (isArchive) { - // copie dans archive avec le nouveau nom - try { - Files.copy(Paths.get(f.getAbsolutePath()), - Paths.get(fileOutArchive.getAbsolutePath())); - } catch (IOException exception) { - throw new ArcException(exception, ArcExceptionMessage.FILE_COPY_FAILED, f, - fileOutArchive); - } - // déplacer le fichier dans encours - deplacerFichier(dirIn, dirOut, f.getName(), d + "_" + fname); - } else { - // on génére le tar.gz dans archive - CompressedUtils.generateTarGzFromFile(f, fileOutArchive, f.getName()); - // on copie le tar.gz dans encours - File fOut = new File(dirOut + File.separator + d + "_" + fname); - try { - Files.copy(Paths.get(fileOutArchive.getAbsolutePath()), - Paths.get(fOut.getAbsolutePath())); - } catch (IOException exception) { - throw new ArcException(exception, ArcExceptionMessage.FILE_COPY_FAILED, - fileOutArchive, fOut); - } - // on efface le fichier source - FileUtilsArc.delete(f); - } - - fileSize = fileSize + (int) (fileOutArchive.length() / 1024 / 1024); - - // - GenericBean archiveContentTemp = checkArchiveFiles( - new File[] { new File(dirOut + File.separator + d + "_" + fname) }); - - if (archivesContent == null) { - archivesContent = archiveContentTemp; - } else { - archivesContent.content.addAll(archiveContentTemp.content); - } - - fileNb = fileNb + archiveContentTemp.content.size(); - - // enregistrer le fichier - UtilitaireDao.get(0).executeBlock(this.connexion.getCoordinatorConnection(), - "INSERT INTO " + TableNaming.dbEnv(this.envExecution) - + "pilotage_archive (entrepot,nom_archive) values ('" + d + "','" - + fname + "'); "); - break; - } - - } - - } - } - } - } - return archivesContent; - } - - /** - * Checks the content of the archive and returns it. - * - * @param filesIn the archives - * @return a GenericBean describing the archive - */ - private GenericBean checkArchiveFiles(File[] filesIn) { - ArrayList> content = new ArrayList<>(); - ArrayList l; - for (File f : filesIn) { - String entrepot = ManipString.substringBeforeFirst(f.getName(), "_") + "_"; - if (f.getName().endsWith(".tar.gz") || f.getName().endsWith(".tgz")) { - content.addAll(checkTgzArchive(f, entrepot)); - } else if (f.getName().endsWith(".zip")) { - content.addAll(checkZipArchive(f, entrepot)); - } else if (f.getName().endsWith(".gz")) { - content.addAll(checkGzArchive(f)); - } else {// cas rebus, hors tar.gz, zip et gz - l = new ArrayList<>(); - l.add(f.getName() + ".tar.gz"); - l.add(f.getName()); - l.add(TraitementTypeFichier.D.toString()); - l.add(TraitementEtat.OK.toString()); - l.add(null); - l.add(null); - StaticLoggerDispatcher.info(LOGGER, "Insertion du cas rebus : " + l.toString()); - content.add(l); - } - } - - return new GenericBean(GENERIC_BEAN_HEADERS, GENERIC_BEAN_TYPES, content); - } - - /** - * Check every file in the tgz archive and returns the archive content. - */ - private ArrayList> checkTgzArchive(File f, String entrepot) { - ArrayList l; - // Inscription des fichiers au contenu de l'archive - ArrayList> contentTemp = new ArrayList<>(); - int erreur = 0; - String rapport = null; - String etat = null; - // Check if the archive is fully readable - try (BufferedInputStream fis = new BufferedInputStream(new FileInputStream(f), READ_BUFFER_SIZE); - GZIPInputStream gzis = new GZIPInputStream(fis); - TarInputStream tarInput = new TarInputStream(gzis);) { - erreur = 1; - rapport = TraitementRapport.INITIALISATION_CORRUPTED_ARCHIVE.toString(); - TarEntry currentEntry = tarInput.getNextEntry(); - // Check every entry - while (currentEntry != null) { - if (currentEntry.isDirectory()) { - currentEntry = tarInput.getNextEntry(); - } else { - l = new ArrayList<>(); - l.add(f.getName()); - l.add(entrepot + currentEntry.getName()); - l.add(TraitementTypeFichier.DA.toString()); - // Check if the entry is readable (by calling nextEntry) - erreur = 0; - etat = TraitementEtat.OK.toString(); - rapport = null; - try { - currentEntry = tarInput.getNextEntry(); - } catch (IOException e) { - erreur = 2; - etat = TraitementEtat.KO.toString(); - rapport = TraitementRapport.INITIALISATION_CORRUPTED_ENTRY.toString(); - currentEntry = null; - } - l.add(etat); - l.add(rapport); - l.add(null); - contentTemp.add(l); - rapport = null; - } - } - } catch (IOException e1) { - erreur = 1; - rapport = TraitementRapport.INITIALISATION_CORRUPTED_ARCHIVE.toString(); - } - - // Inscription de l'archive - l = new ArrayList<>(); - l.add(f.getName()); - l.add(null); - if (erreur == 1) { - l.add(TraitementTypeFichier.AC.toString()); - } else { - l.add(TraitementTypeFichier.A.toString()); - } - if (erreur > 0) { - l.add(TraitementEtat.KO.toString()); - } else { - l.add(TraitementEtat.OK.toString()); - } - l.add(rapport); - l.add(null); - contentTemp.add(l); - propagateErrorToAllFiles(contentTemp, erreur); - return contentTemp; - } - - /** - * Check every file in the zip archive and returns the archive content. - */ - private ArrayList> checkZipArchive(File f, String entrepot) { - // Inscription des fichiers au contenu de l'archive - ArrayList> contentTemp = new ArrayList<>(); - ArrayList l; - int erreur = 0; - String rapport = null; - String etat = null; - // Check if the archive is fully readable - try (BufferedInputStream fis = new BufferedInputStream(new FileInputStream(f), READ_BUFFER_SIZE); - ZipArchiveInputStream tarInput = new ZipArchiveInputStream(fis);) { - // Check every entry - erreur = 1; - rapport = TraitementRapport.INITIALISATION_CORRUPTED_ARCHIVE.toString(); - ZipArchiveEntry currentEntry = tarInput.getNextZipEntry(); - while (currentEntry != null) { - l = new ArrayList<>(); - l.add(f.getName()); - l.add(entrepot + currentEntry.getName()); - l.add(TraitementTypeFichier.DA.toString()); - erreur = 0; - etat = TraitementEtat.OK.toString(); - rapport = null; - // Check if the entry is readable (by calling nextEntry) - // If not, currentEntry = null to stop the loop - try { - currentEntry = tarInput.getNextZipEntry(); - } catch (IOException e) { - erreur = 2; - etat = TraitementEtat.KO.toString(); - rapport = TraitementRapport.INITIALISATION_CORRUPTED_ENTRY.toString(); - currentEntry = null; - } - l.add(etat); - l.add(rapport); - l.add(null); - contentTemp.add(l); - rapport = null; - } - } catch (IOException e1) { - erreur = 1; - rapport = TraitementRapport.INITIALISATION_CORRUPTED_ARCHIVE.toString(); - } - // Inscription de l'archive - l = new ArrayList<>(); - l.add(f.getName()); - l.add(null); - if (erreur == 1) { - l.add(TraitementTypeFichier.AC.toString()); - } else { - l.add(TraitementTypeFichier.A.toString()); - } - if (erreur > 0) { - l.add(TraitementEtat.KO.toString()); - } else { - l.add(TraitementEtat.OK.toString()); - } - l.add(rapport); - l.add(null); - contentTemp.add(l); - propagateErrorToAllFiles(contentTemp, erreur); - return contentTemp; - } - - /** - * Check every file in the gzip archive and returns the archive content. - */ - private ArrayList> checkGzArchive(File f) { - // Inscription des fichier au contenu de l'archive - ArrayList> contentTemp = new ArrayList<>(); - ArrayList l; - int erreur = 0; - String rapport = null; - String etat = null; - // Check if the archive is fully readable - try (BufferedInputStream fis = new BufferedInputStream(new FileInputStream(f), READ_BUFFER_SIZE); - GZIPInputStream tarInput = new GZIPInputStream(fis);) { - - l = new ArrayList<>(); - l.add(f.getName()); - l.add(ManipString.substringBeforeLast(f.getName(), ".gz")); - l.add(TraitementTypeFichier.DA.toString()); - // Check every entry - try { - tarInput.read(); - erreur = 0; - etat = TraitementEtat.OK.toString(); - rapport = null; - } catch (IOException e) { - erreur = 2; - etat = TraitementEtat.KO.toString(); - rapport = TraitementRapport.INITIALISATION_CORRUPTED_ENTRY.toString(); - } - l.add(etat); - l.add(rapport); - l.add(null); - contentTemp.add(l); - rapport = null; - } catch (IOException e1) { - erreur = 1; - rapport = TraitementRapport.INITIALISATION_CORRUPTED_ARCHIVE.toString(); - } - // Inscription de l'archive - l = new ArrayList<>(); - l.add(f.getName()); - l.add(null); - if (erreur == 1) { - l.add(TraitementTypeFichier.AC.toString()); - } else { - l.add(TraitementTypeFichier.A.toString()); - } - if (erreur > 0) { - l.add(TraitementEtat.KO.toString()); - } else { - l.add(TraitementEtat.OK.toString()); - } - l.add(rapport); - l.add(null); - contentTemp.add(l); - propagateErrorToAllFiles(contentTemp, erreur); - return contentTemp; - } - - /** - * If there is any error, all files are marked KO. Otherwise, all files are - * marked OK. - */ - private void propagateErrorToAllFiles(ArrayList> archiveContent, int erreur) { - for (ArrayList fileInfo : archiveContent) { - if (erreur > 0) { - fileInfo.set(GENERIC_BEAN_HEADERS.indexOf(GB_STATE), TraitementEtat.KO.toString()); - if (fileInfo.get(GENERIC_BEAN_HEADERS.indexOf(GB_REPORT)) == null) { - fileInfo.set(GENERIC_BEAN_HEADERS.indexOf(GB_REPORT), - TraitementRapport.INITIALISATION_FICHIER_OK_ARCHIVE_KO.toString()); - } - } else { - fileInfo.set(GENERIC_BEAN_HEADERS.indexOf(GB_STATE), TraitementEtat.OK.toString()); - } - } - } - - /** - * Enregistrer les fichiers en entrée Déplacer les fichier reçus dans les - * repertoires OK ou pas OK selon le bordereau Supprimer les fichiers déjà - * existants de la table de pilotage Marquer les fichiers dans la table de - * pilotage - */ - private void registerAndDispatchFiles(Connection connexion, GenericBean archiveContent) { - StaticLoggerDispatcher.info(LOGGER, "registerAndDispatchFiles"); - // la bean (fileName,type, etat) contient pour chaque fichier, le type - // du fichier et l'action à réaliser - GenericBean g = findDuplicates(archiveContent); - - try { - - StringBuilder requete = new StringBuilder(); - requete.append(TableOperations.creationTableResultat(this.tablePil, this.tablePilTemp)); - soumettreRequete(requete); - - if (!g.content.isEmpty()) { - String dirIn = ApiReceptionService.directoryReceptionEtatEnCours(this.directoryRoot, this.envExecution); - for (int i = 0; i < g.content.size(); i++) { - String container = g.content.get(i).get(g.getHeaders().indexOf(GB_CONTAINER)); - String v_container = g.content.get(i).get(g.getHeaders().indexOf(GB_VCONTAINER)); - String fileName = g.content.get(i).get(g.getHeaders().indexOf(GB_FILENAME)); - String type = g.content.get(i).get(g.getHeaders().indexOf(GB_TYPE)); - String etat = g.content.get(i).get(g.getHeaders().indexOf(GB_STATE)); - String rapport = g.content.get(i).get(g.getHeaders().indexOf(GB_REPORT)); - String containerNewName = buildContainerName(container); - if (type.equals(TraitementTypeFichier.DA.toString())) { - insertPilotage(requete, this.tablePilTemp, container, containerNewName, v_container, fileName, - etat, rapport); - } - if (type.equals(TraitementTypeFichier.A.toString())) { - String dirOut = ApiReceptionService.directoryReceptionEtat(this.directoryRoot, - this.envExecution, TraitementEtat.valueOf(etat)); - deplacerFichier(dirIn, dirOut, container, containerNewName); - } - if (type.equals(TraitementTypeFichier.AC.toString())) { - String dirOut = ApiReceptionService.directoryReceptionEtat(this.directoryRoot, - this.envExecution, TraitementEtat.valueOf(etat)); - deplacerFichier(dirIn, dirOut, container, containerNewName); - insertPilotage(requete, this.tablePilTemp, container, containerNewName, v_container, fileName, - etat, rapport); - } - // pour les fichier seul, on en fait une archive - if (type.equals(TraitementTypeFichier.D.toString())) { - // en termes de destination, les fichiers seuls vont tout le temps dans - // RECEPTION_OK, même s'ils sont KO pour la table - // de pilotage - String dirOut = ApiReceptionService.directoryReceptionEtatOK(this.directoryRoot, - this.envExecution); - File fileIn = new File(dirIn + File.separator + fileName); - File fileOut = new File(dirOut + File.separator + containerNewName); - - if (fileOut.exists()) { - FileUtilsArc.delete(fileOut); - } - CompressedUtils.generateTarGzFromFile(fileIn, fileOut, - ManipString.substringAfterFirst(fileIn.getName(), "_")); - FileUtilsArc.delete(fileIn); - insertPilotage(requete, this.tablePilTemp, container, containerNewName, v_container, fileName, - etat, rapport); - - } - } - requete.append(";"); - soumettreRequete(requete); - - StringBuilder query= new StringBuilder("select distinct " + ColumnEnum.ID_SOURCE.getColumnName() + " from " + this.tablePil - + " a where to_delete='R' and exists (select 1 from " + this.tablePilTemp + " b where a." - + ColumnEnum.ID_SOURCE.getColumnName() + "=b." + ColumnEnum.ID_SOURCE.getColumnName() - + ")"); - - List idSourceToBeDeleted = new GenericBean(UtilitaireDao.get(0).executeRequest(connexion, new ArcPreparedStatementBuilder(query))).mapContent().get(ColumnEnum.ID_SOURCE.getColumnName()); - - if (idSourceToBeDeleted!=null) { - // marque les fichiers à effacer (ils vont etre rechargés) - requete.append("CREATE TEMPORARY TABLE a_rejouer " + FormatSQL.WITH_NO_VACUUM +" AS "); - requete.append(query); - requete.append(";"); - - // effacer de la table pilotage des to_delete à R - requete.append("DELETE FROM " + this.tablePil + " a using a_rejouer b where a." - + ColumnEnum.ID_SOURCE.getColumnName() + "=b." + ColumnEnum.ID_SOURCE.getColumnName() - + "; "); - } - - // pb des archives sans nom de fichier - requete.append("UPDATE " + this.tablePilTemp + " set " + ColumnEnum.ID_SOURCE.getColumnName() - + "='' where " + ColumnEnum.ID_SOURCE.getColumnName() + " is null; "); - requete.append("INSERT INTO " + this.tablePil + " select * from " + this.tablePilTemp + "; \n"); - requete.append("DISCARD TEMP; \n"); - soumettreRequete(requete); - - if (idSourceToBeDeleted!=null) { - SynchronizeDataByPilotage synchronizationInstance = new SynchronizeDataByPilotage(this.coordinatorSandbox); - synchronizationInstance.dropUnusedDataTablesAllNods(idSourceToBeDeleted); - synchronizationInstance.deleteUnusedDataRecordsAllNods(idSourceToBeDeleted); - } - - } - } catch (Exception ex) { - LoggerHelper.errorGenTextAsComment(getClass(), "registerFiles()", LOGGER, ex); - } - } - - - private String buildContainerName(String container) { - String newContainerName = ""; - newContainerName = ""; - if (container.endsWith(".tar.gz")) { - newContainerName = normalizeContainerName(container, ".tar.gz"); - } else if (container.endsWith(".tgz")) { - newContainerName = normalizeContainerName(container, ".tgz"); - } else if (container.endsWith(".zip")) { - newContainerName = normalizeContainerName(container, ".zip"); - } else if (container.endsWith(".gz")) { - newContainerName = normalizeContainerName(container, ".gz"); - } else if (container.endsWith(".tar")) { - newContainerName = normalizeContainerName(container, ".tar"); - } - return newContainerName; - } - - private String normalizeContainerName(String container, String extension) { - return ManipString.substringBeforeLast(container, extension) + extension; - } - - private void soumettreRequete(StringBuilder requete) { - try { - UtilitaireDao.get(0).executeImmediate(this.connexion.getCoordinatorConnection(), requete); - } catch (ArcException ex) { - LoggerHelper.errorGenTextAsComment(getClass(), "soumettreRequete()", LOGGER, ex); - } - requete.setLength(0); - } - - private void insertPilotage(StringBuilder requete, String tablePilotage, String originalContainer, - String newContainer, String v_container, String fileName, String etat, String rapport) { - Date d = new Date(); - SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd:HH"); - SimpleDateFormat formatter = new SimpleDateFormat("dd/MM/yyyy HH:mm:ss"); - - // si ko, etape vaut 2 - String etape = etat.equals(TraitementEtat.KO.toString()) ? "2" : "1"; - - if (requete.length() == 0) { - requete.append("INSERT INTO " + tablePilotage + " "); - requete.append("(o_container, container, v_container, " + ColumnEnum.ID_SOURCE.getColumnName() - + ", date_entree,phase_traitement,etat_traitement,date_traitement, rapport, nb_enr, etape) VALUES "); - } else { - requete.append("\n,"); - } - requete.append(" (" + FormatSQL.cast(originalContainer) + "," + FormatSQL.cast(newContainer) + "," - + FormatSQL.cast(v_container) + ", " + FormatSQL.cast(fileName) + "," - + FormatSQL.cast(dateFormat.format(d)) + "," + FormatSQL.cast(TraitementPhase.RECEPTION.toString()) - + "," + FormatSQL.cast("{" + etat + "}") + "," + "to_timestamp(" + FormatSQL.cast(formatter.format(d)) - + ",'" + ApiService.DATABASE_DATE_FORMAT + "')" + "," + FormatSQL.cast(rapport) + ",1," + etape + ") "); - } - - /** - * Deplacer un fichier d'un repertoire source vers répertoire cible (pas de - * slash en fin du nom de repertoire) Si le fichier existe déjà, il est écrasé - * - * @param dirIn , répertoire en entrée, pas de slash à la fin - * @param dirOut , répertoire en sortie, pas de slash à la fin - * @param FileName , nom du fichier - * @throws ArcException - */ - public static void deplacerFichier(String dirIn, String dirOut, String fileNameIn, String fileNameOut) - throws ArcException { - if (!dirIn.equals(dirOut)) { - File fileIn = new File(dirIn + File.separator + fileNameIn); - File fileOut = new File(dirOut + File.separator + fileNameOut); - if (fileOut.exists()) { - FileUtilsArc.delete(fileOut); - } - FileUtilsArc.renameTo(fileIn, fileOut); - } - } - - /** - * Find the duplicates files in the database - * - * @param fileList - * @return - */ - private GenericBean findDuplicates(GenericBean fileList) { - ArrayList headers = fileList.getHeaders(); - ArrayList types = fileList.getTypes(); - ArrayList> content = fileList.content; - - // Localiser les doublons - // Note : l'insertion est redondante mais au niveau métier, c'est - // beaucoup plus logique - StaticLoggerDispatcher.info(LOGGER, "Recherche de doublons de fichiers"); - - StringBuilder requete = new StringBuilder(); - requete.append(TableOperations.creationTableResultat(this.tablePil, this.tablePilTemp)); - String fileName; - String container; - String type; - // insertion des fichiers dans la table tablePilTemp - for (int i = 0; i < content.size(); i++) { - container = content.get(i).get(headers.indexOf(GB_CONTAINER)); - fileName = content.get(i).get(headers.indexOf(GB_FILENAME)); - if (fileName != null) { - requete.append( - "insert into " + this.tablePilTemp + " (container, " + ColumnEnum.ID_SOURCE.getColumnName() - + ") values (" + FormatSQL.cast(container) + "," + FormatSQL.cast(fileName) + "); \n"); - } - } - soumettreRequete(requete); - // detection des doublons de fichiers sur les id_source juste insérés - // faut comparer les id_sources en retirant le #nnn représentant le numéro de - // l'archive (on utilise le regexp_replace pour retirer le #nnn) - - requete.append("select container, " + ColumnEnum.ID_SOURCE.getColumnName() + " FROM " + this.tablePilTemp - + " where " + ColumnEnum.ID_SOURCE.getColumnName() + " in ( "); - requete.append("select distinct " + ColumnEnum.ID_SOURCE.getColumnName() + " from ( "); - requete.append("select " + ColumnEnum.ID_SOURCE.getColumnName() + ", count(1) over (partition by " - + ColumnEnum.ID_SOURCE.getColumnName() + ") as n from " + this.tablePilTemp + " "); - requete.append(") ww where n>1 "); - requete.append(") "); - // detection des doublons de fichiers dans la table de pilotage - requete.append("UNION "); - requete.append( - "SELECT container, " + ColumnEnum.ID_SOURCE.getColumnName() + " from " + this.tablePilTemp + " a "); - requete.append("where exists (select 1 from " + this.tablePil + " b where a." - + ColumnEnum.ID_SOURCE.getColumnName() + "=b." + ColumnEnum.ID_SOURCE.getColumnName() + ") \n"); - requete.append("and a." + ColumnEnum.ID_SOURCE.getColumnName() + " not in (select distinct " - + ColumnEnum.ID_SOURCE.getColumnName() + " from " + this.tablePil + " b where b.to_delete='R') ;\n"); - - // récupérer les doublons pour mettre à jour le dispatcher - try { - ArrayList listIdsourceDoublons = new GenericBean(UtilitaireDao.get(0).executeRequest( - this.connexion.getCoordinatorConnection(), new ArcPreparedStatementBuilder(requete))).mapContent() - .get(ColumnEnum.ID_SOURCE.getColumnName()); - - // on va parcourir la liste des fichiers - // si on retrouve l'id_source dans la liste, on le marque en erreur - if (listIdsourceDoublons != null) { - for (ArrayList z : content) { - // si le nom de fichier est renseigné et retrouvé dans la liste - // on passe l'état à KO et on marque l'anomalie - if (z.get(headers.indexOf(GB_FILENAME)) != null) { - if (listIdsourceDoublons.contains(z.get(headers.indexOf(GB_FILENAME)))) { - z.set(headers.indexOf(GB_STATE), TraitementEtat.KO.toString()); - z.set(headers.indexOf(GB_REPORT), TraitementRapport.INITIALISATION_DUPLICATE.toString()); - } - } - } - } - } catch (ArcException ex) { - LoggerHelper.errorGenTextAsComment(getClass(), "dispatchFiles()", LOGGER, ex); - } - - // on ignore les doublons de l'archive pour les fichiers à rejouer - // on recrée un nouvelle liste en ne lui ajoutant pas ces doublons à ignorer - requete = new StringBuilder(); - requete.append("SELECT container, container||'>'||" + ColumnEnum.ID_SOURCE.getColumnName() + " as " - + ColumnEnum.ID_SOURCE.getColumnName() + " from " + this.tablePilTemp + " a "); - requete.append("where exists (select 1 from " + this.tablePil + " b where to_delete='R' and a." - + ColumnEnum.ID_SOURCE.getColumnName() + "=b." + ColumnEnum.ID_SOURCE.getColumnName() + ") ;\n"); - - ArrayList> content2 = new ArrayList<>(); - try { - HashMap> m = new GenericBean(UtilitaireDao.get(0).executeRequest( - this.connexion.getCoordinatorConnection(), new ArcPreparedStatementBuilder(requete))).mapContent(); - ArrayList listContainerARejouer = m.get(GB_CONTAINER); - ArrayList listIdsourceARejouer = m.get(ColumnEnum.ID_SOURCE.getColumnName()); - - if (listIdsourceARejouer == null) { - content2 = content; - } else { - for (ArrayList z : content) { - // si le fichier est dans la liste des doublons à ignorer, on le l'ajoute pas à - // la nouvelle liste - if (z.get(headers.indexOf(GB_FILENAME)) != null) { - if (listContainerARejouer.contains(z.get(headers.indexOf(GB_CONTAINER)))) { - // si on trouve le fichier à rejouer, on l'ajoute; on ignore les autres - if (listIdsourceARejouer.contains( - z.get(headers.indexOf(GB_CONTAINER)) + ">" + z.get(headers.indexOf(GB_FILENAME)))) { - content2.add(z); - } - } else { - content2.add(z); - } - } else { - // bien ajouter les caracteriqtique de l'archive à la nouvelle liste - content2.add(z); - } - } - } - - } catch (ArcException ex) { - LoggerHelper.errorGenTextAsComment(getClass(), "dispatchFiles()", LOGGER, ex); - } - content = content2; - - // detection des doublons d'archive. Génération d'un numéro pour - // l'archive en cas de doublon - - requete = new StringBuilder(); - // insertion des fichiers d'archive corrompue dans la table - // tablePilTemp - // on doit aussi leur donner un numéro - for (int i = 0; i < content.size(); i++) { - container = content.get(i).get(headers.indexOf(GB_CONTAINER)); - fileName = content.get(i).get(headers.indexOf(GB_FILENAME)); - type = content.get(i).get(headers.indexOf(GB_TYPE)); - if (type.equals(TraitementTypeFichier.AC.toString())) { - requete.append( - "insert into " + this.tablePilTemp + " (container, " + ColumnEnum.ID_SOURCE.getColumnName() - + ") values (" + FormatSQL.cast(container) + "," + FormatSQL.cast(fileName) + "); \n"); - } - } - soumettreRequete(requete); - - requete.append("select container "); - requete.append(" , coalesce((select max(v_container::integer)+1 from " + this.tablePil - + " b where a.container=b.o_container),1)::text as v_container "); - requete.append( - "from (select distinct container from " + this.tablePilTemp + " where container is not null) a "); - try { - HashMap> m = new GenericBean(UtilitaireDao.get(0).executeRequest( - this.connexion.getCoordinatorConnection(), new ArcPreparedStatementBuilder(requete))).mapContent(); - ArrayList listContainerDoublons = m.get(GB_CONTAINER); - ArrayList listVersionContainerDoublons = m.get(GB_VCONTAINER); - if (listContainerDoublons != null) { - for (ArrayList z : content) { - container = z.get(headers.indexOf(GB_CONTAINER)); - if (container != null) { - z.set(headers.indexOf(GB_VCONTAINER), - listVersionContainerDoublons.get(listContainerDoublons.indexOf(container))); - } - } - } - } catch (ArcException ex) { - LoggerHelper.errorGenTextAsComment(getClass(), "dispatchFiles()", LOGGER, ex); + FilesDescriber archiveContent = archiveRegistration.moveAndCheckClientFiles(); + this.setReportNumberOfObject(archiveRegistration.getFileNb()); + + if (! archiveContent.getFilesAttribute().isEmpty()) { + new FileRegistration(this.coordinatorSandbox, this.tablePilTemp) + .registerAndDispatchFiles(archiveContent); } - requete.setLength(0); - requete.append(FormatSQL.dropTable(this.tablePilTemp)); - soumettreRequete(requete); - return new GenericBean(headers, types, content); - } - - /** - * Methods to provide directories paths - * - * @param rootDirectory - * @param env - * @return - */ - public static String directoryReceptionRoot(String rootDirectory, String env) { - return FileSystemManagement.directoryPhaseRoot(rootDirectory, env, TraitementPhase.RECEPTION); - } - - public static String directoryReceptionEntrepot(String rootDirectory, String env, String entrepot) { - return FileSystemManagement.directoryPhaseEntrepot(rootDirectory, env, TraitementPhase.RECEPTION, - entrepot); - } - - public static String directoryReceptionEntrepotArchive(String rootDirectory, String env, String entrepot) { - return FileSystemManagement.directoryPhaseEntrepotArchive(rootDirectory, env, TraitementPhase.RECEPTION, - entrepot); - } - - public static String directoryReceptionEntrepotArchiveOld(String rootDirectory, String env, String entrepot) { - return FileSystemManagement.directoryPhaseEntrepotArchiveOld(rootDirectory, env, - TraitementPhase.RECEPTION, entrepot); - } - - public static String directoryReceptionEntrepotArchiveOldYearStamped(String rootDirectory, String env, String entrepot) { - return directoryReceptionEntrepotArchiveOld(rootDirectory, env, entrepot) + File.separator + Year.now().getValue(); - } - - public static String directoryReceptionEtat(String rootDirectory, String env, TraitementEtat e) { - return FileSystemManagement.directoryPhaseEtat(rootDirectory, env, TraitementPhase.RECEPTION, e); - } - - public static String directoryReceptionEtatOK(String rootDirectory, String env) { - return FileSystemManagement.directoryPhaseEtatOK(rootDirectory, env, TraitementPhase.RECEPTION); - } - - public static String directoryReceptionEtatKO(String rootDirectory, String env) { - return FileSystemManagement.directoryPhaseEtatKO(rootDirectory, env, TraitementPhase.RECEPTION); - } - - public static String directoryReceptionEtatEnCours(String rootDirectory, String env) { - return FileSystemManagement.directoryPhaseEtatEnCours(rootDirectory, env, TraitementPhase.RECEPTION); } } diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/provider/DirectoryPath.java b/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/provider/DirectoryPath.java new file mode 100644 index 000000000..d9f0d1ae7 --- /dev/null +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/provider/DirectoryPath.java @@ -0,0 +1,59 @@ +package fr.insee.arc.core.service.p1reception.provider; + +import java.io.File; +import java.time.Year; + +import fr.insee.arc.core.model.TraitementEtat; +import fr.insee.arc.core.model.TraitementPhase; +import fr.insee.arc.core.service.global.dao.FileSystemManagement; + +public class DirectoryPath { + + + /** + * Methods to provide directories paths + * + * @param rootDirectory + * @param env + * @return + */ + public static String directoryReceptionRoot(String rootDirectory, String env) { + return FileSystemManagement.directoryPhaseRoot(rootDirectory, env, TraitementPhase.RECEPTION); + } + + public static String directoryReceptionEntrepot(String rootDirectory, String env, String entrepot) { + return FileSystemManagement.directoryPhaseEntrepot(rootDirectory, env, TraitementPhase.RECEPTION, + entrepot); + } + + public static String directoryReceptionEntrepotArchive(String rootDirectory, String env, String entrepot) { + return FileSystemManagement.directoryPhaseEntrepotArchive(rootDirectory, env, TraitementPhase.RECEPTION, + entrepot); + } + + public static String directoryReceptionEntrepotArchiveOld(String rootDirectory, String env, String entrepot) { + return FileSystemManagement.directoryPhaseEntrepotArchiveOld(rootDirectory, env, + TraitementPhase.RECEPTION, entrepot); + } + + public static String directoryReceptionEntrepotArchiveOldYearStamped(String rootDirectory, String env, String entrepot) { + return directoryReceptionEntrepotArchiveOld(rootDirectory, env, entrepot) + File.separator + Year.now().getValue(); + } + + public static String directoryReceptionEtat(String rootDirectory, String env, TraitementEtat e) { + return FileSystemManagement.directoryPhaseEtat(rootDirectory, env, TraitementPhase.RECEPTION, e); + } + + public static String directoryReceptionEtatOK(String rootDirectory, String env) { + return FileSystemManagement.directoryPhaseEtatOK(rootDirectory, env, TraitementPhase.RECEPTION); + } + + public static String directoryReceptionEtatKO(String rootDirectory, String env) { + return FileSystemManagement.directoryPhaseEtatKO(rootDirectory, env, TraitementPhase.RECEPTION); + } + + public static String directoryReceptionEtatEnCours(String rootDirectory, String env) { + return FileSystemManagement.directoryPhaseEtatEnCours(rootDirectory, env, TraitementPhase.RECEPTION); + } + +} diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/registerarchive/ArchiveRegistration.java b/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/registerarchive/ArchiveRegistration.java new file mode 100644 index 000000000..3c858f7ce --- /dev/null +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/registerarchive/ArchiveRegistration.java @@ -0,0 +1,189 @@ +package fr.insee.arc.core.service.p1reception.registerarchive; + +import java.io.File; +import java.util.Arrays; +import java.util.List; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +import fr.insee.arc.core.service.global.bo.Sandbox; +import fr.insee.arc.core.service.p1reception.registerarchive.bo.FilesDescriber; +import fr.insee.arc.core.service.p1reception.registerarchive.bo.GzReader; +import fr.insee.arc.core.service.p1reception.registerarchive.bo.TgzReader; +import fr.insee.arc.core.service.p1reception.registerarchive.bo.ZipReader; +import fr.insee.arc.core.service.p1reception.registerarchive.dao.DirectoriesDao; +import fr.insee.arc.core.service.p1reception.registerarchive.dao.MoveFilesToRegisterDao; +import fr.insee.arc.core.service.p1reception.registerarchive.operation.ArchiveCheckOperation; +import fr.insee.arc.core.service.p1reception.registerarchive.operation.ReworkArchiveOperation; +import fr.insee.arc.core.util.StaticLoggerDispatcher; +import fr.insee.arc.utils.exception.ArcException; +import fr.insee.arc.utils.exception.ArcExceptionMessage; +import fr.insee.arc.utils.files.CompressionExtension; +import fr.insee.arc.utils.files.FileUtilsArc; +import fr.insee.arc.utils.utils.ManipString; + +public class ArchiveRegistration { + + private static final Logger LOGGER = LogManager.getLogger(ArchiveRegistration.class); + + private Sandbox sandbox; + + private DirectoriesDao directories; + + private FilesDescriber selectedArchives; + + // max total size of selected files + private int fileSizeLimit; + // max total number of selected files + private int maxNumberOfFiles; + + public ArchiveRegistration(Sandbox sandbox, int fileSizeLimit, int maxNumberOfFiles) { + super(); + this.sandbox = sandbox; + this.fileSizeLimit = fileSizeLimit; + this.maxNumberOfFiles = maxNumberOfFiles; + this.directories = new DirectoriesDao(sandbox); + } + + // current size of files registered + private int fileSize = 0; + // current number of files registered + private int fileNb = 0; + + /** + * Initialize the application directories if needed List the the files received + * and start to move to the processing directory + * + * @param fileSizeLimit + * @param maxNumberOfFiles + * @return + * @throws ArcException + */ + public FilesDescriber moveAndCheckClientFiles() throws ArcException { + StaticLoggerDispatcher.info(LOGGER, "moveAndCheckClientFiles"); + + // create and register sandbox directories + directories.createSandboxDirectories(); + + this.selectedArchives = new FilesDescriber(); + + List entrepotList = MoveFilesToRegisterDao.execQuerySelectDatawarehouses(sandbox.getConnection()); + + if (!entrepotList.isEmpty()) { + moveAndCheckUntilLimit(entrepotList); + } + + return selectedArchives; + } + + /** + * Moves files into RECEPTION_ENCOURS directory, check if the archives are + * readable and returns a description of the content of all treated files. + */ + private void moveAndCheckUntilLimit(List entrepotIdList) throws ArcException { + + StaticLoggerDispatcher.info(LOGGER, "Taille limite de fichiers à charger : " + fileSizeLimit); + + for (String entrepot : entrepotIdList) { + + if (isFileRegisteringFinished()) { + break; + } + + // create and register datawarehouse sandbox directories (entrepot) + directories.createSandboxDatawarehouseDirectories(entrepot); + + selectFilesInDatawarehouse(entrepot); + } + + } + + /** + * Check condition to end the file registering Condition is checked when the + * size of the files selected or the number of files selected exceed the given + * limits + * + * @return + */ + private boolean isFileRegisteringFinished() { + return (fileSize > fileSizeLimit || fileNb > maxNumberOfFiles); + } + + private void selectFilesInDatawarehouse(String entrepot) throws ArcException { + + File fDirIn = new File(directories.getDirEntrepotIn()); + // vérifier le type (répertoire) + if (fDirIn.isDirectory()) { + + File[] filesDirIn = fDirIn.listFiles(); + + // trier par nom + Arrays.sort(filesDirIn, (f1, f2) -> f1.getName().compareTo(f2.getName())); + + for (File f : filesDirIn) { + + // ignorer le fichier s'il est en cours d'ecriture + if (!FileUtilsArc.isCompletelyWritten(f)) { + continue; + } + + if (isFileRegisteringFinished()) { + break; + } + + ReworkArchiveOperation reworkInstance = new ReworkArchiveOperation(directories, entrepot, f); + + reworkInstance.qualifyAndRename(); + + reworkInstance.reworkArchive(); + + this.fileSize += reworkInstance.getReworkedArchiveSize(); + + FilesDescriber selectedArchive = checkArchiveFiles(reworkInstance.getReworkedArchiveFile()); + + selectedArchives.addAll(selectedArchive); + + fileNb = fileNb + selectedArchive.getFilesAttribute().size(); + + // enregistrer le fichier + + MoveFilesToRegisterDao.registerArchive(sandbox, entrepot, reworkInstance.getReworkedArchiveName()); + + } + } + } + + /** + * Select some archive, check them and return a report as FilesDescribers + * + * @param filesIn the archives + * @return a GenericBean describing the archive + * @throws ArcException + */ + private FilesDescriber checkArchiveFiles(File f) throws ArcException { + + FilesDescriber content = new FilesDescriber(); + + String entrepot = ManipString.substringBeforeFirst(f.getName(), "_") + "_"; + + if (f.getName().endsWith(CompressionExtension.TAR_GZ.getFileExtension()) + || f.getName().endsWith(CompressionExtension.TGZ.getFileExtension())) { + content.addAll(new ArchiveCheckOperation(new TgzReader()).checkArchive(f, entrepot)); + } else if (f.getName().endsWith(CompressionExtension.ZIP.getFileExtension())) { + content.addAll(new ArchiveCheckOperation(new ZipReader()).checkArchive(f, entrepot)); + } else if (f.getName().endsWith(CompressionExtension.GZ.getFileExtension())) { + content.addAll(new ArchiveCheckOperation(new GzReader()).checkArchive(f, entrepot)); + } else { + throw new ArcException(ArcExceptionMessage.INVALID_FILE_FORMAT); + } + + return content; + } + + public int getFileNb() { + return fileNb; + } + + +} diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/registerarchive/bo/Entry.java b/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/registerarchive/bo/Entry.java new file mode 100644 index 000000000..e25900727 --- /dev/null +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/registerarchive/bo/Entry.java @@ -0,0 +1,31 @@ +package fr.insee.arc.core.service.p1reception.registerarchive.bo; + +public class Entry { + + private boolean isDirectory; + + private String name; + + public Entry(boolean isDirectory, String name) { + super(); + this.isDirectory = isDirectory; + this.name = name; + } + + public boolean isDirectory() { + return isDirectory; + } + + public void setDirectory(boolean isDirectory) { + this.isDirectory = isDirectory; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + +} diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/registerarchive/bo/FileDescriber.java b/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/registerarchive/bo/FileDescriber.java new file mode 100644 index 000000000..c823a9f9a --- /dev/null +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/registerarchive/bo/FileDescriber.java @@ -0,0 +1,90 @@ +package fr.insee.arc.core.service.p1reception.registerarchive.bo; + +import java.util.Arrays; +import java.util.List; + +import fr.insee.arc.core.model.TraitementEtat; +import fr.insee.arc.core.model.TraitementTypeFichier; + +public class FileDescriber { + + private String containerName; + private String fileName; + private TraitementTypeFichier typeOfFile; + private TraitementEtat etat; + private String report; + private String virtualContainer; + + public FileDescriber(String containerName, String fileName, TraitementTypeFichier typeOfFile, TraitementEtat etat, String report, + String virtualContainer) { + super(); + + this.containerName = containerName; + this.fileName = fileName; + this.typeOfFile = typeOfFile; + this.etat = etat; + this.report = report; + this.virtualContainer = virtualContainer; + } + + public List fileAttributes() { + return Arrays + .asList(containerName, fileName, typeOfFile.toString(), etat.toString(), report, virtualContainer); + } + + public String getContainerName() { + return containerName; + } + + public void setContainerName(String containerName) { + this.containerName = containerName; + } + + public String getFileName() { + return fileName; + } + + public void setFileName(String fileName) { + this.fileName = fileName; + } + + public TraitementTypeFichier getTypeOfFile() { + return typeOfFile; + } + + public void setTypeOfFile(TraitementTypeFichier typeOfFile) { + this.typeOfFile = typeOfFile; + } + + public TraitementEtat getEtat() { + return etat; + } + + public void setEtat(TraitementEtat etat) { + this.etat = etat; + } + + public String getReport() { + return report; + } + + public void setReport(String report) { + this.report = report; + } + + public String getVirtualContainer() { + return virtualContainer; + } + + public void setVirtualContainer(String virtualContainer) { + this.virtualContainer = virtualContainer; + } + + @Override + public String toString() + { + return fileAttributes().toString(); + } + + +} diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/registerarchive/bo/FilesDescriber.java b/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/registerarchive/bo/FilesDescriber.java new file mode 100644 index 000000000..0169463d7 --- /dev/null +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/registerarchive/bo/FilesDescriber.java @@ -0,0 +1,41 @@ +package fr.insee.arc.core.service.p1reception.registerarchive.bo; + +import java.util.ArrayList; +import java.util.List; + +public class FilesDescriber { + + private List filesAttribute; + + public FilesDescriber() { + super(); + filesAttribute = new ArrayList<>(); + } + + // add a file + public void add(FileDescriber fileAttribute) + { + filesAttribute.add(fileAttribute); + } + + // add a list of file + public void addAll(FilesDescriber filesAttribute) + { + this.filesAttribute.addAll(filesAttribute.getFilesAttribute()); + } + + public List getFilesAttribute() { + return filesAttribute; + } + + public void setFilesAttribute(List filesAttribute) { + this.filesAttribute = filesAttribute; + } + + @Override + public String toString() + { + return getFilesAttribute().toString(); + } + +} diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/registerarchive/bo/GzReader.java b/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/registerarchive/bo/GzReader.java new file mode 100644 index 000000000..5ee685b4b --- /dev/null +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/registerarchive/bo/GzReader.java @@ -0,0 +1,53 @@ +package fr.insee.arc.core.service.p1reception.registerarchive.bo; + +import java.io.BufferedInputStream; +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.util.zip.GZIPInputStream; + +import fr.insee.arc.utils.files.CompressedUtils; + +public class GzReader implements IArchiveStream { + + public GzReader() { + super(); + } + + private GZIPInputStream inputStream; + boolean readTest=false; + + private String fileName; + + @Override + public void startInputStream(File f) throws IOException { + this.inputStream = new GZIPInputStream(new BufferedInputStream(new FileInputStream(f), CompressedUtils.READ_BUFFER_SIZE)); + fileName=f.getName(); + } + + @Override + public Entry getEntry() throws IOException { + + if (!readTest) + { + this.inputStream.read(); + readTest=true; + } + else + { + return null; + } + + return new Entry(false, fileName); + } + + @Override + public void close() { + try { + inputStream.close(); + } catch (IOException e) { + + } + } + +} diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/registerarchive/bo/IArchiveStream.java b/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/registerarchive/bo/IArchiveStream.java new file mode 100644 index 000000000..2c2255ca9 --- /dev/null +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/registerarchive/bo/IArchiveStream.java @@ -0,0 +1,14 @@ +package fr.insee.arc.core.service.p1reception.registerarchive.bo; + +import java.io.File; +import java.io.IOException; + +public interface IArchiveStream { + + public void startInputStream(File f) throws IOException; + + public Entry getEntry() throws IOException; + + public void close(); + +} diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/registerarchive/bo/TgzReader.java b/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/registerarchive/bo/TgzReader.java new file mode 100644 index 000000000..391f694e1 --- /dev/null +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/registerarchive/bo/TgzReader.java @@ -0,0 +1,47 @@ +package fr.insee.arc.core.service.p1reception.registerarchive.bo; + +import java.io.BufferedInputStream; +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.util.zip.GZIPInputStream; + +import org.apache.tools.tar.TarEntry; +import org.apache.tools.tar.TarInputStream; + +import fr.insee.arc.utils.files.CompressedUtils; + +public class TgzReader implements IArchiveStream { + + public TgzReader() { + super(); + } + + private TarInputStream tarInputStream; + + @Override + public void startInputStream(File f) throws IOException { + tarInputStream = new TarInputStream(new GZIPInputStream(new BufferedInputStream(new FileInputStream(f), CompressedUtils.READ_BUFFER_SIZE))); + } + + @Override + public Entry getEntry() throws IOException { + TarEntry currentEntry = tarInputStream.getNextEntry(); + + if (currentEntry == null) { + return null; + } + + return new Entry(currentEntry.isDirectory(), currentEntry.getName()); + } + + @Override + public void close() { + try { + tarInputStream.close(); + } catch (IOException e) { + + } + } + +} diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/registerarchive/bo/ZipReader.java b/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/registerarchive/bo/ZipReader.java new file mode 100644 index 000000000..e3739361f --- /dev/null +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/registerarchive/bo/ZipReader.java @@ -0,0 +1,46 @@ +package fr.insee.arc.core.service.p1reception.registerarchive.bo; + +import java.io.BufferedInputStream; +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; + +import org.apache.commons.compress.archivers.zip.ZipArchiveEntry; +import org.apache.commons.compress.archivers.zip.ZipArchiveInputStream; + +import fr.insee.arc.utils.files.CompressedUtils; + +public class ZipReader implements IArchiveStream { + + public ZipReader() { + super(); + } + + private ZipArchiveInputStream zipInputStream; + + @Override + public void startInputStream(File f) throws IOException { + zipInputStream = new ZipArchiveInputStream(new BufferedInputStream(new FileInputStream(f), CompressedUtils.READ_BUFFER_SIZE)); + } + + @Override + public Entry getEntry() throws IOException { + ZipArchiveEntry currentEntry = zipInputStream.getNextZipEntry(); + + if (currentEntry == null) { + return null; + } + + return new Entry(currentEntry.isDirectory(), currentEntry.getName()); + } + + @Override + public void close() { + try { + zipInputStream.close(); + } catch (IOException e) { + + } + } + +} diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/registerarchive/dao/DirectoriesDao.java b/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/registerarchive/dao/DirectoriesDao.java new file mode 100644 index 000000000..7289cd09c --- /dev/null +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/registerarchive/dao/DirectoriesDao.java @@ -0,0 +1,100 @@ +package fr.insee.arc.core.service.p1reception.registerarchive.dao; + +import fr.insee.arc.core.service.global.bo.Sandbox; +import fr.insee.arc.core.service.p1reception.provider.DirectoryPath; +import fr.insee.arc.utils.files.FileUtilsArc; +import fr.insee.arc.utils.ressourceUtils.PropertiesHandler; + +public class DirectoriesDao { + + public DirectoriesDao(Sandbox sandbox) { + this.sandbox = sandbox; + this.directoryRoot = PropertiesHandler.getInstance().getBatchParametersDirectory(); + this.dirEnCours = DirectoryPath.directoryReceptionEtatEnCours(directoryRoot, sandbox.getSchema()); + this.dirOK = DirectoryPath.directoryReceptionEtatOK(directoryRoot, sandbox.getSchema()); + this.dirKO = DirectoryPath.directoryReceptionEtatKO(directoryRoot, sandbox.getSchema()); + } + + private String directoryRoot; + private String dirEnCours; + private String dirOK; + private String dirKO; + private String dirEntrepotIn; + private String dirEntrepotArchive; + private Sandbox sandbox; + + /** + * create global sandbox directories if not exist and register their paths in class + */ + public void createSandboxDirectories() { + // Create target directories if they don't exist + FileUtilsArc.createDirIfNotexist(this.dirEnCours); + FileUtilsArc.createDirIfNotexist(this.dirOK); + FileUtilsArc.createDirIfNotexist(this.dirKO); + } + + /** + * create datawarehouse sandbox directories if not exist and register their paths in class + */ + public void createSandboxDatawarehouseDirectories(String entrepot) { + + this.dirEntrepotIn = DirectoryPath.directoryReceptionEntrepot(directoryRoot, sandbox.getSchema(), + entrepot); + this.dirEntrepotArchive = DirectoryPath.directoryReceptionEntrepotArchive(directoryRoot, + sandbox.getSchema(), entrepot); + + // créer le répertoire de l'entrepot et son repertoire archive + FileUtilsArc.createDirIfNotexist(dirEntrepotArchive); + FileUtilsArc.createDirIfNotexist(dirEntrepotIn); + } + + public String getDirectoryRoot() { + return directoryRoot; + } + + public void setDirectoryRoot(String directoryRoot) { + this.directoryRoot = directoryRoot; + } + + public String getDirEnCours() { + return dirEnCours; + } + + public void setDirEnCours(String dirEnCours) { + this.dirEnCours = dirEnCours; + } + + public String getDirOK() { + return dirOK; + } + + public void setDirOK(String dirOK) { + this.dirOK = dirOK; + } + + public String getDirKO() { + return dirKO; + } + + public void setDirKO(String dirKO) { + this.dirKO = dirKO; + } + + public String getDirEntrepotIn() { + return dirEntrepotIn; + } + + public void setDirEntrepotIn(String dirEntrepotIn) { + this.dirEntrepotIn = dirEntrepotIn; + } + + public String getDirEntrepotArchive() { + return dirEntrepotArchive; + } + + public void setDirEntrepotArchive(String dirEntrepotArchive) { + this.dirEntrepotArchive = dirEntrepotArchive; + } + + +} \ No newline at end of file diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/registerarchive/dao/MoveFilesToRegisterDao.java b/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/registerarchive/dao/MoveFilesToRegisterDao.java new file mode 100644 index 000000000..d7fbf6d2b --- /dev/null +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/registerarchive/dao/MoveFilesToRegisterDao.java @@ -0,0 +1,53 @@ +package fr.insee.arc.core.service.p1reception.registerarchive.dao; + +import java.sql.Connection; +import java.util.List; + +import fr.insee.arc.core.dataobjects.ArcPreparedStatementBuilder; +import fr.insee.arc.core.dataobjects.ColumnEnum; +import fr.insee.arc.core.dataobjects.ViewEnum; +import fr.insee.arc.core.service.global.bo.Sandbox; +import fr.insee.arc.utils.dao.SQL; +import fr.insee.arc.utils.dao.UtilitaireDao; +import fr.insee.arc.utils.exception.ArcException; +import fr.insee.arc.utils.structure.GenericBean; + +public class MoveFilesToRegisterDao { + + /** + * select archive container that had been flagged as to be replayed + * + * @param connection + * @param schema + * @throws ArcException + */ + public static List execQuerySelectDatawarehouses(Connection connection) throws ArcException { + + ArcPreparedStatementBuilder query = new ArcPreparedStatementBuilder(); + query.build(SQL.SELECT, ColumnEnum.ID_ENTREPOT, SQL.FROM, ViewEnum.IHM_ENTREPOT.getFullName()); + + return new GenericBean(UtilitaireDao.get(0).executeRequest(connection, query)) + .getColumnValues(ColumnEnum.ID_ENTREPOT.getColumnName()); + + } + + + public static void registerArchive(Sandbox sandbox, String entrepot, String archiveName) throws ArcException { + + ArcPreparedStatementBuilder query = new ArcPreparedStatementBuilder(); + query.build(SQL.INSERT_INTO, ViewEnum.PILOTAGE_ARCHIVE.getFullName(sandbox.getSchema())); + query.build(query.tupleOfColumn(ColumnEnum.ENTREPOT.getColumnName(), ColumnEnum.NOM_ARCHIVE.getColumnName())); + query.build(SQL.VALUES); + query.build(query.tupleOfValues(entrepot, archiveName)); + query.build(SQL.END_QUERY); + + UtilitaireDao.get(0).executeRequest(sandbox.getConnection(), query); +// + +// UtilitaireDao.get(0).executeBlock(sandbox.getConnection(), +// "INSERT INTO " + TableNaming.dbEnv(sandbox.getSchema()) +// + "pilotage_archive (entrepot,nom_archive) values ('" + entrepot + "','" + reworkInstance.getReworkedArchiveName() +// + "'); "); + } + +} diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/registerarchive/operation/ArchiveCheckOperation.java b/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/registerarchive/operation/ArchiveCheckOperation.java new file mode 100644 index 000000000..9b0e99bb1 --- /dev/null +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/registerarchive/operation/ArchiveCheckOperation.java @@ -0,0 +1,119 @@ +package fr.insee.arc.core.service.p1reception.registerarchive.operation; + +import java.io.File; +import java.io.IOException; + +import fr.insee.arc.core.model.TraitementEtat; +import fr.insee.arc.core.model.TraitementRapport; +import fr.insee.arc.core.model.TraitementTypeFichier; +import fr.insee.arc.core.service.p1reception.registerarchive.bo.Entry; +import fr.insee.arc.core.service.p1reception.registerarchive.bo.FileDescriber; +import fr.insee.arc.core.service.p1reception.registerarchive.bo.FilesDescriber; +import fr.insee.arc.core.service.p1reception.registerarchive.bo.IArchiveStream; + +/** + * Class to check archive + * @author FY2QEQ + * + */ +public class ArchiveCheckOperation { + + private int erreur; + private TraitementEtat etat; + private String rapport; + + IArchiveStream archiveStream; + Entry currentEntry; + + + public ArchiveCheckOperation(IArchiveStream archiveStream) { + super(); + this.archiveStream = archiveStream; + } + + /** + * Check every file in the tgz archive and returns the archive content. + */ + public FilesDescriber checkArchive(File f, String entrepot) { + // Inscription des fichiers au contenu de l'archive + + FilesDescriber contentTemp = new FilesDescriber(); + + setStatus(0, null, null); + + // Check if the archive is fully readable + try + { + archiveStream.startInputStream(f); + // default case if archive is empty of real files + setStatus(1, TraitementEtat.KO, TraitementRapport.INITIALISATION_CORRUPTED_ARCHIVE.toString()); + + this.currentEntry = archiveStream.getEntry(); + + // Check every entry + while (currentEntry != null) { + + if (currentEntry.isDirectory()) { + currentEntry = archiveStream.getEntry(); + } else { + setStatus(0, TraitementEtat.OK, null); + + String name = currentEntry.getName(); + + validateEntry(); + + contentTemp.add(new FileDescriber(f.getName(), entrepot + name, + TraitementTypeFichier.DA, etat, rapport, null)); + + rapport = null; + } + } + } catch (IOException e1) { + erreur = 1; + rapport = TraitementRapport.INITIALISATION_CORRUPTED_ARCHIVE.toString(); + } + finally + { + archiveStream.close(); + } + + // Inscription de l'archive + contentTemp.add(new FileDescriber(f.getName(), null, + erreur == 1 ? TraitementTypeFichier.AC : TraitementTypeFichier.A, + erreur > 0 ? TraitementEtat.KO : TraitementEtat.OK, rapport, null)); + + // If there is any error, all files are marked KO with a special report + if (erreur > 0) { + for (FileDescriber fileInfo : contentTemp.getFilesAttribute()) { + fileInfo.setEtat(TraitementEtat.KO); + if (fileInfo.getReport() == null) { + fileInfo.setReport(TraitementRapport.INITIALISATION_FICHIER_OK_ARCHIVE_KO.toString()); + } + } + } + + return contentTemp; + } + + private void setStatus(int erreur, TraitementEtat etat, String rapport) { + this.erreur = erreur; + this.etat = etat; + this.rapport = rapport; + } + + /** + * validate the entry integrity if entry is invalid, mark error and break the + * loop over entry + * + * @param tarInput + */ + private void validateEntry() { + try { + currentEntry = archiveStream.getEntry(); + } catch (IOException e) { + setStatus(2, TraitementEtat.KO, TraitementRapport.INITIALISATION_CORRUPTED_ENTRY.toString()); + currentEntry = null; + } + } + +} diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/registerarchive/operation/ReworkArchiveOperation.java b/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/registerarchive/operation/ReworkArchiveOperation.java new file mode 100644 index 000000000..a32bd6ba5 --- /dev/null +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/registerarchive/operation/ReworkArchiveOperation.java @@ -0,0 +1,139 @@ +package fr.insee.arc.core.service.p1reception.registerarchive.operation; + +import java.io.File; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Paths; + +import fr.insee.arc.core.service.p1reception.registerarchive.dao.DirectoriesDao; +import fr.insee.arc.utils.exception.ArcException; +import fr.insee.arc.utils.exception.ArcExceptionMessage; +import fr.insee.arc.utils.files.CompressedUtils; +import fr.insee.arc.utils.files.CompressionExtension; +import fr.insee.arc.utils.files.FileUtilsArc; +import fr.insee.arc.utils.utils.ManipString; + +/** + * This class rework the archive that had been received It renames it so that + * old archive with the same name are not overwritten and securely kept It + * convert simple file into archived file + * + * @author FY2QEQ + * + */ +public class ReworkArchiveOperation { + + public ReworkArchiveOperation(DirectoriesDao directories, String entrepot, File inputFile) { + super(); + this.directories = directories; + this.entrepot = entrepot; + this.inputFile = inputFile; + } + + private String entrepot; + private File inputFile; + private DirectoriesDao directories; + + private boolean isArchive; + private File fileOutArchive; + + private int reworkedArchiveSize; + private String reworkedArchiveName; + private File reworkedArchiveFile; + + + /** + * Update that the file is an archive or not Rework the file name not to overlap + * old files + */ + public void qualifyAndRename() { + // Archiver le fichier + // on regarde si le fichier existe déjà dans le repertoire archive; si c'est le + // cas, on va renommer + + for (int i = 1; i < Integer.MAX_VALUE; i++) { + + // on reprend le nom du fichier + reworkedArchiveName = inputFile.getName(); + isArchive = true; + + // les fichiers non archivés sont archivés + if (CompressedUtils.isNotArchive(reworkedArchiveName)) { + reworkedArchiveName = new StringBuilder(reworkedArchiveName).append(CompressionExtension.TAR_GZ.getFileExtension()).toString(); + isArchive = false; + } + + // on ajoute un index au nom du fichier toto.tar.gz devient toto#1.tar.gz + if (i > 1) { + reworkedArchiveName = ManipString.substringBeforeFirst(reworkedArchiveName, ".") + "#" + i + "." + + ManipString.substringAfterFirst(reworkedArchiveName, "."); + } + + fileOutArchive = new File(directories.getDirEntrepotArchive() + File.separator + reworkedArchiveName); + + if (!fileOutArchive.exists()) { + break; + } + } + } + + /** + * rework archive file, save it in the archive directory and move it to the encours directory + * plain not compressed file are compressed before + * @return fileSize : the file size of the reworked archive file + * @throws ArcException + */ + public void reworkArchive() throws ArcException { + // si le fichier n'existe pas dans le repertoire d'archive + // on le copie dans archive avec son nouveau nom + // on change le nom du fichier initial avec son nouveau nom indexé + // on le déplace dans encours + // on enregistre le fichier dans la table d'archive + // on sort de la boucle d'indexation + if (isArchive) { + // copie dans archive avec le nouveau nom + try { + Files.copy(Paths.get(inputFile.getAbsolutePath()), Paths.get(fileOutArchive.getAbsolutePath())); + } catch (IOException exception) { + throw new ArcException(exception, ArcExceptionMessage.FILE_COPY_FAILED, inputFile, fileOutArchive); + } + // déplacer le fichier dans encours + FileUtilsArc.deplacerFichier(directories.getDirEntrepotIn(), directories.getDirEnCours(), + inputFile.getName(), entrepot + "_" + reworkedArchiveName); + } else { + // on génére le tar.gz dans archive + CompressedUtils.generateTarGzFromFile(inputFile, fileOutArchive, inputFile.getName()); + // on copie le tar.gz dans encours + File fOut = new File(directories.getDirEnCours() + File.separator + entrepot + "_" + reworkedArchiveName); + try { + Files.copy(Paths.get(fileOutArchive.getAbsolutePath()), Paths.get(fOut.getAbsolutePath())); + } catch (IOException exception) { + throw new ArcException(exception, ArcExceptionMessage.FILE_COPY_FAILED, fileOutArchive, fOut); + } + // on efface le fichier source + FileUtilsArc.delete(inputFile); + } + + this.reworkedArchiveFile = new File(directories.getDirEnCours() + File.separator + entrepot + "_" + reworkedArchiveName); + this.reworkedArchiveSize = (int) (fileOutArchive.length() / 1024 / 1024); + } + + public int getReworkedArchiveSize() { + return reworkedArchiveSize; + } + + public String getReworkedArchiveName() { + return reworkedArchiveName; + } + + public File getReworkedArchiveFile() { + return reworkedArchiveFile; + } + + public void setReworkedArchiveFile(File reworkedArchiveFile) { + this.reworkedArchiveFile = reworkedArchiveFile; + } + + + +} diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/registerfiles/FileRegistration.java b/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/registerfiles/FileRegistration.java new file mode 100644 index 000000000..103867cf2 --- /dev/null +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/registerfiles/FileRegistration.java @@ -0,0 +1,351 @@ +package fr.insee.arc.core.service.p1reception.registerfiles; + +import java.io.File; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.sql.Connection; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +import fr.insee.arc.core.dataobjects.ArcPreparedStatementBuilder; +import fr.insee.arc.core.dataobjects.ColumnEnum; +import fr.insee.arc.core.dataobjects.ViewEnum; +import fr.insee.arc.core.model.TraitementEtat; +import fr.insee.arc.core.model.TraitementRapport; +import fr.insee.arc.core.model.TraitementTypeFichier; +import fr.insee.arc.core.service.global.bo.Sandbox; +import fr.insee.arc.core.service.global.dao.TableNaming; +import fr.insee.arc.core.service.global.dao.TableOperations; +import fr.insee.arc.core.service.p0initialisation.pilotage.SynchronizeDataByPilotage; +import fr.insee.arc.core.service.p1reception.ApiReceptionService; +import fr.insee.arc.core.service.p1reception.provider.DirectoryPath; +import fr.insee.arc.core.service.p1reception.registerarchive.bo.FileDescriber; +import fr.insee.arc.core.service.p1reception.registerarchive.bo.FilesDescriber; +import fr.insee.arc.core.service.p1reception.registerarchive.bo.GzReader; +import fr.insee.arc.core.service.p1reception.registerarchive.bo.TgzReader; +import fr.insee.arc.core.service.p1reception.registerarchive.bo.ZipReader; +import fr.insee.arc.core.service.p1reception.registerarchive.dao.DirectoriesDao; +import fr.insee.arc.core.service.p1reception.registerarchive.dao.MoveFilesToRegisterDao; +import fr.insee.arc.core.service.p1reception.registerarchive.operation.ArchiveCheckOperation; +import fr.insee.arc.core.service.p1reception.registerarchive.operation.ReworkArchiveOperation; +import fr.insee.arc.core.service.p1reception.registerfiles.dao.FileRegistrationDao; +import fr.insee.arc.core.service.p1reception.registerfiles.provider.ContainerName; +import fr.insee.arc.core.util.StaticLoggerDispatcher; +import fr.insee.arc.utils.dao.UtilitaireDao; +import fr.insee.arc.utils.exception.ArcException; +import fr.insee.arc.utils.exception.ArcExceptionMessage; +import fr.insee.arc.utils.files.CompressedUtils; +import fr.insee.arc.utils.files.CompressionExtension; +import fr.insee.arc.utils.files.FileUtilsArc; +import fr.insee.arc.utils.structure.GenericBean; +import fr.insee.arc.utils.utils.FormatSQL; +import fr.insee.arc.utils.utils.LoggerHelper; +import fr.insee.arc.utils.utils.ManipString; + +public class FileRegistration { + + private static final Logger LOGGER = LogManager.getLogger(FileRegistration.class); + + public FileRegistration(Sandbox sandbox, String tablePilTemp) { + super(); + this.sandbox = sandbox; + this.tablePilTemp = tablePilTemp; + this.tablePil = ViewEnum.PILOTAGE_FICHIER.getFullName(sandbox.getSchema()); + directories = new DirectoriesDao(sandbox); + } + + private Sandbox sandbox; + private DirectoriesDao directories; + private String tablePilTemp; + private String tablePil; + + /** + * Enregistrer les fichiers en entrée Déplacer les fichier reçus dans les + * repertoires OK ou pas OK selon le bordereau Supprimer les fichiers déjà + * existants de la table de pilotage Marquer les fichiers dans la table de + * pilotage + * @throws ArcException + */ + public void registerAndDispatchFiles(FilesDescriber providedArchiveContent) throws ArcException { + StaticLoggerDispatcher.info(LOGGER, "registerAndDispatchFiles"); + + // la bean (fileName,type, etat) contient pour chaque fichier, le type + // du fichier et l'action à réaliser + + FilesDescriber archiveContent = findDuplicates(providedArchiveContent); + + StringBuilder requete = new StringBuilder(); + requete.append(TableOperations.creationTableResultat(this.tablePil, this.tablePilTemp)); + soumettreRequete(requete); + + if (archiveContent.getFilesAttribute().isEmpty()) { + return; + } + String dirIn = directories.getDirEnCours(); + + + for (FileDescriber f : archiveContent.getFilesAttribute()) { + + String containerNewName = ContainerName.buildContainerName(f.getContainerName()); + + if (f.getTypeOfFile().equals(TraitementTypeFichier.DA)) { + FileRegistrationDao.insertPilotage(requete, this.tablePilTemp, f.getContainerName() + , containerNewName, f.getVirtualContainer(), f.getFileName(), + f.getEtat(), f.getReport()); + } + if (f.getTypeOfFile().equals(TraitementTypeFichier.A)) { + String dirOut = DirectoryPath.directoryReceptionEtat(directories.getDirectoryRoot(), + sandbox.getSchema(), f.getEtat()); + FileUtilsArc.deplacerFichier(dirIn, dirOut, f.getContainerName(), containerNewName); + } + if (f.getTypeOfFile().equals(TraitementTypeFichier.AC)) { + String dirOut = DirectoryPath.directoryReceptionEtat(this.directories.getDirectoryRoot(), + sandbox.getSchema(), f.getEtat()); + FileUtilsArc.deplacerFichier(dirIn, dirOut, f.getContainerName(), containerNewName); + FileRegistrationDao.insertPilotage(requete, this.tablePilTemp, f.getContainerName(), containerNewName, f.getVirtualContainer(), f.getFileName(), + f.getEtat(), f.getReport()); + } + // pour les fichier seul, on en fait une archive + if (f.getTypeOfFile().equals(TraitementTypeFichier.D)) { + // en termes de destination, les fichiers seuls vont tout le temps dans + // RECEPTION_OK, même s'ils sont KO pour la table + // de pilotage + String dirOut = DirectoryPath.directoryReceptionEtatOK(this.directories.getDirectoryRoot(), + sandbox.getSchema()); + File fileIn = new File(dirIn + File.separator + f.getFileName()); + File fileOut = new File(dirOut + File.separator + containerNewName); + + if (fileOut.exists()) { + FileUtilsArc.delete(fileOut); + } + CompressedUtils.generateTarGzFromFile(fileIn, fileOut, + ManipString.substringAfterFirst(fileIn.getName(), "_")); + FileUtilsArc.delete(fileIn); + FileRegistrationDao.insertPilotage(requete, this.tablePilTemp, f.getContainerName(), containerNewName, f.getVirtualContainer() + , f.getFileName(), f.getEtat(), f.getReport()); + + } + } + requete.append(";"); + soumettreRequete(requete); + + StringBuilder query= new StringBuilder("select distinct " + ColumnEnum.ID_SOURCE.getColumnName() + " from " + this.tablePil + + " a where to_delete='R' and exists (select 1 from " + this.tablePilTemp + " b where a." + + ColumnEnum.ID_SOURCE.getColumnName() + "=b." + ColumnEnum.ID_SOURCE.getColumnName() + + ")"); + + List idSourceToBeDeleted = new GenericBean(UtilitaireDao.get(0).executeRequest(sandbox.getConnection(), new ArcPreparedStatementBuilder(query))).mapContent().get(ColumnEnum.ID_SOURCE.getColumnName()); + + if (idSourceToBeDeleted!=null) { + // marque les fichiers à effacer (ils vont etre rechargés) + requete.append("CREATE TEMPORARY TABLE a_rejouer " + FormatSQL.WITH_NO_VACUUM +" AS "); + requete.append(query); + requete.append(";"); + + // effacer de la table pilotage des to_delete à R + requete.append("DELETE FROM " + this.tablePil + " a using a_rejouer b where a." + + ColumnEnum.ID_SOURCE.getColumnName() + "=b." + ColumnEnum.ID_SOURCE.getColumnName() + + "; "); + } + + // pb des archives sans nom de fichier + requete.append("UPDATE " + this.tablePilTemp + " set " + ColumnEnum.ID_SOURCE.getColumnName() + + "='' where " + ColumnEnum.ID_SOURCE.getColumnName() + " is null; "); + requete.append("INSERT INTO " + this.tablePil + " select * from " + this.tablePilTemp + "; \n"); + requete.append("DISCARD TEMP; \n"); + soumettreRequete(requete); + + if (idSourceToBeDeleted!=null) { + SynchronizeDataByPilotage synchronizationInstance = new SynchronizeDataByPilotage(this.sandbox); + synchronizationInstance.dropUnusedDataTablesAllNods(idSourceToBeDeleted); + synchronizationInstance.deleteUnusedDataRecordsAllNods(idSourceToBeDeleted); + } + } + + + /** + * Find the duplicates files in the database + * + * @param fileList + * @return + */ + private FilesDescriber findDuplicates(FilesDescriber fileList) { + + + FilesDescriber content = new FilesDescriber(); + content.addAll(fileList); + + // Localiser les doublons + // Note : l'insertion est redondante mais au niveau métier, c'est + // beaucoup plus logique + StaticLoggerDispatcher.info(LOGGER, "Recherche de doublons de fichiers"); + + StringBuilder requete = new StringBuilder(); + requete.append(TableOperations.creationTableResultat(this.tablePil, this.tablePilTemp)); + String fileName; + String container; + + // insertion des fichiers dans la table tablePilTemp + for (FileDescriber f:content.getFilesAttribute()) { + container = f.getContainerName(); + fileName = f.getFileName(); + + + if (fileName != null) { + requete.append( + "insert into " + this.tablePilTemp + " (container, " + ColumnEnum.ID_SOURCE.getColumnName() + + ") values (" + FormatSQL.cast(container) + "," + FormatSQL.cast(fileName) + "); \n"); + } + } + soumettreRequete(requete); + // detection des doublons de fichiers sur les id_source juste insérés + // faut comparer les id_sources en retirant le #nnn représentant le numéro de + // l'archive (on utilise le regexp_replace pour retirer le #nnn) + + requete.append("select container, " + ColumnEnum.ID_SOURCE.getColumnName() + " FROM " + this.tablePilTemp + + " where " + ColumnEnum.ID_SOURCE.getColumnName() + " in ( "); + requete.append("select distinct " + ColumnEnum.ID_SOURCE.getColumnName() + " from ( "); + requete.append("select " + ColumnEnum.ID_SOURCE.getColumnName() + ", count(1) over (partition by " + + ColumnEnum.ID_SOURCE.getColumnName() + ") as n from " + this.tablePilTemp + " "); + requete.append(") ww where n>1 "); + requete.append(") "); + // detection des doublons de fichiers dans la table de pilotage + requete.append("UNION "); + requete.append( + "SELECT container, " + ColumnEnum.ID_SOURCE.getColumnName() + " from " + this.tablePilTemp + " a "); + requete.append("where exists (select 1 from " + this.tablePil + " b where a." + + ColumnEnum.ID_SOURCE.getColumnName() + "=b." + ColumnEnum.ID_SOURCE.getColumnName() + ") \n"); + requete.append("and a." + ColumnEnum.ID_SOURCE.getColumnName() + " not in (select distinct " + + ColumnEnum.ID_SOURCE.getColumnName() + " from " + this.tablePil + " b where b.to_delete='R') ;\n"); + + // récupérer les doublons pour mettre à jour le dispatcher + try { + ArrayList listIdsourceDoublons = new GenericBean(UtilitaireDao.get(0).executeRequest( + sandbox.getConnection(), new ArcPreparedStatementBuilder(requete))).mapContent() + .get(ColumnEnum.ID_SOURCE.getColumnName()); + + // on va parcourir la liste des fichiers + // si on retrouve l'id_source dans la liste, on le marque en erreur + if (listIdsourceDoublons != null) { + for (FileDescriber f: content.getFilesAttribute()) { + // si le nom de fichier est renseigné et retrouvé dans la liste + // on passe l'état à KO et on marque l'anomalie + if (f.getFileName() != null) { + if (listIdsourceDoublons.contains(f.getFileName())) { + f.setEtat(TraitementEtat.KO); + f.setReport(TraitementRapport.INITIALISATION_DUPLICATE.toString()); + } + } + } + } + } catch (ArcException ex) { + LoggerHelper.errorGenTextAsComment(getClass(), "dispatchFiles()", LOGGER, ex); + } + + // on ignore les doublons de l'archive pour les fichiers à rejouer + // on recrée un nouvelle liste en ne lui ajoutant pas ces doublons à ignorer + requete = new StringBuilder(); + requete.append("SELECT container, container||'>'||" + ColumnEnum.ID_SOURCE.getColumnName() + " as " + + ColumnEnum.ID_SOURCE.getColumnName() + " from " + this.tablePilTemp + " a "); + requete.append("where exists (select 1 from " + this.tablePil + " b where to_delete='R' and a." + + ColumnEnum.ID_SOURCE.getColumnName() + "=b." + ColumnEnum.ID_SOURCE.getColumnName() + ") ;\n"); + + FilesDescriber content2 = new FilesDescriber(); + try { + HashMap> m = new GenericBean(UtilitaireDao.get(0).executeRequest( + this.sandbox.getConnection(), new ArcPreparedStatementBuilder(requete))).mapContent(); + ArrayList listContainerARejouer = m.get(ColumnEnum.CONTAINER.getColumnName()); + ArrayList listIdsourceARejouer = m.get(ColumnEnum.ID_SOURCE.getColumnName()); + + if (listIdsourceARejouer == null) { + content2 = content; + } else { + for (FileDescriber z : content.getFilesAttribute()) { + // si le fichier est dans la liste des doublons à ignorer, on le l'ajoute pas à + // la nouvelle liste + if (z.getFileName() != null) { + if (listContainerARejouer.contains(z.getContainerName())) { + // si on trouve le fichier à rejouer, on l'ajoute; on ignore les autres + if (listIdsourceARejouer.contains( + z.getContainerName() + ">" + z.getFileName())) { + content2.add(z); + } + } else { + content2.add(z); + } + } else { + // bien ajouter les caracteriqtique de l'archive à la nouvelle liste + content2.add(z); + } + } + } + + } catch (ArcException ex) { + LoggerHelper.errorGenTextAsComment(getClass(), "dispatchFiles()", LOGGER, ex); + } + content = content2; + + // detection des doublons d'archive. Génération d'un numéro pour + // l'archive en cas de doublon + + requete = new StringBuilder(); + // insertion des fichiers d'archive corrompue dans la table + // tablePilTemp + // on doit aussi leur donner un numéro + for (FileDescriber z : content.getFilesAttribute()) { + container = z.getContainerName(); + fileName = z.getFileName(); + + if (z.getTypeOfFile().equals(TraitementTypeFichier.AC.toString())) { + requete.append( + "insert into " + this.tablePilTemp + " (container, " + ColumnEnum.ID_SOURCE.getColumnName() + + ") values (" + FormatSQL.cast(container) + "," + FormatSQL.cast(fileName) + "); \n"); + } + } + soumettreRequete(requete); + + requete.append("select container "); + requete.append(" , coalesce((select max(v_container::integer)+1 from " + this.tablePil + + " b where a.container=b.o_container),1)::text as v_container "); + requete.append( + "from (select distinct container from " + this.tablePilTemp + " where container is not null) a "); + + + try { + HashMap> m = new GenericBean(UtilitaireDao.get(0).executeRequest( + sandbox.getConnection(), new ArcPreparedStatementBuilder(requete))).mapContent(); + ArrayList listContainerDoublons = m.get(ColumnEnum.CONTAINER.getColumnName()); + ArrayList listVersionContainerDoublons = m.get(ColumnEnum.V_CONTAINER.getColumnName()); + if (listContainerDoublons != null) { + for (FileDescriber z : content.getFilesAttribute()) { + container = z.getContainerName(); + if (container != null) { + z.setVirtualContainer(listVersionContainerDoublons.get(listContainerDoublons.indexOf(container))); + } + } + } + } catch (ArcException ex) { + LoggerHelper.errorGenTextAsComment(getClass(), "dispatchFiles()", LOGGER, ex); + } + requete.setLength(0); + requete.append(FormatSQL.dropTable(this.tablePilTemp)); + soumettreRequete(requete); + return content; + } + + private void soumettreRequete(StringBuilder requete) { + try { + UtilitaireDao.get(0).executeImmediate(this.sandbox.getConnection(), requete); + } catch (ArcException ex) { + LoggerHelper.errorGenTextAsComment(getClass(), "soumettreRequete()", LOGGER, ex); + } + requete.setLength(0); + } + +} diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/registerfiles/dao/FileRegistrationDao.java b/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/registerfiles/dao/FileRegistrationDao.java new file mode 100644 index 000000000..55f528eac --- /dev/null +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/registerfiles/dao/FileRegistrationDao.java @@ -0,0 +1,41 @@ +package fr.insee.arc.core.service.p1reception.registerfiles.dao; + +import java.text.SimpleDateFormat; +import java.util.Date; + +import fr.insee.arc.core.dataobjects.ColumnEnum; +import fr.insee.arc.core.model.TraitementEtat; +import fr.insee.arc.core.model.TraitementPhase; +import fr.insee.arc.core.service.global.ApiService; +import fr.insee.arc.utils.utils.FormatSQL; + +public class FileRegistrationDao { + + private FileRegistrationDao() { + throw new IllegalStateException("dao class"); + } + + public static void insertPilotage(StringBuilder requete, String tablePilotage, String originalContainer, + String newContainer, String v_container, String fileName, TraitementEtat etat, String rapport) { + Date d = new Date(); + SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd:HH"); + SimpleDateFormat formatter = new SimpleDateFormat("dd/MM/yyyy HH:mm:ss"); + + // si ko, etape vaut 2 + String etape = etat.equals(TraitementEtat.KO) ? "2" : "1"; + + if (requete.length() == 0) { + requete.append("INSERT INTO " + tablePilotage + " "); + requete.append("(o_container, container, v_container, " + ColumnEnum.ID_SOURCE.getColumnName() + + ", date_entree,phase_traitement,etat_traitement,date_traitement, rapport, nb_enr, etape) VALUES "); + } else { + requete.append("\n,"); + } + requete.append(" (" + FormatSQL.cast(originalContainer) + "," + FormatSQL.cast(newContainer) + "," + + FormatSQL.cast(v_container) + ", " + FormatSQL.cast(fileName) + "," + + FormatSQL.cast(dateFormat.format(d)) + "," + FormatSQL.cast(TraitementPhase.RECEPTION.toString()) + + "," + FormatSQL.cast("{" + etat + "}") + "," + "to_timestamp(" + FormatSQL.cast(formatter.format(d)) + + ",'" + ApiService.DATABASE_DATE_FORMAT + "')" + "," + FormatSQL.cast(rapport) + ",1," + etape + ") "); + } + +} diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/registerfiles/provider/ContainerName.java b/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/registerfiles/provider/ContainerName.java new file mode 100644 index 000000000..c766b8830 --- /dev/null +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p1reception/registerfiles/provider/ContainerName.java @@ -0,0 +1,30 @@ +package fr.insee.arc.core.service.p1reception.registerfiles.provider; + +import fr.insee.arc.utils.files.CompressionExtension; +import fr.insee.arc.utils.utils.ManipString; + +public class ContainerName { + + + + public static String buildContainerName(String container) { + String newContainerName = ""; + newContainerName = ""; + if (container.endsWith(CompressionExtension.TAR_GZ.getFileExtension())) { + newContainerName = normalizeContainerName(container, CompressionExtension.TAR_GZ.getFileExtension()); + } else if (container.endsWith(CompressionExtension.TGZ.getFileExtension())) { + newContainerName = normalizeContainerName(container, CompressionExtension.TGZ.getFileExtension()); + } else if (container.endsWith(CompressionExtension.ZIP.getFileExtension())) { + newContainerName = normalizeContainerName(container, CompressionExtension.ZIP.getFileExtension()); + } else if (container.endsWith(CompressionExtension.GZ.getFileExtension())) { + newContainerName = normalizeContainerName(container, CompressionExtension.GZ.getFileExtension()); + } + return newContainerName; + } + + private static String normalizeContainerName(String container, String extension) { + return ManipString.substringBeforeLast(container, extension) + extension; + } + + +} diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p2chargement/archiveloader/ArchiveChargerFactory.java b/arc-core/src/main/java/fr/insee/arc/core/service/p2chargement/archiveloader/ArchiveChargerFactory.java index 56be8eab2..b09c63905 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/p2chargement/archiveloader/ArchiveChargerFactory.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p2chargement/archiveloader/ArchiveChargerFactory.java @@ -9,6 +9,7 @@ import fr.insee.arc.core.model.TypeArchive; import fr.insee.arc.core.util.StaticLoggerDispatcher; +import fr.insee.arc.utils.files.CompressionExtension; /** @@ -38,13 +39,13 @@ private IArchiveFileLoader getChargeur(TypeArchive typeArchive){ public IArchiveFileLoader getChargeur(String container){ StaticLoggerDispatcher.info(LOGGER, "** getChargeur from container**"); IArchiveFileLoader returned = null; - if (container.endsWith(".tar.gz") || container.endsWith(".tgz")) { + if (container.endsWith(CompressionExtension.TAR_GZ.getFileExtension()) || container.endsWith(CompressionExtension.TGZ.getFileExtension())) { returned = getChargeur(TypeArchive.TARGZ); - } else if (container.endsWith(".gz")) { + } else if (container.endsWith(CompressionExtension.GZ.getFileExtension())) { returned = getChargeur(TypeArchive.GZ); - } else if (container.endsWith(".zip")) { + } else if (container.endsWith(CompressionExtension.ZIP.getFileExtension())) { returned = getChargeur(TypeArchive.ZIP); } return returned; diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p2chargement/engine/ChargeurXmlComplexe.java b/arc-core/src/main/java/fr/insee/arc/core/service/p2chargement/engine/ChargeurXmlComplexe.java index 270afd6c2..6c4fa4be1 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/p2chargement/engine/ChargeurXmlComplexe.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p2chargement/engine/ChargeurXmlComplexe.java @@ -194,7 +194,6 @@ public void execution() throws ArcException { handler.connexion = connexion; handler.tempTableA = this.tableTempA; handler.start = 0; - handler.sizeLimit=0; handler.normeCourante = norme; handler.validite = validite; handler.tempTableAColumnsLongName=this.tempTableAColumnsLongName; diff --git a/arc-core/src/main/java/fr/insee/arc/core/service/p2chargement/xmlhandler/XMLComplexeHandlerCharger.java b/arc-core/src/main/java/fr/insee/arc/core/service/p2chargement/xmlhandler/XMLComplexeHandlerCharger.java index 1d84a1278..066358f27 100644 --- a/arc-core/src/main/java/fr/insee/arc/core/service/p2chargement/xmlhandler/XMLComplexeHandlerCharger.java +++ b/arc-core/src/main/java/fr/insee/arc/core/service/p2chargement/xmlhandler/XMLComplexeHandlerCharger.java @@ -65,9 +65,9 @@ public XMLComplexeHandlerCharger() { // this handler will keep the father reference to handle elements which have the // same name but not the same parent - private String root_father = "*"; + private String rootFather = "*"; - private String father = root_father; + private String father = rootFather; private StringBuilder currentData = new StringBuilder(); /* @@ -98,7 +98,6 @@ public XMLComplexeHandlerCharger() { // indique que la balise courante a des données private boolean hasData = false; - public int sizeLimit; public Norme normeCourante; public String validite; @@ -386,8 +385,8 @@ public void startElement(String uri, String localName, String qName, Attributes } // enregistrement de la structure - structure.append(("," + (this.father.equals(root_father) ? ApiService.ROOT : "i_" + this.father)) + " " - + (this.father.equals(root_father) ? "1" : this.col.get(this.father)) + " " + "i_" + this.currentTag); + structure.append(("," + (this.father.equals(rootFather) ? ApiService.ROOT : "i_" + this.father)) + " " + + (this.father.equals(rootFather) ? "1" : this.col.get(this.father)) + " " + "i_" + this.currentTag); if (this.tree.get(this.allCols.indexOf(this.currentTag)).equals(this.allCols.indexOf(this.father)) // cas des bloc multiples @@ -540,13 +539,6 @@ private void insertQueryBuilder(StringBuilder aRequete, String tempTableI, Strin aRequete.append(req); aRequete.append(req2); } - // en production, on peut vouloir limiter la taille du fichier et le faire - // passer en KO - if (sizeLimit > 0) { - if (this.idLigne > sizeLimit) { - throw new SAXParseException("Fichier trop volumineux", "", "", sizeLimit, sizeLimit); - } - } } /** diff --git a/arc-utils/src/main/java/fr/insee/arc/utils/dao/GenericPreparedStatementBuilder.java b/arc-utils/src/main/java/fr/insee/arc/utils/dao/GenericPreparedStatementBuilder.java index 50fd17cf7..ccaab163b 100644 --- a/arc-utils/src/main/java/fr/insee/arc/utils/dao/GenericPreparedStatementBuilder.java +++ b/arc-utils/src/main/java/fr/insee/arc/utils/dao/GenericPreparedStatementBuilder.java @@ -167,7 +167,7 @@ public String quoteNumberWithoutBinding(String p) { // return a tuple of values (val1, val2, ... valn) - public StringBuilder tuple(String...liste) { + public StringBuilder tupleOfValues(String...liste) { StringBuilder requete = new StringBuilder(); requete.append("(").append(sqlListeOfValues(Arrays.asList(liste))).append(")"); return requete; @@ -220,6 +220,13 @@ public StringBuilder sqlListeOfColumns(Collection liste) { return new StringBuilder(String.join(",", liste)); } + public StringBuilder tupleOfColumn(String...liste) { + StringBuilder requete = new StringBuilder(); + requete.append("(").append(sqlListeOfColumns(Arrays.asList(liste))).append(")"); + return requete; + } + + // getters public StringBuilder getQuery() { diff --git a/arc-utils/src/main/java/fr/insee/arc/utils/exception/ArcExceptionMessage.java b/arc-utils/src/main/java/fr/insee/arc/utils/exception/ArcExceptionMessage.java index 92fd1c560..054a91ed8 100644 --- a/arc-utils/src/main/java/fr/insee/arc/utils/exception/ArcExceptionMessage.java +++ b/arc-utils/src/main/java/fr/insee/arc/utils/exception/ArcExceptionMessage.java @@ -15,6 +15,8 @@ public enum ArcExceptionMessage { STREAM_READ_FAILED("Le stream de données n'a pas pu être lu"), STREAM_WRITE_FAILED("Le stream de données n'a pas pu être écrit"), TGZ_CONVERSION_FAILED("Le fichier %s n'a pu être converti en tgz"), + + INVALID_FILE_FORMAT("Format de fichier non pris en charge"), IMPORTING_JAVA_EXCEPTION_HEADERS_MISSING("ERROR: extra data after last expected column"), diff --git a/arc-utils/src/main/java/fr/insee/arc/utils/files/CompressedUtils.java b/arc-utils/src/main/java/fr/insee/arc/utils/files/CompressedUtils.java index 0078d9a86..7e4ea7e96 100644 --- a/arc-utils/src/main/java/fr/insee/arc/utils/files/CompressedUtils.java +++ b/arc-utils/src/main/java/fr/insee/arc/utils/files/CompressedUtils.java @@ -240,8 +240,8 @@ public static void copyFromInputstreamToOutputStream(InputStream input, OutputSt public static boolean isNotArchive(String fname) { - return !fname.endsWith(".tar.gz") && !fname.endsWith(".tgz") && !fname.endsWith(".zip") - && !fname.endsWith(".gz"); + return !fname.endsWith(CompressionExtension.TAR_GZ.getFileExtension()) && !fname.endsWith(CompressionExtension.TGZ.getFileExtension()) && !fname.endsWith(CompressionExtension.ZIP.getFileExtension()) + && !fname.endsWith(CompressionExtension.GZ.getFileExtension()); } } diff --git a/arc-utils/src/main/java/fr/insee/arc/utils/files/FileUtilsArc.java b/arc-utils/src/main/java/fr/insee/arc/utils/files/FileUtilsArc.java index cef50acd7..ec74e5a3d 100644 --- a/arc-utils/src/main/java/fr/insee/arc/utils/files/FileUtilsArc.java +++ b/arc-utils/src/main/java/fr/insee/arc/utils/files/FileUtilsArc.java @@ -111,5 +111,26 @@ public static void delete(File fileInput) throws ArcException { throw new ArcException(ArcExceptionMessage.FILE_DELETE_FAILED, fileInput.getName()); } } + + /** + * Deplacer un fichier d'un repertoire source vers répertoire cible (pas de + * slash en fin du nom de repertoire) Si le fichier existe déjà, il est écrasé + * + * @param dirIn , répertoire en entrée, pas de slash à la fin + * @param dirOut , répertoire en sortie, pas de slash à la fin + * @param FileName , nom du fichier + * @throws ArcException + */ + public static void deplacerFichier(String dirIn, String dirOut, String fileNameIn, String fileNameOut) + throws ArcException { + if (!dirIn.equals(dirOut)) { + File fileIn = new File(dirIn + File.separator + fileNameIn); + File fileOut = new File(dirOut + File.separator + fileNameOut); + if (fileOut.exists()) { + FileUtilsArc.delete(fileOut); + } + FileUtilsArc.renameTo(fileIn, fileOut); + } + } } diff --git a/arc-ws/src/main/java/fr/insee/arc/ws/services/restServices/execute/ExecuteServiceController.java b/arc-ws/src/main/java/fr/insee/arc/ws/services/restServices/execute/ExecuteServiceController.java index d700a37b1..974dc0754 100644 --- a/arc-ws/src/main/java/fr/insee/arc/ws/services/restServices/execute/ExecuteServiceController.java +++ b/arc-ws/src/main/java/fr/insee/arc/ws/services/restServices/execute/ExecuteServiceController.java @@ -27,7 +27,7 @@ import fr.insee.arc.core.service.p0initialisation.dbmaintenance.BddPatcher; import fr.insee.arc.core.service.p0initialisation.filesystem.BuildFileSystem; import fr.insee.arc.core.service.p0initialisation.metadata.SynchronizeUserRulesAndMetadata; -import fr.insee.arc.core.service.p1reception.ApiReceptionService; +import fr.insee.arc.core.service.p1reception.provider.DirectoryPath; import fr.insee.arc.core.util.LoggerDispatcher; import fr.insee.arc.utils.dao.UtilitaireDao; import fr.insee.arc.utils.exception.ArcException; @@ -71,7 +71,7 @@ public ResponseEntity executeServiceClient( if (TraitementPhase.getPhase(bodyPojo.targetPhase).equals(TraitementPhase.RECEPTION)) { - try(FileOutputStream fos=new FileOutputStream(ApiReceptionService.directoryReceptionEntrepot(repertoire, env, warehouse) + File.separator + bodyPojo.fileName)) + try(FileOutputStream fos=new FileOutputStream(DirectoryPath.directoryReceptionEntrepot(repertoire, env, warehouse) + File.separator + bodyPojo.fileName)) { IOUtils.write(bodyPojo.fileContent, fos, StandardCharsets.UTF_8); } @@ -148,7 +148,7 @@ public ResponseEntity executeServiceClient( if (TraitementPhase.getPhase(bodyPojo.targetPhase).equals(TraitementPhase.RECEPTION)) { - try(FileOutputStream fos=new FileOutputStream(ApiReceptionService.directoryReceptionEntrepot(repertoire, env, warehouse) + File.separator + bodyPojo.fileName)) + try(FileOutputStream fos=new FileOutputStream(DirectoryPath.directoryReceptionEntrepot(repertoire, env, warehouse) + File.separator + bodyPojo.fileName)) { IOUtils.write(bodyPojo.fileContent, fos, StandardCharsets.UTF_8); }