Skip to content

Commit

Permalink
fix: List and Map types
Browse files Browse the repository at this point in the history
  • Loading branch information
Nolife999 committed Oct 6, 2023
1 parent 16aeabf commit d16359d
Show file tree
Hide file tree
Showing 93 changed files with 844 additions and 925 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,9 @@

import java.sql.Connection;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
Expand Down Expand Up @@ -63,7 +64,7 @@ public abstract class ApiService implements IConstanteNumerique {

protected Boolean todo = false;

protected HashMap<String, ArrayList<String>> tabIdSource;
protected Map<String, List<String>> tabIdSource;

public ApiService() {
super();
Expand Down Expand Up @@ -264,7 +265,7 @@ public void finaliser() {
* @param etat
* @return
*/
public HashMap<String, ArrayList<String>> pilotageListIdsource(String tablePilotage, String aCurrentPhase,
public Map<String, List<String>> pilotageListIdsource(String tablePilotage, String aCurrentPhase,
String etat) {
LoggerHelper.info(LOGGER_APISERVICE, "pilotageListIdsource");
ArcPreparedStatementBuilder requete = new ArcPreparedStatementBuilder();
Expand Down Expand Up @@ -425,15 +426,15 @@ private void repriseSurErreur(Connection connexion, String phase, String tablePi
* @return
* @throws ArcException
*/
protected HashMap<String, ArrayList<String>> recuperationIdSource() throws ArcException {
protected Map<String, List<String>> recuperationIdSource() throws ArcException {

ArcPreparedStatementBuilder query = new ArcPreparedStatementBuilder();
query.append("SELECT p." + ColumnEnum.ID_SOURCE.getColumnName() + " ");
query.append("FROM " + this.getTablePilTemp() + " p ");
query.append("ORDER BY " + ColumnEnum.ID_SOURCE.getColumnName());
query.append(";");

HashMap<String, ArrayList<String>> pil = new GenericBean(
Map<String, List<String>> pil = new GenericBean(
UtilitaireDao.get(0).executeRequest(this.connexion.getCoordinatorConnection(), query)).mapContent();

return (pil);
Expand All @@ -444,7 +445,7 @@ public String getEnvExecution() {
return envExecution;
}

public HashMap<String, ArrayList<String>> getTabIdSource() {
public Map<String, List<String>> getTabIdSource() {
return tabIdSource;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
Expand All @@ -13,6 +15,7 @@
import fr.insee.arc.core.util.StaticLoggerDispatcher;
import fr.insee.arc.utils.dao.UtilitaireDao;
import fr.insee.arc.utils.exception.ArcException;
import fr.insee.arc.utils.exception.ArcExceptionMessage;
import fr.insee.arc.utils.structure.GenericBean;
import fr.insee.arc.utils.utils.LoggerHelper;
public class JeuDeRegleDao {
Expand All @@ -39,7 +42,7 @@ public static ArrayList<JeuDeRegle> recupJeuDeRegle(Connection connexion, String
requete.append("SELECT a.id_norme, a.periodicite, a.validite_inf, a.validite_sup, a.version");
requete.append("\n FROM " + tableJeuDeRegle + " a ");

HashMap<String,ArrayList<String>> g=new GenericBean(UtilitaireDao.get(0).executeRequest(connexion, new ArcPreparedStatementBuilder(requete))).mapContent();
Map<String,List<String>> g=new GenericBean(UtilitaireDao.get(0).executeRequest(connexion, new ArcPreparedStatementBuilder(requete))).mapContent();
return extractRuleSetObjects(g);
}

Expand Down Expand Up @@ -67,15 +70,15 @@ public static ArrayList<JeuDeRegle> recupJeuDeRegle(Connection connexion, String
requete.append("\n AND to_date(b.validite,'"+ArcDateFormat.DATE_FORMAT_CONVERSION.getDatastoreFormat()+"')<=a.validite_sup); ");


HashMap<String,ArrayList<String>> g=new GenericBean(UtilitaireDao.get(0).executeRequest(connexion, new ArcPreparedStatementBuilder(requete))).mapContent();
Map<String,List<String>> g=new GenericBean(UtilitaireDao.get(0).executeRequest(connexion, new ArcPreparedStatementBuilder(requete))).mapContent();

ArrayList<JeuDeRegle> listJdr = extractRuleSetObjects(g);

StaticLoggerDispatcher.info(LOGGER, "J'ai trouvé " + listJdr.size() + " jeux de règle, utiles pour controler");
return listJdr;
}

private static ArrayList<JeuDeRegle> extractRuleSetObjects(HashMap<String, ArrayList<String>> g) {
private static ArrayList<JeuDeRegle> extractRuleSetObjects(Map<String, List<String>> g) throws ArcException {
SimpleDateFormat formatDate = new SimpleDateFormat(ArcDateFormat.DATE_FORMAT_CONVERSION.getApplicationFormat());
ArrayList<JeuDeRegle> listJdr = new ArrayList<>();
if (!g.isEmpty())
Expand All @@ -89,9 +92,17 @@ private static ArrayList<JeuDeRegle> extractRuleSetObjects(HashMap<String, Array
jdr.setPeriodicite(g.get("periodicite").get(i));
try {
jdr.setValiditeInf(formatDate.parse(g.get("validite_inf").get(i)));
} catch (ParseException ex) {
ArcException e = new ArcException(ArcExceptionMessage.DATE_PARSE_FAILED_VALIDITE_INF, g.get("validite_inf").get(i));
e.logFullException();
throw e;
}
try {
jdr.setValiditeSup(formatDate.parse(g.get("validite_sup").get(i)));
} catch (ParseException ex) {
LoggerHelper.errorGenTextAsComment(JeuDeRegleDao.class, "recupJeuDeRegle()", LOGGER, ex);
ArcException e = new ArcException(ArcExceptionMessage.DATE_PARSE_FAILED_VALIDITE_SUP, g.get("validite_sup").get(i));
e.logFullException();
throw e;
}
jdr.setVersion(g.get("version").get(i));
// Ajout à la liste de résultat
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,8 @@

import java.sql.Connection;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
Expand Down Expand Up @@ -39,7 +40,7 @@ public static ArrayList<RegleControleEntity> getRegle(Connection connexion, Stri
sb.append(" AND to_date(b.validite,'"+ArcDateFormat.DATE_FORMAT_CONVERSION.getDatastoreFormat()+"')<=a.validite_sup) ");
sb.append("; ");

HashMap<String, ArrayList<String>> g = new GenericBean(UtilitaireDao.get(0).executeRequest(connexion, sb))
Map<String, List<String>> g = new GenericBean(UtilitaireDao.get(0).executeRequest(connexion, sb))
.mapContent();

if (!g.isEmpty()) {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
package fr.insee.arc.core.service.global.dao;

import java.sql.Connection;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

import fr.insee.arc.core.dataobjects.ArcPreparedStatementBuilder;
import fr.insee.arc.core.dataobjects.ColumnEnum;
Expand Down Expand Up @@ -145,7 +145,7 @@ public static String getNormeAttributes(String idSource, String tablePilotage) {
* @return
* @throws ArcException
*/
public static HashMap<String, ArrayList<String>> getBean(Connection c, String req) throws ArcException {
public static Map<String, List<String>> getBean(Connection c, String req) throws ArcException {
GenericBean gb = new GenericBean(
UtilitaireDao.get(0).executeRequest(c, new ArcPreparedStatementBuilder(req)));
return gb.mapContent(true);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@
import java.sql.Connection;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.function.Function;

import org.apache.commons.io.IOUtils;
Expand Down Expand Up @@ -273,7 +275,7 @@ public PropertiesHandler getProperties() {
* @return
* @throws ArcException
*/
private static ArrayList<String> retrieveTablesFromSchema(Connection connexion, String envExecution, Function<String, ArcPreparedStatementBuilder> condition)
private static List<String> retrieveTablesFromSchema(Connection connexion, String envExecution, Function<String, ArcPreparedStatementBuilder> condition)
throws ArcException {

ArcPreparedStatementBuilder query = new ArcPreparedStatementBuilder();
Expand Down Expand Up @@ -349,7 +351,7 @@ private static ArcPreparedStatementBuilder conditionToRetrieveRulesTablesInSchem
}

// return external tables used in rules
public static ArrayList<String> retrieveExternalTablesUsedInRules(Connection connexion, String envExecution)
public static List<String> retrieveExternalTablesUsedInRules(Connection connexion, String envExecution)
throws ArcException {

// generate a sql expression with relevant the columns concatenation of rules table
Expand All @@ -371,7 +373,7 @@ public static ArrayList<String> retrieveExternalTablesUsedInRules(Connection con
query.build(SQL.AND, ColumnEnum.COLUMN_NAME, "!=", query.quoteText(ColumnEnum.VERSION));
query.build(SQL.GROUP_BY, ColumnEnum.TABLE_SCHEMA, "||'.'||", ColumnEnum.TABLE_NAME);

HashMap<String, ArrayList<String>> result = new GenericBean(
Map<String, List<String>> result = new GenericBean(
UtilitaireDao.get(0).executeRequest(connexion, query)).mapContent();

// search if a nomenclature table is quoted in the columns concatenation of rules tables
Expand Down Expand Up @@ -404,12 +406,12 @@ public static ArrayList<String> retrieveExternalTablesUsedInRules(Connection con
* @param envExecution
* @throws ArcException
*/
public static ArrayList<String> retrieveRulesTablesFromSchema(Connection connexion, String envExecution)
public static List<String> retrieveRulesTablesFromSchema(Connection connexion, String envExecution)
throws ArcException {
return retrieveTablesFromSchema(connexion, envExecution, BddPatcher::conditionToRetrieveRulesTablesInSchema );
}

public static ArrayList<String> retrieveModelTablesFromSchema(Connection connexion, String envExecution)
public static List<String> retrieveModelTablesFromSchema(Connection connexion, String envExecution)
throws ArcException {
return retrieveTablesFromSchema(connexion, envExecution, BddPatcher::conditionToRetrieveModelTablesInSchema );
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import java.sql.Connection;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Optional;

import org.apache.logging.log4j.LogManager;
Expand All @@ -18,7 +19,6 @@
import fr.insee.arc.core.service.p5mapping.engine.ExpressionService;
import fr.insee.arc.utils.consumer.ThrowingConsumer;
import fr.insee.arc.utils.dao.CopyObjectsToDatabase;
import fr.insee.arc.utils.dao.UtilitaireDao;
import fr.insee.arc.utils.exception.ArcException;
import fr.insee.arc.utils.ressourceUtils.PropertiesHandler;
import fr.insee.arc.utils.structure.GenericBean;
Expand Down Expand Up @@ -132,7 +132,7 @@ private static void copyMetaDataToExecutors(Connection coordinatorConnexion, Con

// copy tables

ArrayList<String> tablesToCopyIntoExecutor = BddPatcher.retrieveRulesTablesFromSchema(coordinatorConnexion,
List<String> tablesToCopyIntoExecutor = BddPatcher.retrieveRulesTablesFromSchema(coordinatorConnexion,
envExecution);
tablesToCopyIntoExecutor
.addAll(BddPatcher.retrieveExternalTablesUsedInRules(coordinatorConnexion, envExecution));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ public void copyRulesTablesToExecution() throws ArcException {
+ requeteSelectDrop.quoteText(anExecutionEnvironment.toLowerCase()) + " ");
requeteSelectDrop.append(" AND tablename SIMILAR TO '%nmcl%|%ext%'");

ArrayList<String> requetesDeSuppressionTablesNmcl = new GenericBean(
List<String> requetesDeSuppressionTablesNmcl = new GenericBean(
UtilitaireDao.get(0).executeRequest(coordinatorConnexion, requeteSelectDrop)).mapContent()
.get("requete_drop");

Expand All @@ -131,7 +131,7 @@ public void copyRulesTablesToExecution() throws ArcException {
}

// 2.Préparation des requêtes de création des tables
ArrayList<String> requetesDeCreationTablesNmcl = new GenericBean(UtilitaireDao.get(0)
List<String> requetesDeCreationTablesNmcl = new GenericBean(UtilitaireDao.get(0)
.executeRequest(coordinatorConnexion, new ArcPreparedStatementBuilder(
"select tablename from pg_tables where (tablename like 'nmcl\\_%' OR tablename like 'ext\\_%') and schemaname='arc'")))
.mapContent().get("tablename");
Expand Down Expand Up @@ -174,8 +174,8 @@ public static void mettreAJourSchemaTableMetier(Connection coordinatorOrExecutor
+ "'||nom_table_metier), lower(nom_variable_metier), lower(type_variable_metier) FROM "
+ ViewEnum.IHM_MOD_VARIABLE_METIER.getFullName());

List<List<String>> relationalViewRef = Format
.patch(UtilitaireDao.get(0).executeRequestWithoutMetadata(coordinatorOrExecutorConnexion, requeteRef));
List<List<String>> relationalViewRef = UtilitaireDao.get(0).executeRequestWithoutMetadata(coordinatorOrExecutorConnexion, requeteRef);

HierarchicalView familleToTableToVariableToTypeRef = HierarchicalView.asRelationalToHierarchical(
"(Réf) Famille -> Table -> Variable -> Type",
Arrays.asList("id_famille", "nom_table_metier", "variable_metier", "type_variable_metier"),
Expand Down Expand Up @@ -205,8 +205,7 @@ public static void mettreAJourSchemaTableMetier(Connection coordinatorOrExecutor
+ ManipString.substringAfterFirst(TableNaming.dbEnv(envExecution), ".").toLowerCase()
+ "mapping\\_'||lower(id_famille)||'\\_%';");

List<List<String>> relationalView = Format
.patch(UtilitaireDao.get(0).executeRequestWithoutMetadata(coordinatorOrExecutorConnexion, requete));
List<List<String>> relationalView = UtilitaireDao.get(0).executeRequestWithoutMetadata(coordinatorOrExecutorConnexion, requete);

HierarchicalView familleToTableToVariableToType = HierarchicalView.asRelationalToHierarchical(
"(Phy) Famille -> Table -> Variable -> Type",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@
import java.io.File;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
Expand Down Expand Up @@ -67,11 +69,11 @@ public void removeDeprecatedFiles() throws ArcException {
cleanPilotageDao.execQueryMaterializeFilesToDelete(numberOfDaysToKeepFiles);

// initialisation de la liste contenant les archives à déplacer
HashMap<String, ArrayList<String>> recordedArchives = new HashMap<>();
Map<String, List<String>> recordedArchives = new HashMap<>();
recordedArchives.put(ColumnEnum.ENTREPOT.getColumnName(), new ArrayList<>());
recordedArchives.put(ColumnEnum.NOM_ARCHIVE.getColumnName(), new ArrayList<>());

HashMap<String, ArrayList<String>> listOfDeletedArchives;
Map<String, List<String>> listOfDeletedArchives;

// on selectionne les fichiers éligibles et on limite le nombre de retour
// pour que l'update ne soit pas trop massif (perf)
Expand All @@ -96,7 +98,7 @@ public void removeDeprecatedFiles() throws ArcException {
}


private void keepTrackOfDeletedArchives(HashMap<String, ArrayList<String>> listOfDeletedArchives, HashMap<String, ArrayList<String>> recordedArchives)
private void keepTrackOfDeletedArchives(Map<String, List<String>> listOfDeletedArchives, Map<String, List<String>> recordedArchives)
{
if (listOfDeletedArchives.isEmpty()) {
return;
Expand Down Expand Up @@ -129,7 +131,7 @@ private void keepTrackOfDeletedArchives(HashMap<String, ArrayList<String>> listO
* @param recordedArchives
* @throws ArcException
*/
private void moveDeletedArchivesToArchivageDirectory(HashMap<String, ArrayList<String>> recordedArchives) throws ArcException
private void moveDeletedArchivesToArchivageDirectory(Map<String, List<String>> recordedArchives) throws ArcException
{
if (recordedArchives.get(ColumnEnum.ENTREPOT.getColumnName()).isEmpty()) {
return;
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
package fr.insee.arc.core.service.p0initialisation.pilotage.dao;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

import fr.insee.arc.core.dataobjects.ArcPreparedStatementBuilder;
import fr.insee.arc.core.dataobjects.ColumnEnum;
Expand Down Expand Up @@ -68,7 +68,7 @@ public void execQueryMaterializeFilesToDelete(int numberOfDaysToKeepFiles) throw
* @return
* @throws ArcException
*/
public HashMap<String, ArrayList<String>> execQueryDeleteDeprecatedFilesAndSelectArchives(int numberOfFilesToProceed) throws ArcException {
public Map<String, List<String>> execQueryDeleteDeprecatedFilesAndSelectArchives(int numberOfFilesToProceed) throws ArcException {
// requete sur laquelle on va itérer : on selectionne un certain nombre de
// fichier et on itere
ArcPreparedStatementBuilder query = new ArcPreparedStatementBuilder();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -163,7 +163,7 @@ public static void dropUnusedTemporaryTablesOnConnection(Connection targetConnex
GenericBean g = new GenericBean(
UtilitaireDao.get(0).executeRequest(targetConnexion, SynchronizeDataByPilotageDao.requeteListAllTemporaryTablesInEnv(envExecution)));
if (!g.mapContent().isEmpty()) {
ArrayList<String> envTables = g.mapContent().get("table_name");
List<String> envTables = g.mapContent().get("table_name");
for (String nomTable : envTables) {
UtilitaireDao.get(0).executeBlock(targetConnexion, FormatSQL.dropTable(nomTable));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ public static List<Norme> getNormesBase(Connection connexion, String envExecutio

List<Norme> output = new ArrayList<Norme>() ;
// Récupérer les régles de définition de normes
ArrayList<ArrayList<String>> normes = new ArrayList<ArrayList<String>>();
List<List<String>> normes = new ArrayList<>();
try {
normes = new GenericBean(UtilitaireDao.get(0).executeRequest(connexion,
new ArcPreparedStatementBuilder( "select id_norme, periodicite, def_norme, def_validite from " + ViewEnum.NORME.getFullName(envExecution) + ";"))).content;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -143,11 +143,11 @@ private void calculerNormeAndValidite(Norme[] normeOk, String[] validiteOk, Stri
query.append("\n ) vv ");
query.append("\n where norme is not null ");

ArrayList<ArrayList<String>> result =UtilitaireDao.get(0).executeRequestWithoutMetadata(this.connexion, new ArcPreparedStatementBuilder(query));
List<List<String>> result =UtilitaireDao.get(0).executeRequestWithoutMetadata(this.connexion, new ArcPreparedStatementBuilder(query));
if (result.size()>1)
{
StringBuilder normsFound = new StringBuilder();
for (ArrayList<String> resultLine : result)
for (List<String> resultLine : result)
{
int index = Integer.parseInt(resultLine.get(0));
normsFound.append("{");
Expand Down
Loading

0 comments on commit d16359d

Please sign in to comment.