Skip to content

Commit

Permalink
Merge branch 'scalable_data_retrieval_webservice'
Browse files Browse the repository at this point in the history
  • Loading branch information
Nolife999 committed Dec 1, 2023
1 parent cfd1167 commit c6ed374
Show file tree
Hide file tree
Showing 3 changed files with 144 additions and 36 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@ public enum JsonKeys {
,VALINF( "validiteInf" )
,VALSUP( "validiteSup" )
,PERIODICITE( "periodicite" )
,NBFICHIERS("nbfichiers")

//Réponse
,ID( "id" ) //Aussi utilisé dans les réponses quelque soit le service
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -215,9 +215,7 @@ public void createTableOfIdSource(JSONObject requeteJSON) throws ArcException {
? requeteJSON.getString(JsonKeys.VALINF.getKey())
: null;
String validiteSup = requeteJSON.getString(JsonKeys.VALSUP.getKey());
int nbFichiers = requeteJSON.keySet().contains(JsonKeys.NBFICHIERS.getKey())
? requeteJSON.getInt(JsonKeys.NBFICHIERS.getKey())
: 0;

boolean reprise = requeteJSON.getBoolean(JsonKeys.REPRISE.getKey());

StringBuilder query = new StringBuilder();
Expand All @@ -228,8 +226,7 @@ public void createTableOfIdSource(JSONObject requeteJSON) throws ArcException {

query.append("(");
query.append("SELECT " + ColumnEnum.ID_SOURCE.getColumnName()
+ (nbFichiers > 0 ? ", substr(date_entree,1,10)::date as date_entree " : " ") //
+ "FROM " + ViewEnum.PILOTAGE_FICHIER.getFullName(this.environnement) + " T1 ");
+ " FROM " + ViewEnum.PILOTAGE_FICHIER.getFullName(this.environnement) + " T1 ");
query.append(
"WHERE '" + TraitementEtat.OK + "'=ANY(T1.etat_traitement) AND T1.periodicite='" + periodicite + "' ");

Expand All @@ -250,13 +247,7 @@ public void createTableOfIdSource(JSONObject requeteJSON) throws ArcException {
LoggerHelper.debugAsComment(LOGGER, "ClientDaoImpl.getIdSrcTableMetier() : Reprise = true");
}

query.append("GROUP BY " + ColumnEnum.ID_SOURCE.getColumnName() + (nbFichiers > 0 ? ", date_entree " : " ")); // )

// on trie par ordre decroissant de date d'entree
if (nbFichiers > 0) {
query.append("ORDER BY date_entree DESC LIMIT ");
query.append(nbFichiers);
}
query.append("GROUP BY " + ColumnEnum.ID_SOURCE.getColumnName()); // )
query.append(") as foo; ");

UtilitaireDao.get(0).executeBlock(connection, query);
Expand Down Expand Up @@ -348,7 +339,7 @@ public void createTableFamille() throws ArcException {
+ requete.quoteText(client) + ");");
UtilitaireDao.get(0).executeRequest(connection, requete);

registerTableToBeRetrieved(ExportTrackingType.ID_SOURCE, ArcDatabase.COORDINATOR, nomTableImage);
registerTableToBeRetrieved(ExportTrackingType.DATA, ArcDatabase.COORDINATOR, nomTableImage);

}

Expand All @@ -361,7 +352,7 @@ public void createTableFamille() throws ArcException {
public void createTablePeriodicite() throws ArcException {
LoggerHelper.debugAsComment(LOGGER, "ClientDaoImpl.createTablePeriodicite()");

String nomTableImage = ViewEnum.getFullName(environnement,
String nomTableImage = ViewEnum.getFullNameNotNormalized(environnement,
client + "_" + timestamp + "_" + ViewEnum.EXT_MOD_PERIODICITE.getTableName());

UtilitaireDao.get(0).executeImmediate(connection, "CREATE TABLE " + nomTableImage + FormatSQL.WITH_NO_VACUUM
Expand Down Expand Up @@ -560,4 +551,38 @@ public void setConnection(Connection connection) {
this.connection = connection;
}

public long getTimestamp() {
return timestamp;
}

public String getEnvironnement() {
return environnement;
}

public String getClient() {
return client;
}

public String getFamille() {
return famille;
}

public String getTableOfIdSource() {
return tableOfIdSource;
}

public String getTableWsPending() {
return tableWsPending;
}

public String getTableWsTracking() {
return tableWsTracking;
}

public Connection getConnection() {
return connection;
}



}
Original file line number Diff line number Diff line change
Expand Up @@ -15,58 +15,121 @@
import fr.insee.arc.utils.dao.UtilitaireDao;
import fr.insee.arc.utils.exception.ArcException;
import fr.insee.arc.utils.query.InitializeQueryTest;
import fr.insee.arc.utils.structure.GenericBean;
import fr.insee.arc.ws.services.importServlet.bo.ArcClientIdentifier;
import fr.insee.arc.ws.services.importServlet.bo.ExportTrackingType;

public class ClientDaoTest extends InitializeQueryTest {

// request for DSN family, ARTEMIS client and reprise = true
JSONObject jsonDsnStep1 = new JSONObject(
"{\"familleNorme\":\"DSN\",\"periodicite\":\"M\",\"service\":\"arcClient\",\"validiteSup\":\"2032-03-01\",\"format\":\"csv_gzip\",\"reprise\":true,\"client\":\"ARTEMIS\",\"environnement\":\"arc_bas1\"}");
ArcClientIdentifier queryParametersDsnStep1 = new ArcClientIdentifier(jsonDsnStep1, true);
ClientDao clientDaoDsnStep1 = new ClientDao(queryParametersDsnStep1);


@Test
public void clientDaoTest() throws ArcException, SQLException {

InitializeQueryTest.buildPropertiesWithoutScalability(null);


destroyTestData();
initializeTestData();

// test tracking table creation and registration
testCreateTableTrackRetrievedTables();

// test family check
testVerificationFamilleOK();
testVerificationFamilleKO();

// test data tables retrieved according to query
testSelectBusinessDataTables();

// testCreateTableOfIdSource();
testCreateTableOfIdSourceRepriseFalse();
testCreateTableOfIdSourceRepriseTrue();

destroyTestData();
}

private void testSelectBusinessDataTables() throws ArcException {
JSONObject json = new JSONObject(
"{\"client\":\"ARTEMIS\",\"environnement\":\"arc.bas1\",\"familleNorme\":\"DSN\",\"format\":\"csv_gzip\"}");
ArcClientIdentifier queryParameters = new ArcClientIdentifier(json, true);
ClientDao clientDao = new ClientDao(queryParameters);
List<String> clientTables = clientDao.selectBusinessDataTables();
private void testCreateTableOfIdSourceRepriseTrue() throws ArcException {
clientDaoDsnStep1.createTableOfIdSource(jsonDsnStep1);

ArcPreparedStatementBuilder query = new ArcPreparedStatementBuilder();
query.append("SELECT id_source FROM "+clientDaoDsnStep1.getTableOfIdSource()+";");

List<String> content = new GenericBean(UtilitaireDao.get(0).executeRequest(c, query)).getColumnValues("id_source");
assertEquals(2, content.size());
}

private void testCreateTableOfIdSourceRepriseFalse() throws ArcException {

// request on DSN family, ARTEMIS client and reprise = false
// as reprise = false, only files not already retrieved by client must be selected
JSONObject jsonDsnStep1RepriseFalse = new JSONObject(
"{\"familleNorme\":\"DSN\",\"periodicite\":\"M\",\"service\":\"arcClient\",\"validiteSup\":\"2032-03-01\",\"format\":\"csv_gzip\",\"reprise\":false,\"client\":\"ARTEMIS\",\"environnement\":\"arc_bas1\"}");
ArcClientIdentifier queryParametersDsnStep1RepriseFalse = new ArcClientIdentifier(jsonDsnStep1RepriseFalse, true);
ClientDao clientDaoDsnStep1RepriseFalse = new ClientDao(queryParametersDsnStep1RepriseFalse);

// create tracking table
clientDaoDsnStep1RepriseFalse.createTableTrackRetrievedTables();

clientDaoDsnStep1RepriseFalse.createTableOfIdSource(jsonDsnStep1RepriseFalse);
ArcPreparedStatementBuilder query = new ArcPreparedStatementBuilder();
query.append("SELECT id_source FROM "+clientDaoDsnStep1RepriseFalse.getTableOfIdSource()+";");

List<String> content = new GenericBean(UtilitaireDao.get(0).executeRequest(c, query)).getColumnValues("id_source");
// only 1 file must be selected as reprise = false
// file_not_to_retrieve_when_reprise_false has already been marked as retrieved by 'ARTEMIS' client
assertEquals(1, content.size());
}

private void testCreateTableTrackRetrievedTables() throws ArcException {
clientDaoDsnStep1.createTableTrackRetrievedTables();

// test
// retrieve table content
ArcPreparedStatementBuilder query = new ArcPreparedStatementBuilder();
query.append("SELECT tracking_type FROM "+clientDaoDsnStep1.getTableWsTracking()+";");
List<String> content = new GenericBean(UtilitaireDao.get(0).executeRequest(c, query)).getColumnValues("tracking_type");

// test that the table had been created and that it had been registered in itself
assertEquals(1, content.size());
assertEquals(ExportTrackingType.TRACK.toString(), content.get(0));

}

private void testSelectBusinessDataTables() throws ArcException {

List<String> clientTables = clientDaoDsnStep1.selectBusinessDataTables();

assertTrue(clientTables.contains("mapping_dsn_test1_ok"));
assertTrue(clientTables.contains("mapping_dsn_test2_ok"));
assertEquals(2,clientTables.size());
}

public void testVerificationFamilleOK() throws ArcException {
JSONObject json = new JSONObject(
"{\"client\":\"ARTEMIS\",\"environnement\":\"arc.bas1\",\"familleNorme\":\"DSN\",\"format\":\"csv_gzip\"}");
ArcClientIdentifier queryParameters = new ArcClientIdentifier(json, true);
ClientDao clientDao = new ClientDao(queryParameters);
assertTrue(clientDao.verificationClientFamille());
assertTrue(clientDaoDsnStep1.verificationClientFamille());
}

public void testVerificationFamilleKO() throws ArcException {
JSONObject json = new JSONObject(
"{\"client\":\"ARTEMIS\",\"environnement\":\"arc.bas1\",\"familleNorme\":\"BATI\",\"format\":\"csv_gzip\"}");
ArcClientIdentifier queryParameters = new ArcClientIdentifier(json, true);
ClientDao clientDao = new ClientDao(queryParameters);
assertFalse(clientDao.verificationClientFamille());
// request on BATI family, RESIL client and reprise = true
// BATI family doesn't exists in the test data set
JSONObject jsonBatiStep1 = new JSONObject(
"{\"familleNorme\":\"BATI\",\"periodicite\":\"M\",\"service\":\"arcClient\",\"validiteSup\":\"2032-03-01\",\"format\":\"csv_gzip\",\"reprise\":true,\"client\":\"RESIL\",\"environnement\":\"arc_bas1\"}");
ArcClientIdentifier queryParametersBatiStep1 = new ArcClientIdentifier(jsonBatiStep1, true);
ClientDao clientDaoBatiStep1 = new ClientDao(queryParametersBatiStep1);

assertFalse(clientDaoBatiStep1.verificationClientFamille());
}




/**
* initialize data for the tests
* @throws SQLException
* @throws ArcException
*/
private void initializeTestData() throws SQLException, ArcException {

ArcPreparedStatementBuilder query;
Expand All @@ -86,18 +149,39 @@ private void initializeTestData() throws SQLException, ArcException {
query.append("SELECT 'DSN' as id_famille,'mapping_dsn_test2_ok' as nom_table_metier UNION ALL ");
query.append("SELECT 'PASRAU' as id_famille,'mapping_pasrau_test_ok' as nom_table_metier");
query.append(SQL.END_QUERY);

query.append("CREATE TABLE arc_bas1.pilotage_fichier AS ");
query.append("SELECT 'file_to_retrieve.xml' as id_source, 'PHASE3V1' as id_norme, '2023-10-01' as validite,'M' as periodicite");
query.append(", 'MAPPING' as phase_traitement, '{OK}'::text[] as etat_traitement, '2023-11-30 10:29:47.000'::timestamp as date_traitement");
query.append(", null::text[] as client, null::timestamp[] as date_client");
query.append(" UNION ALL ");
// file that mustn't be retrieved when reprise is false and family is DSN
query.append("SELECT 'file_not_to_retrieve_when_reprise_false.xml' as id_source, 'PHASE3V1' as id_norme, '2023-10-01' as validite,'M' as periodicite");
query.append(", 'MAPPING' as phase_traitement, '{OK}'::text[] as etat_traitement, '2023-11-30 10:29:47.000'::timestamp as date_traitement");
query.append(", '{ARTEMIS}'::text[] as client, '{2023-11-30 10:29:47.000}'::timestamp[] as date_client");;
query.append(SQL.END_QUERY);

query.append("CREATE TABLE arc_bas1.norme AS ");
query.append("SELECT 'PHASE3V1' as id_norme, 'DSN' as id_famille UNION ALL ");
query.append("SELECT 'PASRAU' as id_norme, 'PASRAU' as id_famille");
query.append(SQL.END_QUERY);

UtilitaireDao.get(0).executeImmediate(c, query);
}

/**
* destroy data for the tests
* @throws SQLException
* @throws ArcException
*/
private void destroyTestData() throws SQLException, ArcException {

ArcPreparedStatementBuilder query;

query = new ArcPreparedStatementBuilder();

query.append("DROP SCHEMA arc CASCADE;");
query.append("DROP SCHEMA arc_bas1 CASCADE;");
query.append("DROP SCHEMA IF EXISTS arc CASCADE;");
query.append("DROP SCHEMA IF EXISTS arc_bas1 CASCADE;");
UtilitaireDao.get(0).executeImmediate(c, query);
}

Expand Down

0 comments on commit c6ed374

Please sign in to comment.