Skip to content

Commit

Permalink
feat: test wsImport1 KO
Browse files Browse the repository at this point in the history
  • Loading branch information
Nolife999 committed Dec 4, 2023
1 parent 2722fe0 commit 2b3ce1d
Show file tree
Hide file tree
Showing 4 changed files with 231 additions and 34 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -147,20 +147,12 @@ public void run() {
executeIf(ExportSource.METADATA, () -> clientDao.createTableFamille());
executeIf(ExportSource.METADATA, () -> clientDao.createTablePeriodicite());
} catch (ArcException e) {
try {
clientDao.createTableWsKO();
} catch (ArcException e1) {
new ArcException(ArcExceptionMessage.DATABASE_CONNECTION_FAILED).logFullException();
}
clientDao.registerWsKO();
} finally {
try {
clientDao.dropTableWsPending();
} catch (ArcException e) {
try {
clientDao.createTableWsKO();
} catch (ArcException e1) {
new ArcException(ArcExceptionMessage.DATABASE_CONNECTION_FAILED).logFullException();
}
clientDao.registerWsKO();
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,8 @@ public class ClientDao {

// the tablename of the table that tracks tables left to retrieved
private String tableWsTracking;

private String tableWsInfo;

private Connection connection;

Expand All @@ -67,7 +69,7 @@ public ClientDao(ArcClientIdentifier arcClientIdentifier) {
timestamp);
this.tableWsTracking = TableNaming.buildTableNameWithTokens(environnement, ViewEnum.WS_TRACKING, client,
timestamp);

this.tableWsInfo = TableNaming.buildTableNameWithTokens(environnement, ViewEnum.WS_INFO, client, timestamp);
}

/**
Expand All @@ -84,7 +86,6 @@ public boolean verificationClientFamille() throws ArcException {
.append(" LIMIT 1);");

String bool = UtilitaireDao.get(0).executeRequestWithoutMetadata(connection, request).get(0).get(0);

return bool.equals("t");

}
Expand Down Expand Up @@ -192,7 +193,7 @@ public void createTableTrackRetrievedTables() throws ArcException {
ArcPreparedStatementBuilder query = new ArcPreparedStatementBuilder();
query.build(SQL.DROP, SQL.TABLE, SQL.IF_EXISTS, this.tableWsTracking, SQL.END_QUERY);
query.build(SQL.CREATE, SQL.TABLE, this.tableWsTracking,
" (tracking_type text, nod text, table_to_retrieve text) ", SQL.END_QUERY);
" (id serial, tracking_type text, nod text, table_to_retrieve text) ", SQL.END_QUERY);
UtilitaireDao.get(0).executeRequest(connection, query);

registerTableToBeRetrieved(ExportTrackingType.TRACK, ArcDatabase.COORDINATOR, this.tableWsTracking);
Expand Down Expand Up @@ -403,6 +404,7 @@ public TableToRetrieve getAClientTableByType(ExportTrackingType type) throws Arc
ArcPreparedStatementBuilder query = new ArcPreparedStatementBuilder();
query.build(SQL.SELECT, "nod, table_to_retrieve", SQL.FROM, this.tableWsTracking);
query.build(SQL.WHERE, "tracking_type=", query.quoteText(type.toString()));
query.build(SQL.ORDER_BY, "id");
query.build(SQL.LIMIT, "1");

Map<String, List<String>> content = new GenericBean(UtilitaireDao.get(0).executeRequest(connection, query))
Expand All @@ -425,6 +427,7 @@ public TableToRetrieve getAClientTableByName(String tableName) throws ArcExcepti
ArcPreparedStatementBuilder query = new ArcPreparedStatementBuilder();
query.build(SQL.SELECT, "nod, table_to_retrieve", SQL.FROM, this.tableWsTracking);
query.build(SQL.WHERE, "table_to_retrieve=", query.quoteText(tableName));
query.build(SQL.ORDER_BY, "id");
query.build(SQL.LIMIT, "1");

Map<String, List<String>> content = new GenericBean(UtilitaireDao.get(0).executeRequest(connection, query))
Expand Down Expand Up @@ -482,8 +485,6 @@ public void dropPendingClientTables(int connectionId) throws ArcException {
*/
public void createTableWsInfo() throws ArcException {

String tableWsInfo = TableNaming.buildTableNameWithTokens(environnement, ViewEnum.WS_INFO, client, timestamp);

ArcPreparedStatementBuilder requete = new ArcPreparedStatementBuilder();
requete.append("\n DROP TABLE IF EXISTS " + tableWsInfo + ";");

Expand All @@ -500,8 +501,12 @@ public void createTableWsInfo() throws ArcException {

}

public void createTableWsKO() throws ArcException {
registerTableToBeRetrieved(ExportTrackingType.KO, ArcDatabase.COORDINATOR, ViewEnum.WS_KO.toString());
public void registerWsKO() {
try {
registerTableToBeRetrieved(ExportTrackingType.KO, ArcDatabase.COORDINATOR, ViewEnum.WS_KO.toString());
} catch (ArcException e1) {
new ArcException(ArcExceptionMessage.DATABASE_CONNECTION_FAILED).logFullException();
}
}

public void dropTableWsPending() throws ArcException {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@
import java.sql.SQLException;

import org.json.JSONObject;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;

import fr.insee.arc.core.dataobjects.ArcPreparedStatementBuilder;
Expand All @@ -23,37 +25,53 @@ public class ImportStep1InitializeClientTablesServiceTest extends ServletArc {
private static final long serialVersionUID = -7832574224892526397L;



@Test
public void testExecute() throws ArcException, SQLException {
@BeforeClass
public static void setup() throws SQLException, ArcException {

InitializeQueryTest.buildPropertiesWithoutScalability(null);

destroyTestData();
initializeTestData();

JSONObject jsonDsnStep1 = new JSONObject(
"{\"familleNorme\":\"DSN\",\"periodicite\":\"M\",\"service\":\"arcClient\",\"validiteSup\":\"2032-03-01\",\"format\":\"csv_gzip\",\"reprise\":false,\"client\":\"ARTEMIS\",\"environnement\":\"arc_bas1\"}");
}

@AfterClass
public static void tearDown() throws SQLException, ArcException {
destroyTestData();
}

jsonDsnStep1= validateRequest(jsonDsnStep1);

ImportStep1InitializeClientTablesService imp = new ImportStep1InitializeClientTablesService(jsonDsnStep1);
private String executeImportStep1(JSONObject clientJsonInput) throws ArcException
{
JSONObject clientJsonInputValidated= validateRequest(clientJsonInput);
ImportStep1InitializeClientTablesService imp = new ImportStep1InitializeClientTablesService(clientJsonInputValidated);
ByteArrayOutputStream bos = new ByteArrayOutputStream();
SendResponse sentResponse = new SendResponse(bos);

imp.execute(sentResponse);
return sentResponse.getWr().toString();
}


@Test(expected = ArcException.class)
public void testExecuteFamilyNotValid() throws ArcException {
JSONObject clientJsonInput = new JSONObject(
"{\"familleNorme\":\"RESIL\",\"periodicite\":\"M\",\"service\":\"arcClient\",\"validiteSup\":\"2032-03-01\",\"format\":\"csv_gzip\",\"reprise\":false,\"client\":\"ARTEMIS\",\"environnement\":\"arc_bas1\"}");
executeImportStep1(clientJsonInput);
}


@Test
public void testExecute() throws ArcException {

JSONObject clientJsonInput = new JSONObject(
"{\"familleNorme\":\"DSN\",\"periodicite\":\"M\",\"service\":\"arcClient\",\"validiteSup\":\"2032-03-01\",\"format\":\"csv_gzip\",\"reprise\":false,\"client\":\"ARTEMIS\",\"environnement\":\"arc_bas1\"}");

executeImportStep1(clientJsonInput);

testCreateAndDropWsPending();

testCreateTableNmcl();
testCreateTableVarMetier();
testCreateTableTableMetier();
testCreateTableTableFamille();
testCreateTableTablePeriodicite();



destroyTestData();
}

private void testCreateAndDropWsPending() throws ArcException {
Expand Down Expand Up @@ -106,7 +124,7 @@ private void testCreateTableTablePeriodicite() throws ArcException {
* @throws SQLException
* @throws ArcException
*/
private void initializeTestData() throws SQLException, ArcException {
private static void initializeTestData() throws SQLException, ArcException {

ArcPreparedStatementBuilder query;

Expand Down Expand Up @@ -176,7 +194,7 @@ private void initializeTestData() throws SQLException, ArcException {
* @throws SQLException
* @throws ArcException
*/
private void destroyTestData() throws SQLException, ArcException {
private static void destroyTestData() throws SQLException, ArcException {

ArcPreparedStatementBuilder query;

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,182 @@
package fr.insee.arc.ws.services.importServlet;

import static org.junit.Assert.assertTrue;

import java.io.ByteArrayOutputStream;
import java.sql.SQLException;

import org.json.JSONObject;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;

import fr.insee.arc.core.dataobjects.ArcPreparedStatementBuilder;
import fr.insee.arc.utils.dao.SQL;
import fr.insee.arc.utils.dao.UtilitaireDao;
import fr.insee.arc.utils.exception.ArcException;
import fr.insee.arc.utils.query.InitializeQueryTest;
import fr.insee.arc.ws.services.importServlet.actions.SendResponse;
import fr.insee.arc.ws.services.importServlet.bo.ExportTrackingType;

public class ImportStep1InitializeClientTablesServiceTestKO extends ServletArc {

/**
*
*/
private static final long serialVersionUID = -7832574224892526397L;


@BeforeClass
public static void setup() throws SQLException, ArcException {

InitializeQueryTest.buildPropertiesWithoutScalability(null);

destroyTestData();
initializeTestData();
}

@AfterClass
public static void tearDown() throws SQLException, ArcException {
destroyTestData();
}

private String executeImportStep1(JSONObject clientJsonInput) throws ArcException
{
JSONObject clientJsonInputValidated= validateRequest(clientJsonInput);
ImportStep1InitializeClientTablesService imp = new ImportStep1InitializeClientTablesService(clientJsonInputValidated);
ByteArrayOutputStream bos = new ByteArrayOutputStream();
SendResponse sentResponse = new SendResponse(bos);
imp.execute(sentResponse);
return sentResponse.getWr().toString();
}


@Test
public void testExecute() throws ArcException {

JSONObject clientJsonInput = new JSONObject(
"{\"familleNorme\":\"DSN\",\"periodicite\":\"M\",\"service\":\"arcClient\",\"validiteSup\":\"2032-03-01\",\"format\":\"csv_gzip\",\"reprise\":false,\"client\":\"ARTEMIS\",\"environnement\":\"arc_bas1\"}");

String arcResponse = executeImportStep1(clientJsonInput);

testCreateAndDropWsPending(arcResponse);

testCreateTableWsKo(arcResponse);

}

private void testCreateAndDropWsPending(String arcResponse) throws ArcException {

// check that the parallel thread that create tables drop the table ws_pending

// it should be done in less than 50 iteration, test data is very little
int maxIteration = 50;
int i=0;

while (i<maxIteration && UtilitaireDao.get(0).isTableExiste(InitializeQueryTest.c, arcResponse+"_ws_pending"))
{
i++;
UtilitaireDao.get(0).executeImmediate(InitializeQueryTest.c, "SELECT pg_sleep(1);");
}

assertTrue(i>0);
assertTrue(i<maxIteration);
}

private void testCreateTableWsKo(String arcResponse) throws ArcException {
ArcPreparedStatementBuilder query = new ArcPreparedStatementBuilder();

query.append("SELECT 1 FROM "+arcResponse+"_ws_tracking where tracking_type="+query.quoteText(ExportTrackingType.KO.toString()));

UtilitaireDao.get(0).hasResults(InitializeQueryTest.c, query);
}

/**
* initialize data for the tests
* @throws SQLException
* @throws ArcException
*/
private static void initializeTestData() throws SQLException, ArcException {

ArcPreparedStatementBuilder query;

query = new ArcPreparedStatementBuilder();

query.append("CREATE SCHEMA arc;");
query.append("CREATE SCHEMA arc_bas1;");


// family and client tables
query.append("CREATE TABLE arc.ihm_client AS ");
query.append("SELECT 'DSN' as id_famille,'ARTEMIS' as id_application UNION ALL ");
query.append("SELECT 'DSN' as id_famille,'DSNFLASH' as id_application");
query.append(SQL.END_QUERY);

query.append("CREATE TABLE arc.ihm_famille AS SELECT 'DSN' as id_famille");
query.append(SQL.END_QUERY);

query.append("CREATE TABLE arc_bas1.mod_table_metier AS ");
query.append("SELECT 'DSN' as id_famille,'mapping_dsn_test1_ok' as nom_table_metier UNION ALL ");
query.append("SELECT 'PASRAU' as id_famille,'mapping_pasrau_test_ok' as nom_table_metier");
query.append(SQL.END_QUERY);

// table variable_metier doesn't exists, it will crash

// pilotage tables
query.append("CREATE TABLE arc_bas1.pilotage_fichier AS ");
query.append("SELECT 'file_to_retrieve.xml' as id_source, 'PHASE3V1' as id_norme, '2023-10-01' as validite,'M' as periodicite");
query.append(", 'MAPPING' as phase_traitement, '{OK}'::text[] as etat_traitement, '2023-11-30 10:29:47.000'::timestamp as date_traitement");
query.append(", null::text[] as client, null::timestamp[] as date_client");
query.append(" UNION ALL ");
// file that mustn't be retrieved when reprise is false and family is DSN
query.append("SELECT 'file_not_to_retrieve_when_reprise_false.xml' as id_source, 'PHASE3V1' as id_norme, '2023-10-01' as validite,'M' as periodicite");
query.append(", 'MAPPING' as phase_traitement, '{OK}'::text[] as etat_traitement, '2023-11-30 10:29:47.000'::timestamp as date_traitement");
query.append(", '{ARTEMIS}'::text[] as client, '{2023-11-30 10:29:47.000}'::timestamp[] as date_client");;
query.append(SQL.END_QUERY);

// norme table used to retrieve family of data
query.append("CREATE TABLE arc_bas1.norme AS ");
query.append("SELECT 'PHASE3V1' as id_norme, 'DSN' as id_famille UNION ALL ");
query.append("SELECT 'PASRAU' as id_norme, 'PASRAU' as id_famille");
query.append(SQL.END_QUERY);

// data tables containing two files
// one had already been retrieved by client 'ARTEMIS', the other hadn't been retrieved yet
query.append("CREATE TABLE arc_bas1.mapping_dsn_test1_ok AS ");
query.append("SELECT 'file_to_retrieve.xml' as id_source, 'data_of_file_to_retrieve' as data UNION ALL ");
query.append("SELECT 'file_not_to_retrieve_when_reprise_false.xml' as id_source, 'data_of_file_not_to_retrieve_when_reprise_false' as data");
query.append(SQL.END_QUERY);

// nomenclature tables
query.append("CREATE TABLE arc_bas1.nmcl_table1 AS SELECT 1 as data");
query.append(SQL.END_QUERY);
query.append("CREATE TABLE arc_bas1.nmcl_table2 AS SELECT 1 as data");
query.append(SQL.END_QUERY);
query.append("CREATE TABLE arc.ext_mod_periodicite AS SELECT 1 as id, 'A' as VAL");
query.append(SQL.END_QUERY);

UtilitaireDao.get(0).executeImmediate(InitializeQueryTest.c, query);
}



/**
* destroy data for the tests
* @throws SQLException
* @throws ArcException
*/
private static void destroyTestData() throws SQLException, ArcException {

ArcPreparedStatementBuilder query;

query = new ArcPreparedStatementBuilder();

query.append("DROP SCHEMA IF EXISTS arc CASCADE;");
query.append("DROP SCHEMA IF EXISTS arc_bas1 CASCADE;");
UtilitaireDao.get(0).executeImmediate(InitializeQueryTest.c, query);
}




}

0 comments on commit 2b3ce1d

Please sign in to comment.