From 25c08323396edfa8aeab9abb27e451bd0fa2f001 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juha=20Syrj=C3=A4l=C3=A4?= Date: Mon, 1 Jun 2015 23:55:01 +0300 Subject: [PATCH 01/51] WIP: archiving --- .../nflow/engine/internal/dao/ArchiveDao.java | 119 ++++++++++++++++++ .../nflow/engine/service/ArchiveService.java | 27 ++++ ...rkflowDefinitionServiceWithSpringTest.java | 6 + 3 files changed, 152 insertions(+) create mode 100644 nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/ArchiveDao.java create mode 100644 nflow-engine/src/main/java/com/nitorcreations/nflow/engine/service/ArchiveService.java diff --git a/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/ArchiveDao.java b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/ArchiveDao.java new file mode 100644 index 000000000..84d76324f --- /dev/null +++ b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/ArchiveDao.java @@ -0,0 +1,119 @@ +package com.nitorcreations.nflow.engine.internal.dao; + +import static com.nitorcreations.nflow.engine.internal.storage.db.DatabaseConfiguration.NFLOW_DATABASE_INITIALIZER; + +import java.sql.ResultSet; +import java.sql.ResultSetMetaData; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.LinkedList; +import java.util.List; + +import javax.inject.Inject; +import javax.inject.Named; + +import org.apache.commons.lang3.StringUtils; +import org.joda.time.DateTime; +import org.springframework.context.annotation.DependsOn; +import org.springframework.dao.DataAccessException; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.jdbc.core.RowMapper; +import org.springframework.transaction.annotation.Transactional; + +import com.nitorcreations.nflow.engine.internal.config.NFlow; + +@Named +@DependsOn(NFLOW_DATABASE_INITIALIZER) +public class ArchiveDao { + private JdbcTemplate jdbc; + + @Inject + public void setJdbcTemplate(@NFlow JdbcTemplate jdbcTemplate) { + this.jdbc = jdbcTemplate; + } + + public List listArchivableWorkflows(DateTime before, int maxRows){ + return jdbc.query("select * from nflow_workflow parent where parent.next_activation is null and parent.modified <= ? " + + "and not exists(" + + " select 1 from nflow_workflow child where child.root_workflow_id = parent.id " + + " and (child.modified > ? or child.next_activation is not null)" + + ")" + + "order by modified asc " + + "limit " + maxRows, + new Object[]{DaoUtil.toTimestamp(before), DaoUtil.toTimestamp(before)}, new ArchivableWorkflowsRowMapper()); + + // TODO create tables nflow_archive_workflow, nflow_archive_workflow_action, nflow_archive_workflow_state + // TODO add index to nflow_workflow.modified + // TODO add nflow_workflow.root_workflow_id (indexed, foreign key => nflow_workflow.id) + // TODO modify childworkflows to set nflow_workflow.root_workflow_id + } + + private static class ArchivableWorkflowsRowMapper implements RowMapper { + @Override + public Integer mapRow(ResultSet rs, int rowNum) throws SQLException { + return rs.getInt("id"); + } + } + + @Transactional + public void archiveWorkflows(List workflowIds) { + String workflowIdParams = params(workflowIds); + + archiveWorkflowTable(workflowIdParams); + archiveActionTable(workflowIdParams); + archiveStateTable(workflowIdParams); + deleteWorkflows(workflowIdParams); + } + + private void archiveWorkflowTable(String workflowIdParams) { + String columns = columnsFromMetadata("nflow_workflow"); + jdbc.update("insert into nflow_archive_workflow(" + columns + ") " + + "select " + columns + " from nflow_workflow where id in " + workflowIdParams); + } + + private void archiveActionTable(String workflowIdParams) { + String columns = columnsFromMetadata("nflow_action"); + jdbc.update("insert into nflow_archive_workflow_action(" + columns + ") " + + "select " + columns + " from nflow_workflow_action where workflow_id in " + workflowIdParams); + } + + private void archiveStateTable(String workflowIdParams) { + String columns = columnsFromMetadata("nflow_workflow_state"); + jdbc.update("insert into nflow_archive_workflow_state (" + columns + ") " + + "select " + columns + " from nflow_workflow_state where workflow_id in " + workflowIdParams); + } + + private void deleteWorkflows(String workflowIdParams) { + jdbc.update("delete from nflow_workflow_state where workflow_id in " + workflowIdParams); + jdbc.update("update nflow_workflow_action set root_workflow_id=null, parent_workflow_id=null, parent_action_id=null" + + "where id in " + workflowIdParams); + jdbc.update("delete from nflow_workflow_action where workflow_id in " + workflowIdParams); + jdbc.update("delete from nflow_workflow where workflow_id in " + workflowIdParams); + } + + private String columnsFromMetadata(String tableName) { + List columnNames = jdbc.query("select * from " + tableName + " where 1 = 0", ColumnNamesExtractor.columnNamesExtractor); + return StringUtils.join(columnNames.toArray(), ","); + } + + private String params(List workflowIds) { + return "(" + StringUtils.join(workflowIds.toArray(), ",") + ")"; + } + + static final class ColumnNamesExtractor implements org.springframework.jdbc.core.ResultSetExtractor> { + static final ColumnNamesExtractor columnNamesExtractor = new ColumnNamesExtractor(); + private ColumnNamesExtractor() {} + + @Override + public List extractData(ResultSet rs) throws SQLException, DataAccessException { + List columnNames = new LinkedList<>(); + + ResultSetMetaData metadata = rs.getMetaData(); + for(int col = 1; col <= metadata.getColumnCount(); col ++) { + columnNames.add(metadata.getColumnName(col)); + } + return columnNames; + } + } + +} diff --git a/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/service/ArchiveService.java b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/service/ArchiveService.java new file mode 100644 index 000000000..8f06f40cf --- /dev/null +++ b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/service/ArchiveService.java @@ -0,0 +1,27 @@ +package com.nitorcreations.nflow.engine.service; + +import java.util.List; + +import javax.inject.Inject; +import javax.inject.Named; + +import org.joda.time.DateTime; +import com.nitorcreations.nflow.engine.internal.dao.ArchiveDao; + +@Named +public class ArchiveService { + @Inject + private ArchiveDao archiveDao; + + public int archiveWorkflows(DateTime olderThan, int batchSize) { + List workflowIds; + int archivedWorkflows = 0; + do { + workflowIds = archiveDao.listArchivableWorkflows(olderThan, batchSize); + archiveDao.archiveWorkflows(workflowIds); + archivedWorkflows += workflowIds.size(); + } while(!workflowIds.isEmpty()); + + return archivedWorkflows; + } +} diff --git a/nflow-engine/src/test/java/com/nitorcreations/nflow/engine/service/WorkflowDefinitionServiceWithSpringTest.java b/nflow-engine/src/test/java/com/nitorcreations/nflow/engine/service/WorkflowDefinitionServiceWithSpringTest.java index 0eb87ab52..90f8df9c4 100644 --- a/nflow-engine/src/test/java/com/nitorcreations/nflow/engine/service/WorkflowDefinitionServiceWithSpringTest.java +++ b/nflow-engine/src/test/java/com/nitorcreations/nflow/engine/service/WorkflowDefinitionServiceWithSpringTest.java @@ -7,6 +7,7 @@ import java.util.List; +import com.nitorcreations.nflow.engine.internal.dao.ArchiveDao; import com.nitorcreations.nflow.engine.internal.workflow.WorkflowInstancePreProcessor; import org.junit.Test; import org.junit.runner.RunWith; @@ -77,6 +78,11 @@ public StatisticsDao statisticsDao() { public WorkflowInstancePreProcessor preProcessor() { return mock(WorkflowInstancePreProcessor.class); } + + @Bean + public ArchiveDao archiveDao() { + return mock(ArchiveDao.class); + } } @Autowired From b90a49c039c5c7b47cf7508eb5f3d66e67c12094 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juha=20Syrj=C3=A4l=C3=A4?= Date: Tue, 2 Jun 2015 20:32:24 +0300 Subject: [PATCH 02/51] Create new database fields and indexes --- .../src/main/resources/scripts/db/h2.create.ddl.sql | 6 +++++- .../src/main/resources/scripts/db/mysql.create.ddl.sql | 6 +++++- .../main/resources/scripts/db/mysql.legacy.create.ddl.sql | 6 +++++- .../src/main/resources/scripts/db/postgresql.create.ddl.sql | 6 +++++- .../resources/scripts/db/update-1.3.0-x/h2.update.ddl.sql | 4 ++++ .../scripts/db/update-1.3.0-x/mysql.legacy.update.ddl.sql | 5 +++++ .../scripts/db/update-1.3.0-x/mysql.update.ddl.sql | 4 ++++ .../scripts/db/update-1.3.0-x/postgresql.update.ddl.sql | 5 +++++ 8 files changed, 38 insertions(+), 4 deletions(-) diff --git a/nflow-engine/src/main/resources/scripts/db/h2.create.ddl.sql b/nflow-engine/src/main/resources/scripts/db/h2.create.ddl.sql index 83e45116f..993d66111 100644 --- a/nflow-engine/src/main/resources/scripts/db/h2.create.ddl.sql +++ b/nflow-engine/src/main/resources/scripts/db/h2.create.ddl.sql @@ -2,6 +2,7 @@ create table if not exists nflow_workflow ( id int not null auto_increment primary key, status varchar(32) not null check status in ('created', 'executing', 'inProgress', 'finished', 'manual'), type varchar(64) not null, + root_workflow_id integer default null, parent_workflow_id integer default null, parent_action_id integer default null, business_key varchar(64), @@ -20,7 +21,7 @@ create trigger if not exists nflow_workflow_modified before update on nflow_work create unique index if not exists nflow_workflow_uniq on nflow_workflow (type, external_id, executor_group); -create index if not exists nflow_workflow_next_activation on nflow_workflow(next_activation); +create index if not exists nflow_workflow_next_activation on nflow_workflow(next_activation, modified); create table if not exists nflow_workflow_action ( id int not null auto_increment primary key, @@ -38,6 +39,9 @@ create table if not exists nflow_workflow_action ( alter table nflow_workflow add constraint fk_workflow_parent foreign key (parent_workflow_id, parent_action_id) references nflow_workflow_action (workflow_id, id) on delete cascade; +alter table nflow_workflow add constraint fk_workflow_root + foreign key (root_workflow_id) references nflow_workflow (id) on delete cascade; + create table if not exists nflow_workflow_state ( workflow_id int not null, action_id int not null, diff --git a/nflow-engine/src/main/resources/scripts/db/mysql.create.ddl.sql b/nflow-engine/src/main/resources/scripts/db/mysql.create.ddl.sql index 966156203..1644c49c5 100644 --- a/nflow-engine/src/main/resources/scripts/db/mysql.create.ddl.sql +++ b/nflow-engine/src/main/resources/scripts/db/mysql.create.ddl.sql @@ -2,6 +2,7 @@ create table if not exists nflow_workflow ( id int not null auto_increment primary key, status enum('created', 'executing', 'inProgress', 'finished', 'manual') not null, type varchar(64) not null, + root_workflow_id integer default null, parent_workflow_id integer default null, parent_action_id integer default null, business_key varchar(64), @@ -16,7 +17,7 @@ create table if not exists nflow_workflow ( modified timestamp(3) not null default current_timestamp(3) on update current_timestamp(3), executor_group varchar(64) not null, constraint nflow_workflow_uniq unique (type, external_id, executor_group), - index nflow_workflow(next_activation) + index nflow_workflow(next_activation, modified) ); create table if not exists nflow_workflow_action ( @@ -35,6 +36,9 @@ create table if not exists nflow_workflow_action ( alter table nflow_workflow add constraint fk_workflow_parent foreign key (parent_workflow_id, parent_action_id) references nflow_workflow_action (workflow_id, id) on delete cascade; +alter table nflow_workflow add constraint fk_workflow_root + foreign key (root_workflow_id) references nflow_workflow (id) on delete cascade; + create table if not exists nflow_workflow_state ( workflow_id int not null, action_id int not null, diff --git a/nflow-engine/src/main/resources/scripts/db/mysql.legacy.create.ddl.sql b/nflow-engine/src/main/resources/scripts/db/mysql.legacy.create.ddl.sql index 507fbe2df..e3cf67e31 100644 --- a/nflow-engine/src/main/resources/scripts/db/mysql.legacy.create.ddl.sql +++ b/nflow-engine/src/main/resources/scripts/db/mysql.legacy.create.ddl.sql @@ -2,6 +2,7 @@ create table if not exists nflow_workflow ( id int not null auto_increment primary key, status enum('created', 'executing', 'inProgress', 'finished', 'manual') not null, type varchar(64) not null, + root_workflow_id integer default null, parent_workflow_id integer default null, parent_action_id integer default null, business_key varchar(64), @@ -16,7 +17,7 @@ create table if not exists nflow_workflow ( modified timestamp not null default current_timestamp on update current_timestamp, executor_group varchar(64) not null, constraint nflow_workflow_uniq unique (type, external_id, executor_group), - index nflow_workflow(next_activation) + index nflow_workflow(next_activation, modified) ); drop trigger if exists nflow_workflow_insert; @@ -40,6 +41,9 @@ create table if not exists nflow_workflow_action ( alter table nflow_workflow add constraint fk_workflow_parent foreign key (parent_workflow_id, parent_action_id) references nflow_workflow_action (workflow_id, id) on delete cascade; +alter table nflow_workflow add constraint fk_workflow_root + foreign key (root_workflow_id) references nflow_workflow (id) on delete cascade; + create table if not exists nflow_workflow_state ( workflow_id int not null, action_id int not null, diff --git a/nflow-engine/src/main/resources/scripts/db/postgresql.create.ddl.sql b/nflow-engine/src/main/resources/scripts/db/postgresql.create.ddl.sql index d7b9be541..d7f6f532e 100644 --- a/nflow-engine/src/main/resources/scripts/db/postgresql.create.ddl.sql +++ b/nflow-engine/src/main/resources/scripts/db/postgresql.create.ddl.sql @@ -3,6 +3,7 @@ create table if not exists nflow_workflow ( id serial primary key, status workflow_status not null, type varchar(64) not null, + root_workflow_id integer default null, parent_workflow_id integer default null, parent_action_id integer default null, business_key varchar(64), @@ -30,7 +31,7 @@ drop trigger if exists update_nflow_modified on nflow_workflow; create trigger update_nflow_modified before update on nflow_workflow for each row execute procedure update_modified(); drop index nflow_workflow_activation; -create index nflow_workflow_activation on nflow_workflow(next_activation) where next_activation is not null; +create index nflow_workflow_activation on nflow_workflow(next_activation, modified); create type action_type as enum ('stateExecution', 'stateExecutionFailed', 'recovery', 'externalChange'); create table if not exists nflow_workflow_action ( @@ -50,6 +51,9 @@ create table if not exists nflow_workflow_action ( alter table nflow_workflow add constraint fk_workflow_parent foreign key (parent_workflow_id, parent_action_id) references nflow_workflow_action (workflow_id, id) on delete cascade; +alter table nflow_workflow add constraint fk_workflow_root + foreign key (root_workflow_id) references nflow_workflow (id) on delete cascade; + create table if not exists nflow_workflow_state ( workflow_id int not null, action_id int not null, diff --git a/nflow-engine/src/main/resources/scripts/db/update-1.3.0-x/h2.update.ddl.sql b/nflow-engine/src/main/resources/scripts/db/update-1.3.0-x/h2.update.ddl.sql index ad0e1a20b..238ab434d 100644 --- a/nflow-engine/src/main/resources/scripts/db/update-1.3.0-x/h2.update.ddl.sql +++ b/nflow-engine/src/main/resources/scripts/db/update-1.3.0-x/h2.update.ddl.sql @@ -1,3 +1,4 @@ +alter table nflow_workflow add root_workflow_id integer default null; alter table nflow_workflow add parent_workflow_id integer default null; alter table nflow_workflow add parent_action_id integer default null; alter table nflow_workflow add external_next_activation timestamp default null, @@ -6,3 +7,6 @@ alter table nflow_workflow add constraint fk_workflow_parent foreign key (parent_workflow_id, parent_action_id) references nflow_workflow_action (workflow_id, id) on delete cascade; alter table nflow_executor alter column host varchar(253) not null; + +alter table nflow_workflow add constraint fk_workflow_root + foreign key (root_workflow_id) references nflow_workflow (id) on delete cascade; diff --git a/nflow-engine/src/main/resources/scripts/db/update-1.3.0-x/mysql.legacy.update.ddl.sql b/nflow-engine/src/main/resources/scripts/db/update-1.3.0-x/mysql.legacy.update.ddl.sql index ce10d5189..cee2582a8 100644 --- a/nflow-engine/src/main/resources/scripts/db/update-1.3.0-x/mysql.legacy.update.ddl.sql +++ b/nflow-engine/src/main/resources/scripts/db/update-1.3.0-x/mysql.legacy.update.ddl.sql @@ -1,3 +1,4 @@ +alter table nflow_workflow add root_workflow_id integer default null; alter table nflow_workflow add parent_workflow_id integer default null; alter table nflow_workflow add parent_action_id integer default null; alter table nflow workflow add external_next_activation timestamp default null; @@ -6,3 +7,7 @@ alter table nflow_workflow add constraint fk_workflow_parent foreign key (parent_workflow_id, parent_action_id) references nflow_workflow_action (workflow_id, id) on delete cascade; alter table nflow_executor modify host varchar(253) not null; + +alter table nflow_workflow add constraint fk_workflow_root + foreign key (root_workflow_id) references nflow_workflow (id) on delete cascade; + diff --git a/nflow-engine/src/main/resources/scripts/db/update-1.3.0-x/mysql.update.ddl.sql b/nflow-engine/src/main/resources/scripts/db/update-1.3.0-x/mysql.update.ddl.sql index 621787036..d8f790b53 100644 --- a/nflow-engine/src/main/resources/scripts/db/update-1.3.0-x/mysql.update.ddl.sql +++ b/nflow-engine/src/main/resources/scripts/db/update-1.3.0-x/mysql.update.ddl.sql @@ -1,3 +1,4 @@ +alter table nflow_workflow add root_workflow_id integer default null; alter table nflow_workflow add parent_workflow_id integer default null; alter table nflow_workflow add parent_action_id integer default null; alter table nflow_workflow add external_next_activation timestamp(3) default null; @@ -6,3 +7,6 @@ alter table nflow_workflow add constraint fk_workflow_parent foreign key (parent_workflow_id, parent_action_id) references nflow_workflow_action (workflow_id, id) on delete cascade; alter table nflow_executor modify host varchar(253) not null; + +alter table nflow_workflow add constraint fk_workflow_root + foreign key (root_workflow_id) references nflow_workflow (id) on delete cascade; diff --git a/nflow-engine/src/main/resources/scripts/db/update-1.3.0-x/postgresql.update.ddl.sql b/nflow-engine/src/main/resources/scripts/db/update-1.3.0-x/postgresql.update.ddl.sql index 91336b75a..ce85e19ec 100644 --- a/nflow-engine/src/main/resources/scripts/db/update-1.3.0-x/postgresql.update.ddl.sql +++ b/nflow-engine/src/main/resources/scripts/db/update-1.3.0-x/postgresql.update.ddl.sql @@ -1,3 +1,4 @@ +alter table nflow_workflow add root_workflow_id integer default null; alter table nflow_workflow add parent_workflow_id integer default null; alter table nflow_workflow add parent_action_id integer default null; alter table nflow_workflow add external_next_activation timestamptz default null; @@ -6,3 +7,7 @@ alter table nflow_workflow add constraint fk_workflow_parent foreign key (parent_workflow_id, parent_action_id) references nflow_workflow_action (workflow_id, id) on delete cascade; alter table nflow_executor alter host varchar(253) not null; + +alter table nflow_workflow add constraint fk_workflow_root + foreign key (root_workflow_id) references nflow_workflow (id) on delete cascade; + From 85c48c1be59f180b4ae20262563fcfbba065ecc0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juha=20Syrj=C3=A4l=C3=A4?= Date: Tue, 2 Jun 2015 22:53:37 +0300 Subject: [PATCH 03/51] Store nflow_workflow.root_workflow_id value --- .../internal/dao/WorkflowInstanceDao.java | 46 ++++++++-------- .../storage/db/DatabaseConfiguration.java | 2 +- .../workflow/instance/WorkflowInstance.java | 19 +++++++ .../internal/dao/WorkflowInstanceDaoTest.java | 53 +++++++++++++++++-- 4 files changed, 94 insertions(+), 26 deletions(-) diff --git a/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/WorkflowInstanceDao.java b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/WorkflowInstanceDao.java index 13bd49e3f..43367cb0e 100644 --- a/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/WorkflowInstanceDao.java +++ b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/WorkflowInstanceDao.java @@ -137,9 +137,10 @@ public int insertWorkflowInstance(WorkflowInstance instance) { private int insertWorkflowInstanceWithCte(WorkflowInstance instance) { StringBuilder sqlb = new StringBuilder(256); sqlb.append("with wf as (" + insertWorkflowInstanceSql() + " returning id)"); - Object[] instanceValues = new Object[] { instance.type, instance.parentWorkflowId, instance.parentActionId, - instance.businessKey, instance.externalId, executorInfo.getExecutorGroup(), instance.status.name(), instance.state, - abbreviate(instance.stateText, instanceStateTextLength), toTimestamp(instance.nextActivation) }; + Object[] instanceValues = new Object[] { instance.type, instance.rootWorkflowId, instance.parentWorkflowId, + instance.parentActionId, instance.businessKey, instance.externalId, executorInfo.getExecutorGroup(), + instance.status.name(), instance.state, abbreviate(instance.stateText, instanceStateTextLength), + toTimestamp(instance.nextActivation) }; int pos = instanceValues.length; Object[] args = Arrays.copyOf(instanceValues, pos + instance.stateVariables.size() * 2); for (Entry var : instance.stateVariables.entrySet()) { @@ -153,8 +154,8 @@ private int insertWorkflowInstanceWithCte(WorkflowInstance instance) { } String insertWorkflowInstanceSql() { - return "insert into nflow_workflow(type, parent_workflow_id, parent_action_id, business_key, external_id, " - + "executor_group, status, state, state_text, next_activation) values (?, ?, ?, ?, ?, ?, " + sqlVariants.workflowStatus() + return "insert into nflow_workflow(type, root_workflow_id, parent_workflow_id, parent_action_id, business_key, external_id, " + + "executor_group, status, state, state_text, next_activation) values (?, ?, ?, ?, ?, ?, ?, " + sqlVariants.workflowStatus() + ", ?, ?, ?)"; } @@ -175,6 +176,7 @@ public PreparedStatement createPreparedStatement(Connection connection) throws S int p = 1; ps = connection.prepareStatement(insertWorkflowInstanceSql(), new String[] { "id" }); ps.setString(p++, instance.type); + ps.setObject(p++, instance.rootWorkflowId); ps.setObject(p++, instance.parentWorkflowId); ps.setObject(p++, instance.parentActionId); ps.setString(p++, instance.businessKey); @@ -216,20 +218,20 @@ void insertVariables(final int id, final int actionId, Map state } final Iterator> variables = changedStateVariables.entrySet().iterator(); int[] updateStatus = jdbc.batchUpdate(insertWorkflowInstanceStateSql() + " values (?,?,?,?)", - new AbstractInterruptibleBatchPreparedStatementSetter() { - @Override - protected boolean setValuesIfAvailable(PreparedStatement ps, int i) throws SQLException { - if (!variables.hasNext()) { - return false; - } - Entry var = variables.next(); - ps.setInt(1, id); - ps.setInt(2, actionId); - ps.setString(3, var.getKey()); - ps.setString(4, var.getValue()); - return true; - } - }); + new AbstractInterruptibleBatchPreparedStatementSetter() { + @Override + protected boolean setValuesIfAvailable(PreparedStatement ps, int i) throws SQLException { + if (!variables.hasNext()) { + return false; + } + Entry var = variables.next(); + ps.setInt(1, id); + ps.setInt(2, actionId); + ps.setString(3, var.getKey()); + ps.setString(4, var.getValue()); + return true; + } + }); int updatedRows = 0; boolean unknownResults = false; for (int i = 0; i < updateStatus.length; ++i) { @@ -276,8 +278,9 @@ protected void doInTransactionWithoutResult(TransactionStatus status) { updateWorkflowInstance(instance); int parentActionId = insertWorkflowInstanceAction(instance, action); for (WorkflowInstance childTemplate : childWorkflows) { - WorkflowInstance childWorkflow = new WorkflowInstance.Builder(childTemplate).setParentWorkflowId(instance.id) - .setParentActionId(parentActionId).build(); + Integer rootWorkflowId = instance.rootWorkflowId != null ? instance.rootWorkflowId : instance.id; + WorkflowInstance childWorkflow = new WorkflowInstance.Builder(childTemplate).setRootWorkflowId(rootWorkflowId) + .setParentWorkflowId(instance.id).setParentActionId(parentActionId).build(); insertWorkflowInstance(childWorkflow); } } @@ -639,6 +642,7 @@ public WorkflowInstance mapRow(ResultSet rs, int rowNum) throws SQLException { return new WorkflowInstance.Builder() .setId(rs.getInt("id")) .setExecutorId(getInt(rs, "executor_id")) + .setRootWorkflowId(getInt(rs, "root_workflow_id")) .setParentWorkflowId(getInt(rs, "parent_workflow_id")) .setParentActionId(getInt(rs, "parent_action_id")) .setStatus(WorkflowInstanceStatus.valueOf(rs.getString("status"))) diff --git a/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/storage/db/DatabaseConfiguration.java b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/storage/db/DatabaseConfiguration.java index 53960e7f7..0413bd596 100644 --- a/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/storage/db/DatabaseConfiguration.java +++ b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/storage/db/DatabaseConfiguration.java @@ -22,7 +22,7 @@ import com.zaxxer.hikari.HikariDataSource; public abstract class DatabaseConfiguration { - private static final String NFLOW_DATABASE_INITIALIZER = "nflowDatabaseInitializer"; + public static final String NFLOW_DATABASE_INITIALIZER = "nflowDatabaseInitializer"; private static final Logger logger = getLogger(DatabaseConfiguration.class); private final String dbType; diff --git a/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/workflow/instance/WorkflowInstance.java b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/workflow/instance/WorkflowInstance.java index 8451f346b..720589e12 100644 --- a/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/workflow/instance/WorkflowInstance.java +++ b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/workflow/instance/WorkflowInstance.java @@ -42,6 +42,12 @@ public static enum WorkflowInstanceStatus { */ public final Integer executorId; + /** + * The id of the workflow that created the hierarchy of workflow where this sub workflow belongs to. + * Null for workflows without children and workflows that are root of hierarchy. + */ + public final Integer rootWorkflowId; + /** * The id of the workflow that created this sub workflow. Is null for parent workflows. */ @@ -136,6 +142,7 @@ public static enum WorkflowInstanceStatus { WorkflowInstance(Builder builder) { this.id = builder.id; this.executorId = builder.executorId; + this.rootWorkflowId = builder.rootWorkflowId; this.parentWorkflowId = builder.parentWorkflowId; this.parentActionId = builder.parentActionId; this.status = builder.status; @@ -168,6 +175,7 @@ public static class Builder { Integer id; Integer executorId; + Integer rootWorkflowId; Integer parentWorkflowId; Integer parentActionId; WorkflowInstanceStatus status; @@ -210,6 +218,7 @@ public Builder(ObjectStringMapper objectMapper) { public Builder(WorkflowInstance copy) { this.id = copy.id; this.executorId = copy.executorId; + this.rootWorkflowId = copy.rootWorkflowId; this.parentWorkflowId = copy.parentWorkflowId; this.parentActionId = copy.parentActionId; this.status = copy.status; @@ -249,6 +258,16 @@ public Builder setExecutorId(Integer executorId) { return this; } + /** + * Set the root workflow identifier. + * @param rootWorkflowId The identifier. + * @return this + */ + public Builder setRootWorkflowId(Integer rootWorkflowId) { + this.rootWorkflowId = rootWorkflowId; + return this; + } + /** * Set the parent workflow identifier. * @param parentWorkflowId The identifier. diff --git a/nflow-engine/src/test/java/com/nitorcreations/nflow/engine/internal/dao/WorkflowInstanceDaoTest.java b/nflow-engine/src/test/java/com/nitorcreations/nflow/engine/internal/dao/WorkflowInstanceDaoTest.java index 82074a53d..7071c1b3d 100644 --- a/nflow-engine/src/test/java/com/nitorcreations/nflow/engine/internal/dao/WorkflowInstanceDaoTest.java +++ b/nflow-engine/src/test/java/com/nitorcreations/nflow/engine/internal/dao/WorkflowInstanceDaoTest.java @@ -148,7 +148,7 @@ public void processRow(ResultSet rs) throws SQLException { } @Test - public void updateWorkflowInstanceWithChildWorkflowsWorks() { + public void updateWorkflowInstanceWithRootWorkflowAndChildWorkflowsWorks() { WorkflowInstance i1 = constructWorkflowInstanceBuilder().setStatus(created).build(); int id = dao.insertWorkflowInstance(i1); WorkflowInstance i2 = new WorkflowInstance.Builder(dao.getWorkflowInstance(id)).setStatus(inProgress).setState("updateState") @@ -164,6 +164,50 @@ public void updateWorkflowInstanceWithChildWorkflowsWorks() { for (List childIds : childWorkflows.values()) { assertThat(childIds.size(), is(1)); WorkflowInstance childInstance = dao.getWorkflowInstance(childIds.get(0)); + assertThat(childInstance.rootWorkflowId, is(id)); + assertThat(childInstance.parentWorkflowId, is(id)); + assertThat(childInstance.businessKey, is("childKey")); + } + } + + @Test + public void updateWorkflowInstanceWithNonRootWorkflowAndChildWorkflowsWorks() { + // create 3 level hierarchy of workflows + WorkflowInstance i1 = constructWorkflowInstanceBuilder().setStatus(created).build(); + int id = dao.insertWorkflowInstance(i1); + WorkflowInstance i2 = new WorkflowInstance.Builder(dao.getWorkflowInstance(id)).setStatus(inProgress).setState("updateState") + .setStateText("update text").setNextActivation(now()).build(); + DateTime started = now(); + WorkflowInstanceAction a1 = new WorkflowInstanceAction.Builder().setExecutionStart(started).setExecutorId(42) + .setExecutionEnd(started.plusMillis(100)).setRetryNo(1).setState("test").setStateText("state text") + .setWorkflowInstanceId(id).setType(stateExecution).build(); + + WorkflowInstance middleWorkflow = constructWorkflowInstanceBuilder().setBusinessKey("middleKey").build(); + + dao.updateWorkflowInstanceAfterExecution(i2, a1, asList(middleWorkflow)); + + int middleWorkflowId = -1; + for (List childIds : dao.getWorkflowInstance(id).childWorkflows.values()) { + middleWorkflowId = childIds.get(0); + } + + middleWorkflow = new WorkflowInstance.Builder(dao.getWorkflowInstance(middleWorkflowId)).setStatus(inProgress).setState("updateState") + .setStateText("update text").setNextActivation(now()).build(); + + WorkflowInstanceAction middleAction = new WorkflowInstanceAction.Builder().setExecutionStart(started).setExecutorId(42) + .setExecutionEnd(started.plusMillis(100)).setRetryNo(1).setState("test").setStateText("state text") + .setWorkflowInstanceId(middleWorkflow.id).setType(stateExecution).build(); + + WorkflowInstance childWorkflow = constructWorkflowInstanceBuilder().setBusinessKey("childKey").build(); + dao.updateWorkflowInstanceAfterExecution(middleWorkflow, middleAction, asList(childWorkflow)); + + Map> childWorkflows = dao.getWorkflowInstance(middleWorkflowId).childWorkflows; + assertThat(childWorkflows.size(), is(1)); + for (List childIds : childWorkflows.values()) { + assertThat(childIds.size(), is(1)); + WorkflowInstance childInstance = dao.getWorkflowInstance(childIds.get(0)); + assertThat(childInstance.rootWorkflowId, is(id)); + assertThat(childInstance.parentWorkflowId, is(middleWorkflowId)); assertThat(childInstance.businessKey, is("childKey")); } } @@ -351,18 +395,19 @@ public void fakePostgreSQLinsertWorkflowInstance() { DateTime started = DateTime.now(); WorkflowInstance wf = new WorkflowInstance.Builder().setStatus(inProgress).setState("updateState") - .setStateText("update text").setParentWorkflowId(110).setParentActionId(421) + .setStateText("update text").setRootWorkflowId(9283).setParentWorkflowId(110).setParentActionId(421) .setNextActivation(started.plusSeconds(1)).setRetries(3).setId(43).putStateVariable("A", "B") .putStateVariable("C", "D").build(); d.insertWorkflowInstance(wf); assertEquals( - "with wf as (insert into nflow_workflow(type, parent_workflow_id, parent_action_id, business_key, external_id, executor_group, status, state, state_text, next_activation) values (?, ?, ?, ?, ?, ?, ?::workflow_status, ?, ?, ?) returning id), ins10 as (insert into nflow_workflow_state(workflow_id, action_id, state_key, state_value) select wf.id,0,?,? from wf), ins12 as (insert into nflow_workflow_state(workflow_id, action_id, state_key, state_value) select wf.id,0,?,? from wf) select wf.id from wf", + "with wf as (insert into nflow_workflow(type, root_workflow_id, parent_workflow_id, parent_action_id, business_key, external_id, executor_group, status, state, state_text, next_activation) values (?, ?, ?, ?, ?, ?, ?, ?::workflow_status, ?, ?, ?) returning id), ins11 as (insert into nflow_workflow_state(workflow_id, action_id, state_key, state_value) select wf.id,0,?,? from wf), ins13 as (insert into nflow_workflow_state(workflow_id, action_id, state_key, state_value) select wf.id,0,?,? from wf) select wf.id from wf", sql.getValue()); assertThat(args.getAllValues().size(), is(countMatches(sql.getValue(), "?"))); int i = 0; assertThat(args.getAllValues().get(i++), is((Object) wf.type)); + assertThat(args.getAllValues().get(i++), is((Object) wf.rootWorkflowId)); assertThat(args.getAllValues().get(i++), is((Object) wf.parentWorkflowId)); assertThat(args.getAllValues().get(i++), is((Object) wf.parentActionId)); assertThat(args.getAllValues().get(i++), is((Object) wf.businessKey)); @@ -453,7 +498,7 @@ public void pollNextWorkflowInstancesWithPartialRaceCondition() throws Interrupt threads[1].join(); assertThat(pollers[0].returnSize + pollers[1].returnSize, is(batchSize)); assertTrue("Race condition should happen", pollers[0].detectedRaceCondition || pollers[1].detectedRaceCondition - || (pollers[0].returnSize < batchSize && pollers[1].returnSize < batchSize)); + || (pollers[0].returnSize < batchSize && pollers[1].returnSize < batchSize)); } @Test From 5a028c0d6198d73b9aca473168df5dfc6794bb5d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juha=20Syrj=C3=A4l=C3=A4?= Date: Thu, 9 Jul 2015 20:54:31 +0300 Subject: [PATCH 04/51] Add archive tables --- .../nflow/engine/internal/dao/ArchiveDao.java | 3 - .../resources/scripts/db/h2.create.ddl.sql | 58 ++++++++++++++++++ .../resources/scripts/db/mysql.create.ddl.sql | 57 +++++++++++++++++ .../scripts/db/mysql.legacy.create.ddl.sql | 58 ++++++++++++++++++ .../scripts/db/postgresql.create.ddl.sql | 61 +++++++++++++++++++ 5 files changed, 234 insertions(+), 3 deletions(-) diff --git a/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/ArchiveDao.java b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/ArchiveDao.java index 84d76324f..30d005d5b 100644 --- a/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/ArchiveDao.java +++ b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/ArchiveDao.java @@ -42,10 +42,7 @@ public List listArchivableWorkflows(DateTime before, int maxRows){ "limit " + maxRows, new Object[]{DaoUtil.toTimestamp(before), DaoUtil.toTimestamp(before)}, new ArchivableWorkflowsRowMapper()); - // TODO create tables nflow_archive_workflow, nflow_archive_workflow_action, nflow_archive_workflow_state // TODO add index to nflow_workflow.modified - // TODO add nflow_workflow.root_workflow_id (indexed, foreign key => nflow_workflow.id) - // TODO modify childworkflows to set nflow_workflow.root_workflow_id } private static class ArchivableWorkflowsRowMapper implements RowMapper { diff --git a/nflow-engine/src/main/resources/scripts/db/h2.create.ddl.sql b/nflow-engine/src/main/resources/scripts/db/h2.create.ddl.sql index 993d66111..e8393c42c 100644 --- a/nflow-engine/src/main/resources/scripts/db/h2.create.ddl.sql +++ b/nflow-engine/src/main/resources/scripts/db/h2.create.ddl.sql @@ -1,3 +1,4 @@ +-- Production tables create table if not exists nflow_workflow ( id int not null auto_increment primary key, status varchar(32) not null check status in ('created', 'executing', 'inProgress', 'finished', 'manual'), @@ -71,3 +72,60 @@ create table if not exists nflow_workflow_definition ( executor_group varchar(64) not null, primary key (type, executor_group) ); + +-- Archive tables +-- - no default values +-- - no triggers +-- - same indexes and constraints as production tables + +create table if not exists nflow_archive_workflow ( + id int not null auto_increment primary key, + status varchar(32) not null check status in ('created', 'executing', 'inProgress', 'finished', 'manual'), + type varchar(64) not null, + root_workflow_id integer, + parent_workflow_id integer, + parent_action_id integer, + business_key varchar(64), + external_id varchar(64) not null, + state varchar(64) not null, + state_text varchar(128), + next_activation timestamp, + external_next_activation timestamp, + executor_id int, + retries int not null, + created timestamp not null, + modified timestamp not null, + executor_group varchar(64) not null +); + +create unique index if not exists nflow_archive_workflow_uniq on nflow_archive_workflow (type, external_id, executor_group); + +create index if not exists nflow_archive_workflow_next_activation on nflow_archive_workflow(next_activation, modified); + +create table if not exists nflow_archive_workflow_action ( + id int not null auto_increment primary key, + workflow_id int not null, + executor_id int not null, + type varchar(32) not null check type in ('stateExecution', 'stateExecutionFailed', 'recovery', 'externalChange'), + state varchar(64) not null, + state_text varchar(128), + retry_no int not null, + execution_start timestamp not null, + execution_end timestamp not null, + foreign key (workflow_id) references nflow_archive_workflow(id) on delete cascade +); + +alter table nflow_archive_workflow add constraint fk_workflow_archive_parent + foreign key (parent_workflow_id, parent_action_id) references nflow_archive_workflow_action (workflow_id, id) on delete cascade; + +alter table nflow_archive_workflow add constraint fk_workflow_archive_root + foreign key (root_workflow_id) references nflow_archive_workflow (id) on delete cascade; + +create table if not exists nflow_archive_workflow_state ( + workflow_id int not null, + action_id int not null, + state_key varchar(64) not null, + state_value varchar(10240) not null, + primary key (workflow_id, action_id, state_key), + foreign key (workflow_id) references nflow_archive_workflow(id) on delete cascade +); diff --git a/nflow-engine/src/main/resources/scripts/db/mysql.create.ddl.sql b/nflow-engine/src/main/resources/scripts/db/mysql.create.ddl.sql index 1644c49c5..7cf964bb5 100644 --- a/nflow-engine/src/main/resources/scripts/db/mysql.create.ddl.sql +++ b/nflow-engine/src/main/resources/scripts/db/mysql.create.ddl.sql @@ -1,3 +1,4 @@ +-- Production tables create table if not exists nflow_workflow ( id int not null auto_increment primary key, status enum('created', 'executing', 'inProgress', 'finished', 'manual') not null, @@ -68,3 +69,59 @@ create table if not exists nflow_workflow_definition ( executor_group varchar(64) not null, primary key (type, executor_group) ); + +-- Archive tables +-- - no default values +-- - no triggers +-- - same indexes and constraints as production tables + +create table if not exists nflow_archive_workflow ( + id int not null auto_increment primary key, + status enum('created', 'executing', 'inProgress', 'finished', 'manual') not null, + type varchar(64) not null, + root_workflow_id integer, + parent_workflow_id integer, + parent_action_id integer, + business_key varchar(64), + external_id varchar(64) not null, + state varchar(64) not null, + state_text varchar(128), + next_activation timestamp(3) null, + external_next_activation timestamp(3) null, + executor_id int, + retries int not null default 0, + created timestamp(3) not null, + modified timestamp(3) not null, + executor_group varchar(64) not null, + constraint nflow_archive_workflow_uniq unique (type, external_id, executor_group), + index nflow_archive_workflow(next_activation, modified) +); + +create table if not exists nflow_archive_workflow_action ( + id int not null auto_increment primary key, + workflow_id int not null, + executor_id int not null, + type enum('stateExecution', 'stateExecutionFailed', 'recovery', 'externalChange') not null, + state varchar(64) not null, + state_text varchar(128), + retry_no int not null, + execution_start timestamp(3) not null, + execution_end timestamp(3) not null, + foreign key (workflow_id) references nflow_archive_workflow(id) on delete cascade +); + +alter table nflow_archive_workflow add constraint fk_archive_workflow_parent + foreign key (parent_workflow_id, parent_action_id) references nflow_archive_workflow_action (workflow_id, id) on delete cascade; + +alter table nflow_archive_workflow add constraint fk_archive_workflow_root + foreign key (root_workflow_id) references nflow_workflow (id) on delete cascade; + +create table if not exists nflow_archive_workflow_state ( + workflow_id int not null, + action_id int not null, + state_key varchar(64) not null, + state_value varchar(10240) not null, + primary key (workflow_id, action_id, state_key), + foreign key (workflow_id) references nflow_archive_workflow(id) on delete cascade +); + diff --git a/nflow-engine/src/main/resources/scripts/db/mysql.legacy.create.ddl.sql b/nflow-engine/src/main/resources/scripts/db/mysql.legacy.create.ddl.sql index e3cf67e31..d962073f3 100644 --- a/nflow-engine/src/main/resources/scripts/db/mysql.legacy.create.ddl.sql +++ b/nflow-engine/src/main/resources/scripts/db/mysql.legacy.create.ddl.sql @@ -1,3 +1,4 @@ +-- Production tables create table if not exists nflow_workflow ( id int not null auto_increment primary key, status enum('created', 'executing', 'inProgress', 'finished', 'manual') not null, @@ -78,3 +79,60 @@ drop trigger if exists nflow_workflow_definition_insert; create trigger nflow_workflow_definition_insert before insert on `nflow_workflow_definition` for each row set new.created = now(); + +-- Archive tables +-- - no default values +-- - no triggers +-- - same indexes and constraints as production tables + +create table if not exists nflow_archive_workflow ( + id int not null auto_increment primary key, + status enum('created', 'executing', 'inProgress', 'finished', 'manual') not null, + type varchar(64) not null, + root_workflow_id integer, + parent_workflow_id integer, + parent_action_id integer, + business_key varchar(64), + external_id varchar(64) not null, + state varchar(64) not null, + state_text varchar(128), + next_activation timestamp null, + external_next_activation timestamp null, + executor_id int, + retries int not null default 0, + created timestamp not null, + modified timestamp not null, + executor_group varchar(64) not null, + constraint nflow_archive_workflow_uniq unique (type, external_id, executor_group), + index nflow_archive_workflow(next_activation, modified) +); + +create table if not exists nflow_archive_workflow_action ( + id int not null auto_increment primary key, + workflow_id int not null, + executor_id int not null default -1, + type enum('stateExecution', 'stateExecutionFailed', 'recovery', 'externalChange') not null, + state varchar(64) not null, + state_text varchar(128), + retry_no int not null, + execution_start timestamp not null, + execution_end timestamp not null, + foreign key (workflow_id) references nflow_archive_workflow(id) on delete cascade +); + +alter table nflow_archive_workflow add constraint fk_archive_workflow_parent + foreign key (parent_workflow_id, parent_action_id) references nflow_archive_workflow_action (workflow_id, id) on delete cascade; + +alter table nflow_archive_workflow add constraint fk_archive_workflow_root + foreign key (root_archive_workflow_id) references nflow_archive_workflow (id) on delete cascade; + +create table if not exists nflow_archive_workflow_state ( + workflow_id int not null, + action_id int not null, + state_key varchar(64) not null, + state_value varchar(10240) not null, + primary key (workflow_id, action_id, state_key), + foreign key (workflow_id) references nflow_archive_workflow(id) on delete cascade +); + + diff --git a/nflow-engine/src/main/resources/scripts/db/postgresql.create.ddl.sql b/nflow-engine/src/main/resources/scripts/db/postgresql.create.ddl.sql index d7f6f532e..b14fbf533 100644 --- a/nflow-engine/src/main/resources/scripts/db/postgresql.create.ddl.sql +++ b/nflow-engine/src/main/resources/scripts/db/postgresql.create.ddl.sql @@ -1,3 +1,5 @@ +-- production tables + create type workflow_status as enum ('created', 'executing', 'inProgress', 'finished', 'manual'); create table if not exists nflow_workflow ( id serial primary key, @@ -86,3 +88,62 @@ create table if not exists nflow_workflow_definition ( drop trigger if exists update_nflow_definition_modified on nflow_workflow_definition; create trigger update_nflow_definition_modified before update on nflow_workflow_definition for each row execute procedure update_modified(); + + +-- Archive tables +-- - no default values +-- - no triggers +-- - same indexes and constraints as production tables + +create table if not exists nflow_archive_workflow ( + id serial primary key, + status workflow_status not null, + type varchar(64) not null, + root_workflow_id integer, + parent_workflow_id integer, + parent_action_id integer, + business_key varchar(64), + external_id varchar(64) not null, + state varchar(64) not null, + state_text varchar(128), + next_activation timestamptz, + external_next_activation timestamptz, + executor_id int, + retries int not null default 0, + created timestamptz not null, + modified timestamptz not null, + executor_group varchar(64) not null, + constraint nflow_workflow_uniq unique (type, external_id, executor_group) +); + +drop index nflow_archive_workflow_activation; +create index nflow_archive_workflow_activation on nflow_archive_workflow(next_activation, modified); + +create table if not exists nflow_archive_workflow_action ( + id serial primary key, + workflow_id int not null, + executor_id int not null, + type action_type not null, + state varchar(64) not null, + state_text varchar(128), + retry_no int not null, + execution_start timestamptz not null, + execution_end timestamptz not null, + foreign key (workflow_id) references nflow_archive_workflow(id) on delete cascade, + constraint nflow_archive_workflow_action_uniq unique (workflow_id, id) +); + +alter table nflow_archive_workflow add constraint fk_archive_workflow_parent + foreign key (parent_workflow_id, parent_action_id) references nflow_workflow_action (workflow_id, id) on delete cascade; + +alter table nflow_archive_workflow add constraint fk_archive_workflow_root + foreign key (root_workflow_id) references nflow_archive_workflow (id) on delete cascade; + +create table if not exists nflow_archive_workflow_state ( + workflow_id int not null, + action_id int not null, + state_key varchar(64) not null, + state_value text not null, + primary key (workflow_id, action_id, state_key), + foreign key (workflow_id) references nflow_archive_workflow(id) on delete cascade +); \ No newline at end of file From f41ce970222df5542636dcef0d43b25abf8741e3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juha=20Syrj=C3=A4l=C3=A4?= Date: Fri, 10 Jul 2015 00:03:04 +0300 Subject: [PATCH 05/51] Allow changing modified field via insert/update in H2 --- .../nflow/engine/internal/dao/ArchiveDaoTest.java | 7 +++++++ 1 file changed, 7 insertions(+) create mode 100644 nflow-engine/src/test/java/com/nitorcreations/nflow/engine/internal/dao/ArchiveDaoTest.java diff --git a/nflow-engine/src/test/java/com/nitorcreations/nflow/engine/internal/dao/ArchiveDaoTest.java b/nflow-engine/src/test/java/com/nitorcreations/nflow/engine/internal/dao/ArchiveDaoTest.java new file mode 100644 index 000000000..30ca94653 --- /dev/null +++ b/nflow-engine/src/test/java/com/nitorcreations/nflow/engine/internal/dao/ArchiveDaoTest.java @@ -0,0 +1,7 @@ +package com.nitorcreations.nflow.engine.internal.dao; + +/** + * Created by jsyrjala on 09/07/15. + */ +public class ArchiveDaoTest { +} From b84ddd835eb4f17978f6da25b17c1f5592058cd8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juha=20Syrj=C3=A4l=C3=A4?= Date: Fri, 10 Jul 2015 00:05:05 +0300 Subject: [PATCH 06/51] Implement tests for archiving. Archiving workflow rows is now tested. --- .../nflow/engine/internal/dao/ArchiveDao.java | 15 +- .../storage/db/H2ModifiedColumnTrigger.java | 7 +- .../engine/internal/dao/ArchiveDaoTest.java | 159 +++++++++++++++++- .../internal/dao/DaoTestConfiguration.java | 5 + 4 files changed, 175 insertions(+), 11 deletions(-) diff --git a/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/ArchiveDao.java b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/ArchiveDao.java index 30d005d5b..d6159a1cd 100644 --- a/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/ArchiveDao.java +++ b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/ArchiveDao.java @@ -5,7 +5,6 @@ import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; -import java.util.ArrayList; import java.util.LinkedList; import java.util.List; @@ -32,7 +31,7 @@ public void setJdbcTemplate(@NFlow JdbcTemplate jdbcTemplate) { this.jdbc = jdbcTemplate; } - public List listArchivableWorkflows(DateTime before, int maxRows){ + public List listArchivableWorkflows(DateTime before, int maxRows) { return jdbc.query("select * from nflow_workflow parent where parent.next_activation is null and parent.modified <= ? " + "and not exists(" + " select 1 from nflow_workflow child where child.root_workflow_id = parent.id " + @@ -42,7 +41,11 @@ public List listArchivableWorkflows(DateTime before, int maxRows){ "limit " + maxRows, new Object[]{DaoUtil.toTimestamp(before), DaoUtil.toTimestamp(before)}, new ArchivableWorkflowsRowMapper()); - // TODO add index to nflow_workflow.modified + // TODO add index to nflow_workflow.modified (combined index with next_activation?) + // TODO change modified trigger for postgre + // TODO add new triggers for h2 and postgre to update scripts + // TODO handle recursive dependency between workflow and action somehow + // TODO implement method to check that archive and prod tables have matching fields } private static class ArchivableWorkflowsRowMapper implements RowMapper { @@ -69,7 +72,7 @@ private void archiveWorkflowTable(String workflowIdParams) { } private void archiveActionTable(String workflowIdParams) { - String columns = columnsFromMetadata("nflow_action"); + String columns = columnsFromMetadata("nflow_workflow_action"); jdbc.update("insert into nflow_archive_workflow_action(" + columns + ") " + "select " + columns + " from nflow_workflow_action where workflow_id in " + workflowIdParams); } @@ -82,10 +85,10 @@ private void archiveStateTable(String workflowIdParams) { private void deleteWorkflows(String workflowIdParams) { jdbc.update("delete from nflow_workflow_state where workflow_id in " + workflowIdParams); - jdbc.update("update nflow_workflow_action set root_workflow_id=null, parent_workflow_id=null, parent_action_id=null" + + jdbc.update("update nflow_workflow set root_workflow_id=null, parent_workflow_id=null, parent_action_id=null " + "where id in " + workflowIdParams); jdbc.update("delete from nflow_workflow_action where workflow_id in " + workflowIdParams); - jdbc.update("delete from nflow_workflow where workflow_id in " + workflowIdParams); + jdbc.update("delete from nflow_workflow where id in " + workflowIdParams); } private String columnsFromMetadata(String tableName) { diff --git a/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/storage/db/H2ModifiedColumnTrigger.java b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/storage/db/H2ModifiedColumnTrigger.java index 03c3cf225..9a8cf5914 100644 --- a/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/storage/db/H2ModifiedColumnTrigger.java +++ b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/storage/db/H2ModifiedColumnTrigger.java @@ -6,12 +6,17 @@ import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Timestamp; +import java.util.Objects; import org.h2.tools.TriggerAdapter; public class H2ModifiedColumnTrigger extends TriggerAdapter { @Override public void fire(Connection conn, ResultSet oldRow, ResultSet newRow) throws SQLException { - newRow.updateTimestamp("modified", new Timestamp(currentTimeMillis())); + Timestamp oldModified = oldRow.getTimestamp("modified"); + Timestamp newModified = newRow.getTimestamp("modified"); + if(Objects.equals(oldModified, newModified)) { + newRow.updateTimestamp("modified", new Timestamp(currentTimeMillis())); + } } } \ No newline at end of file diff --git a/nflow-engine/src/test/java/com/nitorcreations/nflow/engine/internal/dao/ArchiveDaoTest.java b/nflow-engine/src/test/java/com/nitorcreations/nflow/engine/internal/dao/ArchiveDaoTest.java index 30ca94653..0e30469b9 100644 --- a/nflow-engine/src/test/java/com/nitorcreations/nflow/engine/internal/dao/ArchiveDaoTest.java +++ b/nflow-engine/src/test/java/com/nitorcreations/nflow/engine/internal/dao/ArchiveDaoTest.java @@ -1,7 +1,158 @@ package com.nitorcreations.nflow.engine.internal.dao; -/** - * Created by jsyrjala on 09/07/15. - */ -public class ArchiveDaoTest { +import static com.nitorcreations.nflow.engine.workflow.instance.WorkflowInstance.WorkflowInstanceStatus.created; +import static org.joda.time.DateTime.now; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Map; + +import javax.inject.Inject; + +import org.joda.time.DateTime; +import org.junit.Test; +import org.springframework.dao.EmptyResultDataAccessException; + +import com.nitorcreations.nflow.engine.workflow.instance.WorkflowInstance; + +public class ArchiveDaoTest extends BaseDaoTest { + @Inject + ArchiveDao archiveDao; + @Inject + WorkflowInstanceDao workflowInstanceDao; + + DateTime archiveTimeLimit = new DateTime(2015,7,8, 21,28,0,0); + + DateTime archiveTime1 = archiveTimeLimit.minus(1); + DateTime archiveTime2 = archiveTimeLimit.minusMinutes(1); + DateTime archiveTime3 = archiveTimeLimit.minusHours(2); + DateTime archiveTime4 = archiveTimeLimit.minusDays(3); + + DateTime prodTime1 = archiveTimeLimit.plus(1); + DateTime prodTime2 = archiveTimeLimit.plusMinutes(1); + DateTime prodTime3 = archiveTimeLimit.plusHours(2); + DateTime prodTime4 = archiveTimeLimit.plusDays(3); + + // TODO implement tests for actions, states + // TODO implement tests for child workflows and their actions, states + + @Test + public void listingArchivableWorkflows() { + List expectedArchive = new ArrayList<>(); + + storeActiveWorkflow(archiveTime1); + storeActiveWorkflow(prodTime1); + storePassiveWorkflow(prodTime2); + + expectedArchive.add(storePassiveWorkflow(archiveTime1)); + expectedArchive.add(storePassiveWorkflow(archiveTime2)); + + List archivableIds = archiveDao.listArchivableWorkflows(archiveTimeLimit, 10); + assertEqualsInAnyOrder(expectedArchive, archivableIds); + } + + @Test + public void listingReturnsOldestRowsAndMaxBatchSizeRows() { + List expectedArchive = new ArrayList<>(); + + int eleventh = storePassiveWorkflow(archiveTime2); + + for(int i = 0; i < 9; i++){ + expectedArchive.add(storePassiveWorkflow(archiveTime4)); + } + expectedArchive.add(storePassiveWorkflow(archiveTime3)); + + storeActiveWorkflow(archiveTime1); + storeActiveWorkflow(prodTime3); + storePassiveWorkflow(prodTime4); + + List archivableIds = archiveDao.listArchivableWorkflows(archiveTimeLimit, 10); + Collections.sort(archivableIds); + assertEquals(expectedArchive, archivableIds); + + expectedArchive.add(eleventh); + archivableIds = archiveDao.listArchivableWorkflows(archiveTimeLimit, 11); + assertEqualsInAnyOrder(expectedArchive, archivableIds); + } + + @Test + public void archivingSimpleWorkflowsWorks() { + List expectedArchive = new ArrayList<>(); + + storeActiveWorkflow(archiveTime1); + storeActiveWorkflow(prodTime1); + storePassiveWorkflow(prodTime1); + + expectedArchive.add(storePassiveWorkflow(archiveTime1)); + expectedArchive.add(storePassiveWorkflow(archiveTime2)); + + archiveDao.archiveWorkflows(expectedArchive); + + assertActiveWorkflowsRemoved(expectedArchive); + assertArchiveWorkflowsExists(expectedArchive); + } + + private void assertActiveWorkflowsRemoved(List workflowIds) { + for(int id: workflowIds){ + try { + workflowInstanceDao.getWorkflowInstance(id); + fail("Expected workflow " + id + " to be removed"); + } catch(EmptyResultDataAccessException e) { + // expected exception + } + } + } + + private void assertArchiveWorkflowsExists(List workflowIds) { + for(int workflowId : workflowIds){ + Map archived = getArchivedWorkflow(workflowId); + assertEquals(workflowId, archived.get("id")); + } + } + + // TODO re-implement using archive searches in daos when they are implement + private Map getArchivedWorkflow(int workflowId) { + return jdbc.queryForMap("select * from nflow_archive_workflow where id = ?", new Object[]{workflowId}); + } + + private int storePassiveWorkflow(DateTime modified) { + WorkflowInstance instance = constructWorkflowInstanceBuilder().setStatus(created).setNextActivation(null).setModified(modified).build(); + int id = insert(instance); + return id; + } + + private int storeActiveWorkflow(DateTime modified) { + WorkflowInstance instance = constructWorkflowInstanceBuilder().setStatus(created).setNextActivation(now()).setModified(modified).build(); + int id = insert(instance); + return id; + } + + private int insert(WorkflowInstance instance) { + // TODO insertWorkflowInstance doesn't support storing modified date. Add some magic internal variable to enable that? + int id = workflowInstanceDao.insertWorkflowInstance(instance); + assertTrue(id > 0); + DateTime modified = instance.modified; + updateModified(id, modified); + WorkflowInstance dbInstance = workflowInstanceDao.getWorkflowInstance(id); + assertEquals(modified, dbInstance.modified); + return id; + } + + private void updateModified(int workflowId, DateTime modified) { + int updateCount = jdbc.update("update nflow_workflow set modified = ? where id = ?", + new Object[]{ DaoUtil.toTimestamp(modified), workflowId }); + assertEquals(1, updateCount); + } + + private void assertEqualsInAnyOrder(List expected, List actual) { + List expectedCopy = new ArrayList<>(expected); + List actualCopy = new ArrayList<>(actual); + Collections.sort(expectedCopy); + Collections.sort(actualCopy); + assertEquals(expectedCopy, actualCopy); + } } diff --git a/nflow-engine/src/test/java/com/nitorcreations/nflow/engine/internal/dao/DaoTestConfiguration.java b/nflow-engine/src/test/java/com/nitorcreations/nflow/engine/internal/dao/DaoTestConfiguration.java index d8e3efa68..9ec314860 100644 --- a/nflow-engine/src/test/java/com/nitorcreations/nflow/engine/internal/dao/DaoTestConfiguration.java +++ b/nflow-engine/src/test/java/com/nitorcreations/nflow/engine/internal/dao/DaoTestConfiguration.java @@ -48,6 +48,11 @@ public StatisticsDao statisticsDao() { return new StatisticsDao(); } + @Bean + public ArchiveDao archiveDao() { + return new ArchiveDao(); + } + @Bean public PlatformTransactionManager transactionManager(DataSource ds) { return new DataSourceTransactionManager(ds); From fd8da94c5d66458263c823a6ac5c14564b8d41bc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juha=20Syrj=C3=A4l=C3=A4?= Date: Fri, 10 Jul 2015 16:46:36 +0300 Subject: [PATCH 07/51] Allow changing modified field via insert/update in postgres --- .../src/main/resources/scripts/db/postgresql.create.ddl.sql | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/nflow-engine/src/main/resources/scripts/db/postgresql.create.ddl.sql b/nflow-engine/src/main/resources/scripts/db/postgresql.create.ddl.sql index b14fbf533..781a19e89 100644 --- a/nflow-engine/src/main/resources/scripts/db/postgresql.create.ddl.sql +++ b/nflow-engine/src/main/resources/scripts/db/postgresql.create.ddl.sql @@ -24,7 +24,9 @@ create table if not exists nflow_workflow ( create or replace function update_modified() returns trigger language plpgsql as ' begin - NEW.modified := now(); + if NEW.modified = OLD.modified then + NEW.modified := now(); + end if; return NEW; end; '; From e008a3a3cabba3bff98428d102b65fef674cb32f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juha=20Syrj=C3=A4l=C3=A4?= Date: Fri, 10 Jul 2015 17:07:38 +0300 Subject: [PATCH 08/51] Remove auto increments from database table primary key fields --- nflow-engine/src/main/resources/scripts/db/h2.create.ddl.sql | 4 ++-- .../src/main/resources/scripts/db/mysql.create.ddl.sql | 4 ++-- .../src/main/resources/scripts/db/mysql.legacy.create.ddl.sql | 4 ++-- .../src/main/resources/scripts/db/postgresql.create.ddl.sql | 4 ++-- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/nflow-engine/src/main/resources/scripts/db/h2.create.ddl.sql b/nflow-engine/src/main/resources/scripts/db/h2.create.ddl.sql index e8393c42c..e5c26a471 100644 --- a/nflow-engine/src/main/resources/scripts/db/h2.create.ddl.sql +++ b/nflow-engine/src/main/resources/scripts/db/h2.create.ddl.sql @@ -79,7 +79,7 @@ create table if not exists nflow_workflow_definition ( -- - same indexes and constraints as production tables create table if not exists nflow_archive_workflow ( - id int not null auto_increment primary key, + id int not null primary key, status varchar(32) not null check status in ('created', 'executing', 'inProgress', 'finished', 'manual'), type varchar(64) not null, root_workflow_id integer, @@ -103,7 +103,7 @@ create unique index if not exists nflow_archive_workflow_uniq on nflow_archive_w create index if not exists nflow_archive_workflow_next_activation on nflow_archive_workflow(next_activation, modified); create table if not exists nflow_archive_workflow_action ( - id int not null auto_increment primary key, + id int not null primary key, workflow_id int not null, executor_id int not null, type varchar(32) not null check type in ('stateExecution', 'stateExecutionFailed', 'recovery', 'externalChange'), diff --git a/nflow-engine/src/main/resources/scripts/db/mysql.create.ddl.sql b/nflow-engine/src/main/resources/scripts/db/mysql.create.ddl.sql index 7cf964bb5..7c466c507 100644 --- a/nflow-engine/src/main/resources/scripts/db/mysql.create.ddl.sql +++ b/nflow-engine/src/main/resources/scripts/db/mysql.create.ddl.sql @@ -76,7 +76,7 @@ create table if not exists nflow_workflow_definition ( -- - same indexes and constraints as production tables create table if not exists nflow_archive_workflow ( - id int not null auto_increment primary key, + id int not null primary key, status enum('created', 'executing', 'inProgress', 'finished', 'manual') not null, type varchar(64) not null, root_workflow_id integer, @@ -98,7 +98,7 @@ create table if not exists nflow_archive_workflow ( ); create table if not exists nflow_archive_workflow_action ( - id int not null auto_increment primary key, + id int not null primary key, workflow_id int not null, executor_id int not null, type enum('stateExecution', 'stateExecutionFailed', 'recovery', 'externalChange') not null, diff --git a/nflow-engine/src/main/resources/scripts/db/mysql.legacy.create.ddl.sql b/nflow-engine/src/main/resources/scripts/db/mysql.legacy.create.ddl.sql index d962073f3..06ee64545 100644 --- a/nflow-engine/src/main/resources/scripts/db/mysql.legacy.create.ddl.sql +++ b/nflow-engine/src/main/resources/scripts/db/mysql.legacy.create.ddl.sql @@ -86,7 +86,7 @@ create trigger nflow_workflow_definition_insert before insert on `nflow_workflow -- - same indexes and constraints as production tables create table if not exists nflow_archive_workflow ( - id int not null auto_increment primary key, + id int not null primary key, status enum('created', 'executing', 'inProgress', 'finished', 'manual') not null, type varchar(64) not null, root_workflow_id integer, @@ -108,7 +108,7 @@ create table if not exists nflow_archive_workflow ( ); create table if not exists nflow_archive_workflow_action ( - id int not null auto_increment primary key, + id int not null primary key, workflow_id int not null, executor_id int not null default -1, type enum('stateExecution', 'stateExecutionFailed', 'recovery', 'externalChange') not null, diff --git a/nflow-engine/src/main/resources/scripts/db/postgresql.create.ddl.sql b/nflow-engine/src/main/resources/scripts/db/postgresql.create.ddl.sql index 781a19e89..25742baf7 100644 --- a/nflow-engine/src/main/resources/scripts/db/postgresql.create.ddl.sql +++ b/nflow-engine/src/main/resources/scripts/db/postgresql.create.ddl.sql @@ -98,7 +98,7 @@ create trigger update_nflow_definition_modified before update on nflow_workflow_ -- - same indexes and constraints as production tables create table if not exists nflow_archive_workflow ( - id serial primary key, + id integer primary key, status workflow_status not null, type varchar(64) not null, root_workflow_id integer, @@ -122,7 +122,7 @@ drop index nflow_archive_workflow_activation; create index nflow_archive_workflow_activation on nflow_archive_workflow(next_activation, modified); create table if not exists nflow_archive_workflow_action ( - id serial primary key, + id integer primary key, workflow_id int not null, executor_id int not null, type action_type not null, From 76bd2260a39aa35a31a85e6c3de4cfb826af53bb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juha=20Syrj=C3=A4l=C3=A4?= Date: Fri, 10 Jul 2015 20:15:43 +0300 Subject: [PATCH 09/51] Add test for ArchiveService. Change archive service not to call archiving with empty list of archivable workflow ids. --- .../nflow/engine/service/ArchiveService.java | 7 +++ .../engine/service/ArchiveServiceTest.java | 46 +++++++++++++++++++ 2 files changed, 53 insertions(+) create mode 100644 nflow-engine/src/test/java/com/nitorcreations/nflow/engine/service/ArchiveServiceTest.java diff --git a/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/service/ArchiveService.java b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/service/ArchiveService.java index 8f06f40cf..b85af977b 100644 --- a/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/service/ArchiveService.java +++ b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/service/ArchiveService.java @@ -7,6 +7,7 @@ import org.joda.time.DateTime; import com.nitorcreations.nflow.engine.internal.dao.ArchiveDao; +import org.springframework.util.Assert; @Named public class ArchiveService { @@ -14,10 +15,16 @@ public class ArchiveService { private ArchiveDao archiveDao; public int archiveWorkflows(DateTime olderThan, int batchSize) { + Assert.notNull(olderThan, "olderThan must not be null"); + Assert.isTrue(batchSize > 0, "batchSize must be greater than 0"); + List workflowIds; int archivedWorkflows = 0; do { workflowIds = archiveDao.listArchivableWorkflows(olderThan, batchSize); + if(workflowIds.isEmpty()) { + break; + } archiveDao.archiveWorkflows(workflowIds); archivedWorkflows += workflowIds.size(); } while(!workflowIds.isEmpty()); diff --git a/nflow-engine/src/test/java/com/nitorcreations/nflow/engine/service/ArchiveServiceTest.java b/nflow-engine/src/test/java/com/nitorcreations/nflow/engine/service/ArchiveServiceTest.java new file mode 100644 index 000000000..3b501f3d7 --- /dev/null +++ b/nflow-engine/src/test/java/com/nitorcreations/nflow/engine/service/ArchiveServiceTest.java @@ -0,0 +1,46 @@ +package com.nitorcreations.nflow.engine.service; + +import com.nitorcreations.nflow.engine.internal.dao.ArchiveDao; +import org.joda.time.DateTime; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.runners.MockitoJUnitRunner; + +import java.util.Arrays; +import java.util.Collections; +import java.util.List; + +import static org.junit.Assert.assertEquals; +import static org.mockito.Mockito.*; + +@RunWith(MockitoJUnitRunner.class) +public class ArchiveServiceTest { + @InjectMocks + private final ArchiveService service = new ArchiveService(); + @Mock + private ArchiveDao dao; + private DateTime limit = new DateTime(2015,7,10,19,57,0,0); + private List emptyList = Collections.emptyList(); + private List dataList = Arrays.asList(1,2,3,4,5,6,7,8,9,10); + + @Test + public void withZeroWorkflowsInFirstBatchCausesNothingToArchive() { + when(dao.listArchivableWorkflows(limit, 10)).thenReturn(emptyList); + int archived = service.archiveWorkflows(limit, 10); + assertEquals(0, archived); + verify(dao).listArchivableWorkflows(limit, 10); + verifyNoMoreInteractions(dao); + } + + @Test + public void archivingContinuesUntilEmptyListOfArchivableIsReturned() { + when(dao.listArchivableWorkflows(limit, 10)).thenReturn(dataList, dataList, dataList, emptyList); + int archived = service.archiveWorkflows(limit, 10); + assertEquals(dataList.size() * 3, archived); + verify(dao, times(4)).listArchivableWorkflows(limit, 10); + verify(dao, times(3)).archiveWorkflows(dataList); + verifyNoMoreInteractions(dao); + } +} From 4a9b5b017c9cd037d9f807960abbe9ad19ab6e4f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juha=20Syrj=C3=A4l=C3=A4?= Date: Fri, 10 Jul 2015 21:25:37 +0300 Subject: [PATCH 10/51] Add unit tests for archiving workflow actions and state variables --- .../engine/internal/dao/ArchiveDaoTest.java | 220 ++++++++++++++++-- .../test/resources/scripts/db/h2.truncate.sql | 4 + 2 files changed, 202 insertions(+), 22 deletions(-) diff --git a/nflow-engine/src/test/java/com/nitorcreations/nflow/engine/internal/dao/ArchiveDaoTest.java b/nflow-engine/src/test/java/com/nitorcreations/nflow/engine/internal/dao/ArchiveDaoTest.java index 0e30469b9..a7df7fc24 100644 --- a/nflow-engine/src/test/java/com/nitorcreations/nflow/engine/internal/dao/ArchiveDaoTest.java +++ b/nflow-engine/src/test/java/com/nitorcreations/nflow/engine/internal/dao/ArchiveDaoTest.java @@ -1,23 +1,22 @@ package com.nitorcreations.nflow.engine.internal.dao; -import static com.nitorcreations.nflow.engine.workflow.instance.WorkflowInstance.WorkflowInstanceStatus.created; -import static org.joda.time.DateTime.now; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import com.nitorcreations.nflow.engine.workflow.instance.WorkflowInstance; +import com.nitorcreations.nflow.engine.workflow.instance.WorkflowInstanceAction; +import org.apache.commons.lang3.builder.ReflectionToStringBuilder; +import org.joda.time.DateTime; +import org.junit.Test; +import org.springframework.dao.EmptyResultDataAccessException; +import javax.inject.Inject; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; -import javax.inject.Inject; - -import org.joda.time.DateTime; -import org.junit.Test; -import org.springframework.dao.EmptyResultDataAccessException; - -import com.nitorcreations.nflow.engine.workflow.instance.WorkflowInstance; +import static com.nitorcreations.nflow.engine.workflow.instance.WorkflowInstance.WorkflowInstanceStatus.created; +import static java.util.Arrays.asList; +import static org.joda.time.DateTime.now; +import static org.junit.Assert.*; public class ArchiveDaoTest extends BaseDaoTest { @Inject @@ -37,7 +36,6 @@ public class ArchiveDaoTest extends BaseDaoTest { DateTime prodTime3 = archiveTimeLimit.plusHours(2); DateTime prodTime4 = archiveTimeLimit.plusDays(3); - // TODO implement tests for actions, states // TODO implement tests for child workflows and their actions, states @Test @@ -81,19 +79,102 @@ public void listingReturnsOldestRowsAndMaxBatchSizeRows() { @Test public void archivingSimpleWorkflowsWorks() { - List expectedArchive = new ArrayList<>(); + List archivableWorkflows = new ArrayList<>(); storeActiveWorkflow(archiveTime1); storeActiveWorkflow(prodTime1); storePassiveWorkflow(prodTime1); - expectedArchive.add(storePassiveWorkflow(archiveTime1)); - expectedArchive.add(storePassiveWorkflow(archiveTime2)); + archivableWorkflows.add(storePassiveWorkflow(archiveTime1)); + archivableWorkflows.add(storePassiveWorkflow(archiveTime2)); + + int activeWorkflowCountBefore = rowCount("select 1 from nflow_workflow"); + archiveDao.archiveWorkflows(archivableWorkflows); + int activeWorkflowCountAfter = rowCount("select 1 from nflow_workflow"); + + assertActiveWorkflowsRemoved(archivableWorkflows); + assertArchiveWorkflowsExist(archivableWorkflows); + + assertEquals(archivableWorkflows.size(), rowCount("select 1 from nflow_archive_workflow")); + assertEquals(activeWorkflowCountAfter, activeWorkflowCountBefore - archivableWorkflows.size()); + } + + @Test + public void archivingWorkflowsWithActionsWorks() { + List archivableWorkflows = new ArrayList<>(); + List archivableActions = new ArrayList<>(); + + storeActions(storeActiveWorkflow(archiveTime1), 3); + storeActions(storeActiveWorkflow(prodTime1), 1); + storeActions(storePassiveWorkflow(prodTime1), 2); + + int archivable1 = storePassiveWorkflow(archiveTime1); + int archivable2 = storePassiveWorkflow(archiveTime2); + archivableActions.addAll(storeActions(archivable1, 1)); + archivableActions.addAll(storeActions(archivable2, 3)); + + archivableWorkflows.addAll(asList(archivable1, archivable2)); + + int activeActionCountBefore = rowCount("select 1 from nflow_workflow_action"); + archiveDao.archiveWorkflows(archivableWorkflows); + int activeActionCountAfter = rowCount("select 1 from nflow_workflow_action"); + + assertActiveWorkflowsRemoved(archivableWorkflows); + assertArchiveWorkflowsExist(archivableWorkflows); + + assertActiveActionsRemoved(archivableActions); + assertArchiveActionsExist(archivableActions); + + assertEquals(archivableActions.size(), rowCount("select 1 from nflow_archive_workflow_action")); + assertEquals(activeActionCountAfter, activeActionCountBefore - archivableActions.size()); + } + + @Test + public void archivingWorkflowsWithActionsAndStatesWorks() { + List archivableWorkflows = new ArrayList<>(); + List archivableActions = new ArrayList<>(); + List archivableStates = new ArrayList<>(); + + int nonArchivableWorkflow1 = storeActiveWorkflow(archiveTime1); + storeStateVariables(nonArchivableWorkflow1, storeActions(nonArchivableWorkflow1, 3), 1); + + int nonArchivableWorkflow2 = storeActiveWorkflow(prodTime1); + storeStateVariables(nonArchivableWorkflow2, storeActions(nonArchivableWorkflow2, 1), 3); + + int nonArchivableWorkflow3 = storePassiveWorkflow(prodTime1); + storeStateVariables(nonArchivableWorkflow3, storeActions(nonArchivableWorkflow3, 2), 2); + + int archivable1 = storePassiveWorkflow(archiveTime1); + int archivable2 = storePassiveWorkflow(archiveTime2); + List actions1 = storeActions(archivable1, 1); + List actions2 = storeActions(archivable2, 2); + + archivableActions.addAll(actions1); + archivableActions.addAll(actions2); - archiveDao.archiveWorkflows(expectedArchive); + archivableStates.addAll(storeStateVariables(archivable1, actions1, 4)); + archivableStates.addAll(storeStateVariables(archivable2, actions2, 2)); - assertActiveWorkflowsRemoved(expectedArchive); - assertArchiveWorkflowsExists(expectedArchive); + archivableWorkflows.addAll(asList(archivable1, archivable2)); + + int variablesCountBefore = rowCount("select 1 from nflow_workflow_state"); + archiveDao.archiveWorkflows(archivableWorkflows); + int variablesCountAfter = rowCount("select 1 from nflow_workflow_state"); + + assertActiveWorkflowsRemoved(archivableWorkflows); + assertArchiveWorkflowsExist(archivableWorkflows); + + assertActiveActionsRemoved(archivableActions); + assertArchiveActionsExist(archivableActions); + + assertActiveStateVariablesRemoved(archivableStates); + assertArchiveStateVariablesExist(archivableStates); + + // each workflow gets automatically stateVariable called "requestData" + int requestDataVariableCount = archivableWorkflows.size(); + assertEquals(archivableStates.size() + requestDataVariableCount, rowCount("select 1 from nflow_archive_workflow_state")); + + assertEquals(variablesCountAfter, variablesCountBefore - archivableStates.size() - requestDataVariableCount); } private void assertActiveWorkflowsRemoved(List workflowIds) { @@ -107,14 +188,47 @@ private void assertActiveWorkflowsRemoved(List workflowIds) { } } - private void assertArchiveWorkflowsExists(List workflowIds) { + private void assertArchiveWorkflowsExist(List workflowIds) { for(int workflowId : workflowIds){ Map archived = getArchivedWorkflow(workflowId); assertEquals(workflowId, archived.get("id")); } } - // TODO re-implement using archive searches in daos when they are implement + private void assertActiveActionsRemoved(List actionIds) { + for(int actionId: actionIds) { + int found = rowCount("select 1 from nflow_workflow_action where id = ?", actionId); + assertEquals("Found unexpected action " + actionId + " in nflow_workflow_action", 0, found); + } + } + + private void assertArchiveActionsExist(List actionIds) { + for(int actionId: actionIds) { + int found = rowCount("select 1 from nflow_archive_workflow_action where id = ?", actionId); + assertEquals("Action " + actionId + " not found in nflow_archive_workflow_action", 1, found); + } + } + + private void assertActiveStateVariablesRemoved(List stateKeys) { + for(StateKey stateKey: stateKeys) { + int found = rowCount("select 1 from nflow_workflow_state where workflow_id = ? and action_id = ? and state_key = ?", + stateKey.workflowId, stateKey.actionId, stateKey.stateKey); + assertEquals("Found unexpected state variable " + stateKey + " in nflow_workflow_state", 0, found); + } + } + + private void assertArchiveStateVariablesExist(List stateKeys) { + for(StateKey stateKey: stateKeys) { + int found = rowCount("select 1 from nflow_archive_workflow_state where workflow_id = ? and action_id = ? and state_key = ?", + stateKey.workflowId, stateKey.actionId, stateKey.stateKey); + assertEquals("State variable " + stateKey + " not found in nflow_archive_workflow_state", 1, found); + } + } + + private int rowCount(String sql, Object ... params) { + return jdbc.queryForList(sql, params).size(); + } + private Map getArchivedWorkflow(int workflowId) { return jdbc.queryForMap("select * from nflow_archive_workflow where id = ?", new Object[]{workflowId}); } @@ -131,8 +245,53 @@ private int storeActiveWorkflow(DateTime modified) { return id; } + private List storeActions(int workflowId, int actionCount) { + List actionIds = new ArrayList<>(); + for(int i = 0; i < actionCount; i ++) { + actionIds.add(storeAction(workflowId)); + } + return actionIds; + } + + private List storeStateVariables(int workflowId, List actionIds, int count) { + List stateKeys = new ArrayList<>(); + for(int actionId: actionIds) { + stateKeys.addAll(storeStateVariables(workflowId, actionId, count)); + } + return stateKeys; + } + private List storeStateVariables(int workflowId, int actionId, int stateCount) { + List stateKeys = new ArrayList<>(); + int index = 1; + for(int i = 0; i < stateCount; i ++) { + stateKeys.add(storeStateVariable(workflowId, actionId, "key-" + (index++) )); + } + return stateKeys; + } + + private StateKey storeStateVariable(int workflowId, int actionId, String key) { + String value = key + "_value"; + int updated = jdbc.update("insert into nflow_workflow_state (workflow_id, action_id, state_key, state_value) values (?, ?, ?, ?)", + workflowId, actionId, key, value); + assertEquals(1, updated); + return new StateKey(workflowId, actionId, key); + } + + private int storeAction(int workflowId) { + WorkflowInstanceAction action = actionBuilder(workflowId).build(); + return workflowInstanceDao.insertWorkflowInstanceAction(action); + } + + private WorkflowInstanceAction.Builder actionBuilder(int workflowId) { + return new WorkflowInstanceAction.Builder() + .setState("dummyState") + .setType(WorkflowInstanceAction.WorkflowActionType.stateExecution) + .setExecutionStart(DateTime.now()) + .setExecutionEnd(DateTime.now()) + .setWorkflowInstanceId(workflowId); + } + private int insert(WorkflowInstance instance) { - // TODO insertWorkflowInstance doesn't support storing modified date. Add some magic internal variable to enable that? int id = workflowInstanceDao.insertWorkflowInstance(instance); assertTrue(id > 0); DateTime modified = instance.modified; @@ -155,4 +314,21 @@ private void assertEqualsInAnyOrder(List expected, List actual Collections.sort(actualCopy); assertEquals(expectedCopy, actualCopy); } + + private static class StateKey { + public final int workflowId; + public final int actionId; + public final String stateKey; + + public StateKey(int workflowId, int actionId, String stateKey) { + this.workflowId = workflowId; + this.actionId = actionId; + this.stateKey = stateKey; + } + + @Override + public String toString() { + return ReflectionToStringBuilder.toString(this); + } + } } diff --git a/nflow-engine/src/test/resources/scripts/db/h2.truncate.sql b/nflow-engine/src/test/resources/scripts/db/h2.truncate.sql index 4e6a6f906..926c0745b 100644 --- a/nflow-engine/src/test/resources/scripts/db/h2.truncate.sql +++ b/nflow-engine/src/test/resources/scripts/db/h2.truncate.sql @@ -4,3 +4,7 @@ delete from nflow_workflow_action; delete from nflow_workflow; truncate table nflow_executor; +truncate table nflow_archive_workflow_state; +update nflow_archive_workflow set parent_workflow_id = null, parent_action_id = null; +delete from nflow_archive_workflow_action; +delete from nflow_archive_workflow; From ff26526c44c3d1c535c7bb98636a62b7c8727a08 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juha=20Syrj=C3=A4l=C3=A4?= Date: Sat, 11 Jul 2015 00:55:09 +0300 Subject: [PATCH 11/51] Add integration test for archiving --- .../nflow/tests/ArchiveTest.java | 123 ++++++++++++++++++ 1 file changed, 123 insertions(+) create mode 100644 nflow-tests/src/test/java/com/nitorcreations/nflow/tests/ArchiveTest.java diff --git a/nflow-tests/src/test/java/com/nitorcreations/nflow/tests/ArchiveTest.java b/nflow-tests/src/test/java/com/nitorcreations/nflow/tests/ArchiveTest.java new file mode 100644 index 000000000..5db19097a --- /dev/null +++ b/nflow-tests/src/test/java/com/nitorcreations/nflow/tests/ArchiveTest.java @@ -0,0 +1,123 @@ +package com.nitorcreations.nflow.tests; + +import com.nitorcreations.nflow.engine.service.ArchiveService; +import com.nitorcreations.nflow.rest.v1.msg.CreateWorkflowInstanceRequest; +import com.nitorcreations.nflow.rest.v1.msg.CreateWorkflowInstanceResponse; +import com.nitorcreations.nflow.tests.demo.DemoWorkflow; +import com.nitorcreations.nflow.tests.demo.FibonacciWorkflow; +import com.nitorcreations.nflow.tests.runner.NflowServerRule; +import org.joda.time.DateTime; +import org.junit.ClassRule; +import org.junit.FixMethodOrder; +import org.junit.Test; +import org.springframework.context.annotation.ComponentScan; + +import javax.annotation.PostConstruct; +import javax.annotation.PreDestroy; +import javax.inject.Inject; + +import static org.apache.cxf.jaxrs.client.WebClient.fromClient; +import static org.hamcrest.Matchers.notNullValue; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertThat; +import static org.junit.runners.MethodSorters.NAME_ASCENDING; + +@FixMethodOrder(NAME_ASCENDING) +public class ArchiveTest extends AbstractNflowTest { + private static final int STEP_1_WORKFLOWS = 10, STEP_2_WORKFLOWS = 15, STEP_3_WORKFLOWS = 4; + + @ClassRule + public static NflowServerRule server = new NflowServerRule.Builder().springContextClass(ArchiveConfiguration.class).build(); + static ArchiveService archiveService; + + private static DateTime archiveLimit1, archiveLimit2; + + public ArchiveTest() { + super(server); + } + + @Test(timeout = 5000) + public void t01_createWorkflows() throws InterruptedException { + for(int i = 0; i < STEP_1_WORKFLOWS; i ++){ + createWorkflow(); + } + Thread.sleep(2000); + archiveLimit1 = DateTime.now(); + } + + @Test(timeout = 5000) + public void t02_createMoreWorkflows() throws InterruptedException { + for(int i = 0; i < STEP_2_WORKFLOWS; i ++){ + createWorkflow(); + } + Thread.sleep(2000); + archiveLimit2 = DateTime.now(); + } + + @Test(timeout = 5000) + public void t03_archiveBeforeTime1ArchiveAllWorkflows() { + int archived = archiveService.archiveWorkflows(archiveLimit1, 3); + assertEquals(STEP_1_WORKFLOWS, archived); + } + + @Test(timeout = 5000) + public void t04_archiveAgainBeforeTime1DoesNotArchivesAnything() { + int archived = archiveService.archiveWorkflows(archiveLimit1, 3); + assertEquals(0, archived); + } + + @Test(timeout = 5000) + public void t05_archiveBeforeTime1Archives() { + int archived = archiveService.archiveWorkflows(archiveLimit2, 5); + assertEquals(STEP_2_WORKFLOWS, archived); + } + + @Test(timeout = 5000) + public void t06_createMoreWorkflows() throws InterruptedException { + for(int i = 0; i < STEP_3_WORKFLOWS; i ++){ + createWorkflow(); + } + Thread.sleep(2000); + } + + @Test(timeout = 5000) + public void t07_archiveAgainBeforeTime1DoesNotArchiveAnything() { + int archived = archiveService.archiveWorkflows(archiveLimit1, 3); + assertEquals(0, archived); + } + + @Test(timeout = 5000) + public void t08_archiveAgainBeforeTime2DoesNotArchiveAnything() { + int archived = archiveService.archiveWorkflows(archiveLimit2, 3); + assertEquals(0, archived); + } + + private int createWorkflow() { + CreateWorkflowInstanceRequest req = new CreateWorkflowInstanceRequest(); + req.type = FibonacciWorkflow.WORKFLOW_TYPE; + // FIXME set fibo parameter to 3 after foreign key problems have been fixed in archiving + req.requestData = nflowObjectMapper().valueToTree(new FibonacciWorkflow.FiboData(1)); + CreateWorkflowInstanceResponse resp = fromClient(workflowInstanceResource, true).put(req, CreateWorkflowInstanceResponse.class); + assertThat(resp.id, notNullValue()); + return resp.id; + } + + // TODO another way would be to modify JettyServerContainer to have reference to Spring's applicationContext + // that would allow accessing ArchiveService via NflowServerRule + @ComponentScan(basePackageClasses = DemoWorkflow.class) + private static class ArchiveConfiguration { + @Inject + private ArchiveService archiveService; + + @PostConstruct + public void linkArchiveServiceToTestClass() { + ArchiveTest.archiveService = archiveService; + } + + @PreDestroy + public void removeArchiveServiceFromTestClass() { + ArchiveTest.archiveService = null; + } + + } +} From 4ccf92031c902ce640a59d1d0424414ee75a6366 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juha=20Syrj=C3=A4l=C3=A4?= Date: Sat, 11 Jul 2015 01:42:06 +0300 Subject: [PATCH 12/51] Remove recursive foreign keys from archive tables (workflow -> action and workflow -> workflow) --- .../nflow/engine/internal/dao/ArchiveDao.java | 1 - .../resources/scripts/db/h2.create.ddl.sql | 8 ++---- .../resources/scripts/db/mysql.create.ddl.sql | 8 ++---- .../scripts/db/mysql.legacy.create.ddl.sql | 4 ++- .../scripts/db/postgresql.create.ddl.sql | 8 ++---- .../nflow/tests/ArchiveTest.java | 25 +++++++++++-------- 6 files changed, 23 insertions(+), 31 deletions(-) diff --git a/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/ArchiveDao.java b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/ArchiveDao.java index d6159a1cd..651bb3203 100644 --- a/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/ArchiveDao.java +++ b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/ArchiveDao.java @@ -44,7 +44,6 @@ public List listArchivableWorkflows(DateTime before, int maxRows) { // TODO add index to nflow_workflow.modified (combined index with next_activation?) // TODO change modified trigger for postgre // TODO add new triggers for h2 and postgre to update scripts - // TODO handle recursive dependency between workflow and action somehow // TODO implement method to check that archive and prod tables have matching fields } diff --git a/nflow-engine/src/main/resources/scripts/db/h2.create.ddl.sql b/nflow-engine/src/main/resources/scripts/db/h2.create.ddl.sql index e5c26a471..97fe451ee 100644 --- a/nflow-engine/src/main/resources/scripts/db/h2.create.ddl.sql +++ b/nflow-engine/src/main/resources/scripts/db/h2.create.ddl.sql @@ -76,7 +76,9 @@ create table if not exists nflow_workflow_definition ( -- Archive tables -- - no default values -- - no triggers +-- - no auto increments -- - same indexes and constraints as production tables +-- - remove recursive foreign keys create table if not exists nflow_archive_workflow ( id int not null primary key, @@ -115,12 +117,6 @@ create table if not exists nflow_archive_workflow_action ( foreign key (workflow_id) references nflow_archive_workflow(id) on delete cascade ); -alter table nflow_archive_workflow add constraint fk_workflow_archive_parent - foreign key (parent_workflow_id, parent_action_id) references nflow_archive_workflow_action (workflow_id, id) on delete cascade; - -alter table nflow_archive_workflow add constraint fk_workflow_archive_root - foreign key (root_workflow_id) references nflow_archive_workflow (id) on delete cascade; - create table if not exists nflow_archive_workflow_state ( workflow_id int not null, action_id int not null, diff --git a/nflow-engine/src/main/resources/scripts/db/mysql.create.ddl.sql b/nflow-engine/src/main/resources/scripts/db/mysql.create.ddl.sql index 7c466c507..e0593f1dc 100644 --- a/nflow-engine/src/main/resources/scripts/db/mysql.create.ddl.sql +++ b/nflow-engine/src/main/resources/scripts/db/mysql.create.ddl.sql @@ -73,7 +73,9 @@ create table if not exists nflow_workflow_definition ( -- Archive tables -- - no default values -- - no triggers +-- - no auto increments -- - same indexes and constraints as production tables +-- - remove recursive foreign keys create table if not exists nflow_archive_workflow ( id int not null primary key, @@ -110,12 +112,6 @@ create table if not exists nflow_archive_workflow_action ( foreign key (workflow_id) references nflow_archive_workflow(id) on delete cascade ); -alter table nflow_archive_workflow add constraint fk_archive_workflow_parent - foreign key (parent_workflow_id, parent_action_id) references nflow_archive_workflow_action (workflow_id, id) on delete cascade; - -alter table nflow_archive_workflow add constraint fk_archive_workflow_root - foreign key (root_workflow_id) references nflow_workflow (id) on delete cascade; - create table if not exists nflow_archive_workflow_state ( workflow_id int not null, action_id int not null, diff --git a/nflow-engine/src/main/resources/scripts/db/mysql.legacy.create.ddl.sql b/nflow-engine/src/main/resources/scripts/db/mysql.legacy.create.ddl.sql index 06ee64545..24f558c8f 100644 --- a/nflow-engine/src/main/resources/scripts/db/mysql.legacy.create.ddl.sql +++ b/nflow-engine/src/main/resources/scripts/db/mysql.legacy.create.ddl.sql @@ -83,7 +83,9 @@ create trigger nflow_workflow_definition_insert before insert on `nflow_workflow -- Archive tables -- - no default values -- - no triggers +-- - no auto increments -- - same indexes and constraints as production tables +-- - remove recursive foreign keys create table if not exists nflow_archive_workflow ( id int not null primary key, @@ -124,7 +126,7 @@ alter table nflow_archive_workflow add constraint fk_archive_workflow_parent foreign key (parent_workflow_id, parent_action_id) references nflow_archive_workflow_action (workflow_id, id) on delete cascade; alter table nflow_archive_workflow add constraint fk_archive_workflow_root - foreign key (root_archive_workflow_id) references nflow_archive_workflow (id) on delete cascade; + foreign key (root_workflow_id) references nflow_archive_workflow (id) on delete cascade; create table if not exists nflow_archive_workflow_state ( workflow_id int not null, diff --git a/nflow-engine/src/main/resources/scripts/db/postgresql.create.ddl.sql b/nflow-engine/src/main/resources/scripts/db/postgresql.create.ddl.sql index 25742baf7..65e4228fc 100644 --- a/nflow-engine/src/main/resources/scripts/db/postgresql.create.ddl.sql +++ b/nflow-engine/src/main/resources/scripts/db/postgresql.create.ddl.sql @@ -95,7 +95,9 @@ create trigger update_nflow_definition_modified before update on nflow_workflow_ -- Archive tables -- - no default values -- - no triggers +-- - no auto increments -- - same indexes and constraints as production tables +-- - remove recursive foreign keys create table if not exists nflow_archive_workflow ( id integer primary key, @@ -135,12 +137,6 @@ create table if not exists nflow_archive_workflow_action ( constraint nflow_archive_workflow_action_uniq unique (workflow_id, id) ); -alter table nflow_archive_workflow add constraint fk_archive_workflow_parent - foreign key (parent_workflow_id, parent_action_id) references nflow_workflow_action (workflow_id, id) on delete cascade; - -alter table nflow_archive_workflow add constraint fk_archive_workflow_root - foreign key (root_workflow_id) references nflow_archive_workflow (id) on delete cascade; - create table if not exists nflow_archive_workflow_state ( workflow_id int not null, action_id int not null, diff --git a/nflow-tests/src/test/java/com/nitorcreations/nflow/tests/ArchiveTest.java b/nflow-tests/src/test/java/com/nitorcreations/nflow/tests/ArchiveTest.java index 5db19097a..62c7e7730 100644 --- a/nflow-tests/src/test/java/com/nitorcreations/nflow/tests/ArchiveTest.java +++ b/nflow-tests/src/test/java/com/nitorcreations/nflow/tests/ArchiveTest.java @@ -24,10 +24,13 @@ @FixMethodOrder(NAME_ASCENDING) public class ArchiveTest extends AbstractNflowTest { - private static final int STEP_1_WORKFLOWS = 10, STEP_2_WORKFLOWS = 15, STEP_3_WORKFLOWS = 4; - + private static final int STEP_1_WORKFLOWS = 7, STEP_2_WORKFLOWS = 9, STEP_3_WORKFLOWS = 4; + private final int createSleepMs = 1500; @ClassRule - public static NflowServerRule server = new NflowServerRule.Builder().springContextClass(ArchiveConfiguration.class).build(); + public static NflowServerRule server = new NflowServerRule.Builder() + .prop("nflow.dispatcher.sleep.ms", 25) + .springContextClass(ArchiveConfiguration.class) + .build(); static ArchiveService archiveService; private static DateTime archiveLimit1, archiveLimit2; @@ -41,7 +44,7 @@ public void t01_createWorkflows() throws InterruptedException { for(int i = 0; i < STEP_1_WORKFLOWS; i ++){ createWorkflow(); } - Thread.sleep(2000); + Thread.sleep(createSleepMs); archiveLimit1 = DateTime.now(); } @@ -50,14 +53,15 @@ public void t02_createMoreWorkflows() throws InterruptedException { for(int i = 0; i < STEP_2_WORKFLOWS; i ++){ createWorkflow(); } - Thread.sleep(2000); + Thread.sleep(createSleepMs); archiveLimit2 = DateTime.now(); } @Test(timeout = 5000) public void t03_archiveBeforeTime1ArchiveAllWorkflows() { int archived = archiveService.archiveWorkflows(archiveLimit1, 3); - assertEquals(STEP_1_WORKFLOWS, archived); + // fibonacci workflow creates 1 child workflow + assertEquals(STEP_1_WORKFLOWS * 2, archived); } @Test(timeout = 5000) @@ -67,9 +71,9 @@ public void t04_archiveAgainBeforeTime1DoesNotArchivesAnything() { } @Test(timeout = 5000) - public void t05_archiveBeforeTime1Archives() { + public void t05_archiveBeforeTime2Archives() { int archived = archiveService.archiveWorkflows(archiveLimit2, 5); - assertEquals(STEP_2_WORKFLOWS, archived); + assertEquals(STEP_2_WORKFLOWS * 2, archived); } @Test(timeout = 5000) @@ -77,7 +81,7 @@ public void t06_createMoreWorkflows() throws InterruptedException { for(int i = 0; i < STEP_3_WORKFLOWS; i ++){ createWorkflow(); } - Thread.sleep(2000); + Thread.sleep(createSleepMs); } @Test(timeout = 5000) @@ -95,8 +99,7 @@ public void t08_archiveAgainBeforeTime2DoesNotArchiveAnything() { private int createWorkflow() { CreateWorkflowInstanceRequest req = new CreateWorkflowInstanceRequest(); req.type = FibonacciWorkflow.WORKFLOW_TYPE; - // FIXME set fibo parameter to 3 after foreign key problems have been fixed in archiving - req.requestData = nflowObjectMapper().valueToTree(new FibonacciWorkflow.FiboData(1)); + req.requestData = nflowObjectMapper().valueToTree(new FibonacciWorkflow.FiboData(3)); CreateWorkflowInstanceResponse resp = fromClient(workflowInstanceResource, true).put(req, CreateWorkflowInstanceResponse.class); assertThat(resp.id, notNullValue()); return resp.id; From 65d868b609d1ec8207fe6c17ac988e2e63c20766 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juha=20Syrj=C3=A4l=C3=A4?= Date: Sat, 11 Jul 2015 01:59:26 +0300 Subject: [PATCH 13/51] Make nflow_archive_workflow constraint name unique --- .../src/main/resources/scripts/db/postgresql.create.ddl.sql | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nflow-engine/src/main/resources/scripts/db/postgresql.create.ddl.sql b/nflow-engine/src/main/resources/scripts/db/postgresql.create.ddl.sql index 65e4228fc..51ff36a82 100644 --- a/nflow-engine/src/main/resources/scripts/db/postgresql.create.ddl.sql +++ b/nflow-engine/src/main/resources/scripts/db/postgresql.create.ddl.sql @@ -117,7 +117,7 @@ create table if not exists nflow_archive_workflow ( created timestamptz not null, modified timestamptz not null, executor_group varchar(64) not null, - constraint nflow_workflow_uniq unique (type, external_id, executor_group) + constraint nflow_archive_workflow_uniq unique (type, external_id, executor_group) ); drop index nflow_archive_workflow_activation; From f11f01dc7d536f183cd70953d9af7505b07a620d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juha=20Syrj=C3=A4l=C3=A4?= Date: Sat, 11 Jul 2015 02:01:17 +0300 Subject: [PATCH 14/51] Remove recursive foreign keys also for legacy mysql --- .../main/resources/scripts/db/mysql.legacy.create.ddl.sql | 6 ------ 1 file changed, 6 deletions(-) diff --git a/nflow-engine/src/main/resources/scripts/db/mysql.legacy.create.ddl.sql b/nflow-engine/src/main/resources/scripts/db/mysql.legacy.create.ddl.sql index 24f558c8f..bd8c70dad 100644 --- a/nflow-engine/src/main/resources/scripts/db/mysql.legacy.create.ddl.sql +++ b/nflow-engine/src/main/resources/scripts/db/mysql.legacy.create.ddl.sql @@ -122,12 +122,6 @@ create table if not exists nflow_archive_workflow_action ( foreign key (workflow_id) references nflow_archive_workflow(id) on delete cascade ); -alter table nflow_archive_workflow add constraint fk_archive_workflow_parent - foreign key (parent_workflow_id, parent_action_id) references nflow_archive_workflow_action (workflow_id, id) on delete cascade; - -alter table nflow_archive_workflow add constraint fk_archive_workflow_root - foreign key (root_workflow_id) references nflow_archive_workflow (id) on delete cascade; - create table if not exists nflow_archive_workflow_state ( workflow_id int not null, action_id int not null, From f2e7b89fcc9c344137bee848965bf65abd658337 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juha=20Syrj=C3=A4l=C3=A4?= Date: Sat, 11 Jul 2015 11:35:57 +0300 Subject: [PATCH 15/51] Improve timeouts and code structure in ArchiveTest --- .../nflow/tests/ArchiveTest.java | 59 +++++++++++-------- 1 file changed, 36 insertions(+), 23 deletions(-) diff --git a/nflow-tests/src/test/java/com/nitorcreations/nflow/tests/ArchiveTest.java b/nflow-tests/src/test/java/com/nitorcreations/nflow/tests/ArchiveTest.java index 62c7e7730..780cc6e40 100644 --- a/nflow-tests/src/test/java/com/nitorcreations/nflow/tests/ArchiveTest.java +++ b/nflow-tests/src/test/java/com/nitorcreations/nflow/tests/ArchiveTest.java @@ -15,6 +15,8 @@ import javax.annotation.PostConstruct; import javax.annotation.PreDestroy; import javax.inject.Inject; +import java.util.ArrayList; +import java.util.List; import static org.apache.cxf.jaxrs.client.WebClient.fromClient; import static org.hamcrest.Matchers.notNullValue; @@ -24,8 +26,10 @@ @FixMethodOrder(NAME_ASCENDING) public class ArchiveTest extends AbstractNflowTest { - private static final int STEP_1_WORKFLOWS = 7, STEP_2_WORKFLOWS = 9, STEP_3_WORKFLOWS = 4; - private final int createSleepMs = 1500; + private static final int STEP_1_WORKFLOWS = 4, STEP_2_WORKFLOWS = 7, STEP_3_WORKFLOWS = 4; + private static final int CREATE_TIMEOUT = 15000; + private static final int ARCHIVE_TIMEOUT = 15000; + @ClassRule public static NflowServerRule server = new NflowServerRule.Builder() .prop("nflow.dispatcher.sleep.ms", 25) @@ -39,63 +43,62 @@ public ArchiveTest() { super(server); } - @Test(timeout = 5000) + @Test(timeout = CREATE_TIMEOUT) public void t01_createWorkflows() throws InterruptedException { - for(int i = 0; i < STEP_1_WORKFLOWS; i ++){ - createWorkflow(); - } - Thread.sleep(createSleepMs); + waitUntilWorkflowsFinished(createWorkflows(STEP_1_WORKFLOWS)); archiveLimit1 = DateTime.now(); } - @Test(timeout = 5000) + @Test(timeout = CREATE_TIMEOUT) public void t02_createMoreWorkflows() throws InterruptedException { - for(int i = 0; i < STEP_2_WORKFLOWS; i ++){ - createWorkflow(); - } - Thread.sleep(createSleepMs); + waitUntilWorkflowsFinished(createWorkflows(STEP_2_WORKFLOWS)); archiveLimit2 = DateTime.now(); } - @Test(timeout = 5000) + @Test(timeout = ARCHIVE_TIMEOUT) public void t03_archiveBeforeTime1ArchiveAllWorkflows() { int archived = archiveService.archiveWorkflows(archiveLimit1, 3); - // fibonacci workflow creates 1 child workflow + // fibonacci(3) workflow creates 1 child workflow assertEquals(STEP_1_WORKFLOWS * 2, archived); } - @Test(timeout = 5000) + @Test(timeout = ARCHIVE_TIMEOUT) public void t04_archiveAgainBeforeTime1DoesNotArchivesAnything() { int archived = archiveService.archiveWorkflows(archiveLimit1, 3); assertEquals(0, archived); } - @Test(timeout = 5000) + @Test(timeout = ARCHIVE_TIMEOUT) public void t05_archiveBeforeTime2Archives() { int archived = archiveService.archiveWorkflows(archiveLimit2, 5); assertEquals(STEP_2_WORKFLOWS * 2, archived); } - @Test(timeout = 5000) + @Test(timeout = CREATE_TIMEOUT) public void t06_createMoreWorkflows() throws InterruptedException { - for(int i = 0; i < STEP_3_WORKFLOWS; i ++){ - createWorkflow(); - } - Thread.sleep(createSleepMs); + waitUntilWorkflowsFinished(createWorkflows(STEP_3_WORKFLOWS)); } - @Test(timeout = 5000) + @Test(timeout = ARCHIVE_TIMEOUT) public void t07_archiveAgainBeforeTime1DoesNotArchiveAnything() { int archived = archiveService.archiveWorkflows(archiveLimit1, 3); assertEquals(0, archived); } - @Test(timeout = 5000) + @Test(timeout = ARCHIVE_TIMEOUT) public void t08_archiveAgainBeforeTime2DoesNotArchiveAnything() { int archived = archiveService.archiveWorkflows(archiveLimit2, 3); assertEquals(0, archived); } + private List createWorkflows(int count) { + List ids = new ArrayList<>(); + for(int i = 0; i < count; i ++) { + ids.add(createWorkflow()); + } + return ids; + } + private int createWorkflow() { CreateWorkflowInstanceRequest req = new CreateWorkflowInstanceRequest(); req.type = FibonacciWorkflow.WORKFLOW_TYPE; @@ -105,6 +108,16 @@ private int createWorkflow() { return resp.id; } + private void waitUntilWorkflowsFinished(List workflowIds) { + for(int workflowId : workflowIds) { + try { + getWorkflowInstance(workflowId, "done"); + } catch (InterruptedException e) { + // ignore + } + } + } + // TODO another way would be to modify JettyServerContainer to have reference to Spring's applicationContext // that would allow accessing ArchiveService via NflowServerRule @ComponentScan(basePackageClasses = DemoWorkflow.class) From 6d2f438d09caf3adece11067b198fa62a043e585 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juha=20Syrj=C3=A4l=C3=A4?= Date: Sat, 11 Jul 2015 11:50:02 +0300 Subject: [PATCH 16/51] Change FibonacciWorkflow polling timeout to 10 second. To make workflows complete faster in ArchiveTest. --- .../com/nitorcreations/nflow/tests/demo/FibonacciWorkflow.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nflow-tests/src/main/java/com/nitorcreations/nflow/tests/demo/FibonacciWorkflow.java b/nflow-tests/src/main/java/com/nitorcreations/nflow/tests/demo/FibonacciWorkflow.java index c0fb1583f..af7694d1a 100644 --- a/nflow-tests/src/main/java/com/nitorcreations/nflow/tests/demo/FibonacciWorkflow.java +++ b/nflow-tests/src/main/java/com/nitorcreations/nflow/tests/demo/FibonacciWorkflow.java @@ -98,7 +98,7 @@ public NextAction poll(StateExecution execution) { List finishedChildren = execution.queryChildWorkflows(query); if (finishedChildren.size() < execution.getAllChildWorkflows().size()) { - return retryAfter(now().plusSeconds(20), "Child workflows are not ready yet."); + return retryAfter(now().plusSeconds(10), "Child workflows are not ready yet."); } int sum = 0; for (WorkflowInstance child : finishedChildren) { From 5660b4a3d1236cf2ec594422135e3d6daf7b20e4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juha=20Syrj=C3=A4l=C3=A4?= Date: Sat, 11 Jul 2015 12:02:51 +0300 Subject: [PATCH 17/51] Run archiving as the first step to remove any existing data from skewing the later results. --- .../java/com/nitorcreations/nflow/tests/ArchiveTest.java | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/nflow-tests/src/test/java/com/nitorcreations/nflow/tests/ArchiveTest.java b/nflow-tests/src/test/java/com/nitorcreations/nflow/tests/ArchiveTest.java index 780cc6e40..4f15d31e9 100644 --- a/nflow-tests/src/test/java/com/nitorcreations/nflow/tests/ArchiveTest.java +++ b/nflow-tests/src/test/java/com/nitorcreations/nflow/tests/ArchiveTest.java @@ -43,6 +43,11 @@ public ArchiveTest() { super(server); } + @Test(timeout = ARCHIVE_TIMEOUT) + public void t00_cleanupExistingArchivableStuff() { + archiveService.archiveWorkflows(DateTime.now(), 10); + } + @Test(timeout = CREATE_TIMEOUT) public void t01_createWorkflows() throws InterruptedException { waitUntilWorkflowsFinished(createWorkflows(STEP_1_WORKFLOWS)); From 9aafb4975fc220479d94994730b6f037414421a7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juha=20Syrj=C3=A4l=C3=A4?= Date: Sat, 11 Jul 2015 15:42:19 +0300 Subject: [PATCH 18/51] Add to start of archiving a check to validate archive tables. SQL operations to copy to data from production tables to arhive are generated dynamically. Implemented TableMetadataChecker to ensure that archive tables match production tables and copying is possible. --- .../nflow/engine/internal/dao/ArchiveDao.java | 37 +++---- .../nflow/engine/internal/dao/DaoUtil.java | 20 ++++ .../internal/dao/TableMetadataChecker.java | 98 +++++++++++++++++++ .../nflow/engine/service/ArchiveService.java | 1 + .../internal/dao/DaoTestConfiguration.java | 5 + .../dao/TableMetadataCheckerTest.java | 82 ++++++++++++++++ .../internal/dao/WorkflowInstanceDaoTest.java | 1 + .../engine/service/ArchiveServiceTest.java | 16 +++ .../scripts/db/metadata.create.ddl.sql | 48 +++++++++ 9 files changed, 287 insertions(+), 21 deletions(-) create mode 100644 nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/TableMetadataChecker.java create mode 100644 nflow-engine/src/test/java/com/nitorcreations/nflow/engine/internal/dao/TableMetadataCheckerTest.java create mode 100644 nflow-engine/src/test/resources/scripts/db/metadata.create.ddl.sql diff --git a/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/ArchiveDao.java b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/ArchiveDao.java index 651bb3203..df90c2a99 100644 --- a/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/ArchiveDao.java +++ b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/ArchiveDao.java @@ -25,12 +25,24 @@ @DependsOn(NFLOW_DATABASE_INITIALIZER) public class ArchiveDao { private JdbcTemplate jdbc; + private TableMetadataChecker tableMetadataChecker; @Inject public void setJdbcTemplate(@NFlow JdbcTemplate jdbcTemplate) { this.jdbc = jdbcTemplate; } + @Inject + public void setTableMetadataChecker(TableMetadataChecker tableMetadataChecker) { + this.tableMetadataChecker = tableMetadataChecker; + } + + public void ensureValidArchiveTablesExist() { + tableMetadataChecker.ensureCopyingPossible("nflow_workflow", "nflow_archive_workflow"); + tableMetadataChecker.ensureCopyingPossible("nflow_workflow_action", "nflow_archive_workflow_action"); + tableMetadataChecker.ensureCopyingPossible("nflow_workflow_state", "nflow_archive_workflow_state"); + } + public List listArchivableWorkflows(DateTime before, int maxRows) { return jdbc.query("select * from nflow_workflow parent where parent.next_activation is null and parent.modified <= ? " + "and not exists(" + @@ -47,13 +59,6 @@ public List listArchivableWorkflows(DateTime before, int maxRows) { // TODO implement method to check that archive and prod tables have matching fields } - private static class ArchivableWorkflowsRowMapper implements RowMapper { - @Override - public Integer mapRow(ResultSet rs, int rowNum) throws SQLException { - return rs.getInt("id"); - } - } - @Transactional public void archiveWorkflows(List workflowIds) { String workflowIdParams = params(workflowIds); @@ -91,7 +96,7 @@ private void deleteWorkflows(String workflowIdParams) { } private String columnsFromMetadata(String tableName) { - List columnNames = jdbc.query("select * from " + tableName + " where 1 = 0", ColumnNamesExtractor.columnNamesExtractor); + List columnNames = jdbc.query("select * from " + tableName + " where 1 = 0", DaoUtil.ColumnNamesExtractor.columnNamesExtractor); return StringUtils.join(columnNames.toArray(), ","); } @@ -99,20 +104,10 @@ private String params(List workflowIds) { return "(" + StringUtils.join(workflowIds.toArray(), ",") + ")"; } - static final class ColumnNamesExtractor implements org.springframework.jdbc.core.ResultSetExtractor> { - static final ColumnNamesExtractor columnNamesExtractor = new ColumnNamesExtractor(); - private ColumnNamesExtractor() {} - + private static class ArchivableWorkflowsRowMapper implements RowMapper { @Override - public List extractData(ResultSet rs) throws SQLException, DataAccessException { - List columnNames = new LinkedList<>(); - - ResultSetMetaData metadata = rs.getMetaData(); - for(int col = 1; col <= metadata.getColumnCount(); col ++) { - columnNames.add(metadata.getColumnName(col)); - } - return columnNames; + public Integer mapRow(ResultSet rs, int rowNum) throws SQLException { + return rs.getInt("id"); } } - } diff --git a/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/DaoUtil.java b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/DaoUtil.java index 9e3af77d8..17da0c432 100644 --- a/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/DaoUtil.java +++ b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/DaoUtil.java @@ -1,8 +1,11 @@ package com.nitorcreations.nflow.engine.internal.dao; import java.sql.ResultSet; +import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.sql.Timestamp; +import java.util.LinkedList; +import java.util.List; import org.joda.time.DateTime; import org.springframework.dao.DataAccessException; @@ -35,4 +38,21 @@ public static Integer getInt(ResultSet rs, String columnLabel) throws SQLExcepti int value = rs.getInt(columnLabel); return rs.wasNull() ? null : value; } + + public static final class ColumnNamesExtractor implements org.springframework.jdbc.core.ResultSetExtractor> { + static final ColumnNamesExtractor columnNamesExtractor = new ColumnNamesExtractor(); + private ColumnNamesExtractor() {} + + @Override + public List extractData(ResultSet rs) throws SQLException, DataAccessException { + List columnNames = new LinkedList<>(); + + ResultSetMetaData metadata = rs.getMetaData(); + for(int col = 1; col <= metadata.getColumnCount(); col ++) { + columnNames.add(metadata.getColumnName(col)); + } + return columnNames; + } + } + } diff --git a/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/TableMetadataChecker.java b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/TableMetadataChecker.java new file mode 100644 index 000000000..795ecfee7 --- /dev/null +++ b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/TableMetadataChecker.java @@ -0,0 +1,98 @@ +package com.nitorcreations.nflow.engine.internal.dao; + +import com.nitorcreations.nflow.engine.internal.config.NFlow; +import org.apache.commons.lang3.builder.ReflectionToStringBuilder; +import org.springframework.dao.DataAccessException; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.jdbc.core.ResultSetExtractor; + +import javax.inject.Inject; +import javax.inject.Named; +import java.sql.ResultSet; +import java.sql.ResultSetMetaData; +import java.sql.SQLException; +import java.util.LinkedHashMap; +import java.util.LinkedHashSet; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; + +import static java.lang.String.format; +import static org.apache.commons.lang3.builder.ToStringStyle.SHORT_PREFIX_STYLE; + +@Named +public class TableMetadataChecker { + private JdbcTemplate jdbc; + + public void ensureCopyingPossible(String sourceTable, String destinationTable) { + Map sourceMetadataMap = getMetadata(sourceTable); + Map destMetadataMap = getMetadata(destinationTable); + if(destMetadataMap.size() < sourceMetadataMap.size()) { + throw new IllegalArgumentException(format("Source table %s has more columns than destination table %s", + sourceTable, destinationTable)); + } + if(! destMetadataMap.keySet().containsAll(sourceMetadataMap.keySet())) { + Set missingColumns = new LinkedHashSet<>(destMetadataMap.keySet()); + missingColumns.removeAll(sourceMetadataMap.keySet()); + throw new IllegalArgumentException(format("Destination table %s is missing columns %s that are present in source table %s", + destinationTable, missingColumns, sourceTable)); + } + for(Entry entry: sourceMetadataMap.entrySet()) { + ColumnMetadata sourceMetadata = entry.getValue(); + ColumnMetadata destMetadata = destMetadataMap.get(entry.getKey()); + if(!sourceMetadata.typeName.equals(destMetadata.typeName)) { + throw new IllegalArgumentException(format("Source column %s.%s has type %s and destination column %s.%s has mismatching type %s", + sourceTable, sourceMetadata.columnName, sourceMetadata.typeName, + destinationTable, destMetadata.columnName, destMetadata.typeName)); + } + if(sourceMetadata.size > destMetadata.size) { + throw new IllegalArgumentException(format("Source column %s.%s has size %s and destination column %s.%s smaller size %s", + sourceTable, sourceMetadata.columnName, sourceMetadata.size, + destinationTable, destMetadata.columnName, destMetadata.size)); + } + } + } + + private Map getMetadata(String tableName) { + return jdbc.query("select * from " + tableName + " where 1 = 0", new MetadataExtractor()); + } + + private static class MetadataExtractor implements ResultSetExtractor> { + + @Override + public Map extractData(ResultSet rs) throws SQLException, DataAccessException { + ResultSetMetaData metadata = rs.getMetaData(); + Map metadataMap = new LinkedHashMap<>(); + for(int col = 1; col <= metadata.getColumnCount(); col ++) { + String columnName = metadata.getColumnName(col); + String typeName = metadata.getColumnTypeName(col); + int size = metadata.getColumnDisplaySize(col); + metadataMap.put(columnName, new ColumnMetadata(columnName, typeName, size)); + } + return metadataMap; + } + } + + private static class ColumnMetadata { + public final String columnName; + public final String typeName; + public final int size; + + public ColumnMetadata(String columnName, String typeName, int size) { + this.columnName = columnName; + this.typeName = typeName; + this.size = size; + } + + @Override + public String toString() { + return ReflectionToStringBuilder.toString(this, SHORT_PREFIX_STYLE); + } + } + + @Inject + public void setJdbcTemplate(@NFlow JdbcTemplate jdbcTemplate) { + this.jdbc = jdbcTemplate; + } + +} diff --git a/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/service/ArchiveService.java b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/service/ArchiveService.java index b85af977b..2427ea716 100644 --- a/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/service/ArchiveService.java +++ b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/service/ArchiveService.java @@ -15,6 +15,7 @@ public class ArchiveService { private ArchiveDao archiveDao; public int archiveWorkflows(DateTime olderThan, int batchSize) { + archiveDao.ensureValidArchiveTablesExist(); Assert.notNull(olderThan, "olderThan must not be null"); Assert.isTrue(batchSize > 0, "batchSize must be greater than 0"); diff --git a/nflow-engine/src/test/java/com/nitorcreations/nflow/engine/internal/dao/DaoTestConfiguration.java b/nflow-engine/src/test/java/com/nitorcreations/nflow/engine/internal/dao/DaoTestConfiguration.java index 9ec314860..9a336ca87 100644 --- a/nflow-engine/src/test/java/com/nitorcreations/nflow/engine/internal/dao/DaoTestConfiguration.java +++ b/nflow-engine/src/test/java/com/nitorcreations/nflow/engine/internal/dao/DaoTestConfiguration.java @@ -53,6 +53,11 @@ public ArchiveDao archiveDao() { return new ArchiveDao(); } + @Bean + public TableMetadataChecker tableMetadataChecker() { + return new TableMetadataChecker(); + } + @Bean public PlatformTransactionManager transactionManager(DataSource ds) { return new DataSourceTransactionManager(ds); diff --git a/nflow-engine/src/test/java/com/nitorcreations/nflow/engine/internal/dao/TableMetadataCheckerTest.java b/nflow-engine/src/test/java/com/nitorcreations/nflow/engine/internal/dao/TableMetadataCheckerTest.java new file mode 100644 index 000000000..046991359 --- /dev/null +++ b/nflow-engine/src/test/java/com/nitorcreations/nflow/engine/internal/dao/TableMetadataCheckerTest.java @@ -0,0 +1,82 @@ +package com.nitorcreations.nflow.engine.internal.dao; + +import com.nitorcreations.nflow.engine.internal.storage.db.DatabaseInitializer; +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; +import org.junit.runner.RunWith; +import org.springframework.mock.env.MockEnvironment; +import org.springframework.test.annotation.DirtiesContext; +import org.springframework.test.context.ActiveProfiles; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; + +import javax.inject.Inject; +import javax.sql.DataSource; + +@RunWith(SpringJUnit4ClassRunner.class) +@ContextConfiguration(classes = { DaoTestConfiguration.class }) +@ActiveProfiles("nflow.db.h2") +@DirtiesContext +public class TableMetadataCheckerTest { + @Inject + private DataSource dataSource; + @Inject + private TableMetadataChecker tableMetadataChecker; + private static DatabaseInitializer initializer = null; + @Rule + public ExpectedException thrown = ExpectedException.none(); + + @Before + public void setup() { + if(initializer == null) { + initializer = new DatabaseInitializer("metadata", dataSource, environmentCreateOnStartup("true")); + } + } + + @Test + public void identicalTableIsValid() { + tableMetadataChecker.ensureCopyingPossible("base", "identical"); + } + + @Test + public void tableIsValidWithItself() { + tableMetadataChecker.ensureCopyingPossible("base", "base"); + } + + @Test + public void destinationWithExtraColumnsIsValid() { + tableMetadataChecker.ensureCopyingPossible("base", "more_columns"); + } + + @Test + public void destinationWithLargerColumnIsValid() { + tableMetadataChecker.ensureCopyingPossible("base", "larger_size"); + } + + @Test + public void destinationWithFewerColumnsIsInvalid() { + thrown.expect(IllegalArgumentException.class); + thrown.expectMessage("Source table base has more columns than destination table fewer_columns"); + tableMetadataChecker.ensureCopyingPossible("base", "fewer_columns"); + } + + @Test + public void destinationWithWrongTypeIsInvalid() { + thrown.expect(IllegalArgumentException.class); + thrown.expectMessage("Source column base.TIME1 has type TIME and destination column wrong_type.TIME1 has mismatching type INTEGER"); + tableMetadataChecker.ensureCopyingPossible("base", "wrong_type"); + } + + @Test + public void destinationWithSmallerColumnIsInvalid() { + thrown.expect(IllegalArgumentException.class); + thrown.expectMessage("Source column base.TEXT2 has size 30 and destination column smaller_size.TEXT2 smaller size 25"); + tableMetadataChecker.ensureCopyingPossible("base", "smaller_size"); + } + + private MockEnvironment environmentCreateOnStartup(String value) { + return new MockEnvironment().withProperty("nflow.db.create_on_startup", value); + } +} diff --git a/nflow-engine/src/test/java/com/nitorcreations/nflow/engine/internal/dao/WorkflowInstanceDaoTest.java b/nflow-engine/src/test/java/com/nitorcreations/nflow/engine/internal/dao/WorkflowInstanceDaoTest.java index 7071c1b3d..8ba0f1b72 100644 --- a/nflow-engine/src/test/java/com/nitorcreations/nflow/engine/internal/dao/WorkflowInstanceDaoTest.java +++ b/nflow-engine/src/test/java/com/nitorcreations/nflow/engine/internal/dao/WorkflowInstanceDaoTest.java @@ -122,6 +122,7 @@ public void updateWorkflowInstance() throws InterruptedException { WorkflowInstance i1 = constructWorkflowInstanceBuilder().setStatus(created).build(); int id = dao.insertWorkflowInstance(i1); List ids = dao.pollNextWorkflowInstanceIds(1); + // FIXME this assert fails randomly. due to race condition? assertThat(ids, contains(id)); final WorkflowInstance i2 = new WorkflowInstance.Builder(dao.getWorkflowInstance(id)).setStatus(inProgress) .setState("updateState").setStateText("update text").setNextActivation(DateTime.now()).build(); diff --git a/nflow-engine/src/test/java/com/nitorcreations/nflow/engine/service/ArchiveServiceTest.java b/nflow-engine/src/test/java/com/nitorcreations/nflow/engine/service/ArchiveServiceTest.java index 3b501f3d7..e59740a04 100644 --- a/nflow-engine/src/test/java/com/nitorcreations/nflow/engine/service/ArchiveServiceTest.java +++ b/nflow-engine/src/test/java/com/nitorcreations/nflow/engine/service/ArchiveServiceTest.java @@ -13,6 +13,7 @@ import java.util.List; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.fail; import static org.mockito.Mockito.*; @RunWith(MockitoJUnitRunner.class) @@ -30,6 +31,7 @@ public void withZeroWorkflowsInFirstBatchCausesNothingToArchive() { when(dao.listArchivableWorkflows(limit, 10)).thenReturn(emptyList); int archived = service.archiveWorkflows(limit, 10); assertEquals(0, archived); + verify(dao).ensureValidArchiveTablesExist(); verify(dao).listArchivableWorkflows(limit, 10); verifyNoMoreInteractions(dao); } @@ -39,8 +41,22 @@ public void archivingContinuesUntilEmptyListOfArchivableIsReturned() { when(dao.listArchivableWorkflows(limit, 10)).thenReturn(dataList, dataList, dataList, emptyList); int archived = service.archiveWorkflows(limit, 10); assertEquals(dataList.size() * 3, archived); + verify(dao).ensureValidArchiveTablesExist(); verify(dao, times(4)).listArchivableWorkflows(limit, 10); verify(dao, times(3)).archiveWorkflows(dataList); verifyNoMoreInteractions(dao); } + + @Test + public void noArchivingHappensWhenValidArchiveTablesDoNotExist() { + doThrow(new IllegalArgumentException("bad archive table")).when(dao).ensureValidArchiveTablesExist(); + try { + service.archiveWorkflows(limit, 10); + fail("exception expected"); + } catch(IllegalArgumentException e) { + // ignore + } + verify(dao).ensureValidArchiveTablesExist(); + verifyNoMoreInteractions(dao); + } } diff --git a/nflow-engine/src/test/resources/scripts/db/metadata.create.ddl.sql b/nflow-engine/src/test/resources/scripts/db/metadata.create.ddl.sql new file mode 100644 index 000000000..70c3e6584 --- /dev/null +++ b/nflow-engine/src/test/resources/scripts/db/metadata.create.ddl.sql @@ -0,0 +1,48 @@ +create table base ( + id integer not null primary key, + text1 varchar(20), + text2 char(30), + time1 time not null +); + +create table identical ( + id integer not null primary key, + text1 varchar(20), + text2 char(30), + time1 time not null +); + +create table more_columns ( + id integer not null primary key, + text1 varchar(20), + text2 char(30), + time1 time not null, + extra char(1) +); + +create table fewer_columns ( + id integer not null primary key, + text1 varchar(20), + text2 char(30) +); + +create table wrong_type ( + id integer not null primary key, + text1 varchar(20), + text2 char(30), + time1 integer not null +); + +create table smaller_size ( + id integer not null primary key, + text1 varchar(20), + text2 char(25), + time1 time not null +); + +create table larger_size ( + id integer not null primary key, + text1 varchar(25), + text2 char(30), + time1 time not null +); \ No newline at end of file From 67da7e514810cb777a1f43fc3da8a84a352c5800 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juha=20Syrj=C3=A4l=C3=A4?= Date: Sat, 11 Jul 2015 16:01:28 +0300 Subject: [PATCH 19/51] Add support type aliases in TableMetadataChecker. This is needed for postgres: production table has type serial, archive table has type integer. --- .../internal/dao/TableMetadataChecker.java | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/TableMetadataChecker.java b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/TableMetadataChecker.java index 795ecfee7..72fdca106 100644 --- a/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/TableMetadataChecker.java +++ b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/TableMetadataChecker.java @@ -58,6 +58,7 @@ private Map getMetadata(String tableName) { } private static class MetadataExtractor implements ResultSetExtractor> { + private Map typeAliases = typeAliases(); @Override public Map extractData(ResultSet rs) throws SQLException, DataAccessException { @@ -67,10 +68,24 @@ public Map extractData(ResultSet rs) throws SQLException String columnName = metadata.getColumnName(col); String typeName = metadata.getColumnTypeName(col); int size = metadata.getColumnDisplaySize(col); - metadataMap.put(columnName, new ColumnMetadata(columnName, typeName, size)); + metadataMap.put(columnName, new ColumnMetadata(columnName, resolveTypeAlias(typeName), size)); } return metadataMap; } + + private String resolveTypeAlias(String type) { + String resolvedType = typeAliases.get(type); + if(resolvedType != null) { + return resolvedType; + } + return type; + } + + private Map typeAliases() { + Map map = new LinkedHashMap<>(); + map.put("serial", "int4"); + return map; + } } private static class ColumnMetadata { From 427a994b0ce08a16c75f3fc4b5f34f7f97a325de Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juha=20Syrj=C3=A4l=C3=A4?= Date: Sat, 11 Jul 2015 16:49:28 +0300 Subject: [PATCH 20/51] Add some logging to archiving --- .../nflow/engine/service/ArchiveService.java | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/service/ArchiveService.java b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/service/ArchiveService.java index 2427ea716..dfe8926f5 100644 --- a/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/service/ArchiveService.java +++ b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/service/ArchiveService.java @@ -7,17 +7,21 @@ import org.joda.time.DateTime; import com.nitorcreations.nflow.engine.internal.dao.ArchiveDao; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.springframework.util.Assert; @Named public class ArchiveService { + private static final Logger log = LoggerFactory.getLogger(ArchiveService.class); @Inject private ArchiveDao archiveDao; public int archiveWorkflows(DateTime olderThan, int batchSize) { - archiveDao.ensureValidArchiveTablesExist(); Assert.notNull(olderThan, "olderThan must not be null"); Assert.isTrue(batchSize > 0, "batchSize must be greater than 0"); + archiveDao.ensureValidArchiveTablesExist(); + log.info("Archiving starting. Archiving passive workflows older than {}, in batches of {}.", olderThan, batchSize); List workflowIds; int archivedWorkflows = 0; @@ -27,9 +31,11 @@ public int archiveWorkflows(DateTime olderThan, int batchSize) { break; } archiveDao.archiveWorkflows(workflowIds); + log.debug("Archived a batch of workflows. Workflow ids: {}", workflowIds); archivedWorkflows += workflowIds.size(); } while(!workflowIds.isEmpty()); + log.info("Archiving finished. Archived {} workflows.", archivedWorkflows); return archivedWorkflows; } } From f055959e516c5d9df8baf0ef4606697ab8eab994 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juha=20Syrj=C3=A4l=C3=A4?= Date: Sat, 11 Jul 2015 17:54:32 +0300 Subject: [PATCH 21/51] Add test case for checking validation in case there is correct number of columns but a column has a wrong name. --- .../nflow/engine/internal/dao/TableMetadataChecker.java | 4 ++-- .../engine/internal/dao/TableMetadataCheckerTest.java | 7 +++++++ .../src/test/resources/scripts/db/metadata.create.ddl.sql | 8 ++++++++ 3 files changed, 17 insertions(+), 2 deletions(-) diff --git a/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/TableMetadataChecker.java b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/TableMetadataChecker.java index 72fdca106..ec874d85a 100644 --- a/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/TableMetadataChecker.java +++ b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/TableMetadataChecker.java @@ -32,8 +32,8 @@ public void ensureCopyingPossible(String sourceTable, String destinationTable) { sourceTable, destinationTable)); } if(! destMetadataMap.keySet().containsAll(sourceMetadataMap.keySet())) { - Set missingColumns = new LinkedHashSet<>(destMetadataMap.keySet()); - missingColumns.removeAll(sourceMetadataMap.keySet()); + Set missingColumns = new LinkedHashSet<>(sourceMetadataMap.keySet()); + missingColumns.removeAll(destMetadataMap.keySet()); throw new IllegalArgumentException(format("Destination table %s is missing columns %s that are present in source table %s", destinationTable, missingColumns, sourceTable)); } diff --git a/nflow-engine/src/test/java/com/nitorcreations/nflow/engine/internal/dao/TableMetadataCheckerTest.java b/nflow-engine/src/test/java/com/nitorcreations/nflow/engine/internal/dao/TableMetadataCheckerTest.java index 046991359..9184ca486 100644 --- a/nflow-engine/src/test/java/com/nitorcreations/nflow/engine/internal/dao/TableMetadataCheckerTest.java +++ b/nflow-engine/src/test/java/com/nitorcreations/nflow/engine/internal/dao/TableMetadataCheckerTest.java @@ -62,6 +62,13 @@ public void destinationWithFewerColumnsIsInvalid() { tableMetadataChecker.ensureCopyingPossible("base", "fewer_columns"); } + @Test + public void destinationWithMissingColumnsIsInvalid() { + thrown.expect(IllegalArgumentException.class); + thrown.expectMessage("Destination table wrong_columns is missing columns [TEXT2] that are present in source table base"); + tableMetadataChecker.ensureCopyingPossible("base", "wrong_columns"); + } + @Test public void destinationWithWrongTypeIsInvalid() { thrown.expect(IllegalArgumentException.class); diff --git a/nflow-engine/src/test/resources/scripts/db/metadata.create.ddl.sql b/nflow-engine/src/test/resources/scripts/db/metadata.create.ddl.sql index 70c3e6584..079b929ef 100644 --- a/nflow-engine/src/test/resources/scripts/db/metadata.create.ddl.sql +++ b/nflow-engine/src/test/resources/scripts/db/metadata.create.ddl.sql @@ -20,6 +20,14 @@ create table more_columns ( extra char(1) ); +create table wrong_columns ( + id integer not null primary key, + text1 varchar(20), + text_wrong char(30), + time1 time not null, + extra char(1) +); + create table fewer_columns ( id integer not null primary key, text1 varchar(20), From 13622a647d10289d32b9b3eb12aeccd56bccafd5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juha=20Syrj=C3=A4l=C3=A4?= Date: Sun, 12 Jul 2015 11:31:28 +0300 Subject: [PATCH 22/51] Added database update script. Index nflow_workflow_activation is now outside of nflow_workflow table for mysql. --- .../nflow/engine/internal/dao/ArchiveDao.java | 5 +- .../resources/scripts/db/mysql.create.ddl.sql | 12 ++-- .../scripts/db/mysql.legacy.create.ddl.sql | 12 ++-- .../db/update-1.3.0-x/h2.update.ddl.sql | 50 +++++++++++++++ .../mysql.legacy.update.ddl.sql | 49 +++++++++++++++ .../db/update-1.3.0-x/mysql.update.ddl.sql | 48 ++++++++++++++ .../update-1.3.0-x/postgresql.update.ddl.sql | 63 +++++++++++++++++++ 7 files changed, 227 insertions(+), 12 deletions(-) diff --git a/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/ArchiveDao.java b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/ArchiveDao.java index df90c2a99..ef93f1b32 100644 --- a/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/ArchiveDao.java +++ b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/ArchiveDao.java @@ -53,10 +53,7 @@ public List listArchivableWorkflows(DateTime before, int maxRows) { "limit " + maxRows, new Object[]{DaoUtil.toTimestamp(before), DaoUtil.toTimestamp(before)}, new ArchivableWorkflowsRowMapper()); - // TODO add index to nflow_workflow.modified (combined index with next_activation?) - // TODO change modified trigger for postgre - // TODO add new triggers for h2 and postgre to update scripts - // TODO implement method to check that archive and prod tables have matching fields + // TODO unit test for archiving child workflows } @Transactional diff --git a/nflow-engine/src/main/resources/scripts/db/mysql.create.ddl.sql b/nflow-engine/src/main/resources/scripts/db/mysql.create.ddl.sql index e0593f1dc..60e5f7a93 100644 --- a/nflow-engine/src/main/resources/scripts/db/mysql.create.ddl.sql +++ b/nflow-engine/src/main/resources/scripts/db/mysql.create.ddl.sql @@ -17,10 +17,12 @@ create table if not exists nflow_workflow ( created timestamp(3) not null default current_timestamp(3), modified timestamp(3) not null default current_timestamp(3) on update current_timestamp(3), executor_group varchar(64) not null, - constraint nflow_workflow_uniq unique (type, external_id, executor_group), - index nflow_workflow(next_activation, modified) + constraint nflow_workflow_uniq unique (type, external_id, executor_group) ); +drop index nflow_archive_workflow_activation; +create index nflow_workflow_activation on nflow_workflow(next_activation, modified); + create table if not exists nflow_workflow_action ( id int not null auto_increment primary key, workflow_id int not null, @@ -95,10 +97,12 @@ create table if not exists nflow_archive_workflow ( created timestamp(3) not null, modified timestamp(3) not null, executor_group varchar(64) not null, - constraint nflow_archive_workflow_uniq unique (type, external_id, executor_group), - index nflow_archive_workflow(next_activation, modified) + constraint nflow_archive_workflow_uniq unique (type, external_id, executor_group) ); +drop index nflow_archive_workflow_activation; +create index nflow_archive_workflow_activation on nflow_archive_workflow(next_activation, modified); + create table if not exists nflow_archive_workflow_action ( id int not null primary key, workflow_id int not null, diff --git a/nflow-engine/src/main/resources/scripts/db/mysql.legacy.create.ddl.sql b/nflow-engine/src/main/resources/scripts/db/mysql.legacy.create.ddl.sql index bd8c70dad..2af4de2e4 100644 --- a/nflow-engine/src/main/resources/scripts/db/mysql.legacy.create.ddl.sql +++ b/nflow-engine/src/main/resources/scripts/db/mysql.legacy.create.ddl.sql @@ -17,10 +17,12 @@ create table if not exists nflow_workflow ( created timestamp not null, modified timestamp not null default current_timestamp on update current_timestamp, executor_group varchar(64) not null, - constraint nflow_workflow_uniq unique (type, external_id, executor_group), - index nflow_workflow(next_activation, modified) + constraint nflow_workflow_uniq unique (type, external_id, executor_group) ); +drop index nflow_workflow_activation; +create index nflow_workflow_activation on nflow_workflow(next_activation, modified); + drop trigger if exists nflow_workflow_insert; create trigger nflow_workflow_insert before insert on `nflow_workflow` @@ -105,10 +107,12 @@ create table if not exists nflow_archive_workflow ( created timestamp not null, modified timestamp not null, executor_group varchar(64) not null, - constraint nflow_archive_workflow_uniq unique (type, external_id, executor_group), - index nflow_archive_workflow(next_activation, modified) + constraint nflow_archive_workflow_uniq unique (type, external_id, executor_group) ); +drop index nflow_archive_workflow_activation; +create index nflow_archive_workflow_activation on nflow_archive_workflow(next_activation, modified); + create table if not exists nflow_archive_workflow_action ( id int not null primary key, workflow_id int not null, diff --git a/nflow-engine/src/main/resources/scripts/db/update-1.3.0-x/h2.update.ddl.sql b/nflow-engine/src/main/resources/scripts/db/update-1.3.0-x/h2.update.ddl.sql index 238ab434d..61655f6cd 100644 --- a/nflow-engine/src/main/resources/scripts/db/update-1.3.0-x/h2.update.ddl.sql +++ b/nflow-engine/src/main/resources/scripts/db/update-1.3.0-x/h2.update.ddl.sql @@ -10,3 +10,53 @@ alter table nflow_executor alter column host varchar(253) not null; alter table nflow_workflow add constraint fk_workflow_root foreign key (root_workflow_id) references nflow_workflow (id) on delete cascade; + +-- archiving +drop index nflow_workflow_next_activation; +create index if not exists nflow_workflow_next_activation on nflow_workflow(next_activation, modified); + +create table if not exists nflow_archive_workflow ( + id int not null primary key, + status varchar(32) not null check status in ('created', 'executing', 'inProgress', 'finished', 'manual'), + type varchar(64) not null, + root_workflow_id integer, + parent_workflow_id integer, + parent_action_id integer, + business_key varchar(64), + external_id varchar(64) not null, + state varchar(64) not null, + state_text varchar(128), + next_activation timestamp, + external_next_activation timestamp, + executor_id int, + retries int not null, + created timestamp not null, + modified timestamp not null, + executor_group varchar(64) not null +); + +create unique index if not exists nflow_archive_workflow_uniq on nflow_archive_workflow (type, external_id, executor_group); + +create index if not exists nflow_archive_workflow_next_activation on nflow_archive_workflow(next_activation, modified); + +create table if not exists nflow_archive_workflow_action ( + id int not null primary key, + workflow_id int not null, + executor_id int not null, + type varchar(32) not null check type in ('stateExecution', 'stateExecutionFailed', 'recovery', 'externalChange'), + state varchar(64) not null, + state_text varchar(128), + retry_no int not null, + execution_start timestamp not null, + execution_end timestamp not null, + foreign key (workflow_id) references nflow_archive_workflow(id) on delete cascade +); + +create table if not exists nflow_archive_workflow_state ( + workflow_id int not null, + action_id int not null, + state_key varchar(64) not null, + state_value varchar(10240) not null, + primary key (workflow_id, action_id, state_key), + foreign key (workflow_id) references nflow_archive_workflow(id) on delete cascade +); diff --git a/nflow-engine/src/main/resources/scripts/db/update-1.3.0-x/mysql.legacy.update.ddl.sql b/nflow-engine/src/main/resources/scripts/db/update-1.3.0-x/mysql.legacy.update.ddl.sql index cee2582a8..8efc76392 100644 --- a/nflow-engine/src/main/resources/scripts/db/update-1.3.0-x/mysql.legacy.update.ddl.sql +++ b/nflow-engine/src/main/resources/scripts/db/update-1.3.0-x/mysql.legacy.update.ddl.sql @@ -11,3 +11,52 @@ alter table nflow_executor modify host varchar(253) not null; alter table nflow_workflow add constraint fk_workflow_root foreign key (root_workflow_id) references nflow_workflow (id) on delete cascade; +-- archiving + +alter table drop nflow_workflow index nflow_workflow; +create index nflow_workflow_activation on nflow_workflow(next_activation, modified); + +create table if not exists nflow_archive_workflow ( + id int not null primary key, + status enum('created', 'executing', 'inProgress', 'finished', 'manual') not null, + type varchar(64) not null, + root_workflow_id integer, + parent_workflow_id integer, + parent_action_id integer, + business_key varchar(64), + external_id varchar(64) not null, + state varchar(64) not null, + state_text varchar(128), + next_activation timestamp null, + external_next_activation timestamp null, + executor_id int, + retries int not null default 0, + created timestamp not null, + modified timestamp not null, + executor_group varchar(64) not null, + constraint nflow_archive_workflow_uniq unique (type, external_id, executor_group), + index nflow_archive_workflow(next_activation, modified) +); + +create table if not exists nflow_archive_workflow_action ( + id int not null primary key, + workflow_id int not null, + executor_id int not null default -1, + type enum('stateExecution', 'stateExecutionFailed', 'recovery', 'externalChange') not null, + state varchar(64) not null, + state_text varchar(128), + retry_no int not null, + execution_start timestamp not null, + execution_end timestamp not null, + foreign key (workflow_id) references nflow_archive_workflow(id) on delete cascade +); + +create table if not exists nflow_archive_workflow_state ( + workflow_id int not null, + action_id int not null, + state_key varchar(64) not null, + state_value varchar(10240) not null, + primary key (workflow_id, action_id, state_key), + foreign key (workflow_id) references nflow_archive_workflow(id) on delete cascade +); + diff --git a/nflow-engine/src/main/resources/scripts/db/update-1.3.0-x/mysql.update.ddl.sql b/nflow-engine/src/main/resources/scripts/db/update-1.3.0-x/mysql.update.ddl.sql index d8f790b53..0be81a4ab 100644 --- a/nflow-engine/src/main/resources/scripts/db/update-1.3.0-x/mysql.update.ddl.sql +++ b/nflow-engine/src/main/resources/scripts/db/update-1.3.0-x/mysql.update.ddl.sql @@ -10,3 +10,51 @@ alter table nflow_executor modify host varchar(253) not null; alter table nflow_workflow add constraint fk_workflow_root foreign key (root_workflow_id) references nflow_workflow (id) on delete cascade; + +--archiving +alter table drop nflow_workflow index nflow_workflow; +create index nflow_workflow_activation on nflow_workflow(next_activation, modified); + +create table if not exists nflow_archive_workflow ( + id int not null primary key, + status enum('created', 'executing', 'inProgress', 'finished', 'manual') not null, + type varchar(64) not null, + root_workflow_id integer, + parent_workflow_id integer, + parent_action_id integer, + business_key varchar(64), + external_id varchar(64) not null, + state varchar(64) not null, + state_text varchar(128), + next_activation timestamp(3) null, + external_next_activation timestamp(3) null, + executor_id int, + retries int not null default 0, + created timestamp(3) not null, + modified timestamp(3) not null, + executor_group varchar(64) not null, + constraint nflow_archive_workflow_uniq unique (type, external_id, executor_group), + index nflow_archive_workflow(next_activation, modified) +); + +create table if not exists nflow_archive_workflow_action ( + id int not null primary key, + workflow_id int not null, + executor_id int not null, + type enum('stateExecution', 'stateExecutionFailed', 'recovery', 'externalChange') not null, + state varchar(64) not null, + state_text varchar(128), + retry_no int not null, + execution_start timestamp(3) not null, + execution_end timestamp(3) not null, + foreign key (workflow_id) references nflow_archive_workflow(id) on delete cascade +); + +create table if not exists nflow_archive_workflow_state ( + workflow_id int not null, + action_id int not null, + state_key varchar(64) not null, + state_value varchar(10240) not null, + primary key (workflow_id, action_id, state_key), + foreign key (workflow_id) references nflow_archive_workflow(id) on delete cascade +); diff --git a/nflow-engine/src/main/resources/scripts/db/update-1.3.0-x/postgresql.update.ddl.sql b/nflow-engine/src/main/resources/scripts/db/update-1.3.0-x/postgresql.update.ddl.sql index ce85e19ec..952f319d1 100644 --- a/nflow-engine/src/main/resources/scripts/db/update-1.3.0-x/postgresql.update.ddl.sql +++ b/nflow-engine/src/main/resources/scripts/db/update-1.3.0-x/postgresql.update.ddl.sql @@ -11,3 +11,66 @@ alter table nflow_executor alter host varchar(253) not null; alter table nflow_workflow add constraint fk_workflow_root foreign key (root_workflow_id) references nflow_workflow (id) on delete cascade; +-- archiving + +create or replace function update_modified() returns trigger language plpgsql as ' +begin + if NEW.modified = OLD.modified then + NEW.modified := now(); + end if; + return NEW; +end; +'; + +drop trigger if exists update_nflow_modified on nflow_workflow; +create trigger update_nflow_modified before update on nflow_workflow for each row execute procedure update_modified(); + +drop index nflow_workflow_activation; +create index nflow_workflow_activation on nflow_workflow(next_activation, modified); + +create table if not exists nflow_archive_workflow ( + id integer primary key, + status workflow_status not null, + type varchar(64) not null, + root_workflow_id integer, + parent_workflow_id integer, + parent_action_id integer, + business_key varchar(64), + external_id varchar(64) not null, + state varchar(64) not null, + state_text varchar(128), + next_activation timestamptz, + external_next_activation timestamptz, + executor_id int, + retries int not null default 0, + created timestamptz not null, + modified timestamptz not null, + executor_group varchar(64) not null, + constraint nflow_archive_workflow_uniq unique (type, external_id, executor_group) +); + +drop index nflow_archive_workflow_activation; +create index nflow_archive_workflow_activation on nflow_archive_workflow(next_activation, modified); + +create table if not exists nflow_archive_workflow_action ( + id integer primary key, + workflow_id int not null, + executor_id int not null, + type action_type not null, + state varchar(64) not null, + state_text varchar(128), + retry_no int not null, + execution_start timestamptz not null, + execution_end timestamptz not null, + foreign key (workflow_id) references nflow_archive_workflow(id) on delete cascade, + constraint nflow_archive_workflow_action_uniq unique (workflow_id, id) +); + +create table if not exists nflow_archive_workflow_state ( + workflow_id int not null, + action_id int not null, + state_key varchar(64) not null, + state_value text not null, + primary key (workflow_id, action_id, state_key), + foreign key (workflow_id) references nflow_archive_workflow(id) on delete cascade +); From 8f50ee7dd8c9050d7c79b2b2fb31d202445866d6 Mon Sep 17 00:00:00 2001 From: Edvard Fonsell Date: Mon, 13 Jul 2015 14:49:14 +0300 Subject: [PATCH 23/51] fix eclipse warning, cleanup --- .../nflow/engine/internal/dao/ArchiveDao.java | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/ArchiveDao.java b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/ArchiveDao.java index ef93f1b32..0beb43386 100644 --- a/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/ArchiveDao.java +++ b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/ArchiveDao.java @@ -1,20 +1,19 @@ package com.nitorcreations.nflow.engine.internal.dao; +import static com.nitorcreations.nflow.engine.internal.dao.DaoUtil.toTimestamp; +import static com.nitorcreations.nflow.engine.internal.dao.DaoUtil.ColumnNamesExtractor.columnNamesExtractor; import static com.nitorcreations.nflow.engine.internal.storage.db.DatabaseConfiguration.NFLOW_DATABASE_INITIALIZER; +import static org.apache.commons.lang3.StringUtils.join; import java.sql.ResultSet; -import java.sql.ResultSetMetaData; import java.sql.SQLException; -import java.util.LinkedList; import java.util.List; import javax.inject.Inject; import javax.inject.Named; -import org.apache.commons.lang3.StringUtils; import org.joda.time.DateTime; import org.springframework.context.annotation.DependsOn; -import org.springframework.dao.DataAccessException; import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.jdbc.core.RowMapper; import org.springframework.transaction.annotation.Transactional; @@ -51,7 +50,7 @@ public List listArchivableWorkflows(DateTime before, int maxRows) { ")" + "order by modified asc " + "limit " + maxRows, - new Object[]{DaoUtil.toTimestamp(before), DaoUtil.toTimestamp(before)}, new ArchivableWorkflowsRowMapper()); + new Object[] { toTimestamp(before), toTimestamp(before) }, new ArchivableWorkflowsRowMapper()); // TODO unit test for archiving child workflows } @@ -93,15 +92,15 @@ private void deleteWorkflows(String workflowIdParams) { } private String columnsFromMetadata(String tableName) { - List columnNames = jdbc.query("select * from " + tableName + " where 1 = 0", DaoUtil.ColumnNamesExtractor.columnNamesExtractor); - return StringUtils.join(columnNames.toArray(), ","); + List columnNames = jdbc.query("select * from " + tableName + " where 1 = 0", columnNamesExtractor); + return join(columnNames.toArray(), ","); } private String params(List workflowIds) { - return "(" + StringUtils.join(workflowIds.toArray(), ",") + ")"; + return "(" + join(workflowIds.toArray(), ",") + ")"; } - private static class ArchivableWorkflowsRowMapper implements RowMapper { + static class ArchivableWorkflowsRowMapper implements RowMapper { @Override public Integer mapRow(ResultSet rs, int rowNum) throws SQLException { return rs.getInt("id"); From df1a6ee9a49550d772d409fc74ed617f249c712a Mon Sep 17 00:00:00 2001 From: Edvard Fonsell Date: Mon, 13 Jul 2015 14:49:29 +0300 Subject: [PATCH 24/51] format --- .../nflow/engine/internal/dao/DaoUtil.java | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/DaoUtil.java b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/DaoUtil.java index 17da0c432..17f46eebf 100644 --- a/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/DaoUtil.java +++ b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/DaoUtil.java @@ -39,20 +39,20 @@ public static Integer getInt(ResultSet rs, String columnLabel) throws SQLExcepti return rs.wasNull() ? null : value; } - public static final class ColumnNamesExtractor implements org.springframework.jdbc.core.ResultSetExtractor> { + public static final class ColumnNamesExtractor implements ResultSetExtractor> { static final ColumnNamesExtractor columnNamesExtractor = new ColumnNamesExtractor(); - private ColumnNamesExtractor() {} + + private ColumnNamesExtractor() { + } @Override public List extractData(ResultSet rs) throws SQLException, DataAccessException { List columnNames = new LinkedList<>(); - ResultSetMetaData metadata = rs.getMetaData(); - for(int col = 1; col <= metadata.getColumnCount(); col ++) { + for (int col = 1; col <= metadata.getColumnCount(); col++) { columnNames.add(metadata.getColumnName(col)); } return columnNames; } } - } From 07703506239d8a1f5bf7f83d385359fe9768fa14 Mon Sep 17 00:00:00 2001 From: Edvard Fonsell Date: Mon, 13 Jul 2015 14:49:39 +0300 Subject: [PATCH 25/51] fix typo --- nflow-engine/src/main/resources/scripts/db/mysql.create.ddl.sql | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nflow-engine/src/main/resources/scripts/db/mysql.create.ddl.sql b/nflow-engine/src/main/resources/scripts/db/mysql.create.ddl.sql index 60e5f7a93..3a68e66a1 100644 --- a/nflow-engine/src/main/resources/scripts/db/mysql.create.ddl.sql +++ b/nflow-engine/src/main/resources/scripts/db/mysql.create.ddl.sql @@ -20,7 +20,7 @@ create table if not exists nflow_workflow ( constraint nflow_workflow_uniq unique (type, external_id, executor_group) ); -drop index nflow_archive_workflow_activation; +drop index nflow_workflow_activation; create index nflow_workflow_activation on nflow_workflow(next_activation, modified); create table if not exists nflow_workflow_action ( From dbf0dfd39d12ca01419efae5a905dacdacdd4fcc Mon Sep 17 00:00:00 2001 From: Edvard Fonsell Date: Mon, 13 Jul 2015 14:54:31 +0300 Subject: [PATCH 26/51] fix eclipse warning, format --- .../internal/dao/TableMetadataChecker.java | 55 ++++++++++--------- 1 file changed, 28 insertions(+), 27 deletions(-) diff --git a/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/TableMetadataChecker.java b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/TableMetadataChecker.java index ec874d85a..fa45df19a 100644 --- a/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/TableMetadataChecker.java +++ b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/TableMetadataChecker.java @@ -1,13 +1,8 @@ package com.nitorcreations.nflow.engine.internal.dao; -import com.nitorcreations.nflow.engine.internal.config.NFlow; -import org.apache.commons.lang3.builder.ReflectionToStringBuilder; -import org.springframework.dao.DataAccessException; -import org.springframework.jdbc.core.JdbcTemplate; -import org.springframework.jdbc.core.ResultSetExtractor; +import static java.lang.String.format; +import static org.apache.commons.lang3.builder.ToStringStyle.SHORT_PREFIX_STYLE; -import javax.inject.Inject; -import javax.inject.Named; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; @@ -17,8 +12,15 @@ import java.util.Map.Entry; import java.util.Set; -import static java.lang.String.format; -import static org.apache.commons.lang3.builder.ToStringStyle.SHORT_PREFIX_STYLE; +import javax.inject.Inject; +import javax.inject.Named; + +import org.apache.commons.lang3.builder.ReflectionToStringBuilder; +import org.springframework.dao.DataAccessException; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.jdbc.core.ResultSetExtractor; + +import com.nitorcreations.nflow.engine.internal.config.NFlow; @Named public class TableMetadataChecker { @@ -27,28 +29,28 @@ public class TableMetadataChecker { public void ensureCopyingPossible(String sourceTable, String destinationTable) { Map sourceMetadataMap = getMetadata(sourceTable); Map destMetadataMap = getMetadata(destinationTable); - if(destMetadataMap.size() < sourceMetadataMap.size()) { - throw new IllegalArgumentException(format("Source table %s has more columns than destination table %s", - sourceTable, destinationTable)); + if (destMetadataMap.size() < sourceMetadataMap.size()) { + throw new IllegalArgumentException(format("Source table %s has more columns than destination table %s", sourceTable, + destinationTable)); } - if(! destMetadataMap.keySet().containsAll(sourceMetadataMap.keySet())) { + if (!destMetadataMap.keySet().containsAll(sourceMetadataMap.keySet())) { Set missingColumns = new LinkedHashSet<>(sourceMetadataMap.keySet()); missingColumns.removeAll(destMetadataMap.keySet()); throw new IllegalArgumentException(format("Destination table %s is missing columns %s that are present in source table %s", - destinationTable, missingColumns, sourceTable)); + destinationTable, missingColumns, sourceTable)); } - for(Entry entry: sourceMetadataMap.entrySet()) { + for (Entry entry : sourceMetadataMap.entrySet()) { ColumnMetadata sourceMetadata = entry.getValue(); ColumnMetadata destMetadata = destMetadataMap.get(entry.getKey()); - if(!sourceMetadata.typeName.equals(destMetadata.typeName)) { - throw new IllegalArgumentException(format("Source column %s.%s has type %s and destination column %s.%s has mismatching type %s", - sourceTable, sourceMetadata.columnName, sourceMetadata.typeName, - destinationTable, destMetadata.columnName, destMetadata.typeName)); + if (!sourceMetadata.typeName.equals(destMetadata.typeName)) { + throw new IllegalArgumentException(format( + "Source column %s.%s has type %s and destination column %s.%s has mismatching type %s", sourceTable, + sourceMetadata.columnName, sourceMetadata.typeName, destinationTable, destMetadata.columnName, destMetadata.typeName)); } - if(sourceMetadata.size > destMetadata.size) { + if (sourceMetadata.size > destMetadata.size) { throw new IllegalArgumentException(format("Source column %s.%s has size %s and destination column %s.%s smaller size %s", - sourceTable, sourceMetadata.columnName, sourceMetadata.size, - destinationTable, destMetadata.columnName, destMetadata.size)); + sourceTable, sourceMetadata.columnName, sourceMetadata.size, destinationTable, destMetadata.columnName, + destMetadata.size)); } } } @@ -57,14 +59,14 @@ private Map getMetadata(String tableName) { return jdbc.query("select * from " + tableName + " where 1 = 0", new MetadataExtractor()); } - private static class MetadataExtractor implements ResultSetExtractor> { - private Map typeAliases = typeAliases(); + static class MetadataExtractor implements ResultSetExtractor> { + private final Map typeAliases = typeAliases(); @Override public Map extractData(ResultSet rs) throws SQLException, DataAccessException { ResultSetMetaData metadata = rs.getMetaData(); Map metadataMap = new LinkedHashMap<>(); - for(int col = 1; col <= metadata.getColumnCount(); col ++) { + for (int col = 1; col <= metadata.getColumnCount(); col++) { String columnName = metadata.getColumnName(col); String typeName = metadata.getColumnTypeName(col); int size = metadata.getColumnDisplaySize(col); @@ -75,7 +77,7 @@ public Map extractData(ResultSet rs) throws SQLException private String resolveTypeAlias(String type) { String resolvedType = typeAliases.get(type); - if(resolvedType != null) { + if (resolvedType != null) { return resolvedType; } return type; @@ -109,5 +111,4 @@ public String toString() { public void setJdbcTemplate(@NFlow JdbcTemplate jdbcTemplate) { this.jdbc = jdbcTemplate; } - } From 1be69a047285f82cb2ef169c2324ed0b23b295af Mon Sep 17 00:00:00 2001 From: Edvard Fonsell Date: Mon, 13 Jul 2015 14:58:54 +0300 Subject: [PATCH 27/51] format --- .../engine/internal/storage/db/H2ModifiedColumnTrigger.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/storage/db/H2ModifiedColumnTrigger.java b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/storage/db/H2ModifiedColumnTrigger.java index 9a8cf5914..841f450e0 100644 --- a/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/storage/db/H2ModifiedColumnTrigger.java +++ b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/storage/db/H2ModifiedColumnTrigger.java @@ -15,8 +15,8 @@ public class H2ModifiedColumnTrigger extends TriggerAdapter { public void fire(Connection conn, ResultSet oldRow, ResultSet newRow) throws SQLException { Timestamp oldModified = oldRow.getTimestamp("modified"); Timestamp newModified = newRow.getTimestamp("modified"); - if(Objects.equals(oldModified, newModified)) { + if (Objects.equals(oldModified, newModified)) { newRow.updateTimestamp("modified", new Timestamp(currentTimeMillis())); } } -} \ No newline at end of file +} From 01d806889827921bbc86ae7b16dca322256d2e6f Mon Sep 17 00:00:00 2001 From: Edvard Fonsell Date: Mon, 13 Jul 2015 15:15:54 +0300 Subject: [PATCH 28/51] format --- .../nflow/engine/service/ArchiveService.java | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/service/ArchiveService.java b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/service/ArchiveService.java index dfe8926f5..3008794e3 100644 --- a/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/service/ArchiveService.java +++ b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/service/ArchiveService.java @@ -1,19 +1,21 @@ package com.nitorcreations.nflow.engine.service; +import static org.slf4j.LoggerFactory.getLogger; + import java.util.List; import javax.inject.Inject; import javax.inject.Named; import org.joda.time.DateTime; -import com.nitorcreations.nflow.engine.internal.dao.ArchiveDao; import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.springframework.util.Assert; +import com.nitorcreations.nflow.engine.internal.dao.ArchiveDao; + @Named public class ArchiveService { - private static final Logger log = LoggerFactory.getLogger(ArchiveService.class); + private static final Logger log = getLogger(ArchiveService.class); @Inject private ArchiveDao archiveDao; @@ -27,13 +29,13 @@ public int archiveWorkflows(DateTime olderThan, int batchSize) { int archivedWorkflows = 0; do { workflowIds = archiveDao.listArchivableWorkflows(olderThan, batchSize); - if(workflowIds.isEmpty()) { + if (workflowIds.isEmpty()) { break; } archiveDao.archiveWorkflows(workflowIds); log.debug("Archived a batch of workflows. Workflow ids: {}", workflowIds); archivedWorkflows += workflowIds.size(); - } while(!workflowIds.isEmpty()); + } while (!workflowIds.isEmpty()); log.info("Archiving finished. Archived {} workflows.", archivedWorkflows); return archivedWorkflows; From 18c2851940b960d90f9b51ee1a1d6a457e686bd9 Mon Sep 17 00:00:00 2001 From: Edvard Fonsell Date: Mon, 13 Jul 2015 15:52:22 +0300 Subject: [PATCH 29/51] format --- .../engine/internal/dao/ArchiveDaoTest.java | 86 ++++++++++--------- .../dao/TableMetadataCheckerTest.java | 12 +-- .../engine/service/ArchiveServiceTest.java | 32 ++++--- .../nflow/tests/ArchiveTest.java | 66 +++++++------- 4 files changed, 106 insertions(+), 90 deletions(-) diff --git a/nflow-engine/src/test/java/com/nitorcreations/nflow/engine/internal/dao/ArchiveDaoTest.java b/nflow-engine/src/test/java/com/nitorcreations/nflow/engine/internal/dao/ArchiveDaoTest.java index a7df7fc24..eb4ff57bc 100644 --- a/nflow-engine/src/test/java/com/nitorcreations/nflow/engine/internal/dao/ArchiveDaoTest.java +++ b/nflow-engine/src/test/java/com/nitorcreations/nflow/engine/internal/dao/ArchiveDaoTest.java @@ -1,22 +1,26 @@ package com.nitorcreations.nflow.engine.internal.dao; -import com.nitorcreations.nflow.engine.workflow.instance.WorkflowInstance; -import com.nitorcreations.nflow.engine.workflow.instance.WorkflowInstanceAction; -import org.apache.commons.lang3.builder.ReflectionToStringBuilder; -import org.joda.time.DateTime; -import org.junit.Test; -import org.springframework.dao.EmptyResultDataAccessException; +import static com.nitorcreations.nflow.engine.workflow.instance.WorkflowInstance.WorkflowInstanceStatus.created; +import static java.util.Arrays.asList; +import static org.joda.time.DateTime.now; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; -import javax.inject.Inject; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; -import static com.nitorcreations.nflow.engine.workflow.instance.WorkflowInstance.WorkflowInstanceStatus.created; -import static java.util.Arrays.asList; -import static org.joda.time.DateTime.now; -import static org.junit.Assert.*; +import javax.inject.Inject; + +import org.apache.commons.lang3.builder.ReflectionToStringBuilder; +import org.joda.time.DateTime; +import org.junit.Test; +import org.springframework.dao.EmptyResultDataAccessException; + +import com.nitorcreations.nflow.engine.workflow.instance.WorkflowInstance; +import com.nitorcreations.nflow.engine.workflow.instance.WorkflowInstanceAction; public class ArchiveDaoTest extends BaseDaoTest { @Inject @@ -24,7 +28,7 @@ public class ArchiveDaoTest extends BaseDaoTest { @Inject WorkflowInstanceDao workflowInstanceDao; - DateTime archiveTimeLimit = new DateTime(2015,7,8, 21,28,0,0); + DateTime archiveTimeLimit = new DateTime(2015, 7, 8, 21, 28, 0, 0); DateTime archiveTime1 = archiveTimeLimit.minus(1); DateTime archiveTime2 = archiveTimeLimit.minusMinutes(1); @@ -59,7 +63,7 @@ public void listingReturnsOldestRowsAndMaxBatchSizeRows() { int eleventh = storePassiveWorkflow(archiveTime2); - for(int i = 0; i < 9; i++){ + for (int i = 0; i < 9; i++) { expectedArchive.add(storePassiveWorkflow(archiveTime4)); } expectedArchive.add(storePassiveWorkflow(archiveTime3)); @@ -178,76 +182,79 @@ public void archivingWorkflowsWithActionsAndStatesWorks() { } private void assertActiveWorkflowsRemoved(List workflowIds) { - for(int id: workflowIds){ + for (int id : workflowIds) { try { workflowInstanceDao.getWorkflowInstance(id); fail("Expected workflow " + id + " to be removed"); - } catch(EmptyResultDataAccessException e) { + } catch (EmptyResultDataAccessException e) { // expected exception } } } private void assertArchiveWorkflowsExist(List workflowIds) { - for(int workflowId : workflowIds){ + for (int workflowId : workflowIds) { Map archived = getArchivedWorkflow(workflowId); assertEquals(workflowId, archived.get("id")); } } private void assertActiveActionsRemoved(List actionIds) { - for(int actionId: actionIds) { + for (int actionId : actionIds) { int found = rowCount("select 1 from nflow_workflow_action where id = ?", actionId); assertEquals("Found unexpected action " + actionId + " in nflow_workflow_action", 0, found); } } private void assertArchiveActionsExist(List actionIds) { - for(int actionId: actionIds) { + for (int actionId : actionIds) { int found = rowCount("select 1 from nflow_archive_workflow_action where id = ?", actionId); assertEquals("Action " + actionId + " not found in nflow_archive_workflow_action", 1, found); } } private void assertActiveStateVariablesRemoved(List stateKeys) { - for(StateKey stateKey: stateKeys) { + for (StateKey stateKey : stateKeys) { int found = rowCount("select 1 from nflow_workflow_state where workflow_id = ? and action_id = ? and state_key = ?", - stateKey.workflowId, stateKey.actionId, stateKey.stateKey); + stateKey.workflowId, stateKey.actionId, stateKey.stateKey); assertEquals("Found unexpected state variable " + stateKey + " in nflow_workflow_state", 0, found); } } private void assertArchiveStateVariablesExist(List stateKeys) { - for(StateKey stateKey: stateKeys) { - int found = rowCount("select 1 from nflow_archive_workflow_state where workflow_id = ? and action_id = ? and state_key = ?", - stateKey.workflowId, stateKey.actionId, stateKey.stateKey); + for (StateKey stateKey : stateKeys) { + int found = rowCount( + "select 1 from nflow_archive_workflow_state where workflow_id = ? and action_id = ? and state_key = ?", + stateKey.workflowId, stateKey.actionId, stateKey.stateKey); assertEquals("State variable " + stateKey + " not found in nflow_archive_workflow_state", 1, found); } } - private int rowCount(String sql, Object ... params) { + private int rowCount(String sql, Object... params) { return jdbc.queryForList(sql, params).size(); } private Map getArchivedWorkflow(int workflowId) { - return jdbc.queryForMap("select * from nflow_archive_workflow where id = ?", new Object[]{workflowId}); + return jdbc.queryForMap("select * from nflow_archive_workflow where id = ?", new Object[] { workflowId }); } private int storePassiveWorkflow(DateTime modified) { - WorkflowInstance instance = constructWorkflowInstanceBuilder().setStatus(created).setNextActivation(null).setModified(modified).build(); + WorkflowInstance instance = constructWorkflowInstanceBuilder().setStatus(created).setNextActivation(null) + .setModified(modified).build(); int id = insert(instance); return id; } private int storeActiveWorkflow(DateTime modified) { - WorkflowInstance instance = constructWorkflowInstanceBuilder().setStatus(created).setNextActivation(now()).setModified(modified).build(); + WorkflowInstance instance = constructWorkflowInstanceBuilder().setStatus(created).setNextActivation(now()) + .setModified(modified).build(); int id = insert(instance); return id; } private List storeActions(int workflowId, int actionCount) { List actionIds = new ArrayList<>(); - for(int i = 0; i < actionCount; i ++) { + for (int i = 0; i < actionCount; i++) { actionIds.add(storeAction(workflowId)); } return actionIds; @@ -255,24 +262,26 @@ private List storeActions(int workflowId, int actionCount) { private List storeStateVariables(int workflowId, List actionIds, int count) { List stateKeys = new ArrayList<>(); - for(int actionId: actionIds) { + for (int actionId : actionIds) { stateKeys.addAll(storeStateVariables(workflowId, actionId, count)); } return stateKeys; } + private List storeStateVariables(int workflowId, int actionId, int stateCount) { List stateKeys = new ArrayList<>(); int index = 1; - for(int i = 0; i < stateCount; i ++) { - stateKeys.add(storeStateVariable(workflowId, actionId, "key-" + (index++) )); + for (int i = 0; i < stateCount; i++) { + stateKeys.add(storeStateVariable(workflowId, actionId, "key-" + (index++))); } return stateKeys; } private StateKey storeStateVariable(int workflowId, int actionId, String key) { String value = key + "_value"; - int updated = jdbc.update("insert into nflow_workflow_state (workflow_id, action_id, state_key, state_value) values (?, ?, ?, ?)", - workflowId, actionId, key, value); + int updated = jdbc.update( + "insert into nflow_workflow_state (workflow_id, action_id, state_key, state_value) values (?, ?, ?, ?)", workflowId, + actionId, key, value); assertEquals(1, updated); return new StateKey(workflowId, actionId, key); } @@ -283,12 +292,9 @@ private int storeAction(int workflowId) { } private WorkflowInstanceAction.Builder actionBuilder(int workflowId) { - return new WorkflowInstanceAction.Builder() - .setState("dummyState") - .setType(WorkflowInstanceAction.WorkflowActionType.stateExecution) - .setExecutionStart(DateTime.now()) - .setExecutionEnd(DateTime.now()) - .setWorkflowInstanceId(workflowId); + return new WorkflowInstanceAction.Builder().setState("dummyState") + .setType(WorkflowInstanceAction.WorkflowActionType.stateExecution).setExecutionStart(DateTime.now()) + .setExecutionEnd(DateTime.now()).setWorkflowInstanceId(workflowId); } private int insert(WorkflowInstance instance) { @@ -303,7 +309,7 @@ private int insert(WorkflowInstance instance) { private void updateModified(int workflowId, DateTime modified) { int updateCount = jdbc.update("update nflow_workflow set modified = ? where id = ?", - new Object[]{ DaoUtil.toTimestamp(modified), workflowId }); + new Object[] { DaoUtil.toTimestamp(modified), workflowId }); assertEquals(1, updateCount); } diff --git a/nflow-engine/src/test/java/com/nitorcreations/nflow/engine/internal/dao/TableMetadataCheckerTest.java b/nflow-engine/src/test/java/com/nitorcreations/nflow/engine/internal/dao/TableMetadataCheckerTest.java index 9184ca486..351b042ef 100644 --- a/nflow-engine/src/test/java/com/nitorcreations/nflow/engine/internal/dao/TableMetadataCheckerTest.java +++ b/nflow-engine/src/test/java/com/nitorcreations/nflow/engine/internal/dao/TableMetadataCheckerTest.java @@ -1,6 +1,8 @@ package com.nitorcreations.nflow.engine.internal.dao; -import com.nitorcreations.nflow.engine.internal.storage.db.DatabaseInitializer; +import javax.inject.Inject; +import javax.sql.DataSource; + import org.junit.Before; import org.junit.Rule; import org.junit.Test; @@ -12,8 +14,7 @@ import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; -import javax.inject.Inject; -import javax.sql.DataSource; +import com.nitorcreations.nflow.engine.internal.storage.db.DatabaseInitializer; @RunWith(SpringJUnit4ClassRunner.class) @ContextConfiguration(classes = { DaoTestConfiguration.class }) @@ -30,7 +31,7 @@ public class TableMetadataCheckerTest { @Before public void setup() { - if(initializer == null) { + if (initializer == null) { initializer = new DatabaseInitializer("metadata", dataSource, environmentCreateOnStartup("true")); } } @@ -72,7 +73,8 @@ public void destinationWithMissingColumnsIsInvalid() { @Test public void destinationWithWrongTypeIsInvalid() { thrown.expect(IllegalArgumentException.class); - thrown.expectMessage("Source column base.TIME1 has type TIME and destination column wrong_type.TIME1 has mismatching type INTEGER"); + thrown + .expectMessage("Source column base.TIME1 has type TIME and destination column wrong_type.TIME1 has mismatching type INTEGER"); tableMetadataChecker.ensureCopyingPossible("base", "wrong_type"); } diff --git a/nflow-engine/src/test/java/com/nitorcreations/nflow/engine/service/ArchiveServiceTest.java b/nflow-engine/src/test/java/com/nitorcreations/nflow/engine/service/ArchiveServiceTest.java index e59740a04..4eba197b3 100644 --- a/nflow-engine/src/test/java/com/nitorcreations/nflow/engine/service/ArchiveServiceTest.java +++ b/nflow-engine/src/test/java/com/nitorcreations/nflow/engine/service/ArchiveServiceTest.java @@ -1,6 +1,18 @@ package com.nitorcreations.nflow.engine.service; -import com.nitorcreations.nflow.engine.internal.dao.ArchiveDao; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.fail; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; +import static org.mockito.Mockito.when; + +import java.util.Arrays; +import java.util.Collections; +import java.util.List; + import org.joda.time.DateTime; import org.junit.Test; import org.junit.runner.RunWith; @@ -8,13 +20,7 @@ import org.mockito.Mock; import org.mockito.runners.MockitoJUnitRunner; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.fail; -import static org.mockito.Mockito.*; +import com.nitorcreations.nflow.engine.internal.dao.ArchiveDao; @RunWith(MockitoJUnitRunner.class) public class ArchiveServiceTest { @@ -22,9 +28,9 @@ public class ArchiveServiceTest { private final ArchiveService service = new ArchiveService(); @Mock private ArchiveDao dao; - private DateTime limit = new DateTime(2015,7,10,19,57,0,0); - private List emptyList = Collections.emptyList(); - private List dataList = Arrays.asList(1,2,3,4,5,6,7,8,9,10); + private final DateTime limit = new DateTime(2015, 7, 10, 19, 57, 0, 0); + private final List emptyList = Collections.emptyList(); + private final List dataList = Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10); @Test public void withZeroWorkflowsInFirstBatchCausesNothingToArchive() { @@ -38,7 +44,7 @@ public void withZeroWorkflowsInFirstBatchCausesNothingToArchive() { @Test public void archivingContinuesUntilEmptyListOfArchivableIsReturned() { - when(dao.listArchivableWorkflows(limit, 10)).thenReturn(dataList, dataList, dataList, emptyList); + doReturn(dataList).doReturn(dataList).doReturn(dataList).doReturn(emptyList).when(dao).listArchivableWorkflows(limit, 10); int archived = service.archiveWorkflows(limit, 10); assertEquals(dataList.size() * 3, archived); verify(dao).ensureValidArchiveTablesExist(); @@ -53,7 +59,7 @@ public void noArchivingHappensWhenValidArchiveTablesDoNotExist() { try { service.archiveWorkflows(limit, 10); fail("exception expected"); - } catch(IllegalArgumentException e) { + } catch (IllegalArgumentException e) { // ignore } verify(dao).ensureValidArchiveTablesExist(); diff --git a/nflow-tests/src/test/java/com/nitorcreations/nflow/tests/ArchiveTest.java b/nflow-tests/src/test/java/com/nitorcreations/nflow/tests/ArchiveTest.java index 4f15d31e9..e73e28820 100644 --- a/nflow-tests/src/test/java/com/nitorcreations/nflow/tests/ArchiveTest.java +++ b/nflow-tests/src/test/java/com/nitorcreations/nflow/tests/ArchiveTest.java @@ -1,40 +1,42 @@ package com.nitorcreations.nflow.tests; -import com.nitorcreations.nflow.engine.service.ArchiveService; -import com.nitorcreations.nflow.rest.v1.msg.CreateWorkflowInstanceRequest; -import com.nitorcreations.nflow.rest.v1.msg.CreateWorkflowInstanceResponse; -import com.nitorcreations.nflow.tests.demo.DemoWorkflow; -import com.nitorcreations.nflow.tests.demo.FibonacciWorkflow; -import com.nitorcreations.nflow.tests.runner.NflowServerRule; +import static org.apache.cxf.jaxrs.client.WebClient.fromClient; +import static org.hamcrest.Matchers.notNullValue; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertThat; +import static org.junit.runners.MethodSorters.NAME_ASCENDING; + +import java.util.ArrayList; +import java.util.List; + +import javax.annotation.PostConstruct; +import javax.annotation.PreDestroy; +import javax.inject.Inject; + import org.joda.time.DateTime; import org.junit.ClassRule; import org.junit.FixMethodOrder; import org.junit.Test; import org.springframework.context.annotation.ComponentScan; -import javax.annotation.PostConstruct; -import javax.annotation.PreDestroy; -import javax.inject.Inject; -import java.util.ArrayList; -import java.util.List; - -import static org.apache.cxf.jaxrs.client.WebClient.fromClient; -import static org.hamcrest.Matchers.notNullValue; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertThat; -import static org.junit.runners.MethodSorters.NAME_ASCENDING; +import com.nitorcreations.nflow.engine.service.ArchiveService; +import com.nitorcreations.nflow.rest.v1.msg.CreateWorkflowInstanceRequest; +import com.nitorcreations.nflow.rest.v1.msg.CreateWorkflowInstanceResponse; +import com.nitorcreations.nflow.tests.demo.DemoWorkflow; +import com.nitorcreations.nflow.tests.demo.FibonacciWorkflow; +import com.nitorcreations.nflow.tests.runner.NflowServerRule; @FixMethodOrder(NAME_ASCENDING) public class ArchiveTest extends AbstractNflowTest { - private static final int STEP_1_WORKFLOWS = 4, STEP_2_WORKFLOWS = 7, STEP_3_WORKFLOWS = 4; + private static final int STEP_1_WORKFLOWS = 4; + private static final int STEP_2_WORKFLOWS = 7; + private static final int STEP_3_WORKFLOWS = 4; private static final int CREATE_TIMEOUT = 15000; private static final int ARCHIVE_TIMEOUT = 15000; @ClassRule - public static NflowServerRule server = new NflowServerRule.Builder() - .prop("nflow.dispatcher.sleep.ms", 25) - .springContextClass(ArchiveConfiguration.class) - .build(); + public static NflowServerRule server = new NflowServerRule.Builder().prop("nflow.dispatcher.sleep.ms", 25) + .springContextClass(ArchiveConfiguration.class).build(); static ArchiveService archiveService; private static DateTime archiveLimit1, archiveLimit2; @@ -49,13 +51,13 @@ public void t00_cleanupExistingArchivableStuff() { } @Test(timeout = CREATE_TIMEOUT) - public void t01_createWorkflows() throws InterruptedException { + public void t01_createWorkflows() { waitUntilWorkflowsFinished(createWorkflows(STEP_1_WORKFLOWS)); archiveLimit1 = DateTime.now(); } @Test(timeout = CREATE_TIMEOUT) - public void t02_createMoreWorkflows() throws InterruptedException { + public void t02_createMoreWorkflows() { waitUntilWorkflowsFinished(createWorkflows(STEP_2_WORKFLOWS)); archiveLimit2 = DateTime.now(); } @@ -80,7 +82,7 @@ public void t05_archiveBeforeTime2Archives() { } @Test(timeout = CREATE_TIMEOUT) - public void t06_createMoreWorkflows() throws InterruptedException { + public void t06_createMoreWorkflows() { waitUntilWorkflowsFinished(createWorkflows(STEP_3_WORKFLOWS)); } @@ -98,7 +100,7 @@ public void t08_archiveAgainBeforeTime2DoesNotArchiveAnything() { private List createWorkflows(int count) { List ids = new ArrayList<>(); - for(int i = 0; i < count; i ++) { + for (int i = 0; i < count; i++) { ids.add(createWorkflow()); } return ids; @@ -108,13 +110,14 @@ private int createWorkflow() { CreateWorkflowInstanceRequest req = new CreateWorkflowInstanceRequest(); req.type = FibonacciWorkflow.WORKFLOW_TYPE; req.requestData = nflowObjectMapper().valueToTree(new FibonacciWorkflow.FiboData(3)); - CreateWorkflowInstanceResponse resp = fromClient(workflowInstanceResource, true).put(req, CreateWorkflowInstanceResponse.class); + CreateWorkflowInstanceResponse resp = fromClient(workflowInstanceResource, true).put(req, + CreateWorkflowInstanceResponse.class); assertThat(resp.id, notNullValue()); return resp.id; } private void waitUntilWorkflowsFinished(List workflowIds) { - for(int workflowId : workflowIds) { + for (int workflowId : workflowIds) { try { getWorkflowInstance(workflowId, "done"); } catch (InterruptedException e) { @@ -128,17 +131,16 @@ private void waitUntilWorkflowsFinished(List workflowIds) { @ComponentScan(basePackageClasses = DemoWorkflow.class) private static class ArchiveConfiguration { @Inject - private ArchiveService archiveService; + private ArchiveService service; @PostConstruct public void linkArchiveServiceToTestClass() { - ArchiveTest.archiveService = archiveService; + archiveService = service; } @PreDestroy public void removeArchiveServiceFromTestClass() { - ArchiveTest.archiveService = null; + archiveService = null; } - } } From fdb2112f02a3b6441ed3ad583368e260b13a5377 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juha=20Syrj=C3=A4l=C3=A4?= Date: Mon, 13 Jul 2015 15:51:58 +0300 Subject: [PATCH 30/51] Support child workflows correctly when fetching archivable workflows. --- .../nflow/engine/internal/dao/ArchiveDao.java | 23 +++++++++++-------- 1 file changed, 14 insertions(+), 9 deletions(-) diff --git a/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/ArchiveDao.java b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/ArchiveDao.java index 0beb43386..394796026 100644 --- a/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/ArchiveDao.java +++ b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/ArchiveDao.java @@ -43,16 +43,21 @@ public void ensureValidArchiveTablesExist() { } public List listArchivableWorkflows(DateTime before, int maxRows) { - return jdbc.query("select * from nflow_workflow parent where parent.next_activation is null and parent.modified <= ? " + - "and not exists(" + - " select 1 from nflow_workflow child where child.root_workflow_id = parent.id " + - " and (child.modified > ? or child.next_activation is not null)" + - ")" + - "order by modified asc " + - "limit " + maxRows, + return jdbc.query( + "select w.id id from nflow_workflow w, " + + "(" + + " select parent.id from nflow_workflow parent " + + " where parent.next_activation is null and parent.modified <= ? " + + " and parent.root_workflow_id is null " + + " and not exists(" + + " select 1 from nflow_workflow child where child.root_workflow_id = parent.id " + + " and (child.modified > ? or child.next_activation is not null)" + + " )" + + " order by modified asc " + + " limit " + maxRows + + ") as archivable_parent " + + "where archivable_parent.id = w.id or archivable_parent.id = w.root_workflow_id", new Object[] { toTimestamp(before), toTimestamp(before) }, new ArchivableWorkflowsRowMapper()); - - // TODO unit test for archiving child workflows } @Transactional From 1186601fe10e0234bfd6e2a4b249e3f30ce68bc7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juha=20Syrj=C3=A4l=C3=A4?= Date: Mon, 13 Jul 2015 16:25:45 +0300 Subject: [PATCH 31/51] Small doc fixes and trivial refactorings --- .../nflow/engine/internal/dao/WorkflowInstanceDao.java | 2 +- .../nflow/engine/workflow/instance/WorkflowInstance.java | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/WorkflowInstanceDao.java b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/WorkflowInstanceDao.java index 43367cb0e..53f1d2002 100644 --- a/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/WorkflowInstanceDao.java +++ b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/WorkflowInstanceDao.java @@ -278,7 +278,7 @@ protected void doInTransactionWithoutResult(TransactionStatus status) { updateWorkflowInstance(instance); int parentActionId = insertWorkflowInstanceAction(instance, action); for (WorkflowInstance childTemplate : childWorkflows) { - Integer rootWorkflowId = instance.rootWorkflowId != null ? instance.rootWorkflowId : instance.id; + Integer rootWorkflowId = instance.rootWorkflowId == null ? instance.id : instance.rootWorkflowId; WorkflowInstance childWorkflow = new WorkflowInstance.Builder(childTemplate).setRootWorkflowId(rootWorkflowId) .setParentWorkflowId(instance.id).setParentActionId(parentActionId).build(); insertWorkflowInstance(childWorkflow); diff --git a/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/workflow/instance/WorkflowInstance.java b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/workflow/instance/WorkflowInstance.java index 720589e12..fbb60eaf8 100644 --- a/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/workflow/instance/WorkflowInstance.java +++ b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/workflow/instance/WorkflowInstance.java @@ -44,17 +44,17 @@ public static enum WorkflowInstanceStatus { /** * The id of the workflow that created the hierarchy of workflow where this sub workflow belongs to. - * Null for workflows without children and workflows that are root of hierarchy. + * Null for workflows that are the root of hierarchy. */ public final Integer rootWorkflowId; /** - * The id of the workflow that created this sub workflow. Is null for parent workflows. + * The id of the workflow that created this sub workflow. Is null for root workflows. */ public final Integer parentWorkflowId; /** - * The id of the workflow action that created this sub workflow. Is null for parent workflows. + * The id of the workflow action that created this sub workflow. Is null for root workflows. */ public final Integer parentActionId; From f02d09c3153632793915da513713f62cab4a8ed0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juha=20Syrj=C3=A4l=C3=A4?= Date: Mon, 13 Jul 2015 17:03:56 +0300 Subject: [PATCH 32/51] Fix documentation. Update ConstanctWorkflow error state match current spec. --- nflow-perf-test/README.md | 2 +- .../nflow/performance/workflow/ConstantWorkflow.java | 3 +-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/nflow-perf-test/README.md b/nflow-perf-test/README.md index 8fa1e1239..e9dbfa941 100644 --- a/nflow-perf-test/README.md +++ b/nflow-perf-test/README.md @@ -20,7 +20,7 @@ java -Dnflow.db.user=nflow -Dnflow.db.password=nflownflow -Dnflow.db.postgresql.url=jdbc:postgresql://:5432/nflow?tcpKeepAlive=true -Dnflow.executor.group=nflow-perf -Dnflow.non_spring_workflows_filename=workflows.txt -Dspring.profiles.active=nflow.db.postgresql -Dtestdata.target.count=1000000 - -jar nflow/nflow-perf-test/target/nflow-perf-tests-*-SNAPSHOT.jar generateTestData + -jar nflow/nflow-perf-test/target/nflow-perf-test-*-SNAPSHOT.jar generateTestData ``` **Setup AWS environment using Ansible.** diff --git a/nflow-perf-test/src/main/java/com/nitorcreations/nflow/performance/workflow/ConstantWorkflow.java b/nflow-perf-test/src/main/java/com/nitorcreations/nflow/performance/workflow/ConstantWorkflow.java index 33e7884d8..ec3c52822 100644 --- a/nflow-perf-test/src/main/java/com/nitorcreations/nflow/performance/workflow/ConstantWorkflow.java +++ b/nflow-perf-test/src/main/java/com/nitorcreations/nflow/performance/workflow/ConstantWorkflow.java @@ -100,8 +100,7 @@ public NextAction slowState(StateExecution execution) { return NextAction.stopInState(ConstantState.end, "Goto end"); } - public NextAction error(StateExecution execution) { + public void error(StateExecution execution) { logger.error("should not happen"); - return null; } } From 7530bbb5740d2aacc825e3beeae583b4e3bd0471 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juha=20Syrj=C3=A4l=C3=A4?= Date: Mon, 13 Jul 2015 19:24:39 +0300 Subject: [PATCH 33/51] Improved logging --- .../nflow/engine/service/ArchiveService.java | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/service/ArchiveService.java b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/service/ArchiveService.java index 3008794e3..fc5f40542 100644 --- a/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/service/ArchiveService.java +++ b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/service/ArchiveService.java @@ -7,6 +7,7 @@ import javax.inject.Inject; import javax.inject.Named; +import org.apache.commons.lang3.time.StopWatch; import org.joda.time.DateTime; import org.slf4j.Logger; import org.springframework.util.Assert; @@ -24,7 +25,8 @@ public int archiveWorkflows(DateTime olderThan, int batchSize) { Assert.isTrue(batchSize > 0, "batchSize must be greater than 0"); archiveDao.ensureValidArchiveTablesExist(); log.info("Archiving starting. Archiving passive workflows older than {}, in batches of {}.", olderThan, batchSize); - + StopWatch stopWatch = new StopWatch(); + stopWatch.start(); List workflowIds; int archivedWorkflows = 0; do { @@ -33,8 +35,11 @@ public int archiveWorkflows(DateTime olderThan, int batchSize) { break; } archiveDao.archiveWorkflows(workflowIds); - log.debug("Archived a batch of workflows. Workflow ids: {}", workflowIds); archivedWorkflows += workflowIds.size(); + + double timeDiff = stopWatch.getTime()/1000.0; + log.debug("Archived {} workflows. {} workflows / second. Workflow ids: {}. ", + workflowIds.size(), archivedWorkflows / timeDiff, workflowIds); } while (!workflowIds.isEmpty()); log.info("Archiving finished. Archived {} workflows.", archivedWorkflows); From 6e18de714aca5166263c5f1dede8454bae98a82c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juha=20Syrj=C3=A4l=C3=A4?= Date: Mon, 13 Jul 2015 22:17:34 +0300 Subject: [PATCH 34/51] Add JavaDoc to ArchiveService. --- .../nflow/engine/service/ArchiveService.java | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/service/ArchiveService.java b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/service/ArchiveService.java index fc5f40542..55ba8fbe2 100644 --- a/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/service/ArchiveService.java +++ b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/service/ArchiveService.java @@ -20,6 +20,16 @@ public class ArchiveService { @Inject private ArchiveDao archiveDao; + /** + * Archive old and passive workflows. Copies workflow instance, workflow instance actions and state variables to + * corresponding archive tables and removes them production tables. Archives workflows that do not have + * nextActivation and whose modified time is earlier than olderThan parameter. + * + * @param olderThan + * @param batchSize number of workflow hierarchies to archive in single transactions. Typical value is 1-20. This + * parameter mostly affects on archival performance. + * @return number of archived workflows + */ public int archiveWorkflows(DateTime olderThan, int batchSize) { Assert.notNull(olderThan, "olderThan must not be null"); Assert.isTrue(batchSize > 0, "batchSize must be greater than 0"); From e34f5105bb39adea8c4fb9b6c914020df43a4ea2 Mon Sep 17 00:00:00 2001 From: Edvard Fonsell Date: Tue, 14 Jul 2015 09:54:09 +0300 Subject: [PATCH 35/51] format and update javadoc --- .../nflow/engine/service/ArchiveService.java | 20 +++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/service/ArchiveService.java b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/service/ArchiveService.java index 55ba8fbe2..6b73a186b 100644 --- a/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/service/ArchiveService.java +++ b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/service/ArchiveService.java @@ -21,14 +21,14 @@ public class ArchiveService { private ArchiveDao archiveDao; /** - * Archive old and passive workflows. Copies workflow instance, workflow instance actions and state variables to - * corresponding archive tables and removes them production tables. Archives workflows that do not have - * nextActivation and whose modified time is earlier than olderThan parameter. + * Archive old (whose modified time is earlier than olderThan parameter) and passive (that do not have + * nextActivation) workflows. Copies workflow instances, workflow instance actions and state variables to + * corresponding archive tables and removes them from production tables. * - * @param olderThan - * @param batchSize number of workflow hierarchies to archive in single transactions. Typical value is 1-20. This - * parameter mostly affects on archival performance. - * @return number of archived workflows + * @param olderThan Passive workflow instances whose modified time is before this will be archived. + * @param batchSize Number of workflow hierarchies to archive in a single transaction. Typical value is 1-20. This parameter + * mostly affects on archiving performance. + * @return Total number of archived workflows. */ public int archiveWorkflows(DateTime olderThan, int batchSize) { Assert.notNull(olderThan, "olderThan must not be null"); @@ -47,9 +47,9 @@ public int archiveWorkflows(DateTime olderThan, int batchSize) { archiveDao.archiveWorkflows(workflowIds); archivedWorkflows += workflowIds.size(); - double timeDiff = stopWatch.getTime()/1000.0; - log.debug("Archived {} workflows. {} workflows / second. Workflow ids: {}. ", - workflowIds.size(), archivedWorkflows / timeDiff, workflowIds); + double timeDiff = stopWatch.getTime() / 1000.0; + log.debug("Archived {} workflows. {} workflows / second. Workflow ids: {}. ", workflowIds.size(), archivedWorkflows + / timeDiff, workflowIds); } while (!workflowIds.isEmpty()); log.info("Archiving finished. Archived {} workflows.", archivedWorkflows); From 0e87a929f0d467d807a776d1c67c56d31fc8c226 Mon Sep 17 00:00:00 2001 From: Edvard Fonsell Date: Tue, 14 Jul 2015 10:40:42 +0300 Subject: [PATCH 36/51] Include executor.hostname changes from master --- .../src/main/resources/scripts/db/postgresql.create.ddl.sql | 2 +- .../main/resources/scripts/db/update-1.3.0-x/h2.update.ddl.sql | 2 ++ .../scripts/db/update-1.3.0-x/mysql.legacy.update.ddl.sql | 3 ++- .../resources/scripts/db/update-1.3.0-x/mysql.update.ddl.sql | 2 ++ .../scripts/db/update-1.3.0-x/postgresql.update.ddl.sql | 2 ++ 5 files changed, 9 insertions(+), 2 deletions(-) diff --git a/nflow-engine/src/main/resources/scripts/db/postgresql.create.ddl.sql b/nflow-engine/src/main/resources/scripts/db/postgresql.create.ddl.sql index 51ff36a82..6f5733679 100644 --- a/nflow-engine/src/main/resources/scripts/db/postgresql.create.ddl.sql +++ b/nflow-engine/src/main/resources/scripts/db/postgresql.create.ddl.sql @@ -144,4 +144,4 @@ create table if not exists nflow_archive_workflow_state ( state_value text not null, primary key (workflow_id, action_id, state_key), foreign key (workflow_id) references nflow_archive_workflow(id) on delete cascade -); \ No newline at end of file +); diff --git a/nflow-engine/src/main/resources/scripts/db/update-1.3.0-x/h2.update.ddl.sql b/nflow-engine/src/main/resources/scripts/db/update-1.3.0-x/h2.update.ddl.sql index 61655f6cd..41708e2ec 100644 --- a/nflow-engine/src/main/resources/scripts/db/update-1.3.0-x/h2.update.ddl.sql +++ b/nflow-engine/src/main/resources/scripts/db/update-1.3.0-x/h2.update.ddl.sql @@ -11,6 +11,8 @@ alter table nflow_executor alter column host varchar(253) not null; alter table nflow_workflow add constraint fk_workflow_root foreign key (root_workflow_id) references nflow_workflow (id) on delete cascade; +alter table nflow_executor alter column host varchar(253) not null; + -- archiving drop index nflow_workflow_next_activation; create index if not exists nflow_workflow_next_activation on nflow_workflow(next_activation, modified); diff --git a/nflow-engine/src/main/resources/scripts/db/update-1.3.0-x/mysql.legacy.update.ddl.sql b/nflow-engine/src/main/resources/scripts/db/update-1.3.0-x/mysql.legacy.update.ddl.sql index 8efc76392..ec6bed15c 100644 --- a/nflow-engine/src/main/resources/scripts/db/update-1.3.0-x/mysql.legacy.update.ddl.sql +++ b/nflow-engine/src/main/resources/scripts/db/update-1.3.0-x/mysql.legacy.update.ddl.sql @@ -11,6 +11,8 @@ alter table nflow_executor modify host varchar(253) not null; alter table nflow_workflow add constraint fk_workflow_root foreign key (root_workflow_id) references nflow_workflow (id) on delete cascade; +alter table nflow_executor modify host varchar(253) not null; + -- archiving alter table drop nflow_workflow index nflow_workflow; @@ -59,4 +61,3 @@ create table if not exists nflow_archive_workflow_state ( primary key (workflow_id, action_id, state_key), foreign key (workflow_id) references nflow_archive_workflow(id) on delete cascade ); - diff --git a/nflow-engine/src/main/resources/scripts/db/update-1.3.0-x/mysql.update.ddl.sql b/nflow-engine/src/main/resources/scripts/db/update-1.3.0-x/mysql.update.ddl.sql index 0be81a4ab..527e9009b 100644 --- a/nflow-engine/src/main/resources/scripts/db/update-1.3.0-x/mysql.update.ddl.sql +++ b/nflow-engine/src/main/resources/scripts/db/update-1.3.0-x/mysql.update.ddl.sql @@ -11,6 +11,8 @@ alter table nflow_executor modify host varchar(253) not null; alter table nflow_workflow add constraint fk_workflow_root foreign key (root_workflow_id) references nflow_workflow (id) on delete cascade; +alter table nflow_executor modify host varchar(253) not null; + --archiving alter table drop nflow_workflow index nflow_workflow; create index nflow_workflow_activation on nflow_workflow(next_activation, modified); diff --git a/nflow-engine/src/main/resources/scripts/db/update-1.3.0-x/postgresql.update.ddl.sql b/nflow-engine/src/main/resources/scripts/db/update-1.3.0-x/postgresql.update.ddl.sql index 952f319d1..4bafc8301 100644 --- a/nflow-engine/src/main/resources/scripts/db/update-1.3.0-x/postgresql.update.ddl.sql +++ b/nflow-engine/src/main/resources/scripts/db/update-1.3.0-x/postgresql.update.ddl.sql @@ -11,6 +11,8 @@ alter table nflow_executor alter host varchar(253) not null; alter table nflow_workflow add constraint fk_workflow_root foreign key (root_workflow_id) references nflow_workflow (id) on delete cascade; +alter table nflow_executor alter host varchar(253) not null; + -- archiving create or replace function update_modified() returns trigger language plpgsql as ' From f51635d359b59a65e669969b3fb6767a317b2590 Mon Sep 17 00:00:00 2001 From: Edvard Fonsell Date: Wed, 19 Aug 2015 16:59:20 +0300 Subject: [PATCH 37/51] changes from review --- .../nflow/engine/internal/dao/ArchiveDao.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/ArchiveDao.java b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/ArchiveDao.java index 394796026..0ce3459e6 100644 --- a/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/ArchiveDao.java +++ b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/ArchiveDao.java @@ -57,7 +57,7 @@ public List listArchivableWorkflows(DateTime before, int maxRows) { " limit " + maxRows + ") as archivable_parent " + "where archivable_parent.id = w.id or archivable_parent.id = w.root_workflow_id", - new Object[] { toTimestamp(before), toTimestamp(before) }, new ArchivableWorkflowsRowMapper()); + new ArchivableWorkflowsRowMapper(), toTimestamp(before), toTimestamp(before)); } @Transactional @@ -98,11 +98,11 @@ private void deleteWorkflows(String workflowIdParams) { private String columnsFromMetadata(String tableName) { List columnNames = jdbc.query("select * from " + tableName + " where 1 = 0", columnNamesExtractor); - return join(columnNames.toArray(), ","); + return join(columnNames, ","); } private String params(List workflowIds) { - return "(" + join(workflowIds.toArray(), ",") + ")"; + return "(" + join(workflowIds, ",") + ")"; } static class ArchivableWorkflowsRowMapper implements RowMapper { From 8d95ddd069e4fafef0257b3f9128aabe6bc0ba2b Mon Sep 17 00:00:00 2001 From: Edvard Fonsell Date: Wed, 19 Aug 2015 16:59:42 +0300 Subject: [PATCH 38/51] do not index next_activation on archive tables, it is always null --- nflow-engine/src/main/resources/scripts/db/h2.create.ddl.sql | 2 +- .../src/main/resources/scripts/db/mysql.create.ddl.sql | 4 ++-- .../src/main/resources/scripts/db/mysql.legacy.create.ddl.sql | 4 ++-- .../src/main/resources/scripts/db/postgresql.create.ddl.sql | 4 ++-- 4 files changed, 7 insertions(+), 7 deletions(-) diff --git a/nflow-engine/src/main/resources/scripts/db/h2.create.ddl.sql b/nflow-engine/src/main/resources/scripts/db/h2.create.ddl.sql index 97fe451ee..bee570f0f 100644 --- a/nflow-engine/src/main/resources/scripts/db/h2.create.ddl.sql +++ b/nflow-engine/src/main/resources/scripts/db/h2.create.ddl.sql @@ -102,7 +102,7 @@ create table if not exists nflow_archive_workflow ( create unique index if not exists nflow_archive_workflow_uniq on nflow_archive_workflow (type, external_id, executor_group); -create index if not exists nflow_archive_workflow_next_activation on nflow_archive_workflow(next_activation, modified); +create index if not exists nflow_archive_workflow_modified on nflow_archive_workflow(modified); create table if not exists nflow_archive_workflow_action ( id int not null primary key, diff --git a/nflow-engine/src/main/resources/scripts/db/mysql.create.ddl.sql b/nflow-engine/src/main/resources/scripts/db/mysql.create.ddl.sql index 3a68e66a1..34b52f9a2 100644 --- a/nflow-engine/src/main/resources/scripts/db/mysql.create.ddl.sql +++ b/nflow-engine/src/main/resources/scripts/db/mysql.create.ddl.sql @@ -100,8 +100,8 @@ create table if not exists nflow_archive_workflow ( constraint nflow_archive_workflow_uniq unique (type, external_id, executor_group) ); -drop index nflow_archive_workflow_activation; -create index nflow_archive_workflow_activation on nflow_archive_workflow(next_activation, modified); +drop index nflow_archive_workflow_modified; +create index nflow_archive_workflow_modified on nflow_archive_workflow(modified); create table if not exists nflow_archive_workflow_action ( id int not null primary key, diff --git a/nflow-engine/src/main/resources/scripts/db/mysql.legacy.create.ddl.sql b/nflow-engine/src/main/resources/scripts/db/mysql.legacy.create.ddl.sql index 2af4de2e4..adfe89eae 100644 --- a/nflow-engine/src/main/resources/scripts/db/mysql.legacy.create.ddl.sql +++ b/nflow-engine/src/main/resources/scripts/db/mysql.legacy.create.ddl.sql @@ -110,8 +110,8 @@ create table if not exists nflow_archive_workflow ( constraint nflow_archive_workflow_uniq unique (type, external_id, executor_group) ); -drop index nflow_archive_workflow_activation; -create index nflow_archive_workflow_activation on nflow_archive_workflow(next_activation, modified); +drop index nflow_archive_workflow_modified; +create index nflow_archive_workflow_modified on nflow_archive_workflow(modified); create table if not exists nflow_archive_workflow_action ( id int not null primary key, diff --git a/nflow-engine/src/main/resources/scripts/db/postgresql.create.ddl.sql b/nflow-engine/src/main/resources/scripts/db/postgresql.create.ddl.sql index 6f5733679..c37b011b9 100644 --- a/nflow-engine/src/main/resources/scripts/db/postgresql.create.ddl.sql +++ b/nflow-engine/src/main/resources/scripts/db/postgresql.create.ddl.sql @@ -120,8 +120,8 @@ create table if not exists nflow_archive_workflow ( constraint nflow_archive_workflow_uniq unique (type, external_id, executor_group) ); -drop index nflow_archive_workflow_activation; -create index nflow_archive_workflow_activation on nflow_archive_workflow(next_activation, modified); +drop index nflow_archive_workflow_modified; +create index nflow_archive_workflow_modified on nflow_archive_workflow(modified); create table if not exists nflow_archive_workflow_action ( id integer primary key, From 441483d093c1a9bdc99e2fc44671c76e30dcd92b Mon Sep 17 00:00:00 2001 From: Edvard Fonsell Date: Wed, 19 Aug 2015 17:22:20 +0300 Subject: [PATCH 39/51] remove indices from archive tables --- nflow-engine/src/main/resources/scripts/db/h2.create.ddl.sql | 2 -- .../src/main/resources/scripts/db/mysql.create.ddl.sql | 3 --- .../src/main/resources/scripts/db/mysql.legacy.create.ddl.sql | 3 --- .../src/main/resources/scripts/db/postgresql.create.ddl.sql | 3 --- 4 files changed, 11 deletions(-) diff --git a/nflow-engine/src/main/resources/scripts/db/h2.create.ddl.sql b/nflow-engine/src/main/resources/scripts/db/h2.create.ddl.sql index bee570f0f..e3436a9fc 100644 --- a/nflow-engine/src/main/resources/scripts/db/h2.create.ddl.sql +++ b/nflow-engine/src/main/resources/scripts/db/h2.create.ddl.sql @@ -102,8 +102,6 @@ create table if not exists nflow_archive_workflow ( create unique index if not exists nflow_archive_workflow_uniq on nflow_archive_workflow (type, external_id, executor_group); -create index if not exists nflow_archive_workflow_modified on nflow_archive_workflow(modified); - create table if not exists nflow_archive_workflow_action ( id int not null primary key, workflow_id int not null, diff --git a/nflow-engine/src/main/resources/scripts/db/mysql.create.ddl.sql b/nflow-engine/src/main/resources/scripts/db/mysql.create.ddl.sql index 34b52f9a2..bf86acfd7 100644 --- a/nflow-engine/src/main/resources/scripts/db/mysql.create.ddl.sql +++ b/nflow-engine/src/main/resources/scripts/db/mysql.create.ddl.sql @@ -100,9 +100,6 @@ create table if not exists nflow_archive_workflow ( constraint nflow_archive_workflow_uniq unique (type, external_id, executor_group) ); -drop index nflow_archive_workflow_modified; -create index nflow_archive_workflow_modified on nflow_archive_workflow(modified); - create table if not exists nflow_archive_workflow_action ( id int not null primary key, workflow_id int not null, diff --git a/nflow-engine/src/main/resources/scripts/db/mysql.legacy.create.ddl.sql b/nflow-engine/src/main/resources/scripts/db/mysql.legacy.create.ddl.sql index adfe89eae..869f7debf 100644 --- a/nflow-engine/src/main/resources/scripts/db/mysql.legacy.create.ddl.sql +++ b/nflow-engine/src/main/resources/scripts/db/mysql.legacy.create.ddl.sql @@ -110,9 +110,6 @@ create table if not exists nflow_archive_workflow ( constraint nflow_archive_workflow_uniq unique (type, external_id, executor_group) ); -drop index nflow_archive_workflow_modified; -create index nflow_archive_workflow_modified on nflow_archive_workflow(modified); - create table if not exists nflow_archive_workflow_action ( id int not null primary key, workflow_id int not null, diff --git a/nflow-engine/src/main/resources/scripts/db/postgresql.create.ddl.sql b/nflow-engine/src/main/resources/scripts/db/postgresql.create.ddl.sql index c37b011b9..36a319542 100644 --- a/nflow-engine/src/main/resources/scripts/db/postgresql.create.ddl.sql +++ b/nflow-engine/src/main/resources/scripts/db/postgresql.create.ddl.sql @@ -120,9 +120,6 @@ create table if not exists nflow_archive_workflow ( constraint nflow_archive_workflow_uniq unique (type, external_id, executor_group) ); -drop index nflow_archive_workflow_modified; -create index nflow_archive_workflow_modified on nflow_archive_workflow(modified); - create table if not exists nflow_archive_workflow_action ( id integer primary key, workflow_id int not null, From 15fe5728872695d8b1fa626c484ad305b023f984 Mon Sep 17 00:00:00 2001 From: Edvard Fonsell Date: Thu, 20 Aug 2015 21:47:44 +0300 Subject: [PATCH 40/51] Add rest service to start archive process --- .../jetty/config/NflowJettyConfiguration.java | 7 ++- .../nflow/rest/v1/ArchiveResource.java | 35 +++++++++++++ .../nflow/rest/v1/msg/ArchiveRequest.java | 22 +++++++++ .../nflow/rest/v1/ArchiveResourceTest.java | 49 +++++++++++++++++++ 4 files changed, 111 insertions(+), 2 deletions(-) create mode 100644 nflow-rest-api/src/main/java/com/nitorcreations/nflow/rest/v1/ArchiveResource.java create mode 100644 nflow-rest-api/src/main/java/com/nitorcreations/nflow/rest/v1/msg/ArchiveRequest.java create mode 100644 nflow-rest-api/src/test/java/com/nitorcreations/nflow/rest/v1/ArchiveResourceTest.java diff --git a/nflow-jetty/src/main/java/com/nitorcreations/nflow/jetty/config/NflowJettyConfiguration.java b/nflow-jetty/src/main/java/com/nitorcreations/nflow/jetty/config/NflowJettyConfiguration.java index c1fe6f5d2..5ac4ac8ab 100644 --- a/nflow-jetty/src/main/java/com/nitorcreations/nflow/jetty/config/NflowJettyConfiguration.java +++ b/nflow-jetty/src/main/java/com/nitorcreations/nflow/jetty/config/NflowJettyConfiguration.java @@ -38,6 +38,7 @@ import com.nitorcreations.nflow.rest.config.DateTimeParamConverterProvider; import com.nitorcreations.nflow.rest.config.NotFoundExceptionMapper; import com.nitorcreations.nflow.rest.config.RestConfiguration; +import com.nitorcreations.nflow.rest.v1.ArchiveResource; import com.nitorcreations.nflow.rest.v1.StatisticsResource; import com.nitorcreations.nflow.rest.v1.WorkflowDefinitionResource; import com.nitorcreations.nflow.rest.v1.WorkflowExecutorResource; @@ -55,13 +56,15 @@ public class NflowJettyConfiguration { @Bean public Server jaxRsServer(WorkflowInstanceResource workflowInstanceResource, WorkflowDefinitionResource workflowDefinitionResource, WorkflowExecutorResource workflowExecutorResource, - StatisticsResource statisticsResource, @Named(REST_OBJECT_MAPPER) ObjectMapper nflowRestObjectMapper) { + StatisticsResource statisticsResource, ArchiveResource archiveResource, + @Named(REST_OBJECT_MAPPER) ObjectMapper nflowRestObjectMapper) { JAXRSServerFactoryBean factory = RuntimeDelegate.getInstance().createEndpoint(jaxRsApiApplication(), JAXRSServerFactoryBean.class); factory.setServiceBeans(Arrays.< Object >asList( workflowInstanceResource, workflowDefinitionResource, workflowExecutorResource, - statisticsResource + statisticsResource, + archiveResource )); factory.setAddress('/' + factory.getAddress()); factory.setProviders(asList( diff --git a/nflow-rest-api/src/main/java/com/nitorcreations/nflow/rest/v1/ArchiveResource.java b/nflow-rest-api/src/main/java/com/nitorcreations/nflow/rest/v1/ArchiveResource.java new file mode 100644 index 000000000..4246106f1 --- /dev/null +++ b/nflow-rest-api/src/main/java/com/nitorcreations/nflow/rest/v1/ArchiveResource.java @@ -0,0 +1,35 @@ +package com.nitorcreations.nflow.rest.v1; + +import static javax.ws.rs.core.MediaType.APPLICATION_JSON; +import io.swagger.annotations.Api; +import io.swagger.annotations.ApiOperation; + +import javax.inject.Inject; +import javax.ws.rs.Consumes; +import javax.ws.rs.POST; +import javax.ws.rs.Path; +import javax.ws.rs.Produces; +import javax.ws.rs.core.Response; + +import org.springframework.stereotype.Component; + +import com.nitorcreations.nflow.engine.service.ArchiveService; +import com.nitorcreations.nflow.rest.v1.msg.ArchiveRequest; + +@Path("/v1/archive") +@Consumes(APPLICATION_JSON) +@Produces(APPLICATION_JSON) +@Api("Archiving") +@Component +public class ArchiveResource { + + @Inject + private ArchiveService archiveService; + + @POST + @ApiOperation("Archive workflows") + public Response archiveWorkflows(ArchiveRequest request) { + Integer archivedWorkflows = archiveService.archiveWorkflows(request.olderThan, request.batchSize); + return Response.ok().header("X-Archived-Workflows", archivedWorkflows).build(); + } +} diff --git a/nflow-rest-api/src/main/java/com/nitorcreations/nflow/rest/v1/msg/ArchiveRequest.java b/nflow-rest-api/src/main/java/com/nitorcreations/nflow/rest/v1/msg/ArchiveRequest.java new file mode 100644 index 000000000..b4740178d --- /dev/null +++ b/nflow-rest-api/src/main/java/com/nitorcreations/nflow/rest/v1/msg/ArchiveRequest.java @@ -0,0 +1,22 @@ +package com.nitorcreations.nflow.rest.v1.msg; + +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; + +import javax.validation.constraints.NotNull; + +import org.joda.time.DateTime; + +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; + +@ApiModel(description = "Request to start archiving process") +@SuppressFBWarnings(value = "URF_UNREAD_PUBLIC_OR_PROTECTED_FIELD", justification = "jackson reads dto fields") +public class ArchiveRequest { + + @NotNull + @ApiModelProperty(value = "Passive workflow instances whose modified time is before this will be archived.", required = true) + public DateTime olderThan; + + @ApiModelProperty("Number of workflow hierarchies to archive in a single transaction.") + public int batchSize = 10; +} diff --git a/nflow-rest-api/src/test/java/com/nitorcreations/nflow/rest/v1/ArchiveResourceTest.java b/nflow-rest-api/src/test/java/com/nitorcreations/nflow/rest/v1/ArchiveResourceTest.java new file mode 100644 index 000000000..a65c0f82f --- /dev/null +++ b/nflow-rest-api/src/test/java/com/nitorcreations/nflow/rest/v1/ArchiveResourceTest.java @@ -0,0 +1,49 @@ +package com.nitorcreations.nflow.rest.v1; + +import static org.hamcrest.Matchers.is; +import static org.joda.time.DateTime.now; +import static org.junit.Assert.assertThat; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import javax.ws.rs.core.Response; +import javax.ws.rs.core.Response.Status; + +import org.joda.time.DateTime; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.runners.MockitoJUnitRunner; + +import com.nitorcreations.nflow.engine.service.ArchiveService; +import com.nitorcreations.nflow.rest.v1.msg.ArchiveRequest; + +@RunWith(MockitoJUnitRunner.class) +public class ArchiveResourceTest { + + @InjectMocks + private final ArchiveResource resource = new ArchiveResource(); + @Mock + private ArchiveService service; + @Mock + private Response expected; + + DateTime olderThan = now().minusYears(1); + int batchSize = 10; + int archived = 100; + + @Test + public void archiveDelegatesToArchiveService() { + when(service.archiveWorkflows(olderThan, batchSize)).thenReturn(archived); + + ArchiveRequest request = new ArchiveRequest(); + request.olderThan = olderThan; + request.batchSize = batchSize; + Response response = resource.archiveWorkflows(request); + + verify(service).archiveWorkflows(olderThan, batchSize); + assertThat(response.getStatus(), is(Status.OK.getStatusCode())); + assertThat(response.getHeaderString("X-Archived-Workflows"), is("100")); + } +} From 69d90510a4be3cc923d4bbbc940d2b3e47d43c0f Mon Sep 17 00:00:00 2001 From: Edvard Fonsell Date: Fri, 21 Aug 2015 11:25:43 +0300 Subject: [PATCH 41/51] Archive service should return json instead of custom header --- .../nflow/rest/v1/ArchiveResource.java | 9 +++++---- .../nflow/rest/v1/msg/ArchiveResponse.java | 14 ++++++++++++++ 2 files changed, 19 insertions(+), 4 deletions(-) create mode 100644 nflow-rest-api/src/main/java/com/nitorcreations/nflow/rest/v1/msg/ArchiveResponse.java diff --git a/nflow-rest-api/src/main/java/com/nitorcreations/nflow/rest/v1/ArchiveResource.java b/nflow-rest-api/src/main/java/com/nitorcreations/nflow/rest/v1/ArchiveResource.java index 4246106f1..1be9a2f46 100644 --- a/nflow-rest-api/src/main/java/com/nitorcreations/nflow/rest/v1/ArchiveResource.java +++ b/nflow-rest-api/src/main/java/com/nitorcreations/nflow/rest/v1/ArchiveResource.java @@ -9,12 +9,12 @@ import javax.ws.rs.POST; import javax.ws.rs.Path; import javax.ws.rs.Produces; -import javax.ws.rs.core.Response; import org.springframework.stereotype.Component; import com.nitorcreations.nflow.engine.service.ArchiveService; import com.nitorcreations.nflow.rest.v1.msg.ArchiveRequest; +import com.nitorcreations.nflow.rest.v1.msg.ArchiveResponse; @Path("/v1/archive") @Consumes(APPLICATION_JSON) @@ -28,8 +28,9 @@ public class ArchiveResource { @POST @ApiOperation("Archive workflows") - public Response archiveWorkflows(ArchiveRequest request) { - Integer archivedWorkflows = archiveService.archiveWorkflows(request.olderThan, request.batchSize); - return Response.ok().header("X-Archived-Workflows", archivedWorkflows).build(); + public ArchiveResponse archiveWorkflows(ArchiveRequest request) { + ArchiveResponse response = new ArchiveResponse(); + response.archivedWorkflows = archiveService.archiveWorkflows(request.olderThan, request.batchSize); + return response; } } diff --git a/nflow-rest-api/src/main/java/com/nitorcreations/nflow/rest/v1/msg/ArchiveResponse.java b/nflow-rest-api/src/main/java/com/nitorcreations/nflow/rest/v1/msg/ArchiveResponse.java new file mode 100644 index 000000000..ee4d36844 --- /dev/null +++ b/nflow-rest-api/src/main/java/com/nitorcreations/nflow/rest/v1/msg/ArchiveResponse.java @@ -0,0 +1,14 @@ +package com.nitorcreations.nflow.rest.v1.msg; + +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; + +@ApiModel(description = "Archiving result response") +@SuppressFBWarnings(value = "URF_UNREAD_PUBLIC_OR_PROTECTED_FIELD", justification = "jackson reads dto fields") +public class ArchiveResponse { + + @ApiModelProperty("Total number of archived workflows") + public int archivedWorkflows; + +} From 3fbefed956c26b5c25a4ba4c24cf4c4b327a9558 Mon Sep 17 00:00:00 2001 From: Edvard Fonsell Date: Fri, 21 Aug 2015 11:29:03 +0300 Subject: [PATCH 42/51] fix to compile --- .../nflow/engine/internal/dao/ArchiveDao.java | 3 --- .../nitorcreations/nflow/rest/v1/ArchiveResourceTest.java | 7 +++---- 2 files changed, 3 insertions(+), 7 deletions(-) diff --git a/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/ArchiveDao.java b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/ArchiveDao.java index 0ce3459e6..7425aeb3e 100644 --- a/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/ArchiveDao.java +++ b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/ArchiveDao.java @@ -2,7 +2,6 @@ import static com.nitorcreations.nflow.engine.internal.dao.DaoUtil.toTimestamp; import static com.nitorcreations.nflow.engine.internal.dao.DaoUtil.ColumnNamesExtractor.columnNamesExtractor; -import static com.nitorcreations.nflow.engine.internal.storage.db.DatabaseConfiguration.NFLOW_DATABASE_INITIALIZER; import static org.apache.commons.lang3.StringUtils.join; import java.sql.ResultSet; @@ -13,7 +12,6 @@ import javax.inject.Named; import org.joda.time.DateTime; -import org.springframework.context.annotation.DependsOn; import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.jdbc.core.RowMapper; import org.springframework.transaction.annotation.Transactional; @@ -21,7 +19,6 @@ import com.nitorcreations.nflow.engine.internal.config.NFlow; @Named -@DependsOn(NFLOW_DATABASE_INITIALIZER) public class ArchiveDao { private JdbcTemplate jdbc; private TableMetadataChecker tableMetadataChecker; diff --git a/nflow-rest-api/src/test/java/com/nitorcreations/nflow/rest/v1/ArchiveResourceTest.java b/nflow-rest-api/src/test/java/com/nitorcreations/nflow/rest/v1/ArchiveResourceTest.java index a65c0f82f..cb5965f5b 100644 --- a/nflow-rest-api/src/test/java/com/nitorcreations/nflow/rest/v1/ArchiveResourceTest.java +++ b/nflow-rest-api/src/test/java/com/nitorcreations/nflow/rest/v1/ArchiveResourceTest.java @@ -7,7 +7,6 @@ import static org.mockito.Mockito.when; import javax.ws.rs.core.Response; -import javax.ws.rs.core.Response.Status; import org.joda.time.DateTime; import org.junit.Test; @@ -18,6 +17,7 @@ import com.nitorcreations.nflow.engine.service.ArchiveService; import com.nitorcreations.nflow.rest.v1.msg.ArchiveRequest; +import com.nitorcreations.nflow.rest.v1.msg.ArchiveResponse; @RunWith(MockitoJUnitRunner.class) public class ArchiveResourceTest { @@ -40,10 +40,9 @@ public void archiveDelegatesToArchiveService() { ArchiveRequest request = new ArchiveRequest(); request.olderThan = olderThan; request.batchSize = batchSize; - Response response = resource.archiveWorkflows(request); + ArchiveResponse response = resource.archiveWorkflows(request); verify(service).archiveWorkflows(olderThan, batchSize); - assertThat(response.getStatus(), is(Status.OK.getStatusCode())); - assertThat(response.getHeaderString("X-Archived-Workflows"), is("100")); + assertThat(response.archivedWorkflows, is(archived)); } } From a228ca3b632350ce586401500b3064205a48f9ef Mon Sep 17 00:00:00 2001 From: Edvard Fonsell Date: Fri, 21 Aug 2015 17:43:56 +0300 Subject: [PATCH 43/51] add where clause to avoid updating rows that do not need to be updated --- .../nitorcreations/nflow/engine/internal/dao/ArchiveDao.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/ArchiveDao.java b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/ArchiveDao.java index 7425aeb3e..ea0cc7455 100644 --- a/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/ArchiveDao.java +++ b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/ArchiveDao.java @@ -88,7 +88,7 @@ private void archiveStateTable(String workflowIdParams) { private void deleteWorkflows(String workflowIdParams) { jdbc.update("delete from nflow_workflow_state where workflow_id in " + workflowIdParams); jdbc.update("update nflow_workflow set root_workflow_id=null, parent_workflow_id=null, parent_action_id=null " + - "where id in " + workflowIdParams); + "where id in " + workflowIdParams + " and (root_workflow_id is not null or parent_workflow_id is not null)"); jdbc.update("delete from nflow_workflow_action where workflow_id in " + workflowIdParams); jdbc.update("delete from nflow_workflow where id in " + workflowIdParams); } From 1fe5baef473d34a4853572114d6d05d32ec44c2f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juha=20Syrj=C3=A4l=C3=A4?= Date: Tue, 29 Sep 2015 18:26:48 +0300 Subject: [PATCH 44/51] Log statistics every minute during the archiving process. --- .../engine/internal/util/PeriodicLogger.java | 34 ++++++++++ .../nflow/engine/service/ArchiveService.java | 4 ++ .../internal/util/PeriodicLoggerTest.java | 68 +++++++++++++++++++ 3 files changed, 106 insertions(+) create mode 100644 nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/util/PeriodicLogger.java create mode 100644 nflow-engine/src/test/java/com/nitorcreations/nflow/engine/internal/util/PeriodicLoggerTest.java diff --git a/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/util/PeriodicLogger.java b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/util/PeriodicLogger.java new file mode 100644 index 000000000..2a841756a --- /dev/null +++ b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/util/PeriodicLogger.java @@ -0,0 +1,34 @@ +package com.nitorcreations.nflow.engine.internal.util; + + +import org.joda.time.DateTime; +import org.slf4j.Logger; + +/** + * PeriodicLogger logs once per given periodInSeconds period. + * Typically used in a loop where you don't want to log at every iteration, but want + * to get a log row e.g. once per minute. + * Not thread safe. + */ +public class PeriodicLogger { + private Long previousLogging; + private final int periodInSeconds; + private final Logger logger; + + public PeriodicLogger(Logger logger, int periodInSeconds) { + this.logger = logger; + this.periodInSeconds = periodInSeconds; + } + + public void log(String message, Object ... parameters) { + long now = periodNumber(); + if(previousLogging == null || previousLogging != now) { + logger.info(message, parameters); + } + previousLogging = now; + } + + private long periodNumber() { + return DateTime.now().getMillis() / 1000 / periodInSeconds; + } +} diff --git a/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/service/ArchiveService.java b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/service/ArchiveService.java index 6b73a186b..0e193a974 100644 --- a/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/service/ArchiveService.java +++ b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/service/ArchiveService.java @@ -7,6 +7,7 @@ import javax.inject.Inject; import javax.inject.Named; +import com.nitorcreations.nflow.engine.internal.util.PeriodicLogger; import org.apache.commons.lang3.time.StopWatch; import org.joda.time.DateTime; import org.slf4j.Logger; @@ -38,6 +39,7 @@ public int archiveWorkflows(DateTime olderThan, int batchSize) { StopWatch stopWatch = new StopWatch(); stopWatch.start(); List workflowIds; + PeriodicLogger periodicLogger = new PeriodicLogger(log, 60); int archivedWorkflows = 0; do { workflowIds = archiveDao.listArchivableWorkflows(olderThan, batchSize); @@ -50,6 +52,8 @@ public int archiveWorkflows(DateTime olderThan, int batchSize) { double timeDiff = stopWatch.getTime() / 1000.0; log.debug("Archived {} workflows. {} workflows / second. Workflow ids: {}. ", workflowIds.size(), archivedWorkflows / timeDiff, workflowIds); + periodicLogger.log("Archived {} workflows. Archiving about {} workflows / second.", workflowIds.size(), archivedWorkflows + / timeDiff); } while (!workflowIds.isEmpty()); log.info("Archiving finished. Archived {} workflows.", archivedWorkflows); diff --git a/nflow-engine/src/test/java/com/nitorcreations/nflow/engine/internal/util/PeriodicLoggerTest.java b/nflow-engine/src/test/java/com/nitorcreations/nflow/engine/internal/util/PeriodicLoggerTest.java new file mode 100644 index 000000000..4b176c64f --- /dev/null +++ b/nflow-engine/src/test/java/com/nitorcreations/nflow/engine/internal/util/PeriodicLoggerTest.java @@ -0,0 +1,68 @@ +package com.nitorcreations.nflow.engine.internal.util; + +import org.joda.time.DateTimeUtils; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; + +import org.mockito.Mock; +import org.mockito.runners.MockitoJUnitRunner; +import org.slf4j.Logger; + +import static org.joda.time.DateTimeUtils.setCurrentMillisFixed; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; + +@RunWith(MockitoJUnitRunner.class) +public class PeriodicLoggerTest { + @Mock + Logger logger; + + PeriodicLogger periodicLogger; + Object[] params = new Object[]{new Object(), 1}; + final long now = 1443540008000L; + + @Before + public void setup() { + periodicLogger = new PeriodicLogger(logger, 60); + setCurrentMillisFixed(now); + } + + @After + public void teardown() { + DateTimeUtils.setCurrentMillisSystem(); + } + + @Test + public void periodicLoggerLogsAtFirstLogCall() { + periodicLogger.log("foo {}", params); + verify(logger, times(1)).info("foo {}", params); + verifyNoMoreInteractions(logger); + } + + @Test + public void periodicLoggerDoenstLogMoreThanOneTimeInPeriod() { + periodicLogger.log("foo {}", params); + periodicLogger.log("foo {}", params); + periodicLogger.log("bar {}", params); + periodicLogger.log("baz"); + setCurrentMillisFixed(now + 59 * 1000); + verify(logger, times(1)).info("foo {}", params); + verifyNoMoreInteractions(logger); + } + + @Test + public void periodicLoggerLogsAgainWhenPeriodChanges() { + periodicLogger.log("foo1 {}", params); + verify(logger, times(1)).info("foo1 {}", params); + setCurrentMillisFixed(now + 60 * 1000); + periodicLogger.log("foo2 {}", params); + periodicLogger.log("foo2 {}", params); + verify(logger, times(1)).info("foo2 {}", params); + setCurrentMillisFixed(now + 110 * 1000); + periodicLogger.log("foo3 {}", params); + verifyNoMoreInteractions(logger); + } +} From 6a7d874263d1b73f87f327797a4d8ab645747b13 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juha=20Syrj=C3=A4l=C3=A4?= Date: Thu, 15 Oct 2015 11:20:44 +0300 Subject: [PATCH 45/51] Add archiving tables for Oracle --- .../scripts/db/oracle.create.ddl.sql | 52 +++++++++++++++++++ 1 file changed, 52 insertions(+) diff --git a/nflow-engine/src/main/resources/scripts/db/oracle.create.ddl.sql b/nflow-engine/src/main/resources/scripts/db/oracle.create.ddl.sql index c0e93d7b2..f940cb706 100644 --- a/nflow-engine/src/main/resources/scripts/db/oracle.create.ddl.sql +++ b/nflow-engine/src/main/resources/scripts/db/oracle.create.ddl.sql @@ -119,3 +119,55 @@ begin :new.modified := current_timestamp; end; / + +-- Archive tables +-- - no default values +-- - no triggers +-- - no auto increments +-- - same indexes and constraints as production tables +-- - remove recursive foreign keys + +create table nflow_archive_workflow ( + id integer primary key, + status varchar(32) not null, + type varchar(64) not null, + root_workflow_id integer, + parent_workflow_id integer, + parent_action_id integer, + business_key varchar(64), + external_id varchar(64) not null, + state varchar(64) not null, + state_text varchar(128), + next_activation timestamp, + external_next_activation timestamp, + executor_id int, + retries int not null default 0, + created timestamp not null, + modified timestamp not null, + executor_group varchar(64) not null, + constraint nflow_archive_workflow_uniq unique (type, external_id, executor_group) +); + +create table nflow_archive_workflow_action ( + id integer primary key, + workflow_id int not null, + executor_id int not null, + type varchar(64) not null, + state varchar(64) not null, + state_text varchar(128), + retry_no int not null, + execution_start timestamp not null, + execution_end timestamp not null, + foreign key (workflow_id) references nflow_archive_workflow(id) on delete cascade, + constraint nflow_archive_workflow_action_uniq unique (workflow_id, id) +); + +create table nflow_archive_workflow_state ( + workflow_id int not null, + action_id int not null, + state_key varchar(64) not null, + state_value clob not null, + primary key (workflow_id, action_id, state_key), + foreign key (workflow_id) references nflow_archive_workflow(id) on delete cascade +); + From 03808210a5ce846c7525e45b9d093e8df592c971 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juha=20Syrj=C3=A4l=C3=A4?= Date: Thu, 15 Oct 2015 11:36:06 +0300 Subject: [PATCH 46/51] Increase timeout in CreditApplicationWorkflowTest --- .../nflow/tests/CreditApplicationWorkflowTest.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nflow-tests/src/test/java/com/nitorcreations/nflow/tests/CreditApplicationWorkflowTest.java b/nflow-tests/src/test/java/com/nitorcreations/nflow/tests/CreditApplicationWorkflowTest.java index 7cedfcb30..4b36d5e85 100644 --- a/nflow-tests/src/test/java/com/nitorcreations/nflow/tests/CreditApplicationWorkflowTest.java +++ b/nflow-tests/src/test/java/com/nitorcreations/nflow/tests/CreditApplicationWorkflowTest.java @@ -49,7 +49,7 @@ public void t01_createCreditApplicationWorkflow() { assertThat(resp.id, notNullValue()); } - @Test(timeout = 5000) + @Test(timeout = 10000) public void t02_checkAcceptCreditApplicationReached() throws InterruptedException { ListWorkflowInstanceResponse response; do { From f7406cdcfca5956265df3c6a84c858b0a8b73740 Mon Sep 17 00:00:00 2001 From: Edvard Fonsell Date: Fri, 30 Oct 2015 13:05:25 +0200 Subject: [PATCH 47/51] improve logging and tests --- .../nflow/engine/internal/dao/ArchiveDao.java | 9 ++++---- .../nflow/engine/service/ArchiveService.java | 22 ++++++++++--------- .../engine/internal/dao/ArchiveDaoTest.java | 6 ++--- .../engine/service/ArchiveServiceTest.java | 2 ++ 4 files changed, 22 insertions(+), 17 deletions(-) diff --git a/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/ArchiveDao.java b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/ArchiveDao.java index ea0cc7455..e07163251 100644 --- a/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/ArchiveDao.java +++ b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/ArchiveDao.java @@ -58,18 +58,19 @@ public List listArchivableWorkflows(DateTime before, int maxRows) { } @Transactional - public void archiveWorkflows(List workflowIds) { + public int archiveWorkflows(List workflowIds) { String workflowIdParams = params(workflowIds); - archiveWorkflowTable(workflowIdParams); + int archivedWorkflows = archiveWorkflowTable(workflowIdParams); archiveActionTable(workflowIdParams); archiveStateTable(workflowIdParams); deleteWorkflows(workflowIdParams); + return archivedWorkflows; } - private void archiveWorkflowTable(String workflowIdParams) { + private int archiveWorkflowTable(String workflowIdParams) { String columns = columnsFromMetadata("nflow_workflow"); - jdbc.update("insert into nflow_archive_workflow(" + columns + ") " + + return jdbc.update("insert into nflow_archive_workflow(" + columns + ") " + "select " + columns + " from nflow_workflow where id in " + workflowIdParams); } diff --git a/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/service/ArchiveService.java b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/service/ArchiveService.java index 0e193a974..19b441b8f 100644 --- a/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/service/ArchiveService.java +++ b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/service/ArchiveService.java @@ -1,5 +1,6 @@ package com.nitorcreations.nflow.engine.service; +import static java.lang.Math.max; import static org.slf4j.LoggerFactory.getLogger; import java.util.List; @@ -7,13 +8,13 @@ import javax.inject.Inject; import javax.inject.Named; -import com.nitorcreations.nflow.engine.internal.util.PeriodicLogger; import org.apache.commons.lang3.time.StopWatch; import org.joda.time.DateTime; import org.slf4j.Logger; import org.springframework.util.Assert; import com.nitorcreations.nflow.engine.internal.dao.ArchiveDao; +import com.nitorcreations.nflow.engine.internal.util.PeriodicLogger; @Named public class ArchiveService { @@ -27,7 +28,7 @@ public class ArchiveService { * corresponding archive tables and removes them from production tables. * * @param olderThan Passive workflow instances whose modified time is before this will be archived. - * @param batchSize Number of workflow hierarchies to archive in a single transaction. Typical value is 1-20. This parameter + * @param batchSize Number of workflow hierarchies to archive in a single transaction. Typical value is 1-20. This parameter * mostly affects on archiving performance. * @return Total number of archived workflows. */ @@ -41,22 +42,23 @@ public int archiveWorkflows(DateTime olderThan, int batchSize) { List workflowIds; PeriodicLogger periodicLogger = new PeriodicLogger(log, 60); int archivedWorkflows = 0; + int archivedWorkflowsTotal = 0; do { workflowIds = archiveDao.listArchivableWorkflows(olderThan, batchSize); if (workflowIds.isEmpty()) { break; } - archiveDao.archiveWorkflows(workflowIds); - archivedWorkflows += workflowIds.size(); + archivedWorkflows = archiveDao.archiveWorkflows(workflowIds); + archivedWorkflowsTotal += archivedWorkflows; - double timeDiff = stopWatch.getTime() / 1000.0; - log.debug("Archived {} workflows. {} workflows / second. Workflow ids: {}. ", workflowIds.size(), archivedWorkflows + double timeDiff = max(stopWatch.getTime() / 1000.0, 0.000001); + log.debug("Archived {} workflows. {} workflows / second. Workflow ids: {}. ", archivedWorkflows, archivedWorkflowsTotal / timeDiff, workflowIds); - periodicLogger.log("Archived {} workflows. Archiving about {} workflows / second.", workflowIds.size(), archivedWorkflows - / timeDiff); + periodicLogger.log("Archived {} workflows. Archiving about {} workflows / second.", archivedWorkflows, + archivedWorkflowsTotal / timeDiff); } while (!workflowIds.isEmpty()); - log.info("Archiving finished. Archived {} workflows.", archivedWorkflows); - return archivedWorkflows; + log.info("Archiving finished. Archived {} workflows in {} seconds.", archivedWorkflowsTotal, stopWatch.getTime() / 1000); + return archivedWorkflowsTotal; } } diff --git a/nflow-engine/src/test/java/com/nitorcreations/nflow/engine/internal/dao/ArchiveDaoTest.java b/nflow-engine/src/test/java/com/nitorcreations/nflow/engine/internal/dao/ArchiveDaoTest.java index eb4ff57bc..e7f81ef22 100644 --- a/nflow-engine/src/test/java/com/nitorcreations/nflow/engine/internal/dao/ArchiveDaoTest.java +++ b/nflow-engine/src/test/java/com/nitorcreations/nflow/engine/internal/dao/ArchiveDaoTest.java @@ -93,7 +93,7 @@ public void archivingSimpleWorkflowsWorks() { archivableWorkflows.add(storePassiveWorkflow(archiveTime2)); int activeWorkflowCountBefore = rowCount("select 1 from nflow_workflow"); - archiveDao.archiveWorkflows(archivableWorkflows); + assertEquals(archivableWorkflows.size(), archiveDao.archiveWorkflows(archivableWorkflows)); int activeWorkflowCountAfter = rowCount("select 1 from nflow_workflow"); assertActiveWorkflowsRemoved(archivableWorkflows); @@ -120,7 +120,7 @@ public void archivingWorkflowsWithActionsWorks() { archivableWorkflows.addAll(asList(archivable1, archivable2)); int activeActionCountBefore = rowCount("select 1 from nflow_workflow_action"); - archiveDao.archiveWorkflows(archivableWorkflows); + assertEquals(archivableWorkflows.size(), archiveDao.archiveWorkflows(archivableWorkflows)); int activeActionCountAfter = rowCount("select 1 from nflow_workflow_action"); assertActiveWorkflowsRemoved(archivableWorkflows); @@ -162,7 +162,7 @@ public void archivingWorkflowsWithActionsAndStatesWorks() { archivableWorkflows.addAll(asList(archivable1, archivable2)); int variablesCountBefore = rowCount("select 1 from nflow_workflow_state"); - archiveDao.archiveWorkflows(archivableWorkflows); + assertEquals(archivableWorkflows.size(), archiveDao.archiveWorkflows(archivableWorkflows)); int variablesCountAfter = rowCount("select 1 from nflow_workflow_state"); assertActiveWorkflowsRemoved(archivableWorkflows); diff --git a/nflow-engine/src/test/java/com/nitorcreations/nflow/engine/service/ArchiveServiceTest.java b/nflow-engine/src/test/java/com/nitorcreations/nflow/engine/service/ArchiveServiceTest.java index 4eba197b3..05f1544c3 100644 --- a/nflow-engine/src/test/java/com/nitorcreations/nflow/engine/service/ArchiveServiceTest.java +++ b/nflow-engine/src/test/java/com/nitorcreations/nflow/engine/service/ArchiveServiceTest.java @@ -45,6 +45,8 @@ public void withZeroWorkflowsInFirstBatchCausesNothingToArchive() { @Test public void archivingContinuesUntilEmptyListOfArchivableIsReturned() { doReturn(dataList).doReturn(dataList).doReturn(dataList).doReturn(emptyList).when(dao).listArchivableWorkflows(limit, 10); + when(dao.archiveWorkflows(dataList)).thenReturn(dataList.size()); + when(dao.archiveWorkflows(emptyList)).thenReturn(emptyList.size()); int archived = service.archiveWorkflows(limit, 10); assertEquals(dataList.size() * 3, archived); verify(dao).ensureValidArchiveTablesExist(); From 8158d6636402ebecdd5f4819092cf5c420e4e9e9 Mon Sep 17 00:00:00 2001 From: Edvard Fonsell Date: Fri, 30 Oct 2015 15:24:55 +0200 Subject: [PATCH 48/51] increase timeout --- .../java/com/nitorcreations/nflow/tests/ChildWorkflowTest.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nflow-tests/src/test/java/com/nitorcreations/nflow/tests/ChildWorkflowTest.java b/nflow-tests/src/test/java/com/nitorcreations/nflow/tests/ChildWorkflowTest.java index 6c0777bf7..3e34e51b1 100644 --- a/nflow-tests/src/test/java/com/nitorcreations/nflow/tests/ChildWorkflowTest.java +++ b/nflow-tests/src/test/java/com/nitorcreations/nflow/tests/ChildWorkflowTest.java @@ -38,7 +38,7 @@ public void t01_startFibonacciWorkflow() { workflowId = resp.id; } - @Test(timeout = 15000) + @Test(timeout = 30000) public void t02_checkFibonacciWorkflowComputesCorrectResult() throws InterruptedException { ListWorkflowInstanceResponse response = getWorkflowInstance(workflowId, FibonacciWorkflow.State.done.name()); assertTrue(response.stateVariables.containsKey("result")); From 9b3e1afd579e46a37192b5538c7701bf739bfe8d Mon Sep 17 00:00:00 2001 From: Edvard Fonsell Date: Fri, 6 Nov 2015 14:57:51 +0200 Subject: [PATCH 49/51] use DateTimeUtils.currentTimeMillis() instead of DateTime.now(), format --- .../engine/internal/util/PeriodicLogger.java | 40 +++++++++---------- 1 file changed, 19 insertions(+), 21 deletions(-) diff --git a/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/util/PeriodicLogger.java b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/util/PeriodicLogger.java index 2a841756a..3634a5799 100644 --- a/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/util/PeriodicLogger.java +++ b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/util/PeriodicLogger.java @@ -1,34 +1,32 @@ package com.nitorcreations.nflow.engine.internal.util; +import static org.joda.time.DateTimeUtils.currentTimeMillis; -import org.joda.time.DateTime; import org.slf4j.Logger; /** - * PeriodicLogger logs once per given periodInSeconds period. - * Typically used in a loop where you don't want to log at every iteration, but want - * to get a log row e.g. once per minute. - * Not thread safe. + * PeriodicLogger logs once per given periodInSeconds period. Typically used in a loop where you don't want to log at every + * iteration, but want to get a log row e.g. once per minute. Not thread safe. */ public class PeriodicLogger { - private Long previousLogging; - private final int periodInSeconds; - private final Logger logger; + private Long previousLogging; + private final int periodInSeconds; + private final Logger logger; - public PeriodicLogger(Logger logger, int periodInSeconds) { - this.logger = logger; - this.periodInSeconds = periodInSeconds; - } + public PeriodicLogger(Logger logger, int periodInSeconds) { + this.logger = logger; + this.periodInSeconds = periodInSeconds; + } - public void log(String message, Object ... parameters) { - long now = periodNumber(); - if(previousLogging == null || previousLogging != now) { - logger.info(message, parameters); - } - previousLogging = now; + public void log(String message, Object... parameters) { + long now = periodNumber(); + if (previousLogging == null || previousLogging != now) { + logger.info(message, parameters); } + previousLogging = now; + } - private long periodNumber() { - return DateTime.now().getMillis() / 1000 / periodInSeconds; - } + private long periodNumber() { + return currentTimeMillis() / 1000 / periodInSeconds; + } } From 43bbb08d2911d8d94a143730e633cabf189c15cb Mon Sep 17 00:00:00 2001 From: Edvard Fonsell Date: Fri, 6 Nov 2015 14:58:41 +0200 Subject: [PATCH 50/51] harcode while condition --- .../com/nitorcreations/nflow/engine/service/ArchiveService.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/service/ArchiveService.java b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/service/ArchiveService.java index 19b441b8f..61d176b5c 100644 --- a/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/service/ArchiveService.java +++ b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/service/ArchiveService.java @@ -56,7 +56,7 @@ public int archiveWorkflows(DateTime olderThan, int batchSize) { / timeDiff, workflowIds); periodicLogger.log("Archived {} workflows. Archiving about {} workflows / second.", archivedWorkflows, archivedWorkflowsTotal / timeDiff); - } while (!workflowIds.isEmpty()); + } while (true); log.info("Archiving finished. Archived {} workflows in {} seconds.", archivedWorkflowsTotal, stopWatch.getTime() / 1000); return archivedWorkflowsTotal; From 4a11221dd92ba544995e3ea9c21887732ab6956c Mon Sep 17 00:00:00 2001 From: Edvard Fonsell Date: Fri, 6 Nov 2015 15:00:10 +0200 Subject: [PATCH 51/51] LinkedList to ArrayList --- .../nitorcreations/nflow/engine/internal/dao/DaoUtil.java | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/DaoUtil.java b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/DaoUtil.java index 17f46eebf..37da61b8b 100644 --- a/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/DaoUtil.java +++ b/nflow-engine/src/main/java/com/nitorcreations/nflow/engine/internal/dao/DaoUtil.java @@ -4,7 +4,7 @@ import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.sql.Timestamp; -import java.util.LinkedList; +import java.util.ArrayList; import java.util.List; import org.joda.time.DateTime; @@ -47,9 +47,10 @@ private ColumnNamesExtractor() { @Override public List extractData(ResultSet rs) throws SQLException, DataAccessException { - List columnNames = new LinkedList<>(); ResultSetMetaData metadata = rs.getMetaData(); - for (int col = 1; col <= metadata.getColumnCount(); col++) { + int columnCount = metadata.getColumnCount(); + List columnNames = new ArrayList<>(columnCount); + for (int col = 1; col <= columnCount; col++) { columnNames.add(metadata.getColumnName(col)); } return columnNames;