diff --git a/docs/api.yaml b/docs/api.yaml
index d0f4c1b40..5afa04f5d 100644
--- a/docs/api.yaml
+++ b/docs/api.yaml
@@ -46,6 +46,7 @@ info:
- [RESTORE](/central-api-entity-management/#restoring-a-deleted-entity) endpoint for Entities.
- Entities that have been soft-deleted for 30 days will automatically be purged.
- [Entities Odata](/central-api-odata-endpoints/#id3) now returns `__system/deletedAt`. It can also be used in $filter, $sort and $select query parameters.
+ - [Integrity](/central-api-openrosa-endpoints/#openrosa-dataset-integrity-api) endpoint for the Entity list.
## ODK Central v2024.3
@@ -9800,6 +9801,12 @@ paths:
* The Manifest will only output information for files the server actually has in its possession. Any missing expected files will be omitted, as we cannot provide a `hash` or `downloadUrl` for them.
* For Attachments that are linked to a Dataset, the value of `hash` is calculated using the MD5 of the last updated timestamp of the Dataset, instead of the content of the Dataset.
+
+ [Offline Entities support](https://forum.getodk.org/t/openrosa-spec-proposal-support-offline-entities/48052):
+
+ * If an attachment is linked to a Dataset, then `type="entityList"` attribute is added to the `mediaFile` element.
+
+ * `integrityUrl` is also returned for the attachments that are linked to a Dataset.
operationId: OpenRosa Form Manifest API
parameters:
- name: projectId
@@ -9844,6 +9851,12 @@ paths:
md5:a6fdc426037143cf71cced68e2532e3c
https://your.odk.server/v1/projects/7/forms/basic/attachments/question2.jpg
+
+ people.csv
+ md5:9fd39ac868eccdc0c134b3b7a6a25eb7
+ https://your.odk.server/v1/projects/7/forms/basic/attachments/people.csv
+ https://your.odk.server/v1/projects/7/datasets/people/integrity
+
403:
description: Forbidden
@@ -9857,6 +9870,79 @@ paths:
The authenticated actor does not have rights to perform that action.
+ /v1/projects/{projectId}/datasets/{name}/integrity?id={UUIDs}:
+ get:
+ tags:
+ - OpenRosa Endpoints
+ summary: OpenRosa Dataset Integrity API
+ description: |-
+ _(introduced: version 2025.1)_
+
+ This is the fully standards-compliant implementation of the Entities Integrity API as described in [OpenRosa spec proposal: support offline Entities](https://forum.getodk.org/t/openrosa-spec-proposal-support-offline-entities/48052).
+
+ This returns the `deleted` flag of the Entities requested through `id` query parameter. If no `id` is provided then all Entities are return.
+ operationId: OpenRosa Form Manifest API
+ parameters:
+ - name: projectId
+ in: path
+ description: The numeric ID of the Project
+ required: true
+ schema:
+ type: number
+ example: "7"
+ - name: name
+ in: path
+ description: The `name` of the dataset being referenced.
+ required: true
+ schema:
+ type: string
+ example: people
+ - name: id
+ in: query
+ description: The comma separated UUIDs of the Entities
+ required: true
+ schema:
+ type: string
+ example: 6fdfa3b6-64fb-46cf-b98c-c92b57f914b1,97717278-2bf8-4565-88b2-711c88d66e75
+ - name: X-OpenRosa-Version
+ in: header
+ description: e.g. 1.0
+ schema:
+ type: string
+ example: "1.0"
+ responses:
+ 200:
+ description: OK
+ headers:
+ X-OpenRosa-Version:
+ schema:
+ type: string
+ content:
+ text/xml:
+ example: |
+
+
+
+
+ true
+
+
+ false
+
+
+
+ 403:
+ description: Forbidden
+ headers:
+ X-OpenRosa-Version:
+ schema:
+ type: string
+ content:
+ text/xml:
+ example: |
+
+ The authenticated actor does not have rights to perform that action.
+
/v1/test/{token}/projects/{projectId}/forms/{xmlFormId}/draft/formList:
get:
tags:
diff --git a/lib/external/sentry.js b/lib/external/sentry.js
index fdfd33936..efceba674 100644
--- a/lib/external/sentry.js
+++ b/lib/external/sentry.js
@@ -109,8 +109,7 @@ const init = (config) => {
},
captureException(err) {
process.stderr.write('attempted to log Sentry exception in development:\n');
- process.stderr.write(inspect(err));
- process.stderr.write('\n');
+ process.stderr.write(inspect(err) + '\n');
},
configureScope: noop
};
diff --git a/lib/formats/openrosa.js b/lib/formats/openrosa.js
index 0bac8f0af..677388f2e 100644
--- a/lib/formats/openrosa.js
+++ b/lib/formats/openrosa.js
@@ -12,6 +12,7 @@
const { mergeRight } = require('ramda');
const { parse, render } = require('mustache');
+const { attachmentToDatasetName } = require('../util/util');
////////////////////////////////////////////////////////////////////////////////
// SETUP
@@ -66,6 +67,9 @@ const formManifestTemplate = template(200, `{{name}}
md5:{{openRosaHash}}
{{{domain}}}{{{basePath}}}/attachments/{{urlName}}
+ {{#integrityUrl}}
+ {{{integrityUrl}}}
+ {{/integrityUrl}}
{{/hasSource}}
{{/attachments}}
@@ -77,7 +81,10 @@ const formManifest = (data) => formManifestTemplate(mergeRight(data, {
attachment.with({
hasSource: attachment.blobId || attachment.datasetId,
urlName: encodeURIComponent(attachment.name),
- isDataset: attachment.datasetId != null
+ isDataset: attachment.datasetId != null,
+ integrityUrl: attachment.datasetId ?
+ `${data.domain}${data.projectPath}/datasets/${encodeURIComponent(attachmentToDatasetName(attachment.name))}/integrity`
+ : null
}))
}));
@@ -87,5 +94,16 @@ const openRosaErrorTemplate = openRosaMessageBase('error');
parse(openRosaErrorTemplate);
const openRosaError = (message) => render(openRosaErrorTemplate, { message });
-module.exports = { createdMessage, formList, formManifest, openRosaError };
+const entityListTemplate = template(200, `
+
+
+ {{#entities}}
+
+ {{deleted}}
+
+ {{/entities}}
+
+ `);
+const entityList = (data) => entityListTemplate(data);
+module.exports = { createdMessage, formList, formManifest, openRosaError, entityList };
diff --git a/lib/http/endpoint.js b/lib/http/endpoint.js
index f372d5053..4d30157d4 100644
--- a/lib/http/endpoint.js
+++ b/lib/http/endpoint.js
@@ -246,7 +246,7 @@ const defaultErrorWriter = (error, request, response) => {
writeProblemJson(response, error);
} else {
debugger; // trip debugger if attached.
- process.stderr.write(inspect(error));
+ process.stderr.write(inspect(error) + '\n');
response.status(500).type('application/json').send({
message: 'Internal Server Error',
});
diff --git a/lib/model/migrations/20180727-03-add-form-attachments-table.js b/lib/model/migrations/20180727-03-add-form-attachments-table.js
index 33ebf322d..7334d567f 100644
--- a/lib/model/migrations/20180727-03-add-form-attachments-table.js
+++ b/lib/model/migrations/20180727-03-add-form-attachments-table.js
@@ -31,7 +31,7 @@ const up = (knex) =>
Promise.all(forms.map((form) => expectedFormAttachments(form.xml)
.then((expected) => {
if (uniq(pluck('name', expected)).length < expected.length) {
- process.stderr.write(`WARNING: a form ${form.xmlFormId} contains an attachment filename collision. It will not correctly support form attachments.`);
+ process.stderr.write(`WARNING: a form ${form.xmlFormId} contains an attachment filename collision. It will not correctly support form attachments.\n`);
return Promise.resolve();
}
return knex.insert(Object.assign({ formId: form.id }, expected))
diff --git a/lib/model/query/datasets.js b/lib/model/query/datasets.js
index 0bc1c79f7..4e8d870c9 100644
--- a/lib/model/query/datasets.js
+++ b/lib/model/query/datasets.js
@@ -426,21 +426,21 @@ const getPublishedBySimilarName = (projectId, name) => ({ maybeOne }) => {
////////////////////////////////////////////////////////////////////////////////
// DATASET METADATA GETTERS
+const _getLinkedForms = (datasetName, projectId) => sql`
+SELECT DISTINCT f."xmlFormId", coalesce(current_def.name, f."xmlFormId") "name" FROM form_attachments fa
+JOIN form_defs fd ON fd.id = fa."formDefId" AND fd."publishedAt" IS NOT NULL
+JOIN forms f ON f.id = fd."formId" AND f."deletedAt" IS NULL
+JOIN form_defs current_def ON f."currentDefId" = current_def.id
+JOIN datasets ds ON ds.id = fa."datasetId"
+WHERE ds.name = ${datasetName}
+ AND ds."projectId" = ${projectId}
+ AND ds."publishedAt" IS NOT NULL
+`;
+
// Gets the dataset information, properties (including which forms each property comes from),
// and which forms consume the dataset via CSV attachment.
const getMetadata = (dataset) => async ({ all, Datasets }) => {
- const _getLinkedForms = (datasetName, projectId) => sql`
- SELECT DISTINCT f."xmlFormId", coalesce(current_def.name, f."xmlFormId") "name" FROM form_attachments fa
- JOIN form_defs fd ON fd.id = fa."formDefId" AND fd."publishedAt" IS NOT NULL
- JOIN forms f ON f.id = fd."formId" AND f."deletedAt" IS NULL
- JOIN form_defs current_def ON f."currentDefId" = current_def.id
- JOIN datasets ds ON ds.id = fa."datasetId"
- WHERE ds.name = ${datasetName}
- AND ds."projectId" = ${projectId}
- AND ds."publishedAt" IS NOT NULL
- `;
-
const _getSourceForms = (datasetName, projectId) => sql`
SELECT DISTINCT f."xmlFormId", coalesce(fd.name, f."xmlFormId") "name" FROM datasets ds
JOIN dataset_form_defs dfd ON ds.id = dfd."datasetId"
@@ -489,7 +489,6 @@ const getMetadata = (dataset) => async ({ all, Datasets }) => {
};
};
-
////////////////////////////////////////////////////////////////////////////
// DATASET PROPERTY GETTERS
@@ -665,6 +664,28 @@ const getLastUpdateTimestamp = (datasetId) => ({ maybeOne }) =>
.then((t) => t.orNull())
.then((t) => (t ? t.loggedAt : null));
+
+const canReadForOpenRosa = (auth, datasetName, projectId) => ({ oneFirst }) => oneFirst(sql`
+ WITH linked_forms AS (
+ ${_getLinkedForms(datasetName, projectId)}
+ )
+ SELECT count(1) FROM linked_forms
+ INNER JOIN (
+ SELECT forms."xmlFormId" FROM forms
+ INNER JOIN projects ON projects.id=forms."projectId"
+ INNER JOIN (
+ SELECT "acteeId" FROM assignments
+ INNER JOIN (
+ SELECT id FROM roles WHERE verbs ? 'form.read' OR verbs ? 'open_form.read'
+ ) AS role ON role.id=assignments."roleId"
+ WHERE "actorId"=${auth.actor.map((actor) => actor.id).orElse(-1)}
+ ) AS assignment ON assignment."acteeId" IN ('*', 'form', projects."acteeId", forms."acteeId")
+ WHERE forms.state != 'closed'
+ GROUP BY forms."xmlFormId"
+ ) AS users_forms ON users_forms."xmlFormId" = linked_forms."xmlFormId"
+ `)
+ .then(count => count > 0);
+
module.exports = {
createPublishedDataset, createPublishedProperty,
createOrMerge, publishIfExists,
@@ -674,5 +695,5 @@ module.exports = {
getProperties, getFieldsByFormDefId,
getDiff, update, countUnprocessedSubmissions,
getUnprocessedSubmissions,
- getLastUpdateTimestamp
+ getLastUpdateTimestamp, canReadForOpenRosa
};
diff --git a/lib/model/query/entities.js b/lib/model/query/entities.js
index 657de0a2b..8e0fc0c84 100644
--- a/lib/model/query/entities.js
+++ b/lib/model/query/entities.js
@@ -964,6 +964,34 @@ const purge = (force = false, projectId = null, datasetName = null, entityUuid =
SELECT COUNT(*) FROM deleted_entities`);
};
+////////////////////////////////////////////////////////////////////////////////
+// INTEGRITY CHECK
+
+const idFilter = (options) => {
+ const query = options.ifArg('id', ids => sql`uuid IN (${sql.join(ids.split(',').map(id => sql`${id.trim()}`), sql`, `)})`);
+ return query.sql ? query : sql`TRUE`;
+};
+
+const _getAllEntitiesState = (datasetId, options) => sql`
+ SELECT uuid, "deletedAt" IS NOT NULL as deleted
+ FROM entities
+ WHERE "datasetId" = ${datasetId}
+ AND ${idFilter(options)}
+ UNION
+ SELECT uuid, deleted FROM (
+ SELECT jsonb_array_elements_text(details -> 'entityUuids') AS uuid, TRUE as deleted
+ FROM audits
+ JOIN datasets ON datasets."acteeId" = audits."acteeId"
+ WHERE action = 'entity.purge'
+ AND datasets.id = ${datasetId}
+ ) purged
+ WHERE ${idFilter(options)}
+ -- union with not approved
+`;
+
+const getEntitiesState = (datasetId, options = QueryOptions.none) =>
+ ({ all }) => all(_getAllEntitiesState(datasetId, options));
+
module.exports = {
createNew, _processSubmissionEvent,
createSource,
@@ -980,5 +1008,5 @@ module.exports = {
countByDatasetId, getById, getDef,
getAll, getAllDefs, del,
createEntitiesFromPendingSubmissions,
- resolveConflict, restore, purge
+ resolveConflict, restore, purge, getEntitiesState
};
diff --git a/lib/resources/datasets.js b/lib/resources/datasets.js
index e107e2b6b..f81102a41 100644
--- a/lib/resources/datasets.js
+++ b/lib/resources/datasets.js
@@ -8,7 +8,7 @@
// except according to the terms contained in the LICENSE file.
const sanitize = require('sanitize-filename');
-const { getOrNotFound } = require('../util/promise');
+const { getOrNotFound, reject } = require('../util/promise');
const { streamEntityCsv } = require('../data/entity');
const { validateDatasetName, validatePropertyName } = require('../data/dataset');
const { contentDisposition, success, withEtag } = require('../util/http');
@@ -16,6 +16,7 @@ const { md5sum } = require('../util/crypto');
const { Dataset } = require('../model/frames');
const Problem = require('../util/problem');
const { QueryOptions } = require('../util/db');
+const { entityList } = require('../formats/openrosa');
module.exports = (service, endpoint) => {
service.get('/projects/:id/datasets', endpoint(({ Projects, Datasets }, { auth, params, queryOptions }) =>
@@ -102,4 +103,20 @@ module.exports = (service, endpoint) => {
return withEtag(serverEtag, csv);
}));
+
+ service.get('/projects/:projectId/datasets/:name/integrity', endpoint.openRosa(async ({ Datasets, Entities }, { params, auth, queryOptions }) => {
+ const dataset = await Datasets.get(params.projectId, params.name, true).then(getOrNotFound);
+
+ // Anyone with the verb `entity.list` or anyone with read access on a Form
+ // that consumes this dataset can call this endpoint.
+ const canAccessEntityList = await auth.can('entity.list', dataset);
+ if (!canAccessEntityList) {
+ await Datasets.canReadForOpenRosa(auth, params.name, params.projectId)
+ .then(canAccess => canAccess || reject(Problem.user.insufficientRights()));
+ }
+
+ const entities = await Entities.getEntitiesState(dataset.id, queryOptions.allowArgs('id'));
+
+ return entityList({ entities });
+ }));
};
diff --git a/lib/resources/forms.js b/lib/resources/forms.js
index a37b9d16f..d007665e4 100644
--- a/lib/resources/forms.js
+++ b/lib/resources/forms.js
@@ -19,7 +19,7 @@ const { sanitizeFieldsForOdata, setVersion } = require('../data/schema');
const { getOrNotFound, reject, resolve, rejectIf } = require('../util/promise');
const { success } = require('../util/http');
const { formList, formManifest } = require('../formats/openrosa');
-const { noargs, isPresent, isBlank } = require('../util/util');
+const { noargs, isPresent, isBlank, attachmentToDatasetName } = require('../util/util');
const { streamEntityCsvAttachment } = require('../data/entity');
const { md5sum } = require('../util/crypto');
@@ -226,7 +226,7 @@ module.exports = (service, endpoint) => {
.then(getOrNotFound)
.then((form) => auth.canOrReject('form.update', form))
.then((form) => Promise.all([
- Datasets.get(params.projectId, params.name.replace(/\.csv$/i, ''), true)
+ Datasets.get(params.projectId, attachmentToDatasetName(params.name), true)
.then(getOrNotFound)
.then((dataset) => auth.canOrReject('entity.list', dataset)),
FormAttachments.getByFormDefIdAndName(form.draftDefId, params.name).then(getOrNotFound)
@@ -293,7 +293,11 @@ module.exports = (service, endpoint) => {
.then((form) => canReadForm(auth, form))
.then((form) => FormAttachments.getAllByFormDefIdForOpenRosa(form.def.id)
.then((attachments) =>
- formManifest({ attachments, basePath: path.resolve(originalUrl, '..'), domain: env.domain })))));
+ formManifest({ attachments,
+ basePath: path.resolve(originalUrl, '..'),
+ domain: env.domain,
+ projectPath: originalUrl.match(/^\/v1\/(.*\/)?projects\/\d+/)[0] }
+ )))));
////////////////////////////////////////
// READ-ONLY ATTACHMENT ENDPOINTS
diff --git a/lib/util/db.js b/lib/util/db.js
index dc85da51f..5e75bc876 100644
--- a/lib/util/db.js
+++ b/lib/util/db.js
@@ -583,7 +583,7 @@ const postgresErrorToProblem = (x) => {
}
debugger; // automatically trip the debugger if it's attached.
- process.stderr.write(inspect(error));
+ process.stderr.write(inspect(error) + '\n');
return reject(error);
};
diff --git a/lib/util/util.js b/lib/util/util.js
index b4c86d6b7..22e5d0c96 100644
--- a/lib/util/util.js
+++ b/lib/util/util.js
@@ -80,12 +80,13 @@ function utf8ToBase64(string) {
// so let's just make our own.
const construct = (Type) => (x, y) => new Type(x, y);
+const attachmentToDatasetName = (attachmentName) => attachmentName.replace(/\.csv$/i, '');
module.exports = {
noop, noargs,
isBlank, isPresent, blankStringToNull, sanitizeOdataIdentifier,
printPairs, without, pickAll,
base64ToUtf8, utf8ToBase64,
- construct
+ construct, attachmentToDatasetName
};
diff --git a/package-lock.json b/package-lock.json
index 54f2938f6..f71fdff04 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -68,7 +68,8 @@
"streamtest": "~1.2",
"supertest": "^6.3.3",
"tmp": "~0.2",
- "undici": "^7.3.0"
+ "undici": "^7.3.0",
+ "xml2js": "^0.5.0"
},
"engines": {
"node": "22"
diff --git a/package.json b/package.json
index c93291640..42ae72f9e 100644
--- a/package.json
+++ b/package.json
@@ -69,6 +69,7 @@
"streamtest": "~1.2",
"supertest": "^6.3.3",
"tmp": "~0.2",
- "undici": "^7.3.0"
+ "undici": "^7.3.0",
+ "xml2js": "^0.5.0"
}
}
diff --git a/test/integration/api/datasets.js b/test/integration/api/datasets.js
index b956833ce..e5c73ce6d 100644
--- a/test/integration/api/datasets.js
+++ b/test/integration/api/datasets.js
@@ -10,10 +10,34 @@ const { sql } = require('slonik');
const { QueryOptions } = require('../../../lib/util/db');
const { createConflict } = require('../../util/scenarios');
const { omit } = require('ramda');
+const xml2js = require('xml2js');
const { exhaust } = require(appRoot + '/lib/worker/worker');
const Option = require(appRoot + '/lib/util/option');
+const testEntities = (test) => testService(async (service, container) => {
+ const asAlice = await service.login('alice');
+
+ await asAlice.post(`/v1/projects/1/datasets`)
+ .send({ name: 'people' });
+
+ const uuids = [
+ '12345678-1234-4123-8234-123456789aaa',
+ '12345678-1234-4123-8234-123456789abc'
+ ];
+
+ uuids.forEach(async _uuid => {
+ await asAlice.post('/v1/projects/1/datasets/people/entities')
+ .send({
+ uuid: _uuid,
+ label: 'John Doe'
+ })
+ .expect(200);
+ });
+
+ await test(service, container);
+});
+
describe('datasets and entities', () => {
describe('creating datasets and properties via the API', () => {
@@ -239,7 +263,7 @@ describe('datasets and entities', () => {
const withOutTs = result.replace(isoRegex, '');
withOutTs.should.be.eql(
'__id,label,__createdAt,__creatorId,__creatorName,__updates,__updatedAt,__version\n' +
- '12345678-1234-4123-8234-123456789aaa,Willow,,5,Alice,0,,1\n'
+ '12345678-1234-4123-8234-123456789aaa,Willow,,5,Alice,0,,1\n'
);
}));
@@ -356,7 +380,7 @@ describe('datasets and entities', () => {
const withOutTs = result.replace(isoRegex, '');
withOutTs.should.be.eql(
'__id,label,height,__createdAt,__creatorId,__creatorName,__updates,__updatedAt,__version\n' +
- '12345678-1234-4123-8234-123456789aaa,redwood,120,,5,Alice,0,,1\n'
+ '12345678-1234-4123-8234-123456789aaa,redwood,120,,5,Alice,0,,1\n'
);
}));
@@ -487,7 +511,7 @@ describe('datasets and entities', () => {
logs[0].actorId.should.equal(5);
logs[0].actee.should.be.a.Dataset();
logs[0].actee.name.should.equal('trees');
- logs[0].details.properties.should.eql([ 'circumference' ]);
+ logs[0].details.properties.should.eql(['circumference']);
});
}));
@@ -926,8 +950,8 @@ describe('datasets and entities', () => {
const withOutTs = result.replace(isoRegex, '');
withOutTs.should.be.eql(
'__id,label,first_name,age,__createdAt,__creatorId,__creatorName,__updates,__updatedAt,__version\n' +
- '12345678-1234-4123-8234-123456789aaa,Jane (30),Jane,30,,5,Alice,0,,1\n' +
- '12345678-1234-4123-8234-123456789abc,Alice (88),Alice,88,,5,Alice,0,,1\n'
+ '12345678-1234-4123-8234-123456789aaa,Jane (30),Jane,30,,5,Alice,0,,1\n' +
+ '12345678-1234-4123-8234-123456789abc,Alice (88),Alice,88,,5,Alice,0,,1\n'
);
}));
@@ -958,7 +982,7 @@ describe('datasets and entities', () => {
const withOutTs = result.replace(isoRegex, '');
withOutTs.should.be.eql(
'__id,label,first_name,the.age,__createdAt,__creatorId,__creatorName,__updates,__updatedAt,__version\n' +
- '12345678-1234-4123-8234-123456789abc,Alice (88),Alice,88,,5,Alice,0,,1\n'
+ '12345678-1234-4123-8234-123456789abc,Alice (88),Alice,88,,5,Alice,0,,1\n'
);
}));
@@ -1014,8 +1038,8 @@ describe('datasets and entities', () => {
const withOutTs = text.replace(/\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}.\d{3}Z/g, '');
withOutTs.should.be.eql(
'__id,label,f_q1,e_q2,a_q3,c_q4,b_q1,d_q2,__createdAt,__creatorId,__creatorName,__updates,__updatedAt,__version\n' +
- '12345678-1234-4123-8234-123456789ccc,one,w,x,y,z,,,,5,Alice,0,,1\n'+
- '12345678-1234-4123-8234-123456789bbb,two,,,c,d,a,b,,5,Alice,0,,1\n'+
+ '12345678-1234-4123-8234-123456789ccc,one,w,x,y,z,,,,5,Alice,0,,1\n' +
+ '12345678-1234-4123-8234-123456789bbb,two,,,c,d,a,b,,5,Alice,0,,1\n' +
'12345678-1234-4123-8234-123456789aaa,one,,,y,z,w,x,,5,Alice,0,,1\n'
);
}));
@@ -1088,7 +1112,7 @@ describe('datasets and entities', () => {
const withOutTs = result.replace(isoRegex, '');
withOutTs.should.be.eql(
'__id,label,first_name,age,__createdAt,__creatorId,__creatorName,__updates,__updatedAt,__version\n' +
- '12345678-1234-4123-8234-111111111aaa,Robert Doe (expired),Robert,,,5,Alice,1,,2\n'
+ '12345678-1234-4123-8234-111111111aaa,Robert Doe (expired),Robert,,,5,Alice,1,,2\n'
);
}));
@@ -1133,7 +1157,7 @@ describe('datasets and entities', () => {
const withOutTs = result.replace(isoRegex, '');
withOutTs.should.be.eql(
'__id,label,first_name,age,__createdAt,__creatorId,__creatorName,__updates,__updatedAt,__version\n' +
- '12345678-1234-4123-8234-123456789abc,Alicia (85),Alicia,85,,5,Alice,1,,2\n'
+ '12345678-1234-4123-8234-123456789abc,Alicia (85),Alicia,85,,5,Alice,1,,2\n'
);
}));
@@ -1170,7 +1194,7 @@ describe('datasets and entities', () => {
const withOutTs = result.replace(isoRegex, '');
withOutTs.should.be.eql(
'__id,label,first_name,age,__createdAt,__creatorId,__creatorName,__updates,__updatedAt,__version\n' +
- '12345678-1234-4123-8234-123456789abc,Alicia (85),Alicia,85,,5,Alice,2,,3\n'
+ '12345678-1234-4123-8234-123456789abc,Alicia (85),Alicia,85,,5,Alice,2,,3\n'
);
}));
@@ -1982,6 +2006,7 @@ describe('datasets and entities', () => {
goodone.csv
md5:${etag.replace(/"/g, '')}
${domain}/v1/projects/1/forms/withAttachments/attachments/goodone.csv
+ ${domain}/v1/projects/1/datasets/goodone/integrity
`);
}));
@@ -2066,6 +2091,7 @@ describe('datasets and entities', () => {
goodone.csv
md5:${etag.replace(/"/g, '')}
${domain}/v1/projects/1/forms/withAttachments/attachments/goodone.csv
+ ${domain}/v1/projects/1/datasets/goodone/integrity
`);
}));
@@ -2834,7 +2860,7 @@ describe('datasets and entities', () => {
.expect(200)
.then(({ text }) => {
text.should.equal('name,label,__version,first_name,the.age\n' +
- '12345678-1234-4123-8234-123456789abc,Alice (88),1,Alice,88\n');
+ '12345678-1234-4123-8234-123456789abc,Alice (88),1,Alice,88\n');
});
}));
@@ -2941,6 +2967,7 @@ describe('datasets and entities', () => {
people.csv
md5:${etag.replace(/"/g, '')}
${domain}/v1/projects/1/forms/withAttachments/attachments/people.csv
+ ${domain}/v1/projects/1/datasets/people/integrity
`);
});
@@ -4567,7 +4594,7 @@ describe('datasets and entities', () => {
.expect(200)
.then(({ body }) => {
body.name.should.be.eql('people');
- body.properties.map(p => p.name).should.eql([ 'first_name', 'age' ]);
+ body.properties.map(p => p.name).should.eql(['first_name', 'age']);
});
await asAlice.get('/v1/audits?action=dataset.create')
@@ -5960,4 +5987,166 @@ describe('datasets and entities', () => {
}));
});
});
+
+ // OpenRosa endpoint
+ describe('GET /datasets/:name/integrity', () => {
+ it('should return notfound if the dataset does not exist', testEntities(async (service) => {
+ const asAlice = await service.login('alice');
+
+ await asAlice.get('/v1/projects/1/datasets/nonexistent/integrity')
+ .set('X-OpenRosa-Version', '1.0')
+ .expect(404);
+ }));
+
+ it('should reject if the user cannot read', testEntities(async (service) => {
+ const asChelsea = await service.login('chelsea');
+
+ await asChelsea.get('/v1/projects/1/datasets/people/integrity')
+ .set('X-OpenRosa-Version', '1.0')
+ .expect(403);
+ }));
+
+ it('should happily return given no entities', testService(async (service) => {
+ const asAlice = await service.login('alice');
+
+ await asAlice.post('/v1/projects/1/forms?publish=true')
+ .send(testData.forms.simpleEntity)
+ .expect(200);
+
+ await asAlice.get('/v1/projects/1/datasets/people/integrity')
+ .set('X-OpenRosa-Version', '1.0')
+ .expect(200)
+ .then(async ({ text }) => {
+ const result = await xml2js.parseStringPromise(text, { explicitArray: false });
+ result.data.entities.should.not.have.property('entity');
+ });
+ }));
+
+ it('should return data for app-user with access to consuming Form', testEntities(async (service) => {
+ const asAlice = await service.login('alice');
+
+ await asAlice.post('/v1/projects/1/forms?publish=true')
+ .send(testData.forms.withAttachments.replace(/goodone/g, 'people'))
+ .set('Content-Type', 'application/xml')
+ .expect(200);
+
+ const appUser = await asAlice.post('/v1/projects/1/app-users')
+ .send({ displayName: 'test' })
+ .then(({ body }) => body);
+
+ await asAlice.post(`/v1/projects/1/forms/withAttachments/assignments/app-user/${appUser.id}`);
+
+ await service.get(`/v1/key/${appUser.token}/projects/1/datasets/people/integrity`)
+ .set('X-OpenRosa-Version', '1.0')
+ .expect(200)
+ .then(async ({ text }) => {
+ const result = await xml2js.parseStringPromise(text, { explicitArray: false });
+ result.data.entities.entity.length.should.be.eql(2);
+ });
+ }));
+
+ it('should reject for app-user if consuming Form is closed', testEntities(async (service) => {
+ const asAlice = await service.login('alice');
+
+ await asAlice.post('/v1/projects/1/forms?publish=true')
+ .send(testData.forms.withAttachments.replace(/goodone/g, 'people'))
+ .set('Content-Type', 'application/xml')
+ .expect(200);
+
+ const appUser = await asAlice.post('/v1/projects/1/app-users')
+ .send({ displayName: 'test' })
+ .then(({ body }) => body);
+
+ await asAlice.post(`/v1/projects/1/forms/withAttachments/assignments/app-user/${appUser.id}`);
+
+ await asAlice.patch('/v1/projects/1/forms/withAttachments')
+ .send({ state: 'closed' })
+ .expect(200);
+
+ await service.get(`/v1/key/${appUser.token}/projects/1/datasets/people/integrity`)
+ .set('X-OpenRosa-Version', '1.0')
+ .expect(403);
+ }));
+
+ it('should return with correct deleted value', testEntities(async (service) => {
+ const asAlice = await service.login('alice');
+
+ await asAlice.delete('/v1/projects/1/datasets/people/entities/12345678-1234-4123-8234-123456789abc')
+ .expect(200);
+
+ await asAlice.get(`/v1/projects/1/datasets/people/integrity`)
+ .set('X-OpenRosa-Version', '1.0')
+ .expect(200)
+ .then(async ({ text }) => {
+ const result = await xml2js.parseStringPromise(text, { explicitArray: false });
+ result.data.entities.entity.length.should.be.eql(2);
+ const [first, second] = result.data.entities.entity;
+ first.$.id.should.be.eql('12345678-1234-4123-8234-123456789aaa');
+ first.deleted.should.be.eql('false');
+ second.$.id.should.be.eql('12345678-1234-4123-8234-123456789abc');
+ second.deleted.should.be.eql('true');
+ });
+ }));
+
+ it('should return purged entities as well', testEntities(async (service, { Entities }) => {
+ const asAlice = await service.login('alice');
+
+ await asAlice.delete('/v1/projects/1/datasets/people/entities/12345678-1234-4123-8234-123456789abc')
+ .expect(200);
+
+ await Entities.purge(true);
+
+ await asAlice.get(`/v1/projects/1/datasets/people/integrity`)
+ .set('X-OpenRosa-Version', '1.0')
+ .expect(200)
+ .then(async ({ text }) => {
+ const result = await xml2js.parseStringPromise(text, { explicitArray: false });
+ result.data.entities.entity.length.should.be.eql(2);
+ const [first, second] = result.data.entities.entity;
+ first.$.id.should.be.eql('12345678-1234-4123-8234-123456789aaa');
+ first.deleted.should.be.eql('false');
+ second.$.id.should.be.eql('12345678-1234-4123-8234-123456789abc');
+ second.deleted.should.be.eql('true');
+ });
+ }));
+
+ it('should return only queried entities', testEntities(async (service) => {
+ const asAlice = await service.login('alice');
+
+ await asAlice.delete('/v1/projects/1/datasets/people/entities/12345678-1234-4123-8234-123456789abc')
+ .expect(200);
+
+ await asAlice.get(`/v1/projects/1/datasets/people/integrity?id=12345678-1234-4123-8234-123456789abc`)
+ .set('X-OpenRosa-Version', '1.0')
+ .expect(200)
+ .then(async ({ text }) => {
+ const result = await xml2js.parseStringPromise(text, { explicitArray: false });
+ const { entity } = result.data.entities;
+ entity.$.id.should.be.eql('12345678-1234-4123-8234-123456789abc');
+ entity.deleted.should.be.eql('true');
+ });
+ }));
+
+ it('should return only queried purged entities', testEntities(async (service, { Entities }) => {
+ const asAlice = await service.login('alice');
+
+ await asAlice.delete('/v1/projects/1/datasets/people/entities/12345678-1234-4123-8234-123456789abc')
+ .expect(200);
+
+ await asAlice.delete('/v1/projects/1/datasets/people/entities/12345678-1234-4123-8234-123456789aaa')
+ .expect(200);
+
+ await Entities.purge(true);
+
+ await asAlice.get(`/v1/projects/1/datasets/people/integrity?id=12345678-1234-4123-8234-123456789abc`)
+ .set('X-OpenRosa-Version', '1.0')
+ .expect(200)
+ .then(async ({ text }) => {
+ const result = await xml2js.parseStringPromise(text, { explicitArray: false });
+ const { entity } = result.data.entities;
+ entity.$.id.should.be.eql('12345678-1234-4123-8234-123456789abc');
+ entity.deleted.should.be.eql('true');
+ });
+ }));
+ });
});
diff --git a/test/integration/api/sessions.js b/test/integration/api/sessions.js
index eca35ef13..9bc8dffea 100644
--- a/test/integration/api/sessions.js
+++ b/test/integration/api/sessions.js
@@ -14,6 +14,26 @@ describe('api: /sessions', () => {
body.should.be.a.Session();
})));
+ // These demonstrate a strange feature of bcrypt - a valid password can be
+ // repeated multiple times and still validate successfully. An alternative
+ // to these tests would be to check for NUL characters in supplied passwords
+ // and reject them before passing the values to bcrypt.
+ describe('weird bcrypt implementation details', () => {
+ [
+ [ 'repeated once', 'chelsea\0chelsea' ], // eslint-disable-line no-multi-spaces
+ [ 'repeated twice', 'chelsea\0chelsea\0chelsea' ], // eslint-disable-line no-multi-spaces
+ [ 'repeated until truncation', 'chelsea\0chelsea\0chelsea\0chelsea\0chelsea\0chelsea\0chelsea\0chelsea\0chelsea\0' ],
+ ].forEach(([ description, password ]) => {
+ it(`should treat a password ${description} as the singular version of the same`, testService((service) =>
+ service.post('/v1/sessions')
+ .send({ email: 'chelsea@getodk.org', password })
+ .expect(200)
+ .then(({ body }) => {
+ body.should.be.a.Session();
+ })));
+ });
+ });
+
it('should treat email addresses case insensitively', testService((service) =>
service.post('/v1/sessions')
.send({ email: 'cHeLsEa@getodk.OrG', password: 'chelsea' })
diff --git a/test/integration/setup.js b/test/integration/setup.js
index 8da404df2..c5c2529a6 100644
--- a/test/integration/setup.js
+++ b/test/integration/setup.js
@@ -149,12 +149,14 @@ const baseContainer = withDefaults({ db, mail, env, xlsform, enketo, Sentry, odk
// called to get a service context per request. we do some work to hijack the
// transaction system so that each test runs in a single transaction that then
// gets rolled back for a clean slate on the next test.
-const testService = (test) => () => new Promise((resolve, reject) => {
- baseContainer.transacting((container) => {
- const rollback = (f) => (x) => container.run(sql`rollback`).then(() => f(x));
- return test(augment(request(service(container))), container).then(rollback(resolve), rollback(reject));
- });//.catch(Promise.resolve.bind(Promise)); // TODO/SL probably restore
-});
+const testService = (test) => function() {
+ return new Promise((resolve, reject) => {
+ baseContainer.transacting((container) => {
+ const rollback = (f) => (x) => container.run(sql`rollback`).then(() => f(x));
+ return test.call(this, augment(request(service(container))), container).then(rollback(resolve), rollback(reject));
+ });//.catch(Promise.resolve.bind(Promise)); // TODO/SL probably restore
+ });
+};
// for some tests we explicitly need to make concurrent requests, in which case
// the transaction butchering we do for testService will not work. for these cases,
@@ -166,12 +168,14 @@ const testServiceFullTrx = (test) => function() {
// for some tests we just want a container, without any of the webservice stuffs between.
// this is that, with the same transaction trickery as a normal test.
-const testContainer = (test) => () => new Promise((resolve, reject) => {
- baseContainer.transacting((container) => {
- const rollback = (f) => (x) => container.run(sql`rollback`).then(() => f(x));
- return test(container).then(rollback(resolve), rollback(reject));
- });//.catch(Promise.resolve.bind(Promise));
-});
+const testContainer = (test) => function () {
+ return new Promise((resolve, reject) => {
+ baseContainer.transacting((container) => {
+ const rollback = (f) => (x) => container.run(sql`rollback`).then(() => f(x));
+ return test.call(this, container).then(rollback(resolve), rollback(reject));
+ });//.catch(Promise.resolve.bind(Promise));
+ });
+};
// complete the square of options:
const testContainerFullTrx = (test) => function() {
@@ -182,16 +186,18 @@ const testContainerFullTrx = (test) => function() {
// called to get a container context per task. ditto all // from testService.
// here instead our weird hijack work involves injecting our own constructed
// container into the task context so it just picks it up and uses it.
-const testTask = (test) => () => new Promise((resolve, reject) => {
- baseContainer.transacting((container) => {
- task._container = container.with({ task: true });
- const rollback = (f) => (x) => {
- delete task._container;
- return container.run(sql`rollback`).then(() => f(x));
- };
- return test(task._container).then(rollback(resolve), rollback(reject));
- });//.catch(Promise.resolve.bind(Promise));
-});
+const testTask = (test) => function() {
+ return new Promise((resolve, reject) => {
+ baseContainer.transacting((container) => {
+ task._container = container.with({ task: true });
+ const rollback = (f) => (x) => {
+ delete task._container;
+ return container.run(sql`rollback`).then(() => f(x));
+ };
+ return test.call(this, task._container).then(rollback(resolve), rollback(reject));
+ });//.catch(Promise.resolve.bind(Promise));
+ });
+};
// See testServiceFullTrx()
// eslint-disable-next-line space-before-function-paren, func-names
diff --git a/test/unit/util/crypto.js b/test/unit/util/crypto.js
index a4b6fb215..4887225ae 100644
--- a/test/unit/util/crypto.js
+++ b/test/unit/util/crypto.js
@@ -1,3 +1,4 @@
+const assert = require('node:assert/strict');
const { KeyObject } = require('node:crypto');
const appRoot = require('app-root-path');
const { readFileSync } = require('fs');
@@ -294,14 +295,10 @@ describe('util/crypto', () => {
const aesKey = getSubmissionKey(priv, encAesKey);
const ivs = getSubmissionIvs(instanceId, aesKey);
- let thrown = false;
- try { // i know about should.throws but i can't get it to assert the specific error type.
- getSubmissionCleartext(aesKey, ivs(1), unpaddedCiphertext);
- } catch (ex) {
- ex.message.should.equal('Could not perform decryption. Double check your passphrase and your data and try again.');
- thrown = true;
- }
- thrown.should.equal(true);
+ assert.throws(
+ () => getSubmissionCleartext(aesKey, ivs(1), unpaddedCiphertext),
+ { message: 'Could not perform decryption. Double check your passphrase and your data and try again.' },
+ );
});
});
});