diff --git a/src/sbvr-api/sbvr-utils.ts b/src/sbvr-api/sbvr-utils.ts index 8886f03ae..a9ddcc07c 100644 --- a/src/sbvr-api/sbvr-utils.ts +++ b/src/sbvr-api/sbvr-utils.ts @@ -133,7 +133,8 @@ export interface ApiKey extends Actor { } export interface Response { - statusCode: number; + id?: string | undefined; + status: number; headers?: | { [headerName: string]: any; @@ -1022,15 +1023,15 @@ export const runURI = async ( throw response; } - const { body: responseBody, statusCode, headers } = response as Response; + const { body: responseBody, status, headers } = response as Response; - if (statusCode != null && statusCode >= 400) { + if (status != null && status >= 400) { const ErrorClass = - statusCodeToError[statusCode as keyof typeof statusCodeToError]; + statusCodeToError[status as keyof typeof statusCodeToError]; if (ErrorClass != null) { throw new ErrorClass(undefined, responseBody, headers); } - throw new HttpError(statusCode, undefined, responseBody, headers); + throw new HttpError(status, undefined, responseBody, headers); } return responseBody as AnyObject | undefined; @@ -1069,7 +1070,7 @@ export const getAffectedIds = async ( args: HookArgs & { tx: Db.Tx; }, -): Promise => { +): Promise => { const { request } = args; if (request.affectedIds) { return request.affectedIds; @@ -1094,7 +1095,7 @@ const $getAffectedIds = async ({ tx, }: HookArgs & { tx: Db.Tx; -}): Promise => { +}): Promise => { if (!['PATCH', 'DELETE'].includes(request.method)) { // We can only find the affected ids in advance for requests that modify existing records, if they // can insert new records (POST/PUT) then we're unable to find the ids until the request has actually run @@ -1108,6 +1109,7 @@ const $getAffectedIds = async ({ const parsedRequest: uriParser.ParsedODataRequest & Partial> = await uriParser.parseOData({ + id: request.id, method: request.method, url: `/${request.vocabulary}${request.url}`, }); @@ -1158,6 +1160,44 @@ const runODataRequest = (req: Express.Request, vocabulary: string) => { api[vocabulary].logger.log('Parsing', req.method, req.url); } + if (req.url === `/${vocabulary}/$batch`) { + const { requests } = req.body as { requests: uriParser.UnparsedRequest[] }; + const ids = new Set( + requests + .map((request) => request.id) + .filter((id) => typeof id === 'string') as string[], + ); + if (ids.size !== requests.length) { + throw new BadRequestError( + 'All requests in a batch request must have unique string ids', + ); + } + + const methods = new Set( + requests.map((request) => request.method), + ); + if (methods.has(undefined)) { + throw new BadRequestError( + 'Requests of a batch request must have a "method"', + ); + } + + const urls = new Set( + requests.map((request) => request.url), + ); + if (urls.has(undefined)) { + throw new BadRequestError( + 'Requests of a batch request must have a "url"', + ); + } + if (urls.has('/university/$batch')) { + throw new BadRequestError('Batch requests cannot contain batch requests'); + } + + // TODO: make sure req.body.requests is valid structure/typing for req.batch + req.batch = requests; + } + // Get the hooks for the current method/vocabulary as we know it, // in order to run PREPARSE hooks, before parsing gets us more info const { versions } = models[vocabulary]; @@ -1205,17 +1245,22 @@ const runODataRequest = (req: Express.Request, vocabulary: string) => { await runHooks('PREPARSE', reqHooks, { req, tx: req.tx }); let requests: uriParser.UnparsedRequest[]; // Check if it is a single request or a batch + // console.error('+++++++++++++++++++', req.url, req.batch); if (req.batch != null && req.batch.length > 0) { requests = req.batch; } else { const { method, url, body } = req; - requests = [{ method, url, data: body }]; + requests = [{ method, url, body }]; } + // console.error('+++++++++++++++++++', req.url, requests); const prepareRequest = async ( parsedRequest: uriParser.ParsedODataRequest & Partial>, ): Promise => { + // if (process.env.something) { + // console.error('parsedRequest', parsedRequest); + // } parsedRequest.engine = db.engine; parsedRequest.translateVersions = [...versions]; // Mark that the engine/translateVersions is required now that we've set it @@ -1226,6 +1271,7 @@ const runODataRequest = (req: Express.Request, vocabulary: string) => { // Add/check the relevant permissions try { $request.hooks = []; + // console.error('a'); for (const version of versions) { // We get the hooks list between each `runHooks` so that any resource renames will be used // when getting hooks for later versions @@ -1247,6 +1293,7 @@ const runODataRequest = (req: Express.Request, vocabulary: string) => { request: $request, tx: req.tx, }); + // console.error('b', version); const { resourceRenames } = models[version]; if (resourceRenames) { const resourceName = resolveSynonym($request); @@ -1257,7 +1304,10 @@ const runODataRequest = (req: Express.Request, vocabulary: string) => { } } } + // console.error('$request', $request.id); const translatedRequest = await uriParser.translateUri($request); + // console.error('translatedRequest', translatedRequest.id); + // console.error('c'); return await compileRequest(translatedRequest); } catch (err: any) { rollbackRequestHooks(reqHooks); @@ -1266,9 +1316,11 @@ const runODataRequest = (req: Express.Request, vocabulary: string) => { } }; + // console.error('1'); // Parse the OData requests const results = await mappingFn(requests, async (requestPart) => { const parsedRequest = await uriParser.parseOData(requestPart); + // console.error('2'); let request: uriParser.ODataRequest | uriParser.ODataRequest[]; if (Array.isArray(parsedRequest)) { @@ -1276,7 +1328,6 @@ const runODataRequest = (req: Express.Request, vocabulary: string) => { } else { request = await prepareRequest(parsedRequest); } - // Run the request in its own transaction return await runTransaction( req, request, @@ -1293,7 +1344,7 @@ const runODataRequest = (req: Express.Request, vocabulary: string) => { } }); if (Array.isArray(request)) { - const changeSetResults = new Map(); + const changeSetResults = new Map(); const changeSetRunner = runChangeSet(req, tx); for (const r of request) { await changeSetRunner(changeSetResults, r); @@ -1314,7 +1365,7 @@ const runODataRequest = (req: Express.Request, vocabulary: string) => { if ( !Array.isArray(result) && result.body == null && - result.statusCode == null + result.status == null ) { console.error('No status or body set', req.url, responses); return new InternalRequestError(); @@ -1352,10 +1403,10 @@ export const handleODataRequest: Express.Handler = async (req, res, next) => { // Otherwise its a multipart request and we reply with the appropriate multipart response } else { - (res.status(200) as any).sendMulti( + res.status(200).json( responses.map((response) => { if (response instanceof HttpError) { - response = httpErrorToResponse(response); + return httpErrorToResponse(response); } else { return response; } @@ -1394,9 +1445,9 @@ export const handleHttpErrors = ( return false; }; const handleResponse = (res: Express.Response, response: Response): void => { - const { body, headers, statusCode } = response as Response; + const { body, headers, status } = response as Response; res.set(headers); - res.status(statusCode); + res.status(status); if (!body) { res.end(); } else { @@ -1406,9 +1457,9 @@ const handleResponse = (res: Express.Response, response: Response): void => { const httpErrorToResponse = ( err: HttpError, -): RequiredField => { +): RequiredField => { return { - statusCode: err.status, + status: err.status, body: err.getResponseBody(), headers: err.headers, }; @@ -1514,7 +1565,7 @@ const runRequest = async ( const runChangeSet = (req: Express.Request, tx: Db.Tx) => async ( - changeSetResults: Map, + changeSetResults: Map, request: uriParser.ODataRequest, ): Promise => { request = updateBinds(changeSetResults, request); @@ -1532,7 +1583,7 @@ const runChangeSet = // deferred untill the request they reference is run and returns an insert ID. // This function compiles the sql query of a request which has been deferred const updateBinds = ( - changeSetResults: Map, + changeSetResults: Map, request: uriParser.ODataRequest, ) => { if (request._defer) { @@ -1700,7 +1751,8 @@ const respondGet = async ( ); const response = { - statusCode: 200, + id: request.id, + status: 200, body: { d }, headers: { 'content-type': 'application/json' }, }; @@ -1715,14 +1767,15 @@ const respondGet = async ( } else { if (request.resourceName === '$metadata') { return { - statusCode: 200, + id: request.id, + status: 200, body: models[vocab].odataMetadata, headers: { 'content-type': 'xml' }, }; } else { // TODO: request.resourceName can be '$serviceroot' or a resource and we should return an odata xml document based on that return { - statusCode: 404, + status: 404, }; } } @@ -1778,7 +1831,7 @@ const respondPost = async ( } const response = { - statusCode: 201, + status: 201, body: result.d[0], headers: { 'content-type': 'application/json', @@ -1826,7 +1879,7 @@ const respondPut = async ( tx: Db.Tx, ): Promise => { const response = { - statusCode: 200, + status: 200, }; await runHooks('PRERESPOND', request.hooks, { req, diff --git a/src/sbvr-api/uri-parser.ts b/src/sbvr-api/uri-parser.ts index 28ed74988..c47426f46 100644 --- a/src/sbvr-api/uri-parser.ts +++ b/src/sbvr-api/uri-parser.ts @@ -29,9 +29,10 @@ import * as sbvrUtils from './sbvr-utils'; export type OdataBinds = ODataBinds; export interface UnparsedRequest { + id?: string; method: string; url: string; - data?: any; + body?: any; headers?: { [header: string]: string }; changeSet?: UnparsedRequest[]; _isChangeSet?: boolean; @@ -47,7 +48,7 @@ export interface ParsedODataRequest { odataQuery: ODataQuery; odataBinds: OdataBinds; custom: AnyObject; - id?: number | undefined; + id?: string | undefined; _defer?: boolean; } export interface ODataRequest extends ParsedODataRequest { @@ -60,8 +61,8 @@ export interface ODataRequest extends ParsedODataRequest { modifiedFields?: ReturnType< AbstractSQLCompiler.EngineInstance['getModifiedFields'] >; - affectedIds?: number[]; - pendingAffectedIds?: Promise; + affectedIds?: string[]; + pendingAffectedIds?: Promise; hooks?: Array<[string, InstantiatedHooks]>; engine: AbstractSQLCompiler.Engines; } @@ -292,12 +293,13 @@ export async function parseOData( const odata = memoizedParseOdata(url); return { + id: b.id, method: b.method as SupportedMethod, url, vocabulary: apiRoot, resourceName: odata.tree.resource, originalResourceName: odata.tree.resource, - values: b.data ?? {}, + values: b.body ?? {}, odataQuery: odata.tree, odataBinds: odata.binds, custom: {}, @@ -362,7 +364,7 @@ const parseODataChangeset = ( originalResourceName: odata.tree.resource, odataBinds: odata.binds, odataQuery: odata.tree, - values: b.data ?? {}, + values: b.body ?? {}, custom: {}, id: contentId, _defer: defer, diff --git a/test/06-batch.test.ts b/test/06-batch.test.ts new file mode 100644 index 000000000..d888d6739 --- /dev/null +++ b/test/06-batch.test.ts @@ -0,0 +1,317 @@ +const configPath = __dirname + '/fixtures/06-batch/config'; +const hooksPath = __dirname + '/fixtures/06-batch/translations/hooks'; +import { testInit, testDeInit, testLocalServer } from './lib/test-init'; +import { faker } from '@faker-js/faker'; +import { expect } from 'chai'; +import * as supertest from 'supertest'; + +// TODO: figure out how to not persist the results across describes +describe('06 batch tests', function () { + let pineServer: Awaited>; + before(async () => { + pineServer = await testInit({ + configPath, + hooksPath, + deleteDb: true, + }); + // setup faker so that test date uniqueness is set for all test cases + faker.seed(); + }); + + after(async () => { + testDeInit(pineServer); + }); + + describe('Basic', () => { + it('check /ping route is OK', async () => { + await supertest(testLocalServer).get('/ping').expect(200, 'OK'); + }); + }); + + describe('test non-atomic batch requests', () => { + it('should create two students', async () => { + await supertest(testLocalServer) + .post('/university/$batch') + .send({ + requests: [ + { + id: '0', + method: 'POST', + url: '/university/student', + body: { + matrix_number: 100000, + name: faker.name.firstName(), + last_name: faker.name.lastName(), + studies_at__campus: 'foo', + }, + }, + { + id: '1', + method: 'POST', + url: '/university/student', + body: { + matrix_number: 100001, + name: faker.name.firstName(), + last_name: faker.name.lastName(), + studies_at__campus: 'bar', + }, + }, + ], + }) + .expect(200); + const res = await supertest(testLocalServer) + .get('/university/student') + .expect(200); + expect(res.body) + .to.be.an('object') + .that.has.ownProperty('d') + .to.be.an('array') + .of.length(2); + }); + + // TODO: Is it okay that the effects of earlier completed requests persist? + // i.e. successfully post student before the failed one: database updates + it('should not complete following requests if an earlier request fails', async () => { + await supertest(testLocalServer) + .post('/university/$batch') + .send({ + requests: [ + { + id: '0', + method: 'POST', + url: '/university/student', + body: { + matrix_number: null, + name: faker.name.firstName(), + last_name: faker.name.lastName(), + studies_at__campus: 'foo', + }, + }, + { + id: '1', + method: 'POST', + url: '/university/student', + body: { + matrix_number: 100003, + name: faker.name.firstName(), + last_name: faker.name.lastName(), + studies_at__campus: 'bar', + }, + }, + ], + }) + .expect(200); + const res = await supertest(testLocalServer) + .get('/university/student') + .expect(200); + expect(res.body) + .to.be.an('object') + .that.has.ownProperty('d') + .to.be.an('array') + .of.length(2); + }); + + it('should fail if any request does not have a string id', async () => { + const res1 = await supertest(testLocalServer) + .post('/university/$batch') + .send({ + requests: [ + { + id: '0', + method: 'POST', + url: '/university/student', + body: { + matrix_number: 100003, + name: faker.name.firstName(), + last_name: faker.name.lastName(), + studies_at__campus: 'foo', + }, + }, + { + method: 'POST', + url: '/university/student', + body: { + matrix_number: 100004, + name: faker.name.firstName(), + last_name: faker.name.lastName(), + studies_at__campus: 'bar', + }, + }, + ], + }) + .expect(400); + expect(res1.body).to.equal( + 'All requests in a batch request must have unique string ids', + ); + const res2 = await supertest(testLocalServer) + .post('/university/$batch') + .send({ + requests: [ + { + id: 0, + method: 'POST', + url: '/university/student', + body: { + matrix_number: 100003, + name: faker.name.firstName(), + last_name: faker.name.lastName(), + studies_at__campus: 'foo', + }, + }, + { + id: 'hello', + method: 'POST', + url: '/university/student', + body: { + matrix_number: 100004, + name: faker.name.firstName(), + last_name: faker.name.lastName(), + studies_at__campus: 'bar', + }, + }, + ], + }) + .expect(400); + expect(res2.body).to.equal( + 'All requests in a batch request must have unique string ids', + ); + }); + + it('should fail if not all requests have a unique id', async () => { + const res = await supertest(testLocalServer) + .post('/university/$batch') + .send({ + requests: [ + { + id: '0', + method: 'POST', + url: '/university/student', + body: { + matrix_number: 100003, + name: faker.name.firstName(), + last_name: faker.name.lastName(), + studies_at__campus: 'foo', + }, + }, + { + id: '0', + method: 'POST', + url: '/university/student', + body: { + matrix_number: 100004, + name: faker.name.firstName(), + last_name: faker.name.lastName(), + studies_at__campus: 'bar', + }, + }, + ], + }) + .expect(400); + expect(res.body).to.equal( + 'All requests in a batch request must have unique string ids', + ); + }); + + it('should fail if any of the requests is a batch request', async () => { + const res = await supertest(testLocalServer) + .post('/university/$batch') + .send({ + requests: [ + { + id: '0', + method: 'POST', + url: '/university/$batch', + body: { + matrix_number: 100003, + name: faker.name.firstName(), + last_name: faker.name.lastName(), + studies_at__campus: 'foo', + }, + }, + { + id: '1', + method: 'POST', + url: '/university/student', + body: { + matrix_number: 100004, + name: faker.name.firstName(), + last_name: faker.name.lastName(), + studies_at__campus: 'bar', + }, + }, + ], + }) + .expect(400); + expect(res.body).to.equal('Batch requests cannot contain batch requests'); + }); + + it('should fail if any of the requests does not have a method or url property', async () => { + const res1 = await supertest(testLocalServer) + .post('/university/$batch') + .send({ + requests: [ + { + id: '0', + method: 'POST', + body: { + matrix_number: 100003, + name: faker.name.firstName(), + last_name: faker.name.lastName(), + studies_at__campus: 'foo', + }, + }, + { + id: '1', + method: 'POST', + url: '/university/student', + body: { + matrix_number: 100004, + name: faker.name.firstName(), + last_name: faker.name.lastName(), + studies_at__campus: 'bar', + }, + }, + ], + }) + .expect(400); + expect(res1.body).to.equal( + 'Requests of a batch request must have a "url"', + ); + const res2 = await supertest(testLocalServer) + .post('/university/$batch') + .send({ + requests: [ + { + id: '0', + url: '/university/student', + body: { + matrix_number: 100003, + name: faker.name.firstName(), + last_name: faker.name.lastName(), + studies_at__campus: 'foo', + }, + }, + { + id: '1', + method: 'POST', + url: '/university/student', + body: { + matrix_number: 100004, + name: faker.name.firstName(), + last_name: faker.name.lastName(), + studies_at__campus: 'bar', + }, + }, + ], + }) + .expect(400); + expect(res2.body).to.equal( + 'Requests of a batch request must have a "method"', + ); + }); + }); + + describe('test atomic batch requests', () => { + // TODO + }); +}); diff --git a/test/fixtures/06-batch/config.ts b/test/fixtures/06-batch/config.ts new file mode 100644 index 000000000..c3ba3aa74 --- /dev/null +++ b/test/fixtures/06-batch/config.ts @@ -0,0 +1,42 @@ +import { AbstractSqlQuery } from '@balena/abstract-sql-compiler'; +import { getAbstractSqlModelFromFile } from '../../../src/bin/utils'; +import type { ConfigLoader } from '../../../src/server-glue/module'; + +const apiRoot = 'university'; +const modelName = 'university'; +const modelFile = __dirname + '/university.sbvr'; + +import { v1AbstractSqlModel, v1Translations } from './translations/v1'; + +export const abstractSql = getAbstractSqlModelFromFile(modelFile); + +abstractSql.tables['student'].fields.push({ + fieldName: 'computed field', + dataType: 'Text', + required: false, + computed: ['EmbeddedText', 'latest_computed_field'] as AbstractSqlQuery, +}); + +export default { + models: [ + { + modelName, + abstractSql, + apiRoot, + }, + { + apiRoot: 'v1', + modelName: 'v1', + abstractSql: v1AbstractSqlModel, + translateTo: 'university', + translations: v1Translations, + }, + ], + users: [ + { + username: 'guest', + password: ' ', + permissions: ['resource.all'], + }, + ], +} as ConfigLoader.Config; diff --git a/test/fixtures/06-batch/translations/hooks.ts b/test/fixtures/06-batch/translations/hooks.ts new file mode 100644 index 000000000..cc91e7ed6 --- /dev/null +++ b/test/fixtures/06-batch/translations/hooks.ts @@ -0,0 +1 @@ +import('./v1/hooks'); diff --git a/test/fixtures/06-batch/translations/v1/hooks.ts b/test/fixtures/06-batch/translations/v1/hooks.ts new file mode 100644 index 000000000..4d78f1b0e --- /dev/null +++ b/test/fixtures/06-batch/translations/v1/hooks.ts @@ -0,0 +1,16 @@ +import { sbvrUtils } from '../../../../../src/server-glue/module'; + +const addHook = ( + methods: Array[0]>, + resource: string, + hook: sbvrUtils.Hooks, +) => { + methods.map((method) => sbvrUtils.addPureHook(method, 'v1', resource, hook)); +}; + +addHook(['PUT', 'POST', 'PATCH'], 'student', { + async POSTPARSE({ request }) { + request.values.last_name = request.values.lastname; + delete request.values.lastname; + }, +}); diff --git a/test/fixtures/06-batch/translations/v1/index.ts b/test/fixtures/06-batch/translations/v1/index.ts new file mode 100644 index 000000000..a359cb8c7 --- /dev/null +++ b/test/fixtures/06-batch/translations/v1/index.ts @@ -0,0 +1,24 @@ +import { ConfigLoader } from '../../../../../src/server-glue/module'; +import { getAbstractSqlModelFromFile } from '../../../../../src/bin/utils'; +import { AbstractSqlQuery } from '@balena/abstract-sql-compiler'; + +export const toVersion = 'university'; + +export const v1AbstractSqlModel = getAbstractSqlModelFromFile( + __dirname + '/university.sbvr', +); + +v1AbstractSqlModel.tables['student'].fields.push({ + fieldName: 'computed field', + dataType: 'Text', + required: false, + computed: ['EmbeddedText', 'v1_computed_field'] as AbstractSqlQuery, +}); + +v1AbstractSqlModel.relationships['version'] = { v1: {} }; + +export const v1Translations: ConfigLoader.Model['translations'] = { + student: { + lastname: 'last name', + }, +}; diff --git a/test/fixtures/06-batch/translations/v1/university.sbvr b/test/fixtures/06-batch/translations/v1/university.sbvr new file mode 100644 index 000000000..625b24773 --- /dev/null +++ b/test/fixtures/06-batch/translations/v1/university.sbvr @@ -0,0 +1,28 @@ +Vocabulary: university + +Term: name + Concept Type: Short Text (Type) + +Term: lastname + Concept Type: Short Text (Type) + +Term: matrix number + Concept Type: Integer (Type) + +Term: campus + Concept Type: Short Text (Type) + +Term: student + +Fact Type: student has matrix number + Necessity: each student has exactly one matrix number + Necessity: each matrix number is of exactly one student + +Fact Type: student has name + Necessity: each student has exactly one name + +Fact Type: student has lastname + Necessity: each student has exactly one lastname + +Fact Type: student studies at campus + Necessity: each student studies at exactly one campus \ No newline at end of file diff --git a/test/fixtures/06-batch/university.sbvr b/test/fixtures/06-batch/university.sbvr new file mode 100644 index 000000000..eeeda27d1 --- /dev/null +++ b/test/fixtures/06-batch/university.sbvr @@ -0,0 +1,28 @@ +Vocabulary: university + +Term: name + Concept Type: Short Text (Type) + +Term: last name + Concept Type: Short Text (Type) + +Term: matrix number + Concept Type: Integer (Type) + +Term: campus + Concept Type: Short Text (Type) + +Term: student + +Fact Type: student has matrix number + Necessity: each student has exactly one matrix number + Necessity: each matrix number is of exactly one student + +Fact Type: student has name + Necessity: each student has exactly one name + +Fact Type: student has last name + Necessity: each student has exactly one last name + +Fact Type: student studies at campus + Necessity: each student studies at exactly one campus \ No newline at end of file