diff --git a/src/sbvr-api/sbvr-utils.ts b/src/sbvr-api/sbvr-utils.ts index 8886f03ae..848c39262 100644 --- a/src/sbvr-api/sbvr-utils.ts +++ b/src/sbvr-api/sbvr-utils.ts @@ -95,6 +95,8 @@ import * as odataResponse from './odata-response'; import { env } from '../server-glue/module'; import { translateAbstractSqlModel } from './translations'; +const validBatchMethods = new Set(['PUT', 'POST', 'PATCH', 'DELETE', 'GET']); + const LF2AbstractSQLTranslator = LF2AbstractSQL.createTranslator(sbvrTypes); const LF2AbstractSQLTranslatorVersion = `${LF2AbstractSQLVersion}+${sbvrTypesVersion}`; @@ -133,7 +135,8 @@ export interface ApiKey extends Actor { } export interface Response { - statusCode: number; + id?: string | undefined; + status: number; headers?: | { [headerName: string]: any; @@ -1022,15 +1025,15 @@ export const runURI = async ( throw response; } - const { body: responseBody, statusCode, headers } = response as Response; + const { body: responseBody, status, headers } = response as Response; - if (statusCode != null && statusCode >= 400) { + if (status != null && status >= 400) { const ErrorClass = - statusCodeToError[statusCode as keyof typeof statusCodeToError]; + statusCodeToError[status as keyof typeof statusCodeToError]; if (ErrorClass != null) { throw new ErrorClass(undefined, responseBody, headers); } - throw new HttpError(statusCode, undefined, responseBody, headers); + throw new HttpError(status, undefined, responseBody, headers); } return responseBody as AnyObject | undefined; @@ -1069,7 +1072,7 @@ export const getAffectedIds = async ( args: HookArgs & { tx: Db.Tx; }, -): Promise => { +): Promise => { const { request } = args; if (request.affectedIds) { return request.affectedIds; @@ -1094,7 +1097,7 @@ const $getAffectedIds = async ({ tx, }: HookArgs & { tx: Db.Tx; -}): Promise => { +}): Promise => { if (!['PATCH', 'DELETE'].includes(request.method)) { // We can only find the affected ids in advance for requests that modify existing records, if they // can insert new records (POST/PUT) then we're unable to find the ids until the request has actually run @@ -1108,6 +1111,7 @@ const $getAffectedIds = async ({ const parsedRequest: uriParser.ParsedODataRequest & Partial> = await uriParser.parseOData({ + id: request.id, method: request.method, url: `/${request.vocabulary}${request.url}`, }); @@ -1158,6 +1162,65 @@ const runODataRequest = (req: Express.Request, vocabulary: string) => { api[vocabulary].logger.log('Parsing', req.method, req.url); } + if (req.url === `/${vocabulary}/$batch`) { + const { requests } = req.body as { requests: uriParser.UnparsedRequest[] }; + req.batch = requests; + const ids = new Set( + requests + .map((request) => request.id) + .filter((id) => typeof id === 'string') as string[], + ); + if (ids.size !== requests.length) { + throw new BadRequestError( + 'All requests in a batch request must have unique string ids', + ); + } + + for (const request of requests) { + if (request.method == null) { + throw new BadRequestError( + 'Requests of a batch request must have a "method"', + ); + } + const upperCaseMethod = request.method.toUpperCase(); + if (!validBatchMethods.has(upperCaseMethod)) { + throw new BadRequestError( + `Requests of a batch request must have a method matching one of the following: ${Array.from( + validBatchMethods, + ).join(', ')}`, + ); + } + if ( + request.body !== undefined && + (upperCaseMethod === 'GET' || upperCaseMethod === 'DELETE') + ) { + throw new BadRequestError( + 'GET and DELETE requests of a batch request must not have a body', + ); + } + } + + const urls = new Set( + requests.map((request) => request.url), + ); + if (urls.has(undefined)) { + throw new BadRequestError( + 'Requests of a batch request must have a "url"', + ); + } + if (urls.has('/university/$batch')) { + throw new BadRequestError('Batch requests cannot contain batch requests'); + } + const urlModels = new Set( + Array.from(urls.values()).map((url: string) => url.split('/')[1]), + ); + if (urlModels.size > 1) { + throw new BadRequestError( + 'Batch requests must consist of requests for only one model', + ); + } + } + // Get the hooks for the current method/vocabulary as we know it, // in order to run PREPARSE hooks, before parsing gets us more info const { versions } = models[vocabulary]; @@ -1209,7 +1272,7 @@ const runODataRequest = (req: Express.Request, vocabulary: string) => { requests = req.batch; } else { const { method, url, body } = req; - requests = [{ method, url, data: body }]; + requests = [{ method, url, body }]; } const prepareRequest = async ( @@ -1268,7 +1331,10 @@ const runODataRequest = (req: Express.Request, vocabulary: string) => { // Parse the OData requests const results = await mappingFn(requests, async (requestPart) => { - const parsedRequest = await uriParser.parseOData(requestPart); + const parsedRequest = await uriParser.parseOData( + requestPart, + req.headers, + ); let request: uriParser.ODataRequest | uriParser.ODataRequest[]; if (Array.isArray(parsedRequest)) { @@ -1276,7 +1342,6 @@ const runODataRequest = (req: Express.Request, vocabulary: string) => { } else { request = await prepareRequest(parsedRequest); } - // Run the request in its own transaction return await runTransaction( req, request, @@ -1293,7 +1358,7 @@ const runODataRequest = (req: Express.Request, vocabulary: string) => { } }); if (Array.isArray(request)) { - const changeSetResults = new Map(); + const changeSetResults = new Map(); const changeSetRunner = runChangeSet(req, tx); for (const r of request) { await changeSetRunner(changeSetResults, r); @@ -1314,7 +1379,7 @@ const runODataRequest = (req: Express.Request, vocabulary: string) => { if ( !Array.isArray(result) && result.body == null && - result.statusCode == null + result.status == null ) { console.error('No status or body set', req.url, responses); return new InternalRequestError(); @@ -1352,15 +1417,15 @@ export const handleODataRequest: Express.Handler = async (req, res, next) => { // Otherwise its a multipart request and we reply with the appropriate multipart response } else { - (res.status(200) as any).sendMulti( - responses.map((response) => { + res.status(200).json({ + responses: responses.map((response) => { if (response instanceof HttpError) { - response = httpErrorToResponse(response); + return httpErrorToResponse(response); } else { return response; } }), - ); + }); } } catch (e: any) { if (handleHttpErrors(req, res, e)) { @@ -1387,16 +1452,16 @@ export const handleHttpErrors = ( for (const handleErrorFn of handleErrorFns) { handleErrorFn(req, err); } - const response = httpErrorToResponse(err); + const response = httpErrorToResponse(err, req); handleResponse(res, response); return true; } return false; }; const handleResponse = (res: Express.Response, response: Response): void => { - const { body, headers, statusCode } = response as Response; + const { body, headers, status } = response as Response; res.set(headers); - res.status(statusCode); + res.status(status); if (!body) { res.end(); } else { @@ -1406,10 +1471,15 @@ const handleResponse = (res: Express.Response, response: Response): void => { const httpErrorToResponse = ( err: HttpError, -): RequiredField => { + req?: Express.Request, +): RequiredField => { + const message = err.getResponseBody(); return { - statusCode: err.status, - body: err.getResponseBody(), + status: err.status, + body: + req?.batch != null && req.batch.length > 0 + ? { responses: [], message } + : message, headers: err.headers, }; }; @@ -1514,7 +1584,7 @@ const runRequest = async ( const runChangeSet = (req: Express.Request, tx: Db.Tx) => async ( - changeSetResults: Map, + changeSetResults: Map, request: uriParser.ODataRequest, ): Promise => { request = updateBinds(changeSetResults, request); @@ -1532,7 +1602,7 @@ const runChangeSet = // deferred untill the request they reference is run and returns an insert ID. // This function compiles the sql query of a request which has been deferred const updateBinds = ( - changeSetResults: Map, + changeSetResults: Map, request: uriParser.ODataRequest, ) => { if (request._defer) { @@ -1700,7 +1770,8 @@ const respondGet = async ( ); const response = { - statusCode: 200, + id: request.id, + status: 200, body: { d }, headers: { 'content-type': 'application/json' }, }; @@ -1715,14 +1786,15 @@ const respondGet = async ( } else { if (request.resourceName === '$metadata') { return { - statusCode: 200, + id: request.id, + status: 200, body: models[vocab].odataMetadata, headers: { 'content-type': 'xml' }, }; } else { // TODO: request.resourceName can be '$serviceroot' or a resource and we should return an odata xml document based on that return { - statusCode: 404, + status: 404, }; } } @@ -1778,7 +1850,7 @@ const respondPost = async ( } const response = { - statusCode: 201, + status: 201, body: result.d[0], headers: { 'content-type': 'application/json', @@ -1826,7 +1898,7 @@ const respondPut = async ( tx: Db.Tx, ): Promise => { const response = { - statusCode: 200, + status: 200, }; await runHooks('PRERESPOND', request.hooks, { req, diff --git a/src/sbvr-api/uri-parser.ts b/src/sbvr-api/uri-parser.ts index 28ed74988..27801b7c7 100644 --- a/src/sbvr-api/uri-parser.ts +++ b/src/sbvr-api/uri-parser.ts @@ -29,15 +29,19 @@ import * as sbvrUtils from './sbvr-utils'; export type OdataBinds = ODataBinds; export interface UnparsedRequest { + id?: string; method: string; url: string; - data?: any; + body?: any; headers?: { [header: string]: string }; changeSet?: UnparsedRequest[]; _isChangeSet?: boolean; } export interface ParsedODataRequest { + headers?: { + [key: string]: string | string[] | undefined; + }; method: SupportedMethod; url: string; vocabulary: string; @@ -47,7 +51,7 @@ export interface ParsedODataRequest { odataQuery: ODataQuery; odataBinds: OdataBinds; custom: AnyObject; - id?: number | undefined; + id?: string | undefined; _defer?: boolean; } export interface ODataRequest extends ParsedODataRequest { @@ -60,8 +64,8 @@ export interface ODataRequest extends ParsedODataRequest { modifiedFields?: ReturnType< AbstractSQLCompiler.EngineInstance['getModifiedFields'] >; - affectedIds?: number[]; - pendingAffectedIds?: Promise; + affectedIds?: string[]; + pendingAffectedIds?: Promise; hooks?: Array<[string, InstantiatedHooks]>; engine: AbstractSQLCompiler.Engines; } @@ -263,15 +267,27 @@ export const metadataEndpoints = ['$metadata', '$serviceroot']; export async function parseOData( b: UnparsedRequest & { _isChangeSet?: false }, + headers?: { + [key: string]: string | string[] | undefined; + }, ): Promise; export async function parseOData( b: UnparsedRequest & { _isChangeSet: true }, + headers?: { + [key: string]: string | string[] | undefined; + }, ): Promise; export async function parseOData( b: UnparsedRequest, + headers?: { + [key: string]: string | string[] | undefined; + }, ): Promise; export async function parseOData( b: UnparsedRequest, + headers?: { + [key: string]: string | string[] | undefined; + }, ): Promise { try { if (b._isChangeSet && b.changeSet != null) { @@ -292,12 +308,14 @@ export async function parseOData( const odata = memoizedParseOdata(url); return { + id: b.id, + headers: { ...headers, ...b.headers } ?? headers, method: b.method as SupportedMethod, url, vocabulary: apiRoot, resourceName: odata.tree.resource, originalResourceName: odata.tree.resource, - values: b.data ?? {}, + values: b.body ?? {}, odataQuery: odata.tree, odataBinds: odata.binds, custom: {}, @@ -362,7 +380,7 @@ const parseODataChangeset = ( originalResourceName: odata.tree.resource, odataBinds: odata.binds, odataQuery: odata.tree, - values: b.data ?? {}, + values: b.body ?? {}, custom: {}, id: contentId, _defer: defer, diff --git a/test/06-batch.test.ts b/test/06-batch.test.ts new file mode 100644 index 000000000..a6080e360 --- /dev/null +++ b/test/06-batch.test.ts @@ -0,0 +1,494 @@ +const configPath = __dirname + '/fixtures/06-batch/config'; +const hooksPath = __dirname + '/fixtures/06-batch/translations/hooks'; +import { testInit, testDeInit, testLocalServer } from './lib/test-init'; +import { faker } from '@faker-js/faker'; +import { expect } from 'chai'; +import * as supertest from 'supertest'; + +const validBatchMethods = ['PUT', 'POST', 'PATCH', 'DELETE', 'GET']; + +// TODO: figure out how to not persist the results across describes +describe('06 batch tests', function () { + let pineServer: Awaited>; + before(async () => { + pineServer = await testInit({ + configPath, + hooksPath, + deleteDb: true, + }); + // setup faker so that test date uniqueness is set for all test cases + faker.seed(); + }); + + after(async () => { + testDeInit(pineServer); + }); + + describe('Basic', () => { + it('check /ping route is OK', async () => { + await supertest(testLocalServer).get('/ping').expect(200, 'OK'); + }); + }); + + describe('test non-atomic batch requests', () => { + it('should create two students', async () => { + await supertest(testLocalServer) + .post('/university/$batch') + .send({ + requests: [ + { + id: '0', + method: 'POST', + url: '/university/student', + body: { + matrix_number: 100000, + name: faker.name.firstName(), + last_name: faker.name.lastName(), + studies_at__campus: 'foo', + }, + }, + { + id: '1', + method: 'POST', + url: '/university/student', + body: { + matrix_number: 100001, + name: faker.name.firstName(), + last_name: faker.name.lastName(), + studies_at__campus: 'bar', + }, + }, + ], + }) + .expect(200); + const res = await supertest(testLocalServer) + .get('/university/student') + .expect(200); + expect(res.body) + .to.be.an('object') + .that.has.ownProperty('d') + .to.be.an('array') + .of.length(2); + }); + + it('successful request should have `responses` in its body', async () => { + const res = await supertest(testLocalServer) + .post('/university/$batch') + .send({ + requests: [ + { + id: '0', + method: 'GET', + url: '/university/student', + }, + ], + }) + .expect(200); + expect(res.body) + .to.be.an('object') + .that.has.ownProperty('responses') + .to.be.an('array') + .of.length(1); + expect(res.body.responses[0].body) + .to.be.an('object') + .that.has.ownProperty('d') + .to.be.an('array') + .of.length(2); + }); + + // TODO: Seems we have default `continue-on-error` = `false`, but the docs specify `true`. Do we want to continue like this? + it('should not complete following requests if an earlier request fails', async () => { + await supertest(testLocalServer) + .post('/university/$batch') + .send({ + requests: [ + { + id: '0', + method: 'POST', + url: '/university/student', + body: { + matrix_number: null, + name: faker.name.firstName(), + last_name: faker.name.lastName(), + studies_at__campus: 'foo', + }, + }, + { + id: '1', + method: 'POST', + url: '/university/student', + body: { + matrix_number: 100003, + name: faker.name.firstName(), + last_name: faker.name.lastName(), + studies_at__campus: 'bar', + }, + }, + ], + }) + .expect(200); + const res = await supertest(testLocalServer) + .get('/university/student') + .expect(200); + expect(res.body) + .to.be.an('object') + .that.has.ownProperty('d') + .to.be.an('array') + .of.length(2); + }); + + it('should fail if any request does not have a string id', async () => { + const res1 = await supertest(testLocalServer) + .post('/university/$batch') + .send({ + requests: [ + { + id: '0', + method: 'POST', + url: '/university/student', + body: { + matrix_number: 100003, + name: faker.name.firstName(), + last_name: faker.name.lastName(), + studies_at__campus: 'foo', + }, + }, + { + method: 'POST', + url: '/university/student', + body: { + matrix_number: 100004, + name: faker.name.firstName(), + last_name: faker.name.lastName(), + studies_at__campus: 'bar', + }, + }, + ], + }) + .expect(400); + expect(res1.body) + .to.haveOwnProperty('message') + .that.equals( + 'All requests in a batch request must have unique string ids', + ); + const res2 = await supertest(testLocalServer) + .post('/university/$batch') + .send({ + requests: [ + { + id: 0, + method: 'POST', + url: '/university/student', + body: { + matrix_number: 100003, + name: faker.name.firstName(), + last_name: faker.name.lastName(), + studies_at__campus: 'foo', + }, + }, + { + id: 'hello', + method: 'POST', + url: '/university/student', + body: { + matrix_number: 100004, + name: faker.name.firstName(), + last_name: faker.name.lastName(), + studies_at__campus: 'bar', + }, + }, + ], + }) + .expect(400); + expect(res2.body) + .to.haveOwnProperty('message') + .that.equals( + 'All requests in a batch request must have unique string ids', + ); + }); + + it('should fail if not all requests have a unique id', async () => { + const res = await supertest(testLocalServer) + .post('/university/$batch') + .send({ + requests: [ + { + id: '0', + method: 'POST', + url: '/university/student', + body: { + matrix_number: 100003, + name: faker.name.firstName(), + last_name: faker.name.lastName(), + studies_at__campus: 'foo', + }, + }, + { + id: '0', + method: 'POST', + url: '/university/student', + body: { + matrix_number: 100004, + name: faker.name.firstName(), + last_name: faker.name.lastName(), + studies_at__campus: 'bar', + }, + }, + ], + }) + .expect(400); + expect(res.body) + .to.haveOwnProperty('message') + .that.equals( + 'All requests in a batch request must have unique string ids', + ); + }); + + it('should fail if any of the requests is a batch request', async () => { + const res = await supertest(testLocalServer) + .post('/university/$batch') + .send({ + requests: [ + { + id: '0', + method: 'POST', + url: '/university/$batch', + body: { + matrix_number: 100003, + name: faker.name.firstName(), + last_name: faker.name.lastName(), + studies_at__campus: 'foo', + }, + }, + { + id: '1', + method: 'POST', + url: '/university/student', + body: { + matrix_number: 100004, + name: faker.name.firstName(), + last_name: faker.name.lastName(), + studies_at__campus: 'bar', + }, + }, + ], + }) + .expect(400); + expect(res.body) + .to.haveOwnProperty('message') + .that.equals('Batch requests cannot contain batch requests'); + }); + + it('should fail if any of the requests does not have a url property', async () => { + const res = await supertest(testLocalServer) + .post('/university/$batch') + .send({ + requests: [ + { + id: '0', + method: 'POST', + body: { + matrix_number: 100003, + name: faker.name.firstName(), + last_name: faker.name.lastName(), + studies_at__campus: 'foo', + }, + }, + { + id: '1', + method: 'POST', + url: '/university/student', + body: { + matrix_number: 100004, + name: faker.name.firstName(), + last_name: faker.name.lastName(), + studies_at__campus: 'bar', + }, + }, + ], + }) + .expect(400); + expect(res.body) + .to.haveOwnProperty('message') + .that.equals('Requests of a batch request must have a "url"'); + }); + + it('should fail if any of the requests does not have a valid value for method', async () => { + const res1 = await supertest(testLocalServer) + .post('/university/$batch') + .send({ + requests: [ + { + id: '0', + url: '/university/student', + body: { + matrix_number: 100003, + name: faker.name.firstName(), + last_name: faker.name.lastName(), + studies_at__campus: 'foo', + }, + }, + { + id: '1', + method: 'POST', + url: '/university/student', + body: { + matrix_number: 100004, + name: faker.name.firstName(), + last_name: faker.name.lastName(), + studies_at__campus: 'bar', + }, + }, + ], + }) + .expect(400); + expect(res1.body) + .to.haveOwnProperty('message') + .that.equals('Requests of a batch request must have a "method"'); + const res2 = await supertest(testLocalServer) + .post('/university/$batch') + .send({ + requests: [ + { + id: '0', + method: 'MERGE', + url: '/university/student', + body: { + matrix_number: 100003, + name: faker.name.firstName(), + last_name: faker.name.lastName(), + studies_at__campus: 'foo', + }, + }, + { + id: '1', + method: 'POST', + url: '/university/student', + body: { + matrix_number: 100004, + name: faker.name.firstName(), + last_name: faker.name.lastName(), + studies_at__campus: 'bar', + }, + }, + ], + }) + .expect(400); + expect(res2.body) + .to.haveOwnProperty('message') + .that.equals( + `Requests of a batch request must have a method matching one of the following: ${validBatchMethods.join( + ', ', + )}`, + ); + }); + + it('should fail if any of the requests have method GET or DELETE and have a body', async () => { + const res1 = await supertest(testLocalServer) + .post('/university/$batch') + .send({ + requests: [ + { + id: '0', + method: 'POST', + url: '/university/student', + body: { + matrix_number: 100003, + name: faker.name.firstName(), + last_name: faker.name.lastName(), + studies_at__campus: 'foo', + }, + }, + { + id: '1', + method: 'GET', + url: '/university/student', + body: { + matrix_number: 100004, + name: faker.name.firstName(), + last_name: faker.name.lastName(), + studies_at__campus: 'bar', + }, + }, + ], + }) + .expect(400); + expect(res1.body) + .to.haveOwnProperty('message') + .that.equals( + 'GET and DELETE requests of a batch request must not have a body', + ); + const res2 = await supertest(testLocalServer) + .post('/university/$batch') + .send({ + requests: [ + { + id: '0', + method: 'POST', + url: '/university/student', + body: { + matrix_number: 100003, + name: faker.name.firstName(), + last_name: faker.name.lastName(), + studies_at__campus: 'foo', + }, + }, + { + id: '1', + method: 'DELETE', + url: '/university/student', + body: { + matrix_number: 100004, + name: faker.name.firstName(), + last_name: faker.name.lastName(), + studies_at__campus: 'bar', + }, + }, + ], + }) + .expect(400); + expect(res2.body) + .to.haveOwnProperty('message') + .that.equals( + 'GET and DELETE requests of a batch request must not have a body', + ); + }); + + it('should fail if trying to query cross-model in one batch', async () => { + const res = await supertest(testLocalServer) + .post('/university/$batch') + .send({ + requests: [ + { + id: '0', + method: 'POST', + url: '/v1/student', + body: { + matrix_number: 100003, + name: faker.name.firstName(), + last_name: faker.name.lastName(), + studies_at__campus: 'foo', + }, + }, + { + id: '1', + method: 'POST', + url: '/university/student', + body: { + matrix_number: 100004, + name: faker.name.firstName(), + last_name: faker.name.lastName(), + studies_at__campus: 'bar', + }, + }, + ], + }) + .expect(400); + expect(res.body) + .to.haveOwnProperty('message') + .that.equals( + 'Batch requests must consist of requests for only one model', + ); + }); + }); +}); diff --git a/test/fixtures/06-batch/config.ts b/test/fixtures/06-batch/config.ts new file mode 100644 index 000000000..c3ba3aa74 --- /dev/null +++ b/test/fixtures/06-batch/config.ts @@ -0,0 +1,42 @@ +import { AbstractSqlQuery } from '@balena/abstract-sql-compiler'; +import { getAbstractSqlModelFromFile } from '../../../src/bin/utils'; +import type { ConfigLoader } from '../../../src/server-glue/module'; + +const apiRoot = 'university'; +const modelName = 'university'; +const modelFile = __dirname + '/university.sbvr'; + +import { v1AbstractSqlModel, v1Translations } from './translations/v1'; + +export const abstractSql = getAbstractSqlModelFromFile(modelFile); + +abstractSql.tables['student'].fields.push({ + fieldName: 'computed field', + dataType: 'Text', + required: false, + computed: ['EmbeddedText', 'latest_computed_field'] as AbstractSqlQuery, +}); + +export default { + models: [ + { + modelName, + abstractSql, + apiRoot, + }, + { + apiRoot: 'v1', + modelName: 'v1', + abstractSql: v1AbstractSqlModel, + translateTo: 'university', + translations: v1Translations, + }, + ], + users: [ + { + username: 'guest', + password: ' ', + permissions: ['resource.all'], + }, + ], +} as ConfigLoader.Config; diff --git a/test/fixtures/06-batch/translations/hooks.ts b/test/fixtures/06-batch/translations/hooks.ts new file mode 100644 index 000000000..cc91e7ed6 --- /dev/null +++ b/test/fixtures/06-batch/translations/hooks.ts @@ -0,0 +1 @@ +import('./v1/hooks'); diff --git a/test/fixtures/06-batch/translations/v1/hooks.ts b/test/fixtures/06-batch/translations/v1/hooks.ts new file mode 100644 index 000000000..4d78f1b0e --- /dev/null +++ b/test/fixtures/06-batch/translations/v1/hooks.ts @@ -0,0 +1,16 @@ +import { sbvrUtils } from '../../../../../src/server-glue/module'; + +const addHook = ( + methods: Array[0]>, + resource: string, + hook: sbvrUtils.Hooks, +) => { + methods.map((method) => sbvrUtils.addPureHook(method, 'v1', resource, hook)); +}; + +addHook(['PUT', 'POST', 'PATCH'], 'student', { + async POSTPARSE({ request }) { + request.values.last_name = request.values.lastname; + delete request.values.lastname; + }, +}); diff --git a/test/fixtures/06-batch/translations/v1/index.ts b/test/fixtures/06-batch/translations/v1/index.ts new file mode 100644 index 000000000..a359cb8c7 --- /dev/null +++ b/test/fixtures/06-batch/translations/v1/index.ts @@ -0,0 +1,24 @@ +import { ConfigLoader } from '../../../../../src/server-glue/module'; +import { getAbstractSqlModelFromFile } from '../../../../../src/bin/utils'; +import { AbstractSqlQuery } from '@balena/abstract-sql-compiler'; + +export const toVersion = 'university'; + +export const v1AbstractSqlModel = getAbstractSqlModelFromFile( + __dirname + '/university.sbvr', +); + +v1AbstractSqlModel.tables['student'].fields.push({ + fieldName: 'computed field', + dataType: 'Text', + required: false, + computed: ['EmbeddedText', 'v1_computed_field'] as AbstractSqlQuery, +}); + +v1AbstractSqlModel.relationships['version'] = { v1: {} }; + +export const v1Translations: ConfigLoader.Model['translations'] = { + student: { + lastname: 'last name', + }, +}; diff --git a/test/fixtures/06-batch/translations/v1/university.sbvr b/test/fixtures/06-batch/translations/v1/university.sbvr new file mode 100644 index 000000000..625b24773 --- /dev/null +++ b/test/fixtures/06-batch/translations/v1/university.sbvr @@ -0,0 +1,28 @@ +Vocabulary: university + +Term: name + Concept Type: Short Text (Type) + +Term: lastname + Concept Type: Short Text (Type) + +Term: matrix number + Concept Type: Integer (Type) + +Term: campus + Concept Type: Short Text (Type) + +Term: student + +Fact Type: student has matrix number + Necessity: each student has exactly one matrix number + Necessity: each matrix number is of exactly one student + +Fact Type: student has name + Necessity: each student has exactly one name + +Fact Type: student has lastname + Necessity: each student has exactly one lastname + +Fact Type: student studies at campus + Necessity: each student studies at exactly one campus \ No newline at end of file diff --git a/test/fixtures/06-batch/university.sbvr b/test/fixtures/06-batch/university.sbvr new file mode 100644 index 000000000..eeeda27d1 --- /dev/null +++ b/test/fixtures/06-batch/university.sbvr @@ -0,0 +1,28 @@ +Vocabulary: university + +Term: name + Concept Type: Short Text (Type) + +Term: last name + Concept Type: Short Text (Type) + +Term: matrix number + Concept Type: Integer (Type) + +Term: campus + Concept Type: Short Text (Type) + +Term: student + +Fact Type: student has matrix number + Necessity: each student has exactly one matrix number + Necessity: each matrix number is of exactly one student + +Fact Type: student has name + Necessity: each student has exactly one name + +Fact Type: student has last name + Necessity: each student has exactly one last name + +Fact Type: student studies at campus + Necessity: each student studies at exactly one campus \ No newline at end of file