From 82dbe7e3ff05a0ca96f49e75ffa104079b7e68c6 Mon Sep 17 00:00:00 2001 From: Tomasz Pluskiewicz Date: Fri, 12 Jan 2024 10:41:56 +0100 Subject: [PATCH 01/28] feat: b59-rdf type declarations --- .changeset/lemon-crabs-fly.md | 5 ++ .changeset/rotten-frogs-teach.md | 5 ++ package-lock.json | 92 +++++++++++++++++++---- packages/env/index.ts | 6 +- packages/rdf/fs.js | 13 +++- packages/rdf/imports.js | 4 + packages/rdf/lib/PatternMatcher.js | 26 ++++++- packages/rdf/lib/append.js | 30 +++++++- packages/rdf/lib/membership.js | 43 ++++++++++- packages/rdf/lib/metadata/applyOptions.js | 6 ++ packages/rdf/lib/voidStats.js | 51 ++++++++++++- packages/rdf/mapMatch.js | 27 ++++++- packages/rdf/open.js | 19 ++++- packages/rdf/package.json | 11 ++- packages/rdf/setGraph.js | 12 ++- packages/rdf/tsconfig.json | 17 +++++ 16 files changed, 329 insertions(+), 38 deletions(-) create mode 100644 .changeset/lemon-crabs-fly.md create mode 100644 .changeset/rotten-frogs-teach.md create mode 100644 packages/rdf/tsconfig.json diff --git a/.changeset/lemon-crabs-fly.md b/.changeset/lemon-crabs-fly.md new file mode 100644 index 00000000..5d831079 --- /dev/null +++ b/.changeset/lemon-crabs-fly.md @@ -0,0 +1,5 @@ +--- +"barnard59-rdf": minor +--- + +Bundle TypeScript type declarations diff --git a/.changeset/rotten-frogs-teach.md b/.changeset/rotten-frogs-teach.md new file mode 100644 index 00000000..d45301c7 --- /dev/null +++ b/.changeset/rotten-frogs-teach.md @@ -0,0 +1,5 @@ +--- +"barnard59-env": patch +--- + +Exported environment did not include fs functionality diff --git a/package-lock.json b/package-lock.json index e91ff40b..cda0981c 100644 --- a/package-lock.json +++ b/package-lock.json @@ -6219,6 +6219,15 @@ "@types/node": "*" } }, + "node_modules/@types/file-fetch": { + "version": "1.6.6", + "resolved": "https://registry.npmjs.org/@types/file-fetch/-/file-fetch-1.6.6.tgz", + "integrity": "sha512-DvVqgVRx5+Fcy9VfOA0+bNuDSmcyN9ahN3Wms83szdqYQeDQIerTxdaoVo4Ghnnm1kdUsYRVy23LMfzhkOuIZA==", + "dev": true, + "dependencies": { + "@types/node-fetch": "*" + } + }, "node_modules/@types/glob": { "version": "7.2.0", "resolved": "https://registry.npmjs.org/@types/glob/-/glob-7.2.0.tgz", @@ -6288,8 +6297,7 @@ }, "node_modules/@types/jsonld": { "version": "1.5.9", - "license": "MIT", - "peer": true + "license": "MIT" }, "node_modules/@types/lodash": { "version": "4.14.202", @@ -6297,6 +6305,12 @@ "integrity": "sha512-OvlIYQK9tNneDlS0VN54LLd5uiPCBOp7gS5Z0f1mjoJYBrtStzgmJBxONW3U6OZqdtNzZPmn9BS/7WI7BFFcFQ==", "dev": true }, + "node_modules/@types/mime-types": { + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/@types/mime-types/-/mime-types-2.1.4.tgz", + "integrity": "sha512-lfU4b34HOri+kAY5UheuFMWPDOI+OPceBSHZKp69gEyTL/mmJ4cnU6Y/rlme3UL3GyOn6Y42hyIEw0/q8sWx5w==", + "dev": true + }, "node_modules/@types/minimatch": { "version": "5.1.2", "resolved": "https://registry.npmjs.org/@types/minimatch/-/minimatch-5.1.2.tgz", @@ -6322,6 +6336,30 @@ "undici-types": "~5.26.4" } }, + "node_modules/@types/node-fetch": { + "version": "2.6.10", + "resolved": "https://registry.npmjs.org/@types/node-fetch/-/node-fetch-2.6.10.tgz", + "integrity": "sha512-PPpPK6F9ALFTn59Ka3BaL+qGuipRfxNE8qVgkp0bVixeiR2c2/L+IVOiBdu9JhhT22sWnQEp6YyHGI2b2+CMcA==", + "dev": true, + "dependencies": { + "@types/node": "*", + "form-data": "^4.0.0" + } + }, + "node_modules/@types/node-fetch/node_modules/form-data": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz", + "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==", + "dev": true, + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 6" + } + }, "node_modules/@types/normalize-package-data": { "version": "2.4.1", "dev": true, @@ -6347,6 +6385,12 @@ "integrity": "sha512-Zj9XGVMO/mPq/dG3vuTvOr/AgbEfAwW3t+vIZ8x6QQ7MmUQQPXpVVl0UljXupQm1nbUA8qgP9AlEioMngAhuCg==", "dev": true }, + "node_modules/@types/proto-fetch": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/@types/proto-fetch/-/proto-fetch-1.0.5.tgz", + "integrity": "sha512-Y6O2gsr5mRGiKO3ZcU8ir+hcmk7NTbwfu4f/YIcupk49dlLqyP4NkGsrPKoZIuPh9yAyMX+eXvJsGd0g12DRXg==", + "dev": true + }, "node_modules/@types/rdf-dataset-ext": { "version": "1.0.6", "resolved": "https://registry.npmjs.org/@types/rdf-dataset-ext/-/rdf-dataset-ext-1.0.6.tgz", @@ -6357,6 +6401,16 @@ "rdf-js": "^4.0.2" } }, + "node_modules/@types/rdf-transform-triple-to-quad": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@types/rdf-transform-triple-to-quad/-/rdf-transform-triple-to-quad-2.0.5.tgz", + "integrity": "sha512-IIDwZzEuPThkv+XKofohbVUZfqmIOaR7JpqrsEchDJJdjIQPXLiMC+XzZI/J65FA8pKNPTGmeR7c/9GIQq/5Ew==", + "dev": true, + "dependencies": { + "@rdfjs/types": ">=1.0.0", + "@types/readable-stream": "*" + } + }, "node_modules/@types/rdf-validate-shacl": { "version": "0.4.6", "resolved": "https://registry.npmjs.org/@types/rdf-validate-shacl/-/rdf-validate-shacl-0.4.6.tgz", @@ -6400,11 +6454,21 @@ "@types/rdfjs__term-set": "*" } }, + "node_modules/@types/rdfjs__fetch": { + "version": "3.0.5", + "resolved": "https://registry.npmjs.org/@types/rdfjs__fetch/-/rdfjs__fetch-3.0.5.tgz", + "integrity": "sha512-ATJbavxPTQk6qYRxVdNBQd8Odv7jSYlxtLRYiX0LjHJateShbQ3Vw6jBwcVkKXFYtmdXaGPeSUA+ns49VchHmw==", + "dev": true, + "dependencies": { + "@rdfjs/types": ">=1.0.0", + "@types/node": "*", + "@types/rdfjs__fetch-lite": "*" + } + }, "node_modules/@types/rdfjs__fetch-lite": { - "version": "3.0.7", - "resolved": "https://registry.npmjs.org/@types/rdfjs__fetch-lite/-/rdfjs__fetch-lite-3.0.7.tgz", - "integrity": "sha512-xqxtNe01Z3kkzXQMbb8IhqzyMbP073sTFkegjlABOyTIwHQtAM5lgQiAunrtGpAnLdsD1Qp2qunxvB+WssFl2A==", - "peer": true, + "version": "3.0.8", + "resolved": "https://registry.npmjs.org/@types/rdfjs__fetch-lite/-/rdfjs__fetch-lite-3.0.8.tgz", + "integrity": "sha512-CU77/POKYNlD0kLfLb1vZI0PXPl1ubmjGqlh/B7dVUW6IlgwTepg+OWDpH76osSX9IlzMfJ1TB0SZqTqwh9kTA==", "dependencies": { "@rdfjs/types": "*", "@types/node": "*", @@ -6414,7 +6478,6 @@ "node_modules/@types/rdfjs__formats-common": { "version": "3.1.0", "license": "MIT", - "peer": true, "dependencies": { "@types/rdfjs__parser-jsonld": "*", "@types/rdfjs__parser-n3": "*", @@ -6428,7 +6491,6 @@ "node_modules/@types/rdfjs__formats-common/node_modules/rdfxml-streaming-parser": { "version": "2.2.3", "license": "MIT", - "peer": true, "dependencies": { "@rdfjs/types": "*", "@rubensworks/saxes": "^6.0.1", @@ -6443,7 +6505,6 @@ "node_modules/@types/rdfjs__formats-common/node_modules/readable-stream": { "version": "4.4.2", "license": "MIT", - "peer": true, "dependencies": { "abort-controller": "^3.0.0", "buffer": "^6.0.3", @@ -6465,7 +6526,6 @@ "node_modules/@types/rdfjs__parser-jsonld": { "version": "2.1.1", "license": "MIT", - "peer": true, "dependencies": { "@types/jsonld": "*", "rdf-js": "^4.0.2" @@ -6474,7 +6534,6 @@ "node_modules/@types/rdfjs__parser-n3": { "version": "2.0.1", "license": "MIT", - "peer": true, "dependencies": { "rdf-js": "^4.0.2" } @@ -6482,7 +6541,6 @@ "node_modules/@types/rdfjs__serializer-jsonld": { "version": "2.0.0", "license": "MIT", - "peer": true, "dependencies": { "rdf-js": "^4.0.2" } @@ -6490,7 +6548,6 @@ "node_modules/@types/rdfjs__serializer-ntriples": { "version": "2.0.1", "license": "MIT", - "peer": true, "dependencies": { "rdf-js": "^4.0.2" } @@ -6498,7 +6555,6 @@ "node_modules/@types/rdfjs__sink-map": { "version": "2.0.1", "license": "MIT", - "peer": true, "dependencies": { "@rdfjs/types": "*" } @@ -27046,7 +27102,7 @@ "chai": "^4.3.10", "get-stream": "^6.0.1", "into-stream": "^7.0.0", - "isstream": "^0.1.2", + "is-stream": "^3.0.0", "rimraf": "^3.0.2", "sinon": "^17.0.0" }, @@ -27707,6 +27763,12 @@ }, "devDependencies": { "@rdfjs/to-ntriples": "^2.0.0", + "@types/file-fetch": "^1.6.6", + "@types/mime-types": "^2.1.4", + "@types/proto-fetch": "^1.0.5", + "@types/rdf-transform-triple-to-quad": "^2.0.5", + "@types/rdfjs__fetch": "^3.0.5", + "@types/rdfjs__fetch-lite": "^3.0.8", "assert-throws-async": "^3.0.0", "chai": "^4.3.7", "express-as-promise": "^1.2.0", diff --git a/packages/env/index.ts b/packages/env/index.ts index 718a1dbf..c0f94a63 100644 --- a/packages/env/index.ts +++ b/packages/env/index.ts @@ -1,8 +1,8 @@ -import { create } from '@zazuko/env-node' -import type { DefaultEnv, DerivedEnvironment } from '@zazuko/env' +import { create, NodeEnv } from '@zazuko/env-node' +import type { DerivedEnvironment } from '@zazuko/env' import NamespacesFactory from './lib/Namespaces.js' import ConstantsFactory from './lib/Constants.js' -export type Environment = DerivedEnvironment +export type Environment = DerivedEnvironment export default create(NamespacesFactory, ConstantsFactory) diff --git a/packages/rdf/fs.js b/packages/rdf/fs.js index 7b174870..6fd64452 100644 --- a/packages/rdf/fs.js +++ b/packages/rdf/fs.js @@ -1,5 +1,9 @@ import { Transform } from 'readable-stream' +/** + * @this {import('barnard59-core').Context} + * @return {Transform} + */ export function parse() { const { env } = this @@ -7,16 +11,19 @@ export function parse() { objectMode: true, transform: async function (path, encoding, callback) { try { + /** + * @type {import('@rdfjs/types').Stream} + */ const fileStream = env.fromFile(path) let failed = false - fileStream.on('data', quad => this.push(quad)) - fileStream.on('error', (e) => { + fileStream.on('data', /** @type {import('@rdfjs/types').Quad} */ quad => this.push(quad)) + fileStream.on('error', (/** @type {any} */ e) => { callback(e) failed = true }) fileStream.on('end', () => !failed && callback()) - } catch (e) { + } catch (/** @type {any} */ e) { callback(e) } }, diff --git a/packages/rdf/imports.js b/packages/rdf/imports.js index c3606ab1..a4ad0ee7 100644 --- a/packages/rdf/imports.js +++ b/packages/rdf/imports.js @@ -1,5 +1,9 @@ import transform from 'rdf-transform-graph-imports' +/** + * @this {import('barnard59-core').Context} + * @return {import('stream').Transform} + */ export default function () { return transform(this.env) } diff --git a/packages/rdf/lib/PatternMatcher.js b/packages/rdf/lib/PatternMatcher.js index 0ba4b8be..0ed70afe 100644 --- a/packages/rdf/lib/PatternMatcher.js +++ b/packages/rdf/lib/PatternMatcher.js @@ -1,7 +1,22 @@ import rdf from 'barnard59-env' +/** + * @typedef {'subject' | 'predicate' | 'object' | 'graph'} QuadPart + * @typedef {import('@rdfjs/term-set').default} TermSet + */ + class PatternMatcher { + /** + * @param {object} [options] + * @param {import('@rdfjs/types').Quad_Subject | Iterable} [options.subject] + * @param {import('@rdfjs/types').Quad_Predicate | Iterable} [options.predicate] + * @param {import('@rdfjs/types').Quad_Object | Iterable} [options.object] + * @param {import('@rdfjs/types').Quad_Graph | Iterable} [options.graph] + */ constructor({ subject, predicate, object, graph } = {}) { + /** + * @type {Partial>} + */ this.pattern = {} this.set('subject', subject) @@ -10,16 +25,25 @@ class PatternMatcher { this.set('graph', graph) } + /** + * @param {import('@rdfjs/types').Quad} quad + * @returns {boolean} + */ test(quad) { + // @ts-expect-error return Object.entries(this.pattern).every(([name, values]) => values.has(quad[name])) } + /** + * @param {QuadPart} name + * @param {import('@rdfjs/types').Term | Iterable | undefined} value + */ set(name, value) { if (!value) { return } - if (value[Symbol.iterator]) { + if (Symbol.iterator in value) { this.pattern[name] = rdf.termSet([...value]) } else { this.pattern[name] = rdf.termSet([value]) diff --git a/packages/rdf/lib/append.js b/packages/rdf/lib/append.js index 3ab1daff..13e6e569 100644 --- a/packages/rdf/lib/append.js +++ b/packages/rdf/lib/append.js @@ -3,6 +3,16 @@ import { localFetch } from './localFetch/localFetch.js' import { applyOptions } from './metadata/applyOptions.js' class MetadataAppend extends Transform { + /** + * @param {import('barnard59-core').Context} context + * @param {string | undefined} basePath + * @param {string | URL} input + * @param {{ + * dateCreated?: Date, + * dateModified?: Date, + * graph: undefined + * }} options + */ constructor(context, basePath, input, options) { super({ objectMode: true }) this.context = context @@ -11,17 +21,25 @@ class MetadataAppend extends Transform { this.options = options } + /** + * @param {import('@rdfjs/types').Quad} chunk + * @param {string} encoding + * @param {import('stream').TransformCallback} callback + */ _transform(chunk, encoding, callback) { callback(null, chunk) } + /** + * @param {import('stream').TransformCallback} callback + */ async _flush(callback) { try { const { quadStream, metadata } = await localFetch.call(this.context, this.input, this.basePath) for (const quad of await applyOptions(quadStream, metadata, this.options)) { this.push(quad) } - } catch (err) { + } catch (/** @type {any} */ err) { this.destroy(err) } finally { callback() @@ -29,6 +47,16 @@ class MetadataAppend extends Transform { } } +/** + * @this import('barnard59-core').Context + * @param {object} [options] + * @param {string | URL | undefined} [options.input] + * @param {string} [options.basepath] + * @param {Date} [options.dateModified] + * @param {Date} [options.dateCreated] + * @param {*} [options.graph] + * @return {Promise} + */ async function append({ input, basepath, diff --git a/packages/rdf/lib/membership.js b/packages/rdf/lib/membership.js index 70715410..37120698 100644 --- a/packages/rdf/lib/membership.js +++ b/packages/rdf/lib/membership.js @@ -3,6 +3,13 @@ import { Transform } from 'readable-stream' import * as ns from './namespaces.js' class AddRelations extends Transform { + /** + * @param {import('barnard59-core').Context} context + * @param {object} options + * @param {(targetUri: string | import('@rdfjs/types').NamedNode) => import('@rdfjs/types').Quad} options.createRelation + * @param {import('@rdfjs/types').Quad[]} options.additionalQuads + * @param {import('@rdfjs/term-set').default} options.classes + */ constructor(context, { createRelation, additionalQuads, @@ -14,28 +21,49 @@ class AddRelations extends Transform { this.additionalQuads = additionalQuads } + /** + * @param {import('@rdfjs/types').Quad} chunk + * @param {string} encoding + * @param {import('stream').TransformCallback} callback + */ _transform(chunk, encoding, callback) { - if (chunk.predicate.equals(ns.rdf.type) && this.classes.has(chunk.object)) { + if (chunk.predicate.equals(ns.rdf.type) && this.classes.has(chunk.object) && chunk.subject.termType === 'NamedNode') { const quad = this.createRelation(chunk.subject) this.push(quad) } callback(null, chunk) } + /** + * @param {import('stream').TransformCallback} callback + */ async _flush(callback) { this.additionalQuads.forEach(quad => this.push(quad)) callback() } } +/** + * @param {string|import('@rdfjs/types').NamedNode} item + * @return {import('@rdfjs/types').NamedNode} + */ const toNamedNode = item => typeof item === 'string' ? rdf.namedNode(item) : item +/** + * @this {import('barnard59-core').Context} + * @param {object} options + * @param {string | import('@rdfjs/types').NamedNode} options.targetUri + * @param {string | import('@rdfjs/types').NamedNode} options.targetClass + * @param {string | import('@rdfjs/types').NamedNode} options.property + * @param {(string | import('@rdfjs/types').NamedNode)[]} options.classes + * @return {Transform} + */ function toTarget({ targetUri, targetClass, property, classes = [], -} = {}) { +}) { if (!targetUri) { throw new Error('Needs targetUri as parameter') } @@ -56,12 +84,21 @@ function toTarget({ }) } +/** + * @this {import('barnard59-core').Context} + * @param {object} options + * @param {string | import('@rdfjs/types').NamedNode} options.sourceUri + * @param {string | import('@rdfjs/types').NamedNode} options.sourceClass + * @param {string | import('@rdfjs/types').NamedNode} options.property + * @param {(string | import('@rdfjs/types').NamedNode)[]} options.classes + * @return {Transform} + */ function fromSource({ sourceUri, sourceClass, property, classes = [], -} = {}) { +}) { if (!sourceUri) { throw new Error('Needs sourceUri as parameter') } diff --git a/packages/rdf/lib/metadata/applyOptions.js b/packages/rdf/lib/metadata/applyOptions.js index 2ee5b5ec..071f0b9b 100644 --- a/packages/rdf/lib/metadata/applyOptions.js +++ b/packages/rdf/lib/metadata/applyOptions.js @@ -43,6 +43,12 @@ function resolveNamedDate(value, metadata) { return namedDateLiterals.has(value) ? namedDateLiterals.get(value)(metadata) : toDateLiteral(value) } +/** + * @param {import('@rdfjs/types').Stream & import('stream').EventEmitter} quadStream + * @param metadata + * @param options + * @return {Promise} + */ async function applyOptions(quadStream, metadata = {}, options = {}) { let dataset = await fromStream(rdf.dataset(), quadStream) diff --git a/packages/rdf/lib/voidStats.js b/packages/rdf/lib/voidStats.js index 92b5cb7b..80a38864 100644 --- a/packages/rdf/lib/voidStats.js +++ b/packages/rdf/lib/voidStats.js @@ -2,7 +2,22 @@ import rdf from 'barnard59-env' import { Transform } from 'readable-stream' import * as ns from './namespaces.js' +/** + * @typedef {(datasetUri: import('@rdfjs/types').NamedNode, index: number) => import('@rdfjs/types').NamedNode} CreatePartitionUri + */ + class VoidStats extends Transform { + /** + * @param {import('barnard59-core').Context} context + * @param {object} options + * @param {import('@rdfjs/types').NamedNode} options.voidDatasetUri + * @param {(import('@rdfjs/types').NamedNode | undefined)[]} options.classPartitions + * @param {(import('@rdfjs/types').NamedNode | undefined)[]} options.propertyPartitions + * @param {boolean} options.includeTotals + * @param {import('@rdfjs/types').NamedNode | undefined} options.graph + * @param {CreatePartitionUri} options.createClassPartitionUri + * @param {CreatePartitionUri} options.createPropertyPartitionUri + */ constructor(context, { voidDatasetUri, classPartitions, @@ -33,6 +48,11 @@ class VoidStats extends Transform { this.totalEntityCount = 0 } + /** + * @param {import('@rdfjs/types').Quad} chunk + * @param {string} encoding + * @param {(error?: Error | null, chunk?: import('@rdfjs/types').Quad) => void} callback + */ _transform(chunk, encoding, callback) { this.totalTripleCount++ @@ -54,6 +74,9 @@ class VoidStats extends Transform { callback(null, chunk) } + /** + * @param {() => void} callback + */ async _flush(callback) { try { const datasetUri = toNamedNode(this.voidDatasetUri) @@ -106,7 +129,7 @@ class VoidStats extends Transform { for (const quad of stats.dataset) { this.push(quad) } - } catch (err) { + } catch (/** @type {any} */ err) { this.destroy(err) } finally { callback() @@ -114,6 +137,20 @@ class VoidStats extends Transform { } } +/** + * @overload + * @param {string | import('@rdfjs/types').NamedNode} item + * @returns {import('@rdfjs/types').NamedNode} + */ +/** + * @overload + * @param {string | import('@rdfjs/types').NamedNode | undefined} item + * @returns {import('@rdfjs/types').NamedNode | undefined} + */ +/** + * @param {string | import('@rdfjs/types').NamedNode | undefined} item + * @returns {import('@rdfjs/types').NamedNode | undefined} + */ function toNamedNode(item) { if (item === undefined) { return undefined @@ -121,6 +158,18 @@ function toNamedNode(item) { return typeof item === 'string' ? rdf.namedNode(item) : item } +/** + * @this {import('barnard59-core').Context} + * @param {object} options + * @param {string | import('@rdfjs/types').NamedNode} [options.voidDatasetUri] + * @param {(string | import('@rdfjs/types').NamedNode)[]} [options.classPartitions] + * @param {(string | import('@rdfjs/types').NamedNode)[]} [options.propertyPartitions] + * @param {boolean} [options.includeTotals] + * @param {string | import('@rdfjs/types').NamedNode} [options.graph] + * @param {CreatePartitionUri} [options.createClassPartitionUri] + * @param {CreatePartitionUri} [options.createPropertyPartitionUri] + * @return {Transform} + */ function graphStats({ voidDatasetUri = undefined, classPartitions = [], diff --git a/packages/rdf/mapMatch.js b/packages/rdf/mapMatch.js index 4671c0fd..de95143d 100644 --- a/packages/rdf/mapMatch.js +++ b/packages/rdf/mapMatch.js @@ -1,12 +1,33 @@ -import rdf from 'barnard59-env' import { Transform } from 'readable-stream' import PatternMatcher from './lib/PatternMatcher.js' +/** + * @typedef {(this: { rdf: import('barnard59-env').Environment }, quad: import('@rdfjs/types').Quad) => Promise | T} MapCallback + * @template T + */ + +/** + * @this {import('barnard59-core').Context} + * @param {object} options + * @param {MapCallback} options.map + * @param {import('@rdfjs/types').Quad_Subject | Iterable} [options.subject] + * @param {import('@rdfjs/types').Quad_Predicate | Iterable} [options.predicate] + * @param {import('@rdfjs/types').Quad_Object | Iterable} [options.object] + * @param {import('@rdfjs/types').Quad_Graph | Iterable} [options.graph] + * @return {Transform} + * @template T + */ function mapMatch({ map, subject, predicate, object, graph }) { const matcher = new PatternMatcher({ subject, predicate, object, graph }) return new Transform({ objectMode: true, + /** + * @param {import('@rdfjs/types').Quad} quad + * @param {string} encoding + * @param {(error?: Error | null, data?: unknown) => void} callback + * @return {Promise} + */ transform: async (quad, encoding, callback) => { // these are not the quads you're looking for if (!matcher.test(quad)) { @@ -14,8 +35,8 @@ function mapMatch({ map, subject, predicate, object, graph }) { } try { - callback(null, await map.call({ rdf }, quad)) - } catch (err) { + callback(null, await map.call({ rdf: this.env }, quad)) + } catch (/** @type {any} */ err) { callback(err) } }, diff --git a/packages/rdf/open.js b/packages/rdf/open.js index 3c20103c..a5464d3e 100644 --- a/packages/rdf/open.js +++ b/packages/rdf/open.js @@ -1,3 +1,9 @@ +/** + * @this {import('barnard59-core').Context} + * @param {string} pathOrUri + * @param {string} [mediaTypeOverride] + * @return {Promise} + */ export default async function (pathOrUri, mediaTypeOverride) { let url @@ -8,11 +14,18 @@ export default async function (pathOrUri, mediaTypeOverride) { } const response = await this.env.fetch(url) + if (!response.body) { + throw new Error(`Empty response from ${url}`) + } + let parserStream if (mediaTypeOverride) { - parserStream = this.env.formats.parsers.import(mediaTypeOverride, response.body, { - baseIRI: response.url, - }) + parserStream = this.env.formats.parsers.import( + mediaTypeOverride, + // @ts-expect-error + response.body, { + baseIRI: response.url, + }) if (!parserStream) { throw new Error(`No parser found for ${mediaTypeOverride}`) diff --git a/packages/rdf/package.json b/packages/rdf/package.json index ab8798ce..d88e451d 100644 --- a/packages/rdf/package.json +++ b/packages/rdf/package.json @@ -5,7 +5,10 @@ "main": "index.js", "type": "module", "scripts": { - "test": "mocha" + "test": "mocha", + "prebuild": "rimraf *.d.ts lib/*.d.ts", + "build": "tsc", + "prepack": "npm run build" }, "publishConfig": { "access": "public" @@ -39,6 +42,12 @@ }, "devDependencies": { "@rdfjs/to-ntriples": "^2.0.0", + "@types/file-fetch": "^1.6.6", + "@types/mime-types": "^2.1.4", + "@types/proto-fetch": "^1.0.5", + "@types/rdf-transform-triple-to-quad": "^2.0.5", + "@types/rdfjs__fetch-lite": "^3.0.8", + "@types/rdfjs__fetch": "^3.0.5", "assert-throws-async": "^3.0.0", "chai": "^4.3.7", "express-as-promise": "^1.2.0", diff --git a/packages/rdf/setGraph.js b/packages/rdf/setGraph.js index 7bdce464..f4a2a783 100644 --- a/packages/rdf/setGraph.js +++ b/packages/rdf/setGraph.js @@ -1,14 +1,18 @@ -import rdf from 'barnard59-env' import TripleToQuadTransform from 'rdf-transform-triple-to-quad' +/** + * @this {import('barnard59-core').Context} + * @param {string | import('@rdfjs/types').Term | undefined} graph + * @return {import('readable-stream').Transform} + */ function setGraph(graph) { - const iri = (graph && graph.value) || (graph && graph.toString()) || '' + const iri = (graph && (typeof graph === 'string' ? graph : graph.value)) || '' if (iri === '') { - return new TripleToQuadTransform(rdf.defaultGraph()) + return new TripleToQuadTransform(this.env.defaultGraph()) } - return new TripleToQuadTransform(rdf.namedNode(iri), { factory: rdf }) + return new TripleToQuadTransform(this.env.namedNode(iri), { factory: this.env }) } export default setGraph diff --git a/packages/rdf/tsconfig.json b/packages/rdf/tsconfig.json new file mode 100644 index 00000000..a84667c2 --- /dev/null +++ b/packages/rdf/tsconfig.json @@ -0,0 +1,17 @@ +{ + "extends": "../../tsconfig.json", + "compilerOptions": { + "allowJs": true, + "checkJs": true, + "emitDeclarationOnly": true + }, + "files": [ + "fs.js", + "imports.js", + "mapMatch.js", + "members.js", + "metadata.js", + "open.js", + "setGraph.js" + ] +} From ea621876cf066b6fbafef90093a5f18534fabddb Mon Sep 17 00:00:00 2001 From: Tomasz Pluskiewicz Date: Fri, 12 Jan 2024 10:59:36 +0100 Subject: [PATCH 02/28] refactor: reduce the use environment directly --- packages/rdf/lib/PatternMatcher.js | 11 ++-- packages/rdf/lib/membership.js | 26 ++++----- packages/rdf/lib/metadata/applyOptions.js | 21 +++---- packages/rdf/lib/metadata/datasetClasses.js | 9 ++- .../rdf/lib/metadata/namedDateLiterals.js | 7 +-- packages/rdf/lib/namespaces.js | 5 -- packages/rdf/lib/toNamedNode.js | 24 ++++++++ packages/rdf/lib/voidStats.js | 55 ++++++------------- packages/rdf/mapMatch.js | 2 +- 9 files changed, 75 insertions(+), 85 deletions(-) delete mode 100644 packages/rdf/lib/namespaces.js create mode 100644 packages/rdf/lib/toNamedNode.js diff --git a/packages/rdf/lib/PatternMatcher.js b/packages/rdf/lib/PatternMatcher.js index 0ed70afe..98daf0cf 100644 --- a/packages/rdf/lib/PatternMatcher.js +++ b/packages/rdf/lib/PatternMatcher.js @@ -1,5 +1,3 @@ -import rdf from 'barnard59-env' - /** * @typedef {'subject' | 'predicate' | 'object' | 'graph'} QuadPart * @typedef {import('@rdfjs/term-set').default} TermSet @@ -7,13 +5,16 @@ import rdf from 'barnard59-env' class PatternMatcher { /** + * @param {import('barnard59-env').Environment} rdf * @param {object} [options] * @param {import('@rdfjs/types').Quad_Subject | Iterable} [options.subject] * @param {import('@rdfjs/types').Quad_Predicate | Iterable} [options.predicate] * @param {import('@rdfjs/types').Quad_Object | Iterable} [options.object] * @param {import('@rdfjs/types').Quad_Graph | Iterable} [options.graph] */ - constructor({ subject, predicate, object, graph } = {}) { + constructor(rdf, { subject, predicate, object, graph } = {}) { + this.rdf = rdf + /** * @type {Partial>} */ @@ -44,9 +45,9 @@ class PatternMatcher { } if (Symbol.iterator in value) { - this.pattern[name] = rdf.termSet([...value]) + this.pattern[name] = this.rdf.termSet([...value]) } else { - this.pattern[name] = rdf.termSet([value]) + this.pattern[name] = this.rdf.termSet([value]) } } } diff --git a/packages/rdf/lib/membership.js b/packages/rdf/lib/membership.js index 37120698..33e193d4 100644 --- a/packages/rdf/lib/membership.js +++ b/packages/rdf/lib/membership.js @@ -1,6 +1,5 @@ -import rdf from 'barnard59-env' import { Transform } from 'readable-stream' -import * as ns from './namespaces.js' +import _toNamedNode from './toNamedNode.js' class AddRelations extends Transform { /** @@ -16,6 +15,7 @@ class AddRelations extends Transform { classes, }) { super({ objectMode: true }) + this.rdf = context.env this.classes = classes this.createRelation = createRelation this.additionalQuads = additionalQuads @@ -27,7 +27,7 @@ class AddRelations extends Transform { * @param {import('stream').TransformCallback} callback */ _transform(chunk, encoding, callback) { - if (chunk.predicate.equals(ns.rdf.type) && this.classes.has(chunk.object) && chunk.subject.termType === 'NamedNode') { + if (chunk.predicate.equals(this.rdf.ns.rdf.type) && this.classes.has(chunk.object) && chunk.subject.termType === 'NamedNode') { const quad = this.createRelation(chunk.subject) this.push(quad) } @@ -43,12 +43,6 @@ class AddRelations extends Transform { } } -/** - * @param {string|import('@rdfjs/types').NamedNode} item - * @return {import('@rdfjs/types').NamedNode} - */ -const toNamedNode = item => typeof item === 'string' ? rdf.namedNode(item) : item - /** * @this {import('barnard59-core').Context} * @param {object} options @@ -77,10 +71,11 @@ function toTarget({ throw new Error('Needs a list of classes to link') } + const toNamedNode = _toNamedNode.bind(null, this.env) return new AddRelations(this, { - createRelation: sourceUri => rdf.quad(toNamedNode(sourceUri), toNamedNode(property), toNamedNode(targetUri)), - additionalQuads: [rdf.quad(toNamedNode(targetUri), ns.rdf.type, toNamedNode(targetClass))], - classes: rdf.termSet(classes.map(toNamedNode)), + createRelation: sourceUri => this.env.quad(toNamedNode(sourceUri), toNamedNode(property), toNamedNode(targetUri)), + additionalQuads: [this.env.quad(toNamedNode(targetUri), this.env.ns.rdf.type, toNamedNode(targetClass))], + classes: this.env.termSet(classes.map(toNamedNode)), }) } @@ -112,10 +107,11 @@ function fromSource({ throw new Error('Needs a list of classes to link') } + const toNamedNode = _toNamedNode.bind(null, this.env) return new AddRelations(this, { - createRelation: targetUri => rdf.quad(toNamedNode(sourceUri), toNamedNode(property), toNamedNode(targetUri)), - additionalQuads: [rdf.quad(toNamedNode(sourceUri), ns.rdf.type, toNamedNode(sourceClass))], - classes: rdf.termSet(classes.map(toNamedNode)), + createRelation: targetUri => this.env.quad(toNamedNode(sourceUri), toNamedNode(property), toNamedNode(targetUri)), + additionalQuads: [this.env.quad(toNamedNode(sourceUri), this.env.ns.rdf.type, toNamedNode(sourceClass))], + classes: this.env.termSet(classes.map(toNamedNode)), }) } diff --git a/packages/rdf/lib/metadata/applyOptions.js b/packages/rdf/lib/metadata/applyOptions.js index 071f0b9b..69a1ea69 100644 --- a/packages/rdf/lib/metadata/applyOptions.js +++ b/packages/rdf/lib/metadata/applyOptions.js @@ -1,14 +1,13 @@ import rdf from 'barnard59-env' import fromStream from 'rdf-dataset-ext/fromStream.js' -import * as ns from '../namespaces.js' -import { xsd } from '../namespaces.js' +import toNamedNode from '../toNamedNode.js' import { wellKnownDatasetClasses, wellKnownDatasetClassesWithDcterms } from './datasetClasses.js' import { namedDateLiterals } from './namedDateLiterals.js' function subjectsWithDatasetType(dataset, classes) { const result = rdf.termSet() ;[...dataset] - .filter(quad => (quad.predicate.equals(ns.rdf.type) && classes.has(quad.object))) + .filter(quad => (quad.predicate.equals(rdf.ns.rdf.type) && classes.has(quad.object))) .forEach(quad => { result.add(quad.subject) }) @@ -32,11 +31,7 @@ function updateOrInsert(dataset, datasetClasses, predicate, object) { } function toDateLiteral(item) { - return typeof item === 'string' ? rdf.literal(item, xsd.dateTime) : item -} - -function toNamedNode(item) { - return typeof item === 'string' ? rdf.namedNode(item) : item + return typeof item === 'string' ? rdf.literal(item, rdf.ns.xsd.dateTime) : item } function resolveNamedDate(value, metadata) { @@ -56,20 +51,20 @@ async function applyOptions(quadStream, metadata = {}, options = {}) { if (options.dateModified) { const dateModifiedLiteral = resolveNamedDate(options.dateModified, metadata) - dataset = updateOrInsert(dataset, wellKnownDatasetClassesWithDcterms, ns.dcterms.modified, dateModifiedLiteral) - dataset = updateOrInsert(dataset, wellKnownDatasetClasses, ns.schema.dateModified, dateModifiedLiteral) + dataset = updateOrInsert(dataset, wellKnownDatasetClassesWithDcterms, rdf.ns.dcterms.modified, dateModifiedLiteral) + dataset = updateOrInsert(dataset, wellKnownDatasetClasses, rdf.ns.schema.dateModified, dateModifiedLiteral) } // dateCreated if (options.dateCreated) { const dateCreatedLiteral = resolveNamedDate(options.dateCreated, metadata) - dataset = updateOrInsert(dataset, wellKnownDatasetClassesWithDcterms, ns.dcterms.created, dateCreatedLiteral) - dataset = updateOrInsert(dataset, wellKnownDatasetClasses, ns.schema.dateCreated, dateCreatedLiteral) + dataset = updateOrInsert(dataset, wellKnownDatasetClassesWithDcterms, rdf.ns.dcterms.created, dateCreatedLiteral) + dataset = updateOrInsert(dataset, wellKnownDatasetClasses, rdf.ns.schema.dateCreated, dateCreatedLiteral) } // Sets graph if (options.graph) { - return rdf.dataset([...dataset].map(quad => rdf.quad(quad.subject, quad.predicate, quad.object, toNamedNode(options.graph)))) + return rdf.dataset([...dataset].map(quad => rdf.quad(quad.subject, quad.predicate, quad.object, toNamedNode(rdf, options.graph)))) } return dataset diff --git a/packages/rdf/lib/metadata/datasetClasses.js b/packages/rdf/lib/metadata/datasetClasses.js index b53b8647..131b7bff 100644 --- a/packages/rdf/lib/metadata/datasetClasses.js +++ b/packages/rdf/lib/metadata/datasetClasses.js @@ -1,15 +1,14 @@ import rdf from 'barnard59-env' -import * as ns from '../namespaces.js' const wellKnownDatasetClasses = rdf.termSet([ - ns.schema.Dataset, - ns.cube.Cube, + rdf.ns.schema.Dataset, + rdf.ns.cube.Cube, ], ) const wellKnownDatasetClassesWithDcterms = rdf.termSet([ - ns._void.Dataset, - ns.dcat.Dataset, + rdf.ns._void.Dataset, + rdf.ns.dcat.Dataset, ], ) diff --git a/packages/rdf/lib/metadata/namedDateLiterals.js b/packages/rdf/lib/metadata/namedDateLiterals.js index 624abc19..a57a9b98 100644 --- a/packages/rdf/lib/metadata/namedDateLiterals.js +++ b/packages/rdf/lib/metadata/namedDateLiterals.js @@ -1,24 +1,23 @@ import rdf from 'barnard59-env' -import { xsd } from '../namespaces.js' const namedDateLiterals = new Map() namedDateLiterals.set('TIME_NOW', metadata => { - return rdf.literal((new Date()).toISOString(), xsd.dateTime) + return rdf.literal((new Date()).toISOString(), rdf.ns.xsd.dateTime) }) namedDateLiterals.set('TIME_FILE_CREATION', metadata => { if (!metadata?.stats?.birthtimeMs) { throw new Error('No metadata.stats.birthtimeMs') } - return rdf.literal((new Date(metadata.stats.birthtimeMs)).toISOString(), xsd.dateTime) + return rdf.literal((new Date(metadata.stats.birthtimeMs)).toISOString(), rdf.ns.xsd.dateTime) }) namedDateLiterals.set('TIME_FILE_MODIFICATION', metadata => { if (!metadata?.stats?.mtimeMs) { throw new Error('No metadata.stats.mtimeMs') } - return rdf.literal((new Date(metadata.stats.mtimeMs)).toISOString(), xsd.dateTime) + return rdf.literal((new Date(metadata.stats.mtimeMs)).toISOString(), rdf.ns.xsd.dateTime) }) export { namedDateLiterals } diff --git a/packages/rdf/lib/namespaces.js b/packages/rdf/lib/namespaces.js deleted file mode 100644 index 985a8016..00000000 --- a/packages/rdf/lib/namespaces.js +++ /dev/null @@ -1,5 +0,0 @@ -import $rdf from 'barnard59-env' - -const { cube, rdf, rdfs, sh, xsd, _void, dcat, schema, dcterms } = $rdf.ns - -export { cube, rdf, rdfs, sh, xsd, _void, dcat, schema, dcterms } diff --git a/packages/rdf/lib/toNamedNode.js b/packages/rdf/lib/toNamedNode.js new file mode 100644 index 00000000..3d89ca47 --- /dev/null +++ b/packages/rdf/lib/toNamedNode.js @@ -0,0 +1,24 @@ +/** + * @overload + * @param {import('barnard59-env').Environment} rdf + * @param {string | import('@rdfjs/types').NamedNode} item + * @returns {import('@rdfjs/types').NamedNode} + */ +/** + * @overload + * @param {import('barnard59-env').Environment} rdf + * @param {string | import('@rdfjs/types').NamedNode | undefined} item + * @returns {import('@rdfjs/types').NamedNode | undefined} + */ + +/** + @param {import('barnard59-env').Environment} rdf + * @param {string | import('@rdfjs/types').NamedNode | undefined} item + * @returns {import('@rdfjs/types').NamedNode | undefined} + */ +export default function (rdf, item) { + if (item === undefined) { + return undefined + } + return typeof item === 'string' ? rdf.namedNode(item) : item +} diff --git a/packages/rdf/lib/voidStats.js b/packages/rdf/lib/voidStats.js index 80a38864..63f8d6c8 100644 --- a/packages/rdf/lib/voidStats.js +++ b/packages/rdf/lib/voidStats.js @@ -1,6 +1,5 @@ -import rdf from 'barnard59-env' import { Transform } from 'readable-stream' -import * as ns from './namespaces.js' +import _toNamedNode from './toNamedNode.js' /** * @typedef {(datasetUri: import('@rdfjs/types').NamedNode, index: number) => import('@rdfjs/types').NamedNode} CreatePartitionUri @@ -28,6 +27,7 @@ class VoidStats extends Transform { createPropertyPartitionUri, }) { super({ objectMode: true }) + this.rdf = context.env this.voidDatasetUri = voidDatasetUri this.includeTotals = includeTotals this.graph = graph @@ -56,7 +56,7 @@ class VoidStats extends Transform { _transform(chunk, encoding, callback) { this.totalTripleCount++ - if (chunk.predicate.equals(ns.rdf.type)) { + if (chunk.predicate.equals(this.rdf.ns.rdf.type)) { this.totalEntityCount++ for (const [key, value] of this.classPartitionsCounts) { if (chunk.object.equals(key)) { @@ -82,20 +82,20 @@ class VoidStats extends Transform { const datasetUri = toNamedNode(this.voidDatasetUri) const datasetGraph = this.graph ? toNamedNode(this.graph) : undefined - const stats = rdf.clownface({ - dataset: rdf.dataset(), + const stats = this.rdf.clownface({ + dataset: this.rdf.dataset(), graph: datasetGraph, }) stats .namedNode(datasetUri) - .addOut(ns.rdf.type, ns._void.Dataset) + .addOut(this.rdf.ns.rdf.type, this.rdf.ns._void.Dataset) if (this.includeTotals) { stats .namedNode(datasetUri) - .addOut(ns._void.triples, this.totalTripleCount) - .addOut(ns._void.entities, this.totalEntityCount) + .addOut(this.rdf.ns._void.triples, this.totalTripleCount) + .addOut(this.rdf.ns._void.entities, this.totalEntityCount) } if (this.classPartitionsCounts.size) { @@ -103,10 +103,10 @@ class VoidStats extends Transform { for (const [currentClass, count] of this.classPartitionsCounts) { stats .namedNode(datasetUri) - .addOut(ns._void.classPartition, this.createClassPartitionUri(datasetUri, index), partition => { + .addOut(this.rdf.ns._void.classPartition, this.createClassPartitionUri(datasetUri, index), partition => { partition - .addOut(ns._void.class, currentClass) - .addOut(ns._void.entities, count) + .addOut(this.rdf.ns._void.class, currentClass) + .addOut(this.rdf.ns._void.entities, count) }) index += 1 } @@ -117,10 +117,10 @@ class VoidStats extends Transform { for (const [currentProperty, count] of this.propertyPartitionsCounts) { stats .namedNode(datasetUri) - .addOut(ns._void.propertyPartition, this.createPropertyPartitionUri(datasetUri, index), partition => { + .addOut(this.rdf.ns._void.propertyPartition, this.createPropertyPartitionUri(datasetUri, index), partition => { partition - .addOut(ns._void.property, currentProperty) - .addOut(ns._void.entities, count) + .addOut(this.rdf.ns._void.property, currentProperty) + .addOut(this.rdf.ns._void.entities, count) }) index += 1 } @@ -137,27 +137,6 @@ class VoidStats extends Transform { } } -/** - * @overload - * @param {string | import('@rdfjs/types').NamedNode} item - * @returns {import('@rdfjs/types').NamedNode} - */ -/** - * @overload - * @param {string | import('@rdfjs/types').NamedNode | undefined} item - * @returns {import('@rdfjs/types').NamedNode | undefined} - */ -/** - * @param {string | import('@rdfjs/types').NamedNode | undefined} item - * @returns {import('@rdfjs/types').NamedNode | undefined} - */ -function toNamedNode(item) { - if (item === undefined) { - return undefined - } - return typeof item === 'string' ? rdf.namedNode(item) : item -} - /** * @this {import('barnard59-core').Context} * @param {object} options @@ -176,13 +155,15 @@ function graphStats({ propertyPartitions = [], includeTotals = true, graph = undefined, - createClassPartitionUri = (datasetUri, index) => rdf.namedNode(`${datasetUri.value}/classPartition/${index}`), - createPropertyPartitionUri = (datasetUri, index) => rdf.namedNode(`${datasetUri.value}/propertyPartition/${index}`), + createClassPartitionUri = (datasetUri, index) => this.env.namedNode(`${datasetUri.value}/classPartition/${index}`), + createPropertyPartitionUri = (datasetUri, index) => this.env.namedNode(`${datasetUri.value}/propertyPartition/${index}`), } = {}) { if (!voidDatasetUri) { throw new Error('Needs voidDatasetUri as parameter') } + const toNamedNode = _toNamedNode.bind(null, this.env) + return new VoidStats(this, { voidDatasetUri: toNamedNode(voidDatasetUri), classPartitions: classPartitions.map(toNamedNode), diff --git a/packages/rdf/mapMatch.js b/packages/rdf/mapMatch.js index de95143d..2676679f 100644 --- a/packages/rdf/mapMatch.js +++ b/packages/rdf/mapMatch.js @@ -18,7 +18,7 @@ import PatternMatcher from './lib/PatternMatcher.js' * @template T */ function mapMatch({ map, subject, predicate, object, graph }) { - const matcher = new PatternMatcher({ subject, predicate, object, graph }) + const matcher = new PatternMatcher(this.env, { subject, predicate, object, graph }) return new Transform({ objectMode: true, From 18f6de5c74fc851d121eae14bae641485cfee2e2 Mon Sep 17 00:00:00 2001 From: Tomasz Pluskiewicz Date: Fri, 12 Jan 2024 16:42:09 +0100 Subject: [PATCH 03/28] feat: finish types of all `lib` --- packages/rdf/lib/append.js | 14 ++--- packages/rdf/lib/localFetch/localFetch.js | 55 ++++++++++++++++--- packages/rdf/lib/localFetch/lookupParser.js | 11 +++- packages/rdf/lib/membership.js | 10 +++- packages/rdf/lib/metadata/applyOptions.js | 53 +++++++++++++++--- packages/rdf/lib/metadata/datasetClasses.js | 6 ++ .../rdf/lib/metadata/namedDateLiterals.js | 19 ++++++- packages/rdf/lib/toNamedNode.js | 24 -------- packages/rdf/lib/voidStats.js | 25 ++++++++- packages/rdf/package.json | 3 +- 10 files changed, 163 insertions(+), 57 deletions(-) delete mode 100644 packages/rdf/lib/toNamedNode.js diff --git a/packages/rdf/lib/append.js b/packages/rdf/lib/append.js index 13e6e569..261a71d7 100644 --- a/packages/rdf/lib/append.js +++ b/packages/rdf/lib/append.js @@ -6,11 +6,11 @@ class MetadataAppend extends Transform { /** * @param {import('barnard59-core').Context} context * @param {string | undefined} basePath - * @param {string | URL} input + * @param {string} input * @param {{ - * dateCreated?: Date, - * dateModified?: Date, - * graph: undefined + * dateCreated?: import('./metadata/namedDateLiterals.js').NamedDateLiteral, + * dateModified?: import('./metadata/namedDateLiterals.js').NamedDateLiteral, + * graph?: string | import('@rdfjs/types').NamedNode * }} options */ constructor(context, basePath, input, options) { @@ -50,10 +50,10 @@ class MetadataAppend extends Transform { /** * @this import('barnard59-core').Context * @param {object} [options] - * @param {string | URL | undefined} [options.input] + * @param {string | undefined} [options.input] * @param {string} [options.basepath] - * @param {Date} [options.dateModified] - * @param {Date} [options.dateCreated] + * @param {import('./metadata/namedDateLiterals.js').NamedDateLiteral} [options.dateModified] + * @param {import('./metadata/namedDateLiterals.js').NamedDateLiteral} [options.dateCreated] * @param {*} [options.graph] * @return {Promise} */ diff --git a/packages/rdf/lib/localFetch/localFetch.js b/packages/rdf/lib/localFetch/localFetch.js index 53ab635b..f339a1d0 100644 --- a/packages/rdf/lib/localFetch/localFetch.js +++ b/packages/rdf/lib/localFetch/localFetch.js @@ -1,14 +1,29 @@ -import fsp from 'fs/promises' - -import { resolve } from 'path' -import { pathToFileURL } from 'url' +import fsp from 'node:fs/promises' +import { resolve } from 'node:path' +import { pathToFileURL } from 'node:url' import rdfFetch from '@rdfjs/fetch' import fileFetch from 'file-fetch' import { isReadableStream } from 'is-stream' import protoFetch from 'proto-fetch' import { getParserByExtension } from './lookupParser.js' -async function streamWithMetadata(input) { +/** + * @typedef {{ + * type: string; + * value?: string | URL; + * stats?: import('fs').Stats; + * }} Metadata + */ + +/** + * @typedef {{metadata: Metadata, quadStream: import('@rdfjs/types').Stream}} LocalFetchResponse + */ + +/** + * @param {import('@rdfjs/types').Stream} input + * @return {LocalFetchResponse} + */ +function streamWithMetadata(input) { return { quadStream: input, metadata: { @@ -17,6 +32,10 @@ async function streamWithMetadata(input) { } } +/** + * @param {string} input + * @return {Promise} + */ async function fetchHTTPWithMeta(input) { const url = new URL(input, import.meta.url) const res = await rdfFetch(url) @@ -29,6 +48,10 @@ async function fetchHTTPWithMeta(input) { } } +/** + * @param {import('barnard59-env').Environment} env + * @param {string} filePath + */ function guessParserForFile(env, filePath) { const parser = getParserByExtension(env, filePath) if (!parser) { @@ -37,9 +60,17 @@ function guessParserForFile(env, filePath) { return parser } +/** + * @param {import('barnard59-env').Environment} env + * @param {string} input + * @return {Promise} + */ async function fetchFileWithMeta(env, input) { const filePathURL = new URL(input, import.meta.url) const res = await fileFetch(filePathURL.toString()) + /** + * @type {any} + */ const stream = res.body const quadStream = await guessParserForFile(env, input).import(stream) return { @@ -52,7 +83,13 @@ async function fetchFileWithMeta(env, input) { } } -// Tries to fetch or read locally one file +/** + * Tries to fetch or read locally one file + * @this import('barnard59-core').Context + * @param {import('stream').Readable | string} input + * @param {string} [basePath] + * @return {Promise} + */ async function localFetch( input, basePath, @@ -61,11 +98,15 @@ async function localFetch( throw new Error('needs input filename or URL') } if (isReadableStream(input)) { - return streamWithMetadata(input, basePath) + return streamWithMetadata(input) } if (typeof input !== 'string') { throw new Error(`needs input filename or URL, got [${typeof input}]`) } + /** + * @type {(input: (string | URL)) => Promise} + */ + // @ts-expect-error const fetch = protoFetch({ file: fetchFileWithMeta.bind(null, this.env), http: fetchHTTPWithMeta, diff --git a/packages/rdf/lib/localFetch/lookupParser.js b/packages/rdf/lib/localFetch/lookupParser.js index 1cc91608..bddbb316 100644 --- a/packages/rdf/lib/localFetch/lookupParser.js +++ b/packages/rdf/lib/localFetch/lookupParser.js @@ -1,8 +1,17 @@ import mime from 'mime-types' +/** + * @param {import('barnard59-env').Environment} env + * @param {string | URL} fileUrl + * @returns {import('@rdfjs/types').Sink | undefined} + */ function getParserByExtension(env, fileUrl) { const mimeType = mime.lookup(fileUrl.toString()) - return env.formats.parsers.get(mimeType) + if (mimeType) { + return env.formats.parsers.get(mimeType) + } + + return undefined } export { diff --git a/packages/rdf/lib/membership.js b/packages/rdf/lib/membership.js index 33e193d4..d5da68a5 100644 --- a/packages/rdf/lib/membership.js +++ b/packages/rdf/lib/membership.js @@ -1,5 +1,5 @@ +import rdf from 'barnard59-env' import { Transform } from 'readable-stream' -import _toNamedNode from './toNamedNode.js' class AddRelations extends Transform { /** @@ -43,6 +43,12 @@ class AddRelations extends Transform { } } +/** + * @param {string|import('@rdfjs/types').NamedNode} item + * @return {import('@rdfjs/types').NamedNode} + */ +const toNamedNode = item => typeof item === 'string' ? rdf.namedNode(item) : item + /** * @this {import('barnard59-core').Context} * @param {object} options @@ -71,7 +77,6 @@ function toTarget({ throw new Error('Needs a list of classes to link') } - const toNamedNode = _toNamedNode.bind(null, this.env) return new AddRelations(this, { createRelation: sourceUri => this.env.quad(toNamedNode(sourceUri), toNamedNode(property), toNamedNode(targetUri)), additionalQuads: [this.env.quad(toNamedNode(targetUri), this.env.ns.rdf.type, toNamedNode(targetClass))], @@ -107,7 +112,6 @@ function fromSource({ throw new Error('Needs a list of classes to link') } - const toNamedNode = _toNamedNode.bind(null, this.env) return new AddRelations(this, { createRelation: targetUri => this.env.quad(toNamedNode(sourceUri), toNamedNode(property), toNamedNode(targetUri)), additionalQuads: [this.env.quad(toNamedNode(sourceUri), this.env.ns.rdf.type, toNamedNode(sourceClass))], diff --git a/packages/rdf/lib/metadata/applyOptions.js b/packages/rdf/lib/metadata/applyOptions.js index 69a1ea69..b9d9b148 100644 --- a/packages/rdf/lib/metadata/applyOptions.js +++ b/packages/rdf/lib/metadata/applyOptions.js @@ -1,10 +1,16 @@ import rdf from 'barnard59-env' -import fromStream from 'rdf-dataset-ext/fromStream.js' -import toNamedNode from '../toNamedNode.js' import { wellKnownDatasetClasses, wellKnownDatasetClassesWithDcterms } from './datasetClasses.js' import { namedDateLiterals } from './namedDateLiterals.js' +/** + * @param {import('@rdfjs/types').DatasetCore} dataset + * @param {Set} classes + * @return {Set} + */ function subjectsWithDatasetType(dataset, classes) { + /** + * @type {Set} + */ const result = rdf.termSet() ;[...dataset] .filter(quad => (quad.predicate.equals(rdf.ns.rdf.type) && classes.has(quad.object))) @@ -14,6 +20,13 @@ function subjectsWithDatasetType(dataset, classes) { return result } +/** + * @param {import('@rdfjs/types').DatasetCore} dataset + * @param {Set} datasetClasses + * @param {import('@rdfjs/types').Quad_Predicate} predicate + * @param {import('@rdfjs/types').Quad_Object} object + * @return {import('@rdfjs/types').DatasetCore} + */ function updateOrInsert(dataset, datasetClasses, predicate, object) { const targetSubjects = subjectsWithDatasetType(dataset, datasetClasses) @@ -30,22 +43,44 @@ function updateOrInsert(dataset, datasetClasses, predicate, object) { return dataset } -function toDateLiteral(item) { - return typeof item === 'string' ? rdf.literal(item, rdf.ns.xsd.dateTime) : item +/** + * @param {string | import('@rdfjs/types').NamedNode | undefined} item + * @return {import('@rdfjs/types').NamedNode | undefined} + */ +function toNamedNode(item) { + return typeof item === 'string' ? rdf.namedNode(item) : item } +/** + * @param {import('@rdfjs/types').Literal | import('./namedDateLiterals.js').NamedDateLiteral} value + * @param {import('./namedDateLiterals.js').Metadata} metadata + * @return {import('@rdfjs/types').Literal} + */ function resolveNamedDate(value, metadata) { - return namedDateLiterals.has(value) ? namedDateLiterals.get(value)(metadata) : toDateLiteral(value) + if (typeof value !== 'string') { + return value + } + + const factory = namedDateLiterals.get(value) + + return factory ? factory(metadata) : rdf.literal(value, rdf.ns.xsd.dateTime) } /** * @param {import('@rdfjs/types').Stream & import('stream').EventEmitter} quadStream - * @param metadata - * @param options + * @param {import('./namedDateLiterals.js').Metadata} [metadata] + * @param {{ + * dateModified?: import('./namedDateLiterals.js').NamedDateLiteral; + * dateCreated?: import('./namedDateLiterals.js').NamedDateLiteral; + * graph?: string | import('@rdfjs/types').NamedNode + * }} [options] * @return {Promise} */ async function applyOptions(quadStream, metadata = {}, options = {}) { - let dataset = await fromStream(rdf.dataset(), quadStream) + /** + * @type {import('@rdfjs/types').DatasetCore} + */ + let dataset = await rdf.dataset().import(quadStream) // dateModified if (options.dateModified) { @@ -64,7 +99,7 @@ async function applyOptions(quadStream, metadata = {}, options = {}) { // Sets graph if (options.graph) { - return rdf.dataset([...dataset].map(quad => rdf.quad(quad.subject, quad.predicate, quad.object, toNamedNode(rdf, options.graph)))) + return rdf.dataset([...dataset].map(quad => rdf.quad(quad.subject, quad.predicate, quad.object, toNamedNode(options.graph)))) } return dataset diff --git a/packages/rdf/lib/metadata/datasetClasses.js b/packages/rdf/lib/metadata/datasetClasses.js index 131b7bff..43dcedda 100644 --- a/packages/rdf/lib/metadata/datasetClasses.js +++ b/packages/rdf/lib/metadata/datasetClasses.js @@ -1,11 +1,17 @@ import rdf from 'barnard59-env' +/** + * @type {Set} + */ const wellKnownDatasetClasses = rdf.termSet([ rdf.ns.schema.Dataset, rdf.ns.cube.Cube, ], ) +/** + * @type {Set} + */ const wellKnownDatasetClassesWithDcterms = rdf.termSet([ rdf.ns._void.Dataset, rdf.ns.dcat.Dataset, diff --git a/packages/rdf/lib/metadata/namedDateLiterals.js b/packages/rdf/lib/metadata/namedDateLiterals.js index a57a9b98..5e4322c0 100644 --- a/packages/rdf/lib/metadata/namedDateLiterals.js +++ b/packages/rdf/lib/metadata/namedDateLiterals.js @@ -1,8 +1,25 @@ import rdf from 'barnard59-env' +/** + * @typedef {{ + * type?: string; + * stats?: { + * birthtimeMs?: number; + * mtimeMs?: number; + * } + * }} Metadata + */ + +/** + * @typedef {'TIME_NOW' | 'TIME_FILE_CREATION' | 'TIME_FILE_MODIFICATION'} NamedDateLiteral + */ + +/** + * @type {Map import('@rdfjs/types').Literal>} + */ const namedDateLiterals = new Map() -namedDateLiterals.set('TIME_NOW', metadata => { +namedDateLiterals.set('TIME_NOW', () => { return rdf.literal((new Date()).toISOString(), rdf.ns.xsd.dateTime) }) diff --git a/packages/rdf/lib/toNamedNode.js b/packages/rdf/lib/toNamedNode.js deleted file mode 100644 index 3d89ca47..00000000 --- a/packages/rdf/lib/toNamedNode.js +++ /dev/null @@ -1,24 +0,0 @@ -/** - * @overload - * @param {import('barnard59-env').Environment} rdf - * @param {string | import('@rdfjs/types').NamedNode} item - * @returns {import('@rdfjs/types').NamedNode} - */ -/** - * @overload - * @param {import('barnard59-env').Environment} rdf - * @param {string | import('@rdfjs/types').NamedNode | undefined} item - * @returns {import('@rdfjs/types').NamedNode | undefined} - */ - -/** - @param {import('barnard59-env').Environment} rdf - * @param {string | import('@rdfjs/types').NamedNode | undefined} item - * @returns {import('@rdfjs/types').NamedNode | undefined} - */ -export default function (rdf, item) { - if (item === undefined) { - return undefined - } - return typeof item === 'string' ? rdf.namedNode(item) : item -} diff --git a/packages/rdf/lib/voidStats.js b/packages/rdf/lib/voidStats.js index 63f8d6c8..d851428c 100644 --- a/packages/rdf/lib/voidStats.js +++ b/packages/rdf/lib/voidStats.js @@ -1,5 +1,5 @@ +import rdf from 'barnard59-env' import { Transform } from 'readable-stream' -import _toNamedNode from './toNamedNode.js' /** * @typedef {(datasetUri: import('@rdfjs/types').NamedNode, index: number) => import('@rdfjs/types').NamedNode} CreatePartitionUri @@ -137,6 +137,27 @@ class VoidStats extends Transform { } } +/** + * @overload + * @param {string | import('@rdfjs/types').NamedNode} item + * @returns {import('@rdfjs/types').NamedNode} + */ +/** + * @overload + * @param {string | import('@rdfjs/types').NamedNode | undefined} item + * @returns {import('@rdfjs/types').NamedNode | undefined} + */ +/** + * @param {string | import('@rdfjs/types').NamedNode | undefined} item + * @returns {import('@rdfjs/types').NamedNode | undefined} + */ +function toNamedNode(item) { + if (item === undefined) { + return undefined + } + return typeof item === 'string' ? rdf.namedNode(item) : item +} + /** * @this {import('barnard59-core').Context} * @param {object} options @@ -162,8 +183,6 @@ function graphStats({ throw new Error('Needs voidDatasetUri as parameter') } - const toNamedNode = _toNamedNode.bind(null, this.env) - return new VoidStats(this, { voidDatasetUri: toNamedNode(voidDatasetUri), classPartitions: classPartitions.map(toNamedNode), diff --git a/packages/rdf/package.json b/packages/rdf/package.json index d88e451d..29c06570 100644 --- a/packages/rdf/package.json +++ b/packages/rdf/package.json @@ -6,7 +6,7 @@ "type": "module", "scripts": { "test": "mocha", - "prebuild": "rimraf *.d.ts lib/*.d.ts", + "prebuild": "rimraf *.d.ts lib/**/*.d.ts lib/*.d.ts", "build": "tsc", "prepack": "npm run build" }, @@ -33,7 +33,6 @@ "lodash": "^4.17.21", "mime-types": "^2.1.35", "proto-fetch": "^1.0.0", - "rdf-dataset-ext": "^1.0.1", "rdf-literal": "^1.3.0", "rdf-transform-graph-imports": "^0.2.2", "rdf-transform-triple-to-quad": "^2.0.0", From b279ba2dbca5bf117dc07f98ebecf94f8603209c Mon Sep 17 00:00:00 2001 From: Giacomo Citi Date: Mon, 15 Jan 2024 14:12:19 +0100 Subject: [PATCH 04/28] add fetch-constraint and fetch-cube --- packages/cube/manifest.ttl | 14 +++ packages/cube/pipeline/fetch-constraint.ttl | 58 +++++++++++ packages/cube/pipeline/fetch-cube.ttl | 107 ++++++++++++++++++++ 3 files changed, 179 insertions(+) create mode 100644 packages/cube/pipeline/fetch-constraint.ttl create mode 100644 packages/cube/pipeline/fetch-cube.ttl diff --git a/packages/cube/manifest.ttl b/packages/cube/manifest.ttl index f45c4011..34b544ed 100644 --- a/packages/cube/manifest.ttl +++ b/packages/cube/manifest.ttl @@ -48,3 +48,17 @@ rdfs:label "Validate input observations against cube constraint" ; b59:source "barnard59-cube/pipeline/check-observations.ttl" ; . + + + a b59:CliCommand ; + b59:command "fetch-constraint" ; + rdfs:label "Retrieves cube constraint from SPARQL endpoint" ; + b59:source "barnard59-cube/pipeline/fetch-constraint.ttl" ; +. + + + a b59:CliCommand ; + b59:command "fetch-cube" ; + rdfs:label "Retrieves cube with observations from SPARQL endpoint" ; + b59:source "barnard59-cube/pipeline/fetch-cube.ttl" ; +. diff --git a/packages/cube/pipeline/fetch-constraint.ttl b/packages/cube/pipeline/fetch-constraint.ttl new file mode 100644 index 00000000..42c5750c --- /dev/null +++ b/packages/cube/pipeline/fetch-constraint.ttl @@ -0,0 +1,58 @@ +@prefix code: . +@prefix p: . +@prefix sparql: . +@prefix ntriples: . +@prefix base: . +@prefix rdfs: . + +@base . + +_:endpoint a p:Variable ; + p:name "endpoint" ; + rdfs:label "SPARQL endpoint" ; +. + +_:cube a p:Variable ; + p:name "cube" ; + rdfs:label "cube URI" ; +. + + a p:Pipeline , p:Readable ; + p:variables [ p:variable _:endpoint, _:cube ] ; + p:steps + [ + p:stepList + ( + _:queryConstraint + [ ntriples:serialize () ] + ) + ] +. + + +_:queryConstraint a p:Pipeline, p:ReadableObjectMode ; + p:steps + [ + p:stepList + ( + [ # same query as in fetch-metadata (TODO: load both from file once https://github.com/zazuko/barnard59/issues/257 is available) + sparql:construct + [ code:name "endpoint"; code:value "endpoint"^^p:VariableName ] , + [ + code:name "query"; + code:value """ + #pragma describe.strategy cbd + + PREFIX cube: + + DESCRIBE ?s + WHERE { + <${cube}> cube:observationConstraint ?s . + } + """^^code:EcmaScriptTemplateLiteral + ] + ] + ) + ] + . + \ No newline at end of file diff --git a/packages/cube/pipeline/fetch-cube.ttl b/packages/cube/pipeline/fetch-cube.ttl new file mode 100644 index 00000000..24a2fbb8 --- /dev/null +++ b/packages/cube/pipeline/fetch-cube.ttl @@ -0,0 +1,107 @@ +@prefix code: . +@prefix p: . +@prefix sparql: . +@prefix ntriples: . +@prefix base: . +@prefix rdfs: . + +@base . + +_:endpoint a p:Variable ; + p:name "endpoint" ; + rdfs:label "SPARQL endpoint" ; +. + +_:cube a p:Variable ; + p:name "cube" ; + rdfs:label "cube URI" ; +. + + a p:Pipeline , p:Readable ; + p:variables [ p:variable _:endpoint, _:cube ] ; + p:steps + [ + p:stepList + ( + [ base:concat\/object ( + _:queryCube + _:queryObservationSet + _:queryObservations + ) ] + [ ntriples:serialize () ] + ) + ] +. + + +_:queryCube a p:Pipeline , p:ReadableObjectMode ; + p:steps + [ + p:stepList + ( + [ # same query as in fetch-observations (TODO: load both from file once https://github.com/zazuko/barnard59/issues/257 is available) + sparql:construct + [ code:name "endpoint"; code:value "endpoint"^^p:VariableName ] , + [ + code:name "query"; + code:value """ + #pragma describe.strategy cbd + + DESCRIBE <${cube}> + """^^code:EcmaScriptTemplateLiteral + ] + ] + ) + ] + . + +_:queryObservationSet a p:Pipeline, p:ReadableObjectMode ; + p:steps + [ + p:stepList + ( + [ + sparql:construct + [ code:name "endpoint"; code:value "endpoint"^^p:VariableName ] , + [ + code:name "query"; + code:value """ + PREFIX cube: + + CONSTRUCT { ?s cube:observation ?o } + WHERE { + <${cube}> cube:observationSet ?s . + ?s cube:observation ?o . + } + """^^code:EcmaScriptTemplateLiteral + ] + ] + ) + ] + . + + _:queryObservations a p:Pipeline, p:ReadableObjectMode ; + p:steps + [ + p:stepList + ( + [ # same query as in fetch-observations (TODO: load both from file once https://github.com/zazuko/barnard59/issues/257 is available) + sparql:construct + [ code:name "endpoint"; code:value "endpoint"^^p:VariableName ] , + [ + code:name "query"; + code:value """ + PREFIX cube: + + CONSTRUCT { ?s ?p ?o } + WHERE { + <${cube}> cube:observationSet/cube:observation ?s . + ?s ?p ?o + } + """^^code:EcmaScriptTemplateLiteral + ] + ] + ) + ] + . + From c53483fa04e9607f626f6db8b7cdc3062eb0e8b9 Mon Sep 17 00:00:00 2001 From: Giacomo Citi Date: Mon, 15 Jan 2024 14:26:15 +0100 Subject: [PATCH 05/28] add report-summary --- package-lock.json | 47 ++++++++++++----------- packages/cube/lib/report.js | 29 ++++++++++++++ packages/cube/manifest.ttl | 7 ++++ packages/cube/package.json | 9 +++-- packages/cube/pipeline/report-summary.ttl | 29 ++++++++++++++ 5 files changed, 94 insertions(+), 27 deletions(-) create mode 100644 packages/cube/lib/report.js create mode 100644 packages/cube/pipeline/report-summary.ttl diff --git a/package-lock.json b/package-lock.json index e91ff40b..bcf73a12 100644 --- a/package-lock.json +++ b/package-lock.json @@ -27023,7 +27023,7 @@ }, "packages/base": { "name": "barnard59-base", - "version": "2.2.0", + "version": "2.3.0", "license": "MIT", "dependencies": { "@opentelemetry/api": "^1.0.1", @@ -27042,11 +27042,11 @@ "@types/readable-stream": "^4.0.10", "@types/readable-to-readable": "^0.1.0", "@types/through2": "^2.0.41", - "barnard59-core": "^5.0.0", + "barnard59-core": "^5.3.0", "chai": "^4.3.10", "get-stream": "^6.0.1", "into-stream": "^7.0.0", - "isstream": "^0.1.2", + "is-stream": "^3.0.0", "rimraf": "^3.0.2", "sinon": "^17.0.0" }, @@ -27122,7 +27122,7 @@ }, "packages/cli": { "name": "barnard59", - "version": "4.3.2", + "version": "4.4.0", "license": "MIT", "dependencies": { "@opentelemetry/api": "^1.0.0", @@ -27134,8 +27134,8 @@ "@opentelemetry/semantic-conventions": "^0.24.0", "@opentelemetry/tracing": "^0.24.0", "@rdfjs/namespace": "^2.0.0", - "barnard59-core": "5.2.0", - "barnard59-env": "1.2.0", + "barnard59-core": "5.3.0", + "barnard59-env": "1.2.1", "commander": "^11.0.0", "find-up": "^7.0.0", "is-graph-pointer": "^2.1.0", @@ -27154,9 +27154,9 @@ "@types/lodash": "^4.14.202", "@types/readable-stream": "^4.0.10", "approvals": "^6.2.2", - "barnard59-base": "^2.1.0", + "barnard59-base": "^2.3.0", "barnard59-formats": "^2.1.0", - "barnard59-graph-store": "^5.0.0", + "barnard59-graph-store": "^5.1.0", "barnard59-http": "^2.0.0", "barnard59-shell": "^0.1.0", "barnard59-test-support": "^0.0.3", @@ -27293,7 +27293,7 @@ }, "packages/core": { "name": "barnard59-core", - "version": "5.2.0", + "version": "5.3.0", "license": "MIT", "dependencies": { "@opentelemetry/api": "^1.0.1", @@ -27312,7 +27312,7 @@ "devDependencies": { "@rdfjs/namespace": "^2.0.0", "@types/readable-stream": "^4.0.9", - "barnard59-env": "^1.2.0", + "barnard59-env": "^1.2.1", "barnard59-http": "^2.0.0", "barnard59-test-support": "^0.0.3", "chai": "^4.3.7", @@ -27394,7 +27394,9 @@ "version": "1.2.0", "license": "MIT", "dependencies": { + "@rdfjs/to-ntriples": "^2.0.0", "barnard59-base": "^2.2.0", + "barnard59-env": "^1.2.0", "barnard59-formats": "^2.0.0", "barnard59-http": "^2.0.0", "barnard59-rdf": "^3.3.0", @@ -27403,12 +27405,11 @@ "external-merge-sort": "^0.1.3", "lodash": "^4.17.21", "rdf-literal": "^1.3.0", + "rdf-validate-shacl": "^0.5.1", "readable-stream": "3 - 4", "through2": "^4.0.2" }, "devDependencies": { - "@rdfjs/to-ntriples": "^2.0.0", - "barnard59-env": "^1.2.0", "chai": "^4.3.7", "get-stream": "^6.0.1", "is-stream": "^3.0.0", @@ -27417,7 +27418,7 @@ }, "packages/env": { "name": "barnard59-env", - "version": "1.2.0", + "version": "1.2.1", "license": "MIT", "dependencies": { "@zazuko/env-node": "^1.0.3", @@ -27603,10 +27604,10 @@ }, "packages/graph-store": { "name": "barnard59-graph-store", - "version": "5.0.0", + "version": "5.1.0", "license": "MIT", "dependencies": { - "barnard59-base": "^2.1.0", + "barnard59-base": "^2.3.0", "barnard59-rdf": "^3.0.0", "duplex-to": "^1.0.0", "onetime": "^6.0.0", @@ -27873,11 +27874,11 @@ }, "packages/shacl": { "name": "barnard59-shacl", - "version": "1.2.0", + "version": "1.3.0", "license": "MIT", "dependencies": { "@rdfjs/fetch": "^3.0.0", - "barnard59-base": "^2.2.0", + "barnard59-base": "^2.3.0", "barnard59-formats": "^2.1.0", "barnard59-rdf": "^3.3.0", "is-stream": "^3.0.0", @@ -27889,8 +27890,8 @@ "@tpluscode/rdf-string": "^1.1.0", "@types/rdf-validate-shacl": "^0.4.6", "assert-throws-async": "^3.0.0", - "barnard59-core": "^5.2.0", - "barnard59-env": "^1.2.0", + "barnard59-core": "^5.3.0", + "barnard59-env": "^1.2.1", "barnard59-test-support": "*", "chai": "^4.3.4", "express": "^4.18.2", @@ -28019,11 +28020,11 @@ }, "test/e2e": { "name": "barnard59-test-e2e", - "version": "0.0.5", + "version": "0.1.0", "dependencies": { - "barnard59-base": "^2.0.1", - "barnard59-core": "^5.2.0", - "barnard59-env": "^1.2.0", + "barnard59-base": "^2.3.0", + "barnard59-core": "^5.3.0", + "barnard59-env": "^1.2.1", "barnard59-formats": "^2.1.0", "barnard59-http": "^2.0.0", "barnard59-test-support": "^0.0.3", diff --git a/packages/cube/lib/report.js b/packages/cube/lib/report.js new file mode 100644 index 00000000..f4a3ba69 --- /dev/null +++ b/packages/cube/lib/report.js @@ -0,0 +1,29 @@ +import termToNt from '@rdfjs/to-ntriples' +import rdf from 'barnard59-env' +import ValidationReport from 'rdf-validate-shacl/src/validation-report.js' + +function validationResultToString(result) { + const severity = result.severity.value.split('#')[1] + const message = result.message.map(m => m.value).join(' ') + const path = termToNt(result.path) + const focusNode = termToNt(result.focusNode) + const sourceConstraintComponent = result.sourceConstraintComponent.value.split('#')[1] + const sourceShape = termToNt(result.sourceShape) + + return `${severity} of ${sourceConstraintComponent}: "${message}" with path ${path} at focus node ${focusNode} (source: ${sourceShape})` +} + +function includeNestedResult(result) { + const nestedResult = Object.keys(result.detail).length ? result.detail.map(includeNestedResult).flat() : [] + return [result].concat(nestedResult).flat() +} + +function getMessages(report) { + return report.results.flatMap(includeNestedResult) + .map(validationResultToString) + .map(message => message + '\n') +} + +export function getSummary(dataset) { + return getMessages(new ValidationReport(rdf.clownface({ dataset }))) +} diff --git a/packages/cube/manifest.ttl b/packages/cube/manifest.ttl index 34b544ed..b38b5d22 100644 --- a/packages/cube/manifest.ttl +++ b/packages/cube/manifest.ttl @@ -62,3 +62,10 @@ rdfs:label "Retrieves cube with observations from SPARQL endpoint" ; b59:source "barnard59-cube/pipeline/fetch-cube.ttl" ; . + + + a b59:CliCommand ; + b59:command "report-summary" ; + rdfs:label "Human-readable summary of SHACL validation report" ; + b59:source "barnard59-cube/pipeline/report-summary.ttl" ; +. diff --git a/packages/cube/package.json b/packages/cube/package.json index f55853fa..56b713c4 100644 --- a/packages/cube/package.json +++ b/packages/cube/package.json @@ -20,24 +20,25 @@ }, "homepage": "https://github.com/zazuko/barnard59", "dependencies": { + "barnard59-env": "^1.2.0", + "@rdfjs/to-ntriples": "^2.0.0", "barnard59-base": "^2.2.0", "barnard59-formats": "^2.0.0", "barnard59-http": "^2.0.0", "barnard59-rdf": "^3.3.0", - "barnard59-sparql": "^2.1.1", "barnard59-shacl": "^1.2.0", + "barnard59-sparql": "^2.1.1", "external-merge-sort": "^0.1.3", "lodash": "^4.17.21", "rdf-literal": "^1.3.0", + "rdf-validate-shacl": "^0.5.1", "readable-stream": "3 - 4", "through2": "^4.0.2" }, "devDependencies": { - "@rdfjs/to-ntriples": "^2.0.0", - "barnard59-env": "^1.2.0", "chai": "^4.3.7", - "is-stream": "^3.0.0", "get-stream": "^6.0.1", + "is-stream": "^3.0.0", "shelljs": "^0.8.5" }, "mocha": { diff --git a/packages/cube/pipeline/report-summary.ttl b/packages/cube/pipeline/report-summary.ttl new file mode 100644 index 00000000..9f636d5e --- /dev/null +++ b/packages/cube/pipeline/report-summary.ttl @@ -0,0 +1,29 @@ +@prefix code: . +@prefix p: . +@prefix shacl: . +@prefix base: . +@prefix n3: . +@prefix ntriples: . +@prefix rdf: . +@prefix rdfs: . + +@base . + + + a p:Pipeline , p:Readable ; + p:steps + [ + p:stepList ( + [ base:stdin () ] + [ n3:parse () ] + [ rdf:getDataset () ] + [ base:map ([ + a code:EcmaScriptModule ; + code:link + ]) + ] + [ base:flatten () ] + ) + ] +. + From e2705c85502262f381756682e4d9cc5577a13087 Mon Sep 17 00:00:00 2001 From: Giacomo Citi Date: Mon, 15 Jan 2024 19:42:26 +0100 Subject: [PATCH 06/28] add in-memory SPARQL --- .changeset/six-flowers-look.md | 5 ++ package-lock.json | 69 ++++++++++++++++--------- packages/sparql/inMemory.js | 22 ++++++++ packages/sparql/manifest.ttl | 16 +++++- packages/sparql/package.json | 4 +- packages/sparql/test/construct.test.js | 6 +-- packages/sparql/test/inMemory.test.js | 70 ++++++++++++++++++++++++++ packages/sparql/test/select.test.js | 6 +-- 8 files changed, 167 insertions(+), 31 deletions(-) create mode 100644 .changeset/six-flowers-look.md create mode 100644 packages/sparql/inMemory.js create mode 100644 packages/sparql/test/inMemory.test.js diff --git a/.changeset/six-flowers-look.md b/.changeset/six-flowers-look.md new file mode 100644 index 00000000..16055a3b --- /dev/null +++ b/.changeset/six-flowers-look.md @@ -0,0 +1,5 @@ +--- +"barnard59-sparql": minor +--- + +Add In-Memory SPARQL operations diff --git a/package-lock.json b/package-lock.json index e91ff40b..45cd8bbc 100644 --- a/package-lock.json +++ b/package-lock.json @@ -14972,7 +14972,8 @@ }, "node_modules/is-stream": { "version": "3.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-3.0.0.tgz", + "integrity": "sha512-LnQR4bZ9IADDRSkvpqMGvt/tEJWclzklNgSw48V5EAaAeDd6qGvN8ei6k5p0tvxSR171VmGyHuTiAOfxAbr8kA==", "engines": { "node": "^12.20.0 || ^14.13.1 || >=16.0.0" }, @@ -22119,6 +22120,11 @@ "dev": true, "license": "MIT" }, + "node_modules/oxigraph": { + "version": "0.4.0-alpha.2", + "resolved": "https://registry.npmjs.org/oxigraph/-/oxigraph-0.4.0-alpha.2.tgz", + "integrity": "sha512-7f3RhLkMV6ERIZP5XyNqonbHSN5Obs1JZts63JqxrGYnfysxHtQxnC7c7/NJw2xNXHLFt4ErB35sVu8rdY+79A==" + }, "node_modules/p-defer": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/p-defer/-/p-defer-1.0.0.tgz", @@ -27023,7 +27029,7 @@ }, "packages/base": { "name": "barnard59-base", - "version": "2.2.0", + "version": "2.3.0", "license": "MIT", "dependencies": { "@opentelemetry/api": "^1.0.1", @@ -27042,11 +27048,11 @@ "@types/readable-stream": "^4.0.10", "@types/readable-to-readable": "^0.1.0", "@types/through2": "^2.0.41", - "barnard59-core": "^5.0.0", + "barnard59-core": "^5.3.0", "chai": "^4.3.10", "get-stream": "^6.0.1", "into-stream": "^7.0.0", - "isstream": "^0.1.2", + "is-stream": "^3.0.0", "rimraf": "^3.0.2", "sinon": "^17.0.0" }, @@ -27122,7 +27128,7 @@ }, "packages/cli": { "name": "barnard59", - "version": "4.3.2", + "version": "4.4.0", "license": "MIT", "dependencies": { "@opentelemetry/api": "^1.0.0", @@ -27134,8 +27140,8 @@ "@opentelemetry/semantic-conventions": "^0.24.0", "@opentelemetry/tracing": "^0.24.0", "@rdfjs/namespace": "^2.0.0", - "barnard59-core": "5.2.0", - "barnard59-env": "1.2.0", + "barnard59-core": "5.3.0", + "barnard59-env": "1.2.1", "commander": "^11.0.0", "find-up": "^7.0.0", "is-graph-pointer": "^2.1.0", @@ -27154,9 +27160,9 @@ "@types/lodash": "^4.14.202", "@types/readable-stream": "^4.0.10", "approvals": "^6.2.2", - "barnard59-base": "^2.1.0", + "barnard59-base": "^2.3.0", "barnard59-formats": "^2.1.0", - "barnard59-graph-store": "^5.0.0", + "barnard59-graph-store": "^5.1.0", "barnard59-http": "^2.0.0", "barnard59-shell": "^0.1.0", "barnard59-test-support": "^0.0.3", @@ -27293,7 +27299,7 @@ }, "packages/core": { "name": "barnard59-core", - "version": "5.2.0", + "version": "5.3.0", "license": "MIT", "dependencies": { "@opentelemetry/api": "^1.0.1", @@ -27312,7 +27318,7 @@ "devDependencies": { "@rdfjs/namespace": "^2.0.0", "@types/readable-stream": "^4.0.9", - "barnard59-env": "^1.2.0", + "barnard59-env": "^1.2.1", "barnard59-http": "^2.0.0", "barnard59-test-support": "^0.0.3", "chai": "^4.3.7", @@ -27417,7 +27423,7 @@ }, "packages/env": { "name": "barnard59-env", - "version": "1.2.0", + "version": "1.2.1", "license": "MIT", "dependencies": { "@zazuko/env-node": "^1.0.3", @@ -27603,10 +27609,10 @@ }, "packages/graph-store": { "name": "barnard59-graph-store", - "version": "5.0.0", + "version": "5.1.0", "license": "MIT", "dependencies": { - "barnard59-base": "^2.1.0", + "barnard59-base": "^2.3.0", "barnard59-rdf": "^3.0.0", "duplex-to": "^1.0.0", "onetime": "^6.0.0", @@ -27873,11 +27879,11 @@ }, "packages/shacl": { "name": "barnard59-shacl", - "version": "1.2.0", + "version": "1.3.0", "license": "MIT", "dependencies": { "@rdfjs/fetch": "^3.0.0", - "barnard59-base": "^2.2.0", + "barnard59-base": "^2.3.0", "barnard59-formats": "^2.1.0", "barnard59-rdf": "^3.3.0", "is-stream": "^3.0.0", @@ -27889,8 +27895,8 @@ "@tpluscode/rdf-string": "^1.1.0", "@types/rdf-validate-shacl": "^0.4.6", "assert-throws-async": "^3.0.0", - "barnard59-core": "^5.2.0", - "barnard59-env": "^1.2.0", + "barnard59-core": "^5.3.0", + "barnard59-env": "^1.2.1", "barnard59-test-support": "*", "chai": "^4.3.4", "express": "^4.18.2", @@ -27919,19 +27925,36 @@ "license": "MIT", "dependencies": { "duplex-to": "^1.0.1", + "oxigraph": "^0.4.0-alpha.2", + "readable-stream": "^4.5.2", "sparql-http-client": "^2.4.0" }, "devDependencies": { "@tpluscode/rdf-string": "^1.0.3", "@zazuko/env": "^1.0.0", "get-stream": "^6.0.0", - "isstream": "^0.1.2", + "is-stream": "^3.0.0", "nock": "^13.1.1" }, "engines": { "node": ">= 14.0.0" } }, + "packages/sparql/node_modules/readable-stream": { + "version": "4.5.2", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-4.5.2.tgz", + "integrity": "sha512-yjavECdqeZ3GLXNgRXgeQEdz9fvDDkNKyHnbHRFtOr7/LcfgBcmct7t/ET+HaCTqfh06OzoAxrkN/IfjJBVe+g==", + "dependencies": { + "abort-controller": "^3.0.0", + "buffer": "^6.0.3", + "events": "^3.3.0", + "process": "^0.11.10", + "string_decoder": "^1.3.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + } + }, "packages/validation": { "name": "barnard59-validation", "version": "0.4.1", @@ -28019,11 +28042,11 @@ }, "test/e2e": { "name": "barnard59-test-e2e", - "version": "0.0.5", + "version": "0.1.0", "dependencies": { - "barnard59-base": "^2.0.1", - "barnard59-core": "^5.2.0", - "barnard59-env": "^1.2.0", + "barnard59-base": "^2.3.0", + "barnard59-core": "^5.3.0", + "barnard59-env": "^1.2.1", "barnard59-formats": "^2.1.0", "barnard59-http": "^2.0.0", "barnard59-test-support": "^0.0.3", diff --git a/packages/sparql/inMemory.js b/packages/sparql/inMemory.js new file mode 100644 index 00000000..f21a22ab --- /dev/null +++ b/packages/sparql/inMemory.js @@ -0,0 +1,22 @@ +import { Transform } from 'readable-stream' +import oxigraph from 'oxigraph' + +export const update = sparql => + new Transform({ + objectMode: true, + transform: (chunk, encoding, callback) => { + const store = new oxigraph.Store([...chunk]) + store.update(sparql) + callback(null, store.match()) + }, + }) + +export const query = sparql => + new Transform({ + objectMode: true, + transform: (chunk, encoding, callback) => { + const store = new oxigraph.Store([...chunk]) + const result = store.query(sparql) + callback(null, result) + }, + }) diff --git a/packages/sparql/manifest.ttl b/packages/sparql/manifest.ttl index 2e927d1a..23870dde 100644 --- a/packages/sparql/manifest.ttl +++ b/packages/sparql/manifest.ttl @@ -4,7 +4,7 @@ @prefix rdfs: . a p:Operation, p:ReadableObjectMode; - rdfs:label "SPARQL Select"; + rdfs:label "SPARQL Construct"; rdfs:comment "Runs the given CONSTRUCT query against the given endpoint parses the result."; code:implementedBy [ a code:EcmaScriptModule; code:link @@ -16,3 +16,17 @@ code:implementedBy [ a code:EcmaScriptModule; code:link ]. + + a p:Operation, p:ReadableObjectMode; + rdfs:label "SPARQL in-memory Update"; + rdfs:comment "Runs the given DELETE/INSERT command against each input chunk."; + code:implementedBy [ a code:EcmaScriptModule; + code:link + ]. + + a p:Operation, p:ReadableObjectMode; + rdfs:label "SPARQL in-memory Query"; + rdfs:comment "Runs the given query against each input chunk."; + code:implementedBy [ a code:EcmaScriptModule; + code:link + ]. diff --git a/packages/sparql/package.json b/packages/sparql/package.json index 742da384..746be0f6 100644 --- a/packages/sparql/package.json +++ b/packages/sparql/package.json @@ -19,13 +19,15 @@ "homepage": "https://github.com/zazuko/barnard59-sparql", "dependencies": { "duplex-to": "^1.0.1", + "oxigraph": "^0.4.0-alpha.2", + "readable-stream": "^4.5.2", "sparql-http-client": "^2.4.0" }, "devDependencies": { "@tpluscode/rdf-string": "^1.0.3", "@zazuko/env": "^1.0.0", "get-stream": "^6.0.0", - "isstream": "^0.1.2", + "is-stream": "^3.0.0", "nock": "^13.1.1" }, "engines": { diff --git a/packages/sparql/test/construct.test.js b/packages/sparql/test/construct.test.js index 164c721b..330529da 100644 --- a/packages/sparql/test/construct.test.js +++ b/packages/sparql/test/construct.test.js @@ -1,6 +1,6 @@ import { strictEqual } from 'assert' import getStream from 'get-stream' -import { isReadable, isWritable } from 'isstream' +import { isReadableStream, isWritableStream } from 'is-stream' import nock from 'nock' import rdf from '@zazuko/env' import { turtle } from '@tpluscode/rdf-string' @@ -22,8 +22,8 @@ describe('construct', () => { const result = await construct({ endpoint, query }) - strictEqual(isReadable(result), true) - strictEqual(isWritable(result), false) + strictEqual(isReadableStream(result), true) + strictEqual(isWritableStream(result), false) }) it('should send a GET request', async () => { diff --git a/packages/sparql/test/inMemory.test.js b/packages/sparql/test/inMemory.test.js new file mode 100644 index 00000000..d91362d4 --- /dev/null +++ b/packages/sparql/test/inMemory.test.js @@ -0,0 +1,70 @@ +import { strictEqual } from 'assert' +import getStream from 'get-stream' +import { isReadableStream, isWritableStream } from 'is-stream' +import { Readable } from 'readable-stream' +import rdf from '@zazuko/env' +import { query, update } from '../inMemory.js' +import * as ns from './support/namespaces.js' + +describe('query', () => { + it('should be a function', () => { + strictEqual(typeof query, 'function') + }) + + it('should return a readable and witable stream', () => { + const construct = 'CONSTRUCT { ?s ?p ?o } WHERE { ?s ?p ?o }' + + const result = query(construct) + + strictEqual(isReadableStream(result), true) + strictEqual(isWritableStream(result), true) + }) + + it('should CONSTRUCT quads', async () => { + const chunk1 = [ + rdf.quad(ns.ex.s, ns.ex.p, rdf.literal('0')), + rdf.quad(ns.ex.s, ns.ex.p, rdf.literal('1')), + ] + const chunk2 = [ + rdf.quad(ns.ex.s, ns.ex.p, rdf.literal('2')), + ] + + const construct = query('CONSTRUCT { ?s ?p ?o } WHERE { ?s ?p ?o }') + const pipeline = Readable.from([chunk1, chunk2]).pipe(construct) + const result = await getStream.array(pipeline) + + strictEqual(result.length, 2) + strictEqual(result.flat().length, 3) + }) +}) + +describe('update', () => { + it('should be a function', () => { + strictEqual(typeof update, 'function') + }) + + it('should return a readable and witable stream', () => { + const construct = 'DELETE { ?s ?p ?o } WHERE { ?s ?p ?o }' + + const result = update(construct) + + strictEqual(isReadableStream(result), true) + strictEqual(isWritableStream(result), true) + }) + it('should UPDATE quads', async () => { + const chunk1 = [ + rdf.quad(ns.ex.s, ns.ex.p, rdf.literal('0')), + rdf.quad(ns.ex.s, ns.ex.p, rdf.literal('1')), + ] + const chunk2 = [ + rdf.quad(ns.ex.s, ns.ex.p, rdf.literal('2')), + ] + + const command = update('DELETE { ?s ?p ?o } WHERE { ?s ?p ?o }') + const pipeline = Readable.from([chunk1, chunk2]).pipe(command) + const result = await getStream.array(pipeline) + + strictEqual(result.length, 2) + strictEqual(result.flat().length, 0) + }) +}) diff --git a/packages/sparql/test/select.test.js b/packages/sparql/test/select.test.js index 3b2e3eff..a3e649e0 100644 --- a/packages/sparql/test/select.test.js +++ b/packages/sparql/test/select.test.js @@ -1,6 +1,6 @@ import { strictEqual } from 'assert' import getStream from 'get-stream' -import { isReadable, isWritable } from 'isstream' +import { isReadableStream, isWritableStream } from 'is-stream' import nock from 'nock' import select from '../select.js' @@ -19,8 +19,8 @@ describe('select', () => { const result = await select({ endpoint, query }) - strictEqual(isReadable(result), true) - strictEqual(isWritable(result), false) + strictEqual(isReadableStream(result), true) + strictEqual(isWritableStream(result), false) }) it('should send a GET request', async () => { From 9baaf354acdbdd812e1049e2cc50a4451d9c0104 Mon Sep 17 00:00:00 2001 From: Giacomo Citi Date: Mon, 15 Jan 2024 23:06:10 +0100 Subject: [PATCH 07/28] import query pipelines --- packages/cube/pipeline/fetch-constraint.ttl | 40 ++------ packages/cube/pipeline/fetch-cube.ttl | 98 ++++--------------- packages/cube/pipeline/fetch-metadata.ttl | 66 +++---------- packages/cube/pipeline/fetch-observations.ttl | 31 ++---- packages/cube/pipeline/query-constraint.ttl | 32 ++++++ packages/cube/pipeline/query-cube.ttl | 26 +++++ .../cube/pipeline/query-observation-sets.ttl | 29 ++++++ packages/cube/pipeline/query-observations.ttl | 30 ++++++ 8 files changed, 160 insertions(+), 192 deletions(-) create mode 100644 packages/cube/pipeline/query-constraint.ttl create mode 100644 packages/cube/pipeline/query-cube.ttl create mode 100644 packages/cube/pipeline/query-observation-sets.ttl create mode 100644 packages/cube/pipeline/query-observations.ttl diff --git a/packages/cube/pipeline/fetch-constraint.ttl b/packages/cube/pipeline/fetch-constraint.ttl index 42c5750c..5cda6ede 100644 --- a/packages/cube/pipeline/fetch-constraint.ttl +++ b/packages/cube/pipeline/fetch-constraint.ttl @@ -1,11 +1,13 @@ @prefix code: . @prefix p: . -@prefix sparql: . @prefix ntriples: . -@prefix base: . @prefix rdfs: . +@prefix : . -@base . +[ + code:imports <./query-constraint> ; + code:extension "ttl" ; +] . _:endpoint a p:Variable ; p:name "endpoint" ; @@ -17,42 +19,14 @@ _:cube a p:Variable ; rdfs:label "cube URI" ; . - a p:Pipeline , p:Readable ; +:fetch-constraint a p:Pipeline , p:Readable ; p:variables [ p:variable _:endpoint, _:cube ] ; p:steps [ p:stepList ( - _:queryConstraint + :queryConstraint [ ntriples:serialize () ] ) ] . - - -_:queryConstraint a p:Pipeline, p:ReadableObjectMode ; - p:steps - [ - p:stepList - ( - [ # same query as in fetch-metadata (TODO: load both from file once https://github.com/zazuko/barnard59/issues/257 is available) - sparql:construct - [ code:name "endpoint"; code:value "endpoint"^^p:VariableName ] , - [ - code:name "query"; - code:value """ - #pragma describe.strategy cbd - - PREFIX cube: - - DESCRIBE ?s - WHERE { - <${cube}> cube:observationConstraint ?s . - } - """^^code:EcmaScriptTemplateLiteral - ] - ] - ) - ] - . - \ No newline at end of file diff --git a/packages/cube/pipeline/fetch-cube.ttl b/packages/cube/pipeline/fetch-cube.ttl index 24a2fbb8..cced5a95 100644 --- a/packages/cube/pipeline/fetch-cube.ttl +++ b/packages/cube/pipeline/fetch-cube.ttl @@ -1,11 +1,22 @@ @prefix code: . @prefix p: . -@prefix sparql: . @prefix ntriples: . @prefix base: . @prefix rdfs: . - -@base . +@prefix : . + +[ + code:imports <./query-cube> ; + code:extension "ttl" ; +] . +[ + code:imports <./query-observation-sets> ; + code:extension "ttl" ; +] . +[ + code:imports <./query-observations> ; + code:extension "ttl" ; +] . _:endpoint a p:Variable ; p:name "endpoint" ; @@ -17,91 +28,18 @@ _:cube a p:Variable ; rdfs:label "cube URI" ; . - a p:Pipeline , p:Readable ; +:fetch-cube a p:Pipeline , p:Readable ; p:variables [ p:variable _:endpoint, _:cube ] ; p:steps [ p:stepList ( [ base:concat\/object ( - _:queryCube - _:queryObservationSet - _:queryObservations + :queryCube + :queryObservationSet + :queryObservations ) ] [ ntriples:serialize () ] ) ] . - - -_:queryCube a p:Pipeline , p:ReadableObjectMode ; - p:steps - [ - p:stepList - ( - [ # same query as in fetch-observations (TODO: load both from file once https://github.com/zazuko/barnard59/issues/257 is available) - sparql:construct - [ code:name "endpoint"; code:value "endpoint"^^p:VariableName ] , - [ - code:name "query"; - code:value """ - #pragma describe.strategy cbd - - DESCRIBE <${cube}> - """^^code:EcmaScriptTemplateLiteral - ] - ] - ) - ] - . - -_:queryObservationSet a p:Pipeline, p:ReadableObjectMode ; - p:steps - [ - p:stepList - ( - [ - sparql:construct - [ code:name "endpoint"; code:value "endpoint"^^p:VariableName ] , - [ - code:name "query"; - code:value """ - PREFIX cube: - - CONSTRUCT { ?s cube:observation ?o } - WHERE { - <${cube}> cube:observationSet ?s . - ?s cube:observation ?o . - } - """^^code:EcmaScriptTemplateLiteral - ] - ] - ) - ] - . - - _:queryObservations a p:Pipeline, p:ReadableObjectMode ; - p:steps - [ - p:stepList - ( - [ # same query as in fetch-observations (TODO: load both from file once https://github.com/zazuko/barnard59/issues/257 is available) - sparql:construct - [ code:name "endpoint"; code:value "endpoint"^^p:VariableName ] , - [ - code:name "query"; - code:value """ - PREFIX cube: - - CONSTRUCT { ?s ?p ?o } - WHERE { - <${cube}> cube:observationSet/cube:observation ?s . - ?s ?p ?o - } - """^^code:EcmaScriptTemplateLiteral - ] - ] - ) - ] - . - diff --git a/packages/cube/pipeline/fetch-metadata.ttl b/packages/cube/pipeline/fetch-metadata.ttl index d29ddbed..20125a3b 100644 --- a/packages/cube/pipeline/fetch-metadata.ttl +++ b/packages/cube/pipeline/fetch-metadata.ttl @@ -1,11 +1,18 @@ @prefix code: . @prefix p: . -@prefix sparql: . @prefix ntriples: . @prefix base: . @prefix rdfs: . +@prefix : . -@base . +[ + code:imports <./query-cube> ; + code:extension "ttl" ; +] . +[ + code:imports <./query-constraint> ; + code:extension "ttl" ; +] . _:endpoint a p:Variable ; p:name "endpoint" ; @@ -17,65 +24,14 @@ _:cube a p:Variable ; rdfs:label "cube URI" ; . - a p:Pipeline , p:Readable ; +:fetch-metadata a p:Pipeline , p:Readable ; p:variables [ p:variable _:endpoint, _:cube ] ; p:steps [ p:stepList ( - [ base:concat\/object (_:queryCube _:queryConstraint) ] + [ base:concat\/object (:queryCube :queryConstraint) ] [ ntriples:serialize () ] ) ] . - - -# relying on cbd is not ideal (vendor specific) - -_:queryCube a p:Pipeline , p:ReadableObjectMode ; - p:steps - [ - p:stepList - ( - [ - sparql:construct - [ code:name "endpoint"; code:value "endpoint"^^p:VariableName ] , - [ - code:name "query"; - code:value """ - #pragma describe.strategy cbd - - DESCRIBE <${cube}> - """^^code:EcmaScriptTemplateLiteral - ] - ] - ) - ] - . - -_:queryConstraint a p:Pipeline, p:ReadableObjectMode ; - p:steps - [ - p:stepList - ( - [ - sparql:construct - [ code:name "endpoint"; code:value "endpoint"^^p:VariableName ] , - [ - code:name "query"; - code:value """ - #pragma describe.strategy cbd - - PREFIX cube: - - DESCRIBE ?s - WHERE { - <${cube}> cube:observationConstraint ?s . - } - """^^code:EcmaScriptTemplateLiteral - ] - ] - ) - ] - . - \ No newline at end of file diff --git a/packages/cube/pipeline/fetch-observations.ttl b/packages/cube/pipeline/fetch-observations.ttl index d5680060..6a8b330e 100644 --- a/packages/cube/pipeline/fetch-observations.ttl +++ b/packages/cube/pipeline/fetch-observations.ttl @@ -1,10 +1,13 @@ @prefix code: . @prefix p: . -@prefix sparql: . @prefix ntriples: . @prefix rdfs: . +@prefix : . -@base . +[ + code:imports <./query-observations> ; + code:extension "ttl" ; +] . _:endpoint a p:Variable ; p:name "endpoint" ; @@ -16,34 +19,14 @@ _:cube a p:Variable ; rdfs:label "cube URI" ; . - a p:Pipeline , p:Readable ; +:fetch-observations a p:Pipeline , p:Readable ; p:variables [ p:variable _:endpoint, _:cube ] ; p:steps [ p:stepList ( - _:queryObservations + :queryObservations [ ntriples:serialize () ] ) ] . - - -_:queryObservations sparql:construct - [ - code:name "endpoint"; - code:value "endpoint"^^p:VariableName - ] , - [ - code:name "query"; - code:value """ - PREFIX cube: - - CONSTRUCT { ?s ?p ?o } - WHERE { - <${cube}> cube:observationSet/cube:observation ?s . - ?s ?p ?o - } - """^^code:EcmaScriptTemplateLiteral - ] -. diff --git a/packages/cube/pipeline/query-constraint.ttl b/packages/cube/pipeline/query-constraint.ttl new file mode 100644 index 00000000..a224b02e --- /dev/null +++ b/packages/cube/pipeline/query-constraint.ttl @@ -0,0 +1,32 @@ +@prefix code: . +@prefix p: . +@prefix sparql: . +@prefix : . + +# relying on cbd is not ideal (vendor specific) +:queryConstraint a p:Pipeline, p:ReadableObjectMode ; + p:steps + [ + p:stepList + ( + [ + sparql:construct + [ code:name "endpoint"; code:value "endpoint"^^p:VariableName ] , + [ + code:name "query"; + code:value """ + #pragma describe.strategy cbd + + PREFIX cube: + + DESCRIBE ?s + WHERE { + <${cube}> cube:observationConstraint ?s . + } + """^^code:EcmaScriptTemplateLiteral + ] + ] + ) + ] + . + \ No newline at end of file diff --git a/packages/cube/pipeline/query-cube.ttl b/packages/cube/pipeline/query-cube.ttl new file mode 100644 index 00000000..785eb15c --- /dev/null +++ b/packages/cube/pipeline/query-cube.ttl @@ -0,0 +1,26 @@ +@prefix code: . +@prefix p: . +@prefix sparql: . +@prefix : . + +# relying on cbd is not ideal (vendor specific) +:queryCube a p:Pipeline , p:ReadableObjectMode ; + p:steps + [ + p:stepList + ( + [ + sparql:construct + [ code:name "endpoint"; code:value "endpoint"^^p:VariableName ] , + [ + code:name "query"; + code:value """ + #pragma describe.strategy cbd + + DESCRIBE <${cube}> + """^^code:EcmaScriptTemplateLiteral + ] + ] + ) + ] + . diff --git a/packages/cube/pipeline/query-observation-sets.ttl b/packages/cube/pipeline/query-observation-sets.ttl new file mode 100644 index 00000000..75f487ae --- /dev/null +++ b/packages/cube/pipeline/query-observation-sets.ttl @@ -0,0 +1,29 @@ +@prefix code: . +@prefix p: . +@prefix sparql: . +@prefix : . + +:queryObservationSet a p:Pipeline, p:ReadableObjectMode ; + p:steps + [ + p:stepList + ( + [ + sparql:construct + [ code:name "endpoint"; code:value "endpoint"^^p:VariableName ] , + [ + code:name "query"; + code:value """ + PREFIX cube: + + CONSTRUCT { ?s cube:observation ?o } + WHERE { + <${cube}> cube:observationSet ?s . + ?s cube:observation ?o . + } + """^^code:EcmaScriptTemplateLiteral + ] + ] + ) + ] + . diff --git a/packages/cube/pipeline/query-observations.ttl b/packages/cube/pipeline/query-observations.ttl new file mode 100644 index 00000000..f7fbc354 --- /dev/null +++ b/packages/cube/pipeline/query-observations.ttl @@ -0,0 +1,30 @@ +@prefix code: . +@prefix p: . +@prefix sparql: . +@prefix : . + +:queryObservations a p:Pipeline, p:ReadableObjectMode ; + p:steps + [ + p:stepList + ( + [ + sparql:construct + [ code:name "endpoint"; code:value "endpoint"^^p:VariableName ] , + [ + code:name "query"; + code:value """ + PREFIX cube: + + CONSTRUCT { ?s ?p ?o } + WHERE { + <${cube}> cube:observationSet/cube:observation ?s . + ?s ?p ?o + } + """^^code:EcmaScriptTemplateLiteral + ] + ] + ) + ] + . + \ No newline at end of file From 5ff9068c425987199f9c69c754ca42107931099e Mon Sep 17 00:00:00 2001 From: Giacomo Citi Date: Thu, 18 Jan 2024 12:25:36 +0100 Subject: [PATCH 08/28] docs: mention report-summary --- packages/cube/README.md | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/packages/cube/README.md b/packages/cube/README.md index 1d232608..2ee5aac5 100644 --- a/packages/cube/README.md +++ b/packages/cube/README.md @@ -62,7 +62,7 @@ SHACL reports for violations are written to `stdout`. In cases when a remote address give to `--profile` option does not include a correct `content-type` header (or does not provide a `content-type` header at all), the pipeline will fail. In such cases, it is possible to use the `--profileFormat` option to select the correct RDF parser. Its value must be a media type, such as `text/turtle`. -```bash + ### fetch observations @@ -95,6 +95,18 @@ SHACL reports for violations are written to `stdout`. To limit the output size, there is also a `maxViolations` option to stop validation when the given number of violations is reached. + +### Report Summary +The validation pipelines write a machine-readable [standard](https://www.w3.org/TR/shacl/#validation-report) report to `stdout`. +An additional `report-summary` pipeline produces a human-readable summary of this report: + +```bash +cat observations.ttl \ +| npx barnard59 cube check-observations --constraint metadata.ttl \ +| npx barnard59 cube report-summary +``` + + ### Known issues Command `check-metadata` may fail if there are `sh:in` constraints with too many values. From 83041d407d16c3a6f6c9fa8c44c91793981a7091 Mon Sep 17 00:00:00 2001 From: tpluscode Date: Sun, 21 Jan 2024 12:00:04 +0100 Subject: [PATCH 09/28] build(deps-dev): update @rdfjs/fetch types --- package-lock.json | 7 ++++--- packages/rdf/package.json | 2 +- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/package-lock.json b/package-lock.json index cd515c10..59ca990a 100644 --- a/package-lock.json +++ b/package-lock.json @@ -6468,9 +6468,10 @@ } }, "node_modules/@types/rdfjs__fetch": { - "version": "3.0.5", + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/@types/rdfjs__fetch/-/rdfjs__fetch-3.0.6.tgz", + "integrity": "sha512-NIPzDg7dk53qVVtWus4pQt7KCpH5SgYLtOcsmdB1h4MZCi1ZpCNCD6kiT56H/CKhGByiRxPcCx+spr2lNO3fgA==", "dev": true, - "license": "MIT", "dependencies": { "@rdfjs/types": ">=1.0.0", "@types/node": "*", @@ -27559,7 +27560,7 @@ "@types/mime-types": "^2.1.4", "@types/proto-fetch": "^1.0.5", "@types/rdf-transform-triple-to-quad": "^2.0.5", - "@types/rdfjs__fetch": "^3.0.5", + "@types/rdfjs__fetch": "^3.0.6", "@types/rdfjs__fetch-lite": "^3.0.8", "assert-throws-async": "^3.0.0", "chai": "^4.3.7", diff --git a/packages/rdf/package.json b/packages/rdf/package.json index 29c06570..a72ea2ef 100644 --- a/packages/rdf/package.json +++ b/packages/rdf/package.json @@ -46,7 +46,7 @@ "@types/proto-fetch": "^1.0.5", "@types/rdf-transform-triple-to-quad": "^2.0.5", "@types/rdfjs__fetch-lite": "^3.0.8", - "@types/rdfjs__fetch": "^3.0.5", + "@types/rdfjs__fetch": "^3.0.6", "assert-throws-async": "^3.0.0", "chai": "^4.3.7", "express-as-promise": "^1.2.0", From afb6e3c724faa7f1b9dc170437f35275711dfdf2 Mon Sep 17 00:00:00 2001 From: tpluscode Date: Sun, 21 Jan 2024 20:16:38 +0100 Subject: [PATCH 10/28] lint: remove usages of removed modules --- packages/rdf/test/append.test.js | 136 ++++++++--------- packages/rdf/test/fs.test.js | 12 +- .../rdf/test/localFetch/localFetch.test.js | 32 ++-- packages/rdf/test/mapMatch.test.js | 2 +- packages/rdf/test/membership.test.js | 60 ++++---- .../rdf/test/metadata/applyOptions.test.js | 92 ++++++------ packages/rdf/test/voidStats.test.js | 142 +++++++++--------- 7 files changed, 225 insertions(+), 251 deletions(-) diff --git a/packages/rdf/test/append.test.js b/packages/rdf/test/append.test.js index b56a4fc1..d9e2d517 100644 --- a/packages/rdf/test/append.test.js +++ b/packages/rdf/test/append.test.js @@ -1,41 +1,31 @@ -import { equal, strictEqual } from 'assert' -import fs from 'fs' +import { equal, strictEqual } from 'node:assert' +import fs from 'node:fs' import fsp from 'fs/promises' -import { fileURLToPath } from 'url' -import { resolve } from 'path' +import { fileURLToPath } from 'node:url' +import { resolve } from 'node:path' import assertThrows from 'assert-throws-async' import getStream from 'get-stream' import { isDuplexStream as isDuplex } from 'is-stream' import nock from 'nock' import rdf from 'barnard59-env' import { Readable } from 'readable-stream' -import fromStream from 'rdf-dataset-ext/fromStream.js' -import addAll from 'rdf-dataset-ext/addAll.js' -import toCanonical from 'rdf-dataset-ext/toCanonical.js' import appendUnbound from '../lib/append.js' -import { schema, xsd, dcterms } from '../lib/namespaces.js' -const dataPath = './support/dataset.ttl' +const __dirname = fileURLToPath(new URL('.', import.meta.url)) -const metadataPath = './support/dataset_description.ttl' +const dataPath = resolve(__dirname, './support/dataset.ttl') -const __dirname = fileURLToPath(new URL('.', import.meta.url)) +const metadataPath = resolve(__dirname, './support/dataset_description.ttl') const ex = rdf.namespace('http://example.org/') async function getRDFDataset(filePath) { - return fromStream(rdf.dataset(), getRDFStream(filePath)) -} - -function getRDFStream(filePath) { - const stream = fs.createReadStream(resolve(__dirname, filePath)) - const parser = rdf.formats.parsers.get('text/turtle') - return parser.import(stream) + return rdf.dataset().import(rdf.fromFile(filePath)) } async function applyStep(transform) { const initial = await getRDFDataset(dataPath) - const stream = getRDFStream(dataPath).pipe(transform) + const stream = rdf.fromFile(dataPath).pipe(transform) const final = rdf.dataset(await getStream.array(stream)) return { initial, final } } @@ -51,7 +41,7 @@ describe('metadata.append', () => { it('should return a duplex stream with a stream metadata parameter', async () => { const step = await append({ - input: getRDFStream(metadataPath), + input: rdf.fromFile(metadataPath), }) strictEqual(isDuplex(step), true) }) @@ -78,24 +68,24 @@ describe('metadata.append', () => { it('should append data and metadata with default values', async () => { const all = rdf.dataset() - addAll(all, await getRDFDataset(dataPath)) - addAll(all, await getRDFDataset(metadataPath)) + .addAll(await getRDFDataset(dataPath)) + .addAll(await getRDFDataset(metadataPath)) const step = await append({ - input: getRDFStream(metadataPath), + input: rdf.fromFile(metadataPath), }) const { final } = await applyStep(step) equal( - toCanonical(final), - toCanonical(all), 'appended quads not as expected', + final.toCanonical(), + all.toCanonical(), 'appended quads not as expected', ) }) it('should append data and metadata with default values, and path as string', async () => { const all = rdf.dataset() - addAll(all, await getRDFDataset(dataPath)) - addAll(all, await getRDFDataset(metadataPath)) + .addAll(await getRDFDataset(dataPath)) + .addAll(await getRDFDataset(metadataPath)) const step = await append({ input: metadataPath, @@ -104,8 +94,8 @@ describe('metadata.append', () => { const { final } = await applyStep(step) equal( - toCanonical(final), - toCanonical(all), 'appended quads not as expected', + final.toCanonical(), + all.toCanonical(), 'appended quads not as expected', ) }) @@ -173,14 +163,14 @@ describe('File System: metadata.append', () => { const result = await getStream.array(Readable.from(data).pipe(step)) strictEqual(result.length, 7) - strictEqual(result[4].predicate.value, schema.dateModified.value) + strictEqual(result[4].predicate.value, rdf.ns.schema.dateModified.value) strictEqual(result[4].object.value, rdf.literal('2020-05-30').value) - strictEqual(result[5].predicate.value, dcterms.created.value) - strictEqual(result[5].object.value, rdf.literal((new Date(stats.birthtimeMs)).toISOString(), xsd.dateTime).value) + strictEqual(result[5].predicate.value, rdf.ns.dcterms.created.value) + strictEqual(result[5].object.value, rdf.literal((new Date(stats.birthtimeMs)).toISOString(), rdf.ns.xsd.dateTime).value) - strictEqual(result[6].predicate.value, schema.dateCreated.value) - strictEqual(result[6].object.value, rdf.literal((new Date(stats.birthtimeMs)).toISOString(), xsd.dateTime).value) + strictEqual(result[6].predicate.value, rdf.ns.schema.dateCreated.value) + strictEqual(result[6].object.value, rdf.literal((new Date(stats.birthtimeMs)).toISOString(), rdf.ns.xsd.dateTime).value) }) it('should use resolved literal TIME_FILE_CREATION with dateModified', async () => { @@ -199,14 +189,14 @@ describe('File System: metadata.append', () => { strictEqual(result.length, 7) - strictEqual(result[4].predicate.value, schema.dateCreated.value) + strictEqual(result[4].predicate.value, rdf.ns.schema.dateCreated.value) strictEqual(result[4].object.value, rdf.literal('2020-05-30').value) - strictEqual(result[5].predicate.value, dcterms.modified.value) - strictEqual(result[5].object.value, rdf.literal((new Date(stats.birthtimeMs)).toISOString(), xsd.dateTime).value) + strictEqual(result[5].predicate.value, rdf.ns.dcterms.modified.value) + strictEqual(result[5].object.value, rdf.literal((new Date(stats.birthtimeMs)).toISOString(), rdf.ns.xsd.dateTime).value) - strictEqual(result[6].predicate.value, schema.dateModified.value) - strictEqual(result[6].object.value, rdf.literal((new Date(stats.birthtimeMs)).toISOString(), xsd.dateTime).value) + strictEqual(result[6].predicate.value, rdf.ns.schema.dateModified.value) + strictEqual(result[6].object.value, rdf.literal((new Date(stats.birthtimeMs)).toISOString(), rdf.ns.xsd.dateTime).value) }) it('should use resolved literal TIME_FILE_MODIFICATION with dateCreated', async () => { @@ -224,14 +214,14 @@ describe('File System: metadata.append', () => { const result = await getStream.array(Readable.from(data).pipe(step)) strictEqual(result.length, 7) - strictEqual(result[4].predicate.value, schema.dateModified.value) + strictEqual(result[4].predicate.value, rdf.ns.schema.dateModified.value) strictEqual(result[4].object.value, rdf.literal('2020-05-30').value) - strictEqual(result[5].predicate.value, dcterms.created.value) - strictEqual(result[5].object.value, rdf.literal((new Date(stats.mtimeMs)).toISOString(), xsd.dateTime).value) + strictEqual(result[5].predicate.value, rdf.ns.dcterms.created.value) + strictEqual(result[5].object.value, rdf.literal((new Date(stats.mtimeMs)).toISOString(), rdf.ns.xsd.dateTime).value) - strictEqual(result[6].predicate.value, schema.dateCreated.value) - strictEqual(result[6].object.value, rdf.literal((new Date(stats.mtimeMs)).toISOString(), xsd.dateTime).value) + strictEqual(result[6].predicate.value, rdf.ns.schema.dateCreated.value) + strictEqual(result[6].object.value, rdf.literal((new Date(stats.mtimeMs)).toISOString(), rdf.ns.xsd.dateTime).value) }) it('should use resolved literal TIME_FILE_MODIFICATION with dateModified', async () => { @@ -250,14 +240,14 @@ describe('File System: metadata.append', () => { strictEqual(result.length, 7) - strictEqual(result[4].predicate.value, schema.dateCreated.value) + strictEqual(result[4].predicate.value, rdf.ns.schema.dateCreated.value) strictEqual(result[4].object.value, rdf.literal('2020-05-30').value) - strictEqual(result[5].predicate.value, dcterms.modified.value) - strictEqual(result[5].object.value, rdf.literal((new Date(stats.mtimeMs)).toISOString(), xsd.dateTime).value) + strictEqual(result[5].predicate.value, rdf.ns.dcterms.modified.value) + strictEqual(result[5].object.value, rdf.literal((new Date(stats.mtimeMs)).toISOString(), rdf.ns.xsd.dateTime).value) - strictEqual(result[6].predicate.value, schema.dateModified.value) - strictEqual(result[6].object.value, rdf.literal((new Date(stats.mtimeMs)).toISOString(), xsd.dateTime).value) + strictEqual(result[6].predicate.value, rdf.ns.schema.dateModified.value) + strictEqual(result[6].object.value, rdf.literal((new Date(stats.mtimeMs)).toISOString(), rdf.ns.xsd.dateTime).value) }) it('should use resolved literal TIME_NOW with dateModified', async () => { @@ -265,9 +255,9 @@ describe('File System: metadata.append', () => { rdf.quad(ex.subject0, ex.predicate0, ex.object0, ex.graph0), ] const metadata = [ - rdf.quad(ex.subject1, rdf.namedNode('http://www.w3.org/1999/02/22-rdf-syntax-ns#type'), schema.Dataset), - rdf.quad(ex.subject1, schema.dateCreated, rdf.literal('2020-05-30')), - rdf.quad(ex.subject1, schema.dateModified, rdf.literal('2020-05-30')), + rdf.quad(ex.subject1, rdf.namedNode('http://www.w3.org/1999/02/22-rdf-syntax-ns#type'), rdf.ns.schema.Dataset), + rdf.quad(ex.subject1, rdf.ns.schema.dateCreated, rdf.literal('2020-05-30')), + rdf.quad(ex.subject1, rdf.ns.schema.dateModified, rdf.literal('2020-05-30')), ] const step = await append({ input: Readable.from(metadata), @@ -278,10 +268,10 @@ describe('File System: metadata.append', () => { strictEqual(result.length, 4) - strictEqual(result[2].predicate.value, schema.dateCreated.value) + strictEqual(result[2].predicate.value, rdf.ns.schema.dateCreated.value) strictEqual(result[2].object.value, rdf.literal('2020-05-30').value) - strictEqual(result[3].predicate.value, schema.dateModified.value) + strictEqual(result[3].predicate.value, rdf.ns.schema.dateModified.value) strictEqual(result[3].object.value === rdf.literal('2020-05-30').value, false) }) @@ -290,9 +280,9 @@ describe('File System: metadata.append', () => { rdf.quad(ex.subject0, ex.predicate0, ex.object0, ex.graph0), ] const metadata = [ - rdf.quad(ex.subject1, rdf.namedNode('http://www.w3.org/1999/02/22-rdf-syntax-ns#type'), schema.Dataset), - rdf.quad(ex.subject1, schema.dateCreated, rdf.literal('2020-05-30')), - rdf.quad(ex.subject1, schema.dateModified, rdf.literal('2020-05-30')), + rdf.quad(ex.subject1, rdf.namedNode('http://www.w3.org/1999/02/22-rdf-syntax-ns#type'), rdf.ns.schema.Dataset), + rdf.quad(ex.subject1, rdf.ns.schema.dateCreated, rdf.literal('2020-05-30')), + rdf.quad(ex.subject1, rdf.ns.schema.dateModified, rdf.literal('2020-05-30')), ] const step = await append({ input: Readable.from(metadata), @@ -303,10 +293,10 @@ describe('File System: metadata.append', () => { strictEqual(result.length, 4) - strictEqual(result[2].predicate.value, schema.dateModified.value) + strictEqual(result[2].predicate.value, rdf.ns.schema.dateModified.value) strictEqual(result[2].object.value, rdf.literal('2020-05-30').value) - strictEqual(result[3].predicate.value, schema.dateCreated.value) + strictEqual(result[3].predicate.value, rdf.ns.schema.dateCreated.value) strictEqual(result[3].object.value === rdf.literal('2020-05-30').value, false) }) @@ -315,8 +305,8 @@ describe('File System: metadata.append', () => { rdf.quad(ex.subject0, ex.predicate0, ex.object0, ex.graph0), ] const metadata = [ - rdf.quad(ex.subject1, rdf.namedNode('http://www.w3.org/1999/02/22-rdf-syntax-ns#type'), schema.Dataset), - rdf.quad(ex.subject1, schema.dateModified, rdf.literal('2020-05-30')), + rdf.quad(ex.subject1, rdf.namedNode('http://www.w3.org/1999/02/22-rdf-syntax-ns#type'), rdf.ns.schema.Dataset), + rdf.quad(ex.subject1, rdf.ns.schema.dateModified, rdf.literal('2020-05-30')), ] const step = await append({ input: Readable.from(metadata), @@ -327,7 +317,7 @@ describe('File System: metadata.append', () => { strictEqual(result.length, 3) - strictEqual(result[2].predicate.value, schema.dateModified.value) + strictEqual(result[2].predicate.value, rdf.ns.schema.dateModified.value) strictEqual(result[2].object.value, rdf.literal('1999-12-31').value) }) @@ -336,8 +326,8 @@ describe('File System: metadata.append', () => { rdf.quad(ex.subject0, ex.predicate0, ex.object0, ex.graph0), ] const metadata = [ - rdf.quad(ex.subject1, rdf.namedNode('http://www.w3.org/1999/02/22-rdf-syntax-ns#type'), schema.Dataset), - rdf.quad(ex.subject1, schema.dateCreated, rdf.literal('2020-05-30')), + rdf.quad(ex.subject1, rdf.namedNode('http://www.w3.org/1999/02/22-rdf-syntax-ns#type'), rdf.ns.schema.Dataset), + rdf.quad(ex.subject1, rdf.ns.schema.dateCreated, rdf.literal('2020-05-30')), ] const step = await append({ input: Readable.from(metadata), @@ -348,7 +338,7 @@ describe('File System: metadata.append', () => { strictEqual(result.length, 3) - strictEqual(result[2].predicate.value, schema.dateCreated.value) + strictEqual(result[2].predicate.value, rdf.ns.schema.dateCreated.value) strictEqual(result[2].object.value, rdf.literal('1999-12-31').value) }) @@ -357,19 +347,19 @@ describe('File System: metadata.append', () => { rdf.quad(ex.subject0, ex.predicate0, ex.object0, ex.graph0), ] const metadata = [ - rdf.quad(ex.subject1, rdf.namedNode('http://www.w3.org/1999/02/22-rdf-syntax-ns#type'), schema.Dataset), - rdf.quad(ex.subject1, schema.dateModified, rdf.literal('2020-05-30')), + rdf.quad(ex.subject1, rdf.namedNode('http://www.w3.org/1999/02/22-rdf-syntax-ns#type'), rdf.ns.schema.Dataset), + rdf.quad(ex.subject1, rdf.ns.schema.dateModified, rdf.literal('2020-05-30')), ] const step = await append({ input: Readable.from(metadata), - dateModified: rdf.literal('1999-12-31', xsd.dateTime), + dateModified: rdf.literal('1999-12-31', rdf.ns.xsd.dateTime), }) const result = await getStream.array(Readable.from(data).pipe(step)) strictEqual(result.length, 3) - strictEqual(result[2].predicate.value, schema.dateModified.value) + strictEqual(result[2].predicate.value, rdf.ns.schema.dateModified.value) strictEqual(result[2].object.value, rdf.literal('1999-12-31').value) }) @@ -378,19 +368,19 @@ describe('File System: metadata.append', () => { rdf.quad(ex.subject0, ex.predicate0, ex.object0, ex.graph0), ] const metadata = [ - rdf.quad(ex.subject1, rdf.namedNode('http://www.w3.org/1999/02/22-rdf-syntax-ns#type'), schema.Dataset), - rdf.quad(ex.subject1, schema.dateCreated, rdf.literal('2020-05-30')), + rdf.quad(ex.subject1, rdf.namedNode('http://www.w3.org/1999/02/22-rdf-syntax-ns#type'), rdf.ns.schema.Dataset), + rdf.quad(ex.subject1, rdf.ns.schema.dateCreated, rdf.literal('2020-05-30')), ] const step = await append({ input: Readable.from(metadata), - dateCreated: rdf.literal('1999-12-31', xsd.dateTime), + dateCreated: rdf.literal('1999-12-31', rdf.ns.xsd.dateTime), }) const result = await getStream.array(Readable.from(data).pipe(step)) strictEqual(result.length, 3) - strictEqual(result[2].predicate.value, schema.dateCreated.value) + strictEqual(result[2].predicate.value, rdf.ns.schema.dateCreated.value) strictEqual(result[2].object.value, rdf.literal('1999-12-31').value) }) }) diff --git a/packages/rdf/test/fs.test.js b/packages/rdf/test/fs.test.js index 2cd5c20f..f098892d 100644 --- a/packages/rdf/test/fs.test.js +++ b/packages/rdf/test/fs.test.js @@ -1,11 +1,9 @@ -import { Readable } from 'stream' -import { resolve } from 'path' -import * as url from 'url' +import { Readable } from 'node:stream' +import { resolve } from 'node:path' +import * as url from 'node:url' import $rdf from 'barnard59-env' import chai, { expect } from 'chai' import { jestSnapshotPlugin } from 'mocha-chai-jest-snapshot' -import fromStream from 'rdf-dataset-ext/fromStream.js' -import toCanonical from 'rdf-dataset-ext/toCanonical.js' import { parse as unbound } from '../fs.js' const __dirname = url.fileURLToPath(new URL('.', import.meta.url)) @@ -25,10 +23,10 @@ describe('fs', () => { ]) // when - const dataset = await fromStream($rdf.dataset(), files.pipe(parse())) + const dataset = await $rdf.dataset().import(files.pipe(parse())) // then - expect(toCanonical(dataset)).toMatchSnapshot() + expect(dataset.toCanonical()).toMatchSnapshot() }) it('fails when file does not exist', (done) => { diff --git a/packages/rdf/test/localFetch/localFetch.test.js b/packages/rdf/test/localFetch/localFetch.test.js index 5e199e1a..7285d8b3 100644 --- a/packages/rdf/test/localFetch/localFetch.test.js +++ b/packages/rdf/test/localFetch/localFetch.test.js @@ -1,13 +1,11 @@ -import { equal, strictEqual } from 'assert' -import fs from 'fs' -import { resolve } from 'path' -import { fileURLToPath } from 'url' +import { equal, strictEqual } from 'node:assert' +import fs from 'node:fs' +import { resolve } from 'node:path' +import { fileURLToPath } from 'node:url' import { expect } from 'chai' import assertThrows from 'assert-throws-async' import nock from 'nock' import rdf from 'barnard59-env' -import fromStream from 'rdf-dataset-ext/fromStream.js' -import toCanonical from 'rdf-dataset-ext/toCanonical.js' import { localFetch as unbound } from '../../lib/localFetch/localFetch.js' const __dirname = fileURLToPath(new URL('.', import.meta.url)) @@ -15,13 +13,7 @@ const datasetPath = '../support/dataset.ttl' const datasetAbsolutePath = resolve(__dirname, datasetPath) async function getRDFDataset(filePath) { - return fromStream(rdf.dataset(), getRDFStream(filePath)) -} - -function getRDFStream(filePath) { - const stream = fs.createReadStream(resolve(__dirname, filePath)) - const parser = rdf.formats.parsers.get('text/turtle') - return parser.import(stream) + return rdf.dataset().import(rdf.fromFile(filePath)) } const localFetch = unbound.bind({ env: rdf }) @@ -49,15 +41,15 @@ describe('metadata.lfetch', () => { it('with defaults, should get the same dataset', async () => { const expected = await getRDFDataset(datasetPath) - const { quadStream } = await localFetch(getRDFStream(datasetPath)) - const actual = await fromStream(rdf.dataset(), quadStream) + const { quadStream } = await localFetch(rdf.fromFile(datasetPath)) + const actual = await rdf.dataset().import(quadStream) equal(expected.equals(actual), true) }) it('with filename and base, should get the same dataset', async () => { const expected = await getRDFDataset(datasetPath) const { quadStream } = await localFetch(datasetPath, __dirname) - const actual = await fromStream(rdf.dataset(), quadStream) + const actual = await rdf.dataset().import(quadStream) equal(expected.equals(actual), true) }) @@ -65,7 +57,7 @@ describe('metadata.lfetch', () => { it('with absolute filename, should get the same dataset', async () => { const expected = await getRDFDataset(datasetPath) const { quadStream } = await localFetch(datasetAbsolutePath) - const actual = await fromStream(rdf.dataset(), quadStream) + const actual = await rdf.dataset().import(quadStream) equal(expected.equals(actual), true) }) @@ -73,7 +65,7 @@ describe('metadata.lfetch', () => { it('with absolute filename, should ignore basePath and get the same dataset', async () => { const expected = await getRDFDataset(datasetPath) const { quadStream } = await localFetch(datasetAbsolutePath, '/unknown/') - const actual = await fromStream(rdf.dataset(), quadStream) + const actual = await rdf.dataset().import(quadStream) equal(expected.equals(actual), true) }) @@ -99,9 +91,9 @@ describe('metadata.lfetch', () => { const expected = await getRDFDataset(datasetPath) const { quadStream } = await localFetch('https://example.com/metadata.ttl') - const actual = await fromStream(rdf.dataset(), quadStream) + const actual = await rdf.dataset().import(quadStream) - expect(toCanonical(actual)).to.eq(toCanonical(expected)) + expect(actual.toCanonical()).to.eq(expected.toCanonical()) }) it('fails at unknown file extension', async () => { diff --git a/packages/rdf/test/mapMatch.test.js b/packages/rdf/test/mapMatch.test.js index 978e780d..a4e1d2ed 100644 --- a/packages/rdf/test/mapMatch.test.js +++ b/packages/rdf/test/mapMatch.test.js @@ -1,4 +1,4 @@ -import { strictEqual } from 'assert' +import { strictEqual } from 'node:assert' import getStream from 'get-stream' import { isDuplexStream } from 'is-stream' import rdf from 'barnard59-env' diff --git a/packages/rdf/test/membership.test.js b/packages/rdf/test/membership.test.js index 51f00259..876eb95a 100644 --- a/packages/rdf/test/membership.test.js +++ b/packages/rdf/test/membership.test.js @@ -1,12 +1,10 @@ -import { equal, strictEqual, throws } from 'assert' +import { equal, strictEqual, throws } from 'node:assert' import getStream from 'get-stream' import { isDuplexStream as isDuplex } from 'is-stream' import rdf from 'barnard59-env' import { Readable } from 'readable-stream' -import toCanonical from 'rdf-dataset-ext/toCanonical.js' import append from '../lib/append.js' import { toTarget, fromSource } from '../lib/membership.js' -import * as ns from '../lib/namespaces.js' const ex = rdf.namespace('http://example.org/') @@ -42,17 +40,17 @@ describe('membership.toTarget', () => { it('should append meta-data to the data', async () => { const data = [ - rdf.quad(ex.bob, ns.rdf.type, ex.Person), - rdf.quad(ex.alice, ns.rdf.type, ex.Person), - rdf.quad(ex.fido, ns.rdf.type, ex.Dog), - rdf.quad(ex.tom, ns.rdf.type, ex.Cat), + rdf.quad(ex.bob, rdf.ns.rdf.type, ex.Person), + rdf.quad(ex.alice, rdf.ns.rdf.type, ex.Person), + rdf.quad(ex.fido, rdf.ns.rdf.type, ex.Dog), + rdf.quad(ex.tom, rdf.ns.rdf.type, ex.Cat), ] const expectedMetadata = [ rdf.quad(ex.bob, ex.in, ex.house), rdf.quad(ex.alice, ex.in, ex.house), rdf.quad(ex.tom, ex.in, ex.house), - rdf.quad(ex.house, ns.rdf.type, ex.Container), + rdf.quad(ex.house, rdf.ns.rdf.type, ex.Container), ] const step = toTarget({ @@ -65,24 +63,24 @@ describe('membership.toTarget', () => { const result = await getStream.array(Readable.from(data).pipe(step)) equal( - toCanonical(result), - toCanonical(rdf.dataset([...data, ...expectedMetadata])), + result.toCanonical(), + rdf.dataset([...data, ...expectedMetadata]).toCanonical(), ) }) it('should append meta-data to the data with string parameters', async () => { const data = [ - rdf.quad(ex.bob, ns.rdf.type, ex.Person), - rdf.quad(ex.alice, ns.rdf.type, ex.Person), - rdf.quad(ex.fido, ns.rdf.type, ex.Dog), - rdf.quad(ex.tom, ns.rdf.type, ex.Cat), + rdf.quad(ex.bob, rdf.ns.rdf.type, ex.Person), + rdf.quad(ex.alice, rdf.ns.rdf.type, ex.Person), + rdf.quad(ex.fido, rdf.ns.rdf.type, ex.Dog), + rdf.quad(ex.tom, rdf.ns.rdf.type, ex.Cat), ] const expectedMetadata = [ rdf.quad(ex.bob, ex.in, ex.house), rdf.quad(ex.alice, ex.in, ex.house), rdf.quad(ex.tom, ex.in, ex.house), - rdf.quad(ex.house, ns.rdf.type, ex.Container), + rdf.quad(ex.house, rdf.ns.rdf.type, ex.Container), ] const step = toTarget({ @@ -95,8 +93,8 @@ describe('membership.toTarget', () => { const result = await getStream.array(Readable.from(data).pipe(step)) equal( - toCanonical(result), - toCanonical(rdf.dataset([...data, ...expectedMetadata])), + result.toCanonical(), + rdf.dataset([...data, ...expectedMetadata]).toCanonical(), ) }) }) @@ -133,17 +131,17 @@ describe('membership.fromSource', () => { it('should append meta-data to the data', async () => { const data = [ - rdf.quad(ex.bob, ns.rdf.type, ex.Person), - rdf.quad(ex.alice, ns.rdf.type, ex.Person), - rdf.quad(ex.fido, ns.rdf.type, ex.Dog), - rdf.quad(ex.tom, ns.rdf.type, ex.Cat), + rdf.quad(ex.bob, rdf.ns.rdf.type, ex.Person), + rdf.quad(ex.alice, rdf.ns.rdf.type, ex.Person), + rdf.quad(ex.fido, rdf.ns.rdf.type, ex.Dog), + rdf.quad(ex.tom, rdf.ns.rdf.type, ex.Cat), ] const expectedMetadata = [ rdf.quad(ex.house, ex.contains, ex.bob), rdf.quad(ex.house, ex.contains, ex.alice), rdf.quad(ex.house, ex.contains, ex.tom), - rdf.quad(ex.house, ns.rdf.type, ex.Container), + rdf.quad(ex.house, rdf.ns.rdf.type, ex.Container), ] const step = fromSource({ @@ -156,24 +154,24 @@ describe('membership.fromSource', () => { const result = await getStream.array(Readable.from(data).pipe(step)) equal( - toCanonical(result), - toCanonical(rdf.dataset([...data, ...expectedMetadata])), + result.toCanonical(), + rdf.dataset([...data, ...expectedMetadata]).toCanonical(), ) }) it('should append meta-data to the data with string parameters', async () => { const data = [ - rdf.quad(ex.bob, ns.rdf.type, ex.Person), - rdf.quad(ex.alice, ns.rdf.type, ex.Person), - rdf.quad(ex.fido, ns.rdf.type, ex.Dog), - rdf.quad(ex.tom, ns.rdf.type, ex.Cat), + rdf.quad(ex.bob, rdf.ns.rdf.type, ex.Person), + rdf.quad(ex.alice, rdf.ns.rdf.type, ex.Person), + rdf.quad(ex.fido, rdf.ns.rdf.type, ex.Dog), + rdf.quad(ex.tom, rdf.ns.rdf.type, ex.Cat), ] const expectedMetadata = [ rdf.quad(ex.house, ex.contains, ex.bob), rdf.quad(ex.house, ex.contains, ex.alice), rdf.quad(ex.house, ex.contains, ex.tom), - rdf.quad(ex.house, ns.rdf.type, ex.Container), + rdf.quad(ex.house, rdf.ns.rdf.type, ex.Container), ] const step = fromSource({ @@ -186,8 +184,8 @@ describe('membership.fromSource', () => { const result = await getStream.array(Readable.from(data).pipe(step)) equal( - toCanonical(result), - toCanonical(rdf.dataset([...data, ...expectedMetadata])), + result.toCanonical(), + rdf.dataset([...data, ...expectedMetadata]).toCanonical(), ) }) }) diff --git a/packages/rdf/test/metadata/applyOptions.test.js b/packages/rdf/test/metadata/applyOptions.test.js index d0d71221..0b7390f6 100644 --- a/packages/rdf/test/metadata/applyOptions.test.js +++ b/packages/rdf/test/metadata/applyOptions.test.js @@ -1,10 +1,8 @@ -import { strictEqual } from 'assert' +import { strictEqual } from 'node:assert' import { expect } from 'chai' import rdf from 'barnard59-env' import { Readable } from 'readable-stream' import { applyOptions } from '../../lib/metadata/applyOptions.js' -import * as ns from '../../lib/namespaces.js' -import { xsd } from '../../lib/namespaces.js' const ex = rdf.namespace('http://example.org/') @@ -15,7 +13,7 @@ describe('applyOptions', () => { it('should return the same data if no options given', async () => { const data = [ - rdf.quad(ex.subject0, ns.rdf.type, ex.type0, ex.graph1), + rdf.quad(ex.subject0, rdf.ns.rdf.type, ex.type0, ex.graph1), ] const options = {} @@ -28,14 +26,14 @@ describe('applyOptions', () => { it('should update or append schema:dateCreated for known classes', async () => { const data = [ - rdf.quad(ex.subject0, ns.rdf.type, ns.schema.Dataset, ex.graph1), - rdf.quad(ex.subject0, ns.schema.dateCreated, rdf.literal('Not me'), ex.graph0), - rdf.quad(ex.subject1, ns.rdf.type, ex.type1, ex.graph0), - rdf.quad(ex.subject3, ns.rdf.type, ns.schema.Dataset, ex.graph0), + rdf.quad(ex.subject0, rdf.ns.rdf.type, rdf.ns.schema.Dataset, ex.graph1), + rdf.quad(ex.subject0, rdf.ns.schema.dateCreated, rdf.literal('Not me'), ex.graph0), + rdf.quad(ex.subject1, rdf.ns.rdf.type, ex.type1, ex.graph0), + rdf.quad(ex.subject3, rdf.ns.rdf.type, rdf.ns.schema.Dataset, ex.graph0), ] const options = { - dateCreated: rdf.literal('1999-12-31', xsd.dateTime), + dateCreated: rdf.literal('1999-12-31', rdf.ns.xsd.dateTime), } const quadStream = Readable.from(data) const result = [...await applyOptions(quadStream, {}, options)] @@ -44,13 +42,13 @@ describe('applyOptions', () => { strictEqual(result[0].equals(data[0]), true) strictEqual(result[1].equals(data[2]), true) strictEqual(result[2].equals(data[3]), true) - strictEqual(result[3].equals(rdf.quad(ex.subject0, ns.schema.dateCreated, rdf.literal('1999-12-31', xsd.dateTime))), true) - strictEqual(result[4].equals(rdf.quad(ex.subject3, ns.schema.dateCreated, rdf.literal('1999-12-31', xsd.dateTime))), true) + strictEqual(result[3].equals(rdf.quad(ex.subject0, rdf.ns.schema.dateCreated, rdf.literal('1999-12-31', rdf.ns.xsd.dateTime))), true) + strictEqual(result[4].equals(rdf.quad(ex.subject3, rdf.ns.schema.dateCreated, rdf.literal('1999-12-31', rdf.ns.xsd.dateTime))), true) }) it('should update or append schema:dateCreated for known classes (string)', async () => { const data = [ - rdf.quad(ex.subject0, ns.rdf.type, ns.schema.Dataset, ex.graph1), + rdf.quad(ex.subject0, rdf.ns.rdf.type, rdf.ns.schema.Dataset, ex.graph1), ] const options = { @@ -61,19 +59,19 @@ describe('applyOptions', () => { strictEqual(result.length, 2) strictEqual(result[0].equals(data[0]), true) - expect(result[1]).to.deep.equal(rdf.quad(ex.subject0, ns.schema.dateCreated, rdf.literal('1999-12-31', xsd.dateTime))) + expect(result[1]).to.deep.equal(rdf.quad(ex.subject0, rdf.ns.schema.dateCreated, rdf.literal('1999-12-31', rdf.ns.xsd.dateTime))) }) it('should update or append dcterms:created for known classes', async () => { const data = [ - rdf.quad(ex.subject0, ns.rdf.type, ns.dcat.Dataset, ex.graph1), - rdf.quad(ex.subject0, ns.dcterms.created, rdf.literal('Not me'), ex.graph0), - rdf.quad(ex.subject1, ns.rdf.type, ex.type1, ex.graph0), - rdf.quad(ex.subject3, ns.rdf.type, ns.dcat.Dataset, ex.graph0), + rdf.quad(ex.subject0, rdf.ns.rdf.type, rdf.ns.dcat.Dataset, ex.graph1), + rdf.quad(ex.subject0, rdf.ns.dcterms.created, rdf.literal('Not me'), ex.graph0), + rdf.quad(ex.subject1, rdf.ns.rdf.type, ex.type1, ex.graph0), + rdf.quad(ex.subject3, rdf.ns.rdf.type, rdf.ns.dcat.Dataset, ex.graph0), ] const options = { - dateCreated: rdf.literal('1999-12-31', xsd.dateTime), + dateCreated: rdf.literal('1999-12-31', rdf.ns.xsd.dateTime), } const quadStream = Readable.from(data) const result = [...await applyOptions(quadStream, {}, options)] @@ -82,20 +80,20 @@ describe('applyOptions', () => { strictEqual(result[0].equals(data[0]), true) strictEqual(result[1].equals(data[2]), true) strictEqual(result[2].equals(data[3]), true) - strictEqual(result[3].equals(rdf.quad(ex.subject0, ns.dcterms.created, rdf.literal('1999-12-31', xsd.dateTime))), true) - strictEqual(result[4].equals(rdf.quad(ex.subject3, ns.dcterms.created, rdf.literal('1999-12-31', xsd.dateTime))), true) + strictEqual(result[3].equals(rdf.quad(ex.subject0, rdf.ns.dcterms.created, rdf.literal('1999-12-31', rdf.ns.xsd.dateTime))), true) + strictEqual(result[4].equals(rdf.quad(ex.subject3, rdf.ns.dcterms.created, rdf.literal('1999-12-31', rdf.ns.xsd.dateTime))), true) }) it('should update or append schema:dateModified for known classes', async () => { const data = [ - rdf.quad(ex.subject0, ns.rdf.type, ns.schema.Dataset, ex.graph1), - rdf.quad(ex.subject0, ns.schema.dateModified, rdf.literal('Not me'), ex.graph0), - rdf.quad(ex.subject1, ns.rdf.type, ex.type1, ex.graph0), - rdf.quad(ex.subject3, ns.rdf.type, ns.schema.Dataset, ex.graph0), + rdf.quad(ex.subject0, rdf.ns.rdf.type, rdf.ns.schema.Dataset, ex.graph1), + rdf.quad(ex.subject0, rdf.ns.schema.dateModified, rdf.literal('Not me'), ex.graph0), + rdf.quad(ex.subject1, rdf.ns.rdf.type, ex.type1, ex.graph0), + rdf.quad(ex.subject3, rdf.ns.rdf.type, rdf.ns.schema.Dataset, ex.graph0), ] const options = { - dateModified: rdf.literal('1999-12-31', xsd.dateTime), + dateModified: rdf.literal('1999-12-31', rdf.ns.xsd.dateTime), } const quadStream = Readable.from(data) const result = [...await applyOptions(quadStream, {}, options)] @@ -104,20 +102,20 @@ describe('applyOptions', () => { strictEqual(result[0].equals(data[0]), true) strictEqual(result[1].equals(data[2]), true) strictEqual(result[2].equals(data[3]), true) - strictEqual(result[3].equals(rdf.quad(ex.subject0, ns.schema.dateModified, rdf.literal('1999-12-31', xsd.dateTime))), true) - strictEqual(result[4].equals(rdf.quad(ex.subject3, ns.schema.dateModified, rdf.literal('1999-12-31', xsd.dateTime))), true) + strictEqual(result[3].equals(rdf.quad(ex.subject0, rdf.ns.schema.dateModified, rdf.literal('1999-12-31', rdf.ns.xsd.dateTime))), true) + strictEqual(result[4].equals(rdf.quad(ex.subject3, rdf.ns.schema.dateModified, rdf.literal('1999-12-31', rdf.ns.xsd.dateTime))), true) }) it('should update or append dcterms:modified for known classes', async () => { const data = [ - rdf.quad(ex.subject0, ns.rdf.type, ns.dcat.Dataset, ex.graph1), - rdf.quad(ex.subject0, ns.dcterms.modified, rdf.literal('Not me'), ex.graph0), - rdf.quad(ex.subject1, ns.rdf.type, ex.type1, ex.graph0), - rdf.quad(ex.subject3, ns.rdf.type, ns.dcat.Dataset, ex.graph0), + rdf.quad(ex.subject0, rdf.ns.rdf.type, rdf.ns.dcat.Dataset, ex.graph1), + rdf.quad(ex.subject0, rdf.ns.dcterms.modified, rdf.literal('Not me'), ex.graph0), + rdf.quad(ex.subject1, rdf.ns.rdf.type, ex.type1, ex.graph0), + rdf.quad(ex.subject3, rdf.ns.rdf.type, rdf.ns.dcat.Dataset, ex.graph0), ] const options = { - dateModified: rdf.literal('1999-12-31', xsd.dateTime), + dateModified: rdf.literal('1999-12-31', rdf.ns.xsd.dateTime), } const quadStream = Readable.from(data) const result = [...await applyOptions(quadStream, {}, options)] @@ -126,18 +124,18 @@ describe('applyOptions', () => { strictEqual(result[0].equals(data[0]), true) strictEqual(result[1].equals(data[2]), true) strictEqual(result[2].equals(data[3]), true) - strictEqual(result[3].equals(rdf.quad(ex.subject0, ns.dcterms.modified, rdf.literal('1999-12-31', xsd.dateTime))), true) - strictEqual(result[4].equals(rdf.quad(ex.subject3, ns.dcterms.modified, rdf.literal('1999-12-31', xsd.dateTime))), true) + strictEqual(result[3].equals(rdf.quad(ex.subject0, rdf.ns.dcterms.modified, rdf.literal('1999-12-31', rdf.ns.xsd.dateTime))), true) + strictEqual(result[4].equals(rdf.quad(ex.subject3, rdf.ns.dcterms.modified, rdf.literal('1999-12-31', rdf.ns.xsd.dateTime))), true) }) it('should update or append both dcterms:modified and schema:modified for known classes', async () => { const data = [ - rdf.quad(ex.subject0, ns.rdf.type, ns.dcat.Dataset, ex.graph1), - rdf.quad(ex.subject0, ns.rdf.type, ns.schema.Dataset, ex.graph1), + rdf.quad(ex.subject0, rdf.ns.rdf.type, rdf.ns.dcat.Dataset, ex.graph1), + rdf.quad(ex.subject0, rdf.ns.rdf.type, rdf.ns.schema.Dataset, ex.graph1), ] const options = { - dateModified: rdf.literal('1999-12-31', xsd.dateTime), + dateModified: rdf.literal('1999-12-31', rdf.ns.xsd.dateTime), } const quadStream = Readable.from(data) const result = [...await applyOptions(quadStream, {}, options)] @@ -145,13 +143,13 @@ describe('applyOptions', () => { strictEqual(result.length, 4) strictEqual(result[0].equals(data[0]), true) strictEqual(result[1].equals(data[1]), true) - strictEqual(result[2].equals(rdf.quad(ex.subject0, ns.dcterms.modified, rdf.literal('1999-12-31', xsd.dateTime))), true) - strictEqual(result[3].equals(rdf.quad(ex.subject0, ns.schema.dateModified, rdf.literal('1999-12-31', xsd.dateTime))), true) + strictEqual(result[2].equals(rdf.quad(ex.subject0, rdf.ns.dcterms.modified, rdf.literal('1999-12-31', rdf.ns.xsd.dateTime))), true) + strictEqual(result[3].equals(rdf.quad(ex.subject0, rdf.ns.schema.dateModified, rdf.literal('1999-12-31', rdf.ns.xsd.dateTime))), true) }) it('should update or append schema:dateModified for known (string)', async () => { const data = [ - rdf.quad(ex.subject0, ns.rdf.type, ns.schema.Dataset, ex.graph1), + rdf.quad(ex.subject0, rdf.ns.rdf.type, rdf.ns.schema.Dataset, ex.graph1), ] const options = { @@ -162,20 +160,20 @@ describe('applyOptions', () => { strictEqual(result.length, 2) strictEqual(result[0].equals(data[0]), true) - expect(result[1]).to.deep.equal(rdf.quad(ex.subject0, ns.schema.dateModified, rdf.literal('1999-12-31', xsd.dateTime))) + expect(result[1]).to.deep.equal(rdf.quad(ex.subject0, rdf.ns.schema.dateModified, rdf.literal('1999-12-31', rdf.ns.xsd.dateTime))) }) it('should set the corresponding graph', async () => { const data = [ - rdf.quad(ex.subject0, ns.rdf.type, ns.schema.Dataset, ex.graph1), - rdf.quad(ex.subject0, ns.schema.dateModified, rdf.literal('Not me'), ex.graph0), - rdf.quad(ex.subject1, ns.rdf.type, ex.type1, ex.graph0), - rdf.quad(ex.subject3, ns.rdf.type, ns.schema.Dataset, ex.graph0), + rdf.quad(ex.subject0, rdf.ns.rdf.type, rdf.ns.schema.Dataset, ex.graph1), + rdf.quad(ex.subject0, rdf.ns.schema.dateModified, rdf.literal('Not me'), ex.graph0), + rdf.quad(ex.subject1, rdf.ns.rdf.type, ex.type1, ex.graph0), + rdf.quad(ex.subject3, rdf.ns.rdf.type, rdf.ns.schema.Dataset, ex.graph0), ] const options = { graph: ex.graph2, - dateModified: rdf.literal('1999-12-31', xsd.dateTime), + dateModified: rdf.literal('1999-12-31', rdf.ns.xsd.dateTime), } const quadStream = Readable.from(data) const result = [...await applyOptions(quadStream, {}, options)] @@ -190,7 +188,7 @@ describe('applyOptions', () => { it('should set the corresponding graph (string)', async () => { const data = [ - rdf.quad(ex.subject0, ns.rdf.type, ns.schema.Dataset, ex.graph1), + rdf.quad(ex.subject0, rdf.ns.rdf.type, rdf.ns.schema.Dataset, ex.graph1), ] const options = { diff --git a/packages/rdf/test/voidStats.test.js b/packages/rdf/test/voidStats.test.js index 125b2c2b..569d5ed1 100644 --- a/packages/rdf/test/voidStats.test.js +++ b/packages/rdf/test/voidStats.test.js @@ -1,11 +1,9 @@ -import { equal, strictEqual } from 'assert' +import { equal, strictEqual } from 'node:assert' import assertThrows from 'assert-throws-async' import getStream from 'get-stream' import { isDuplexStream as isDuplex } from 'is-stream' import rdf from 'barnard59-env' import { Readable } from 'readable-stream' -import toCanonical from 'rdf-dataset-ext/toCanonical.js' -import * as ns from '../lib/namespaces.js' import voidStats from '../lib/voidStats.js' const ex = rdf.namespace('http://example.org/') @@ -33,15 +31,15 @@ describe('metadata.voidStats', () => { it('includes counts at the end of the stream', async () => { const data = [ - rdf.quad(ex.bob, ns.rdf.type, ex.Person), - rdf.quad(ex.alice, ns.rdf.type, ex.Person), + rdf.quad(ex.bob, rdf.ns.rdf.type, ex.Person), + rdf.quad(ex.alice, rdf.ns.rdf.type, ex.Person), rdf.quad(ex.bob, ex.knows, ex.alice), rdf.quad(ex.alice, ex.name, rdf.literal('Alice')), ] const expectedMetadata = [ - rdf.quad(ex.dataset, ns.rdf.type, ns._void.Dataset), - rdf.quad(ex.dataset, ns._void.triples, rdf.literal('4', ns.xsd.integer)), - rdf.quad(ex.dataset, ns._void.entities, rdf.literal('2', ns.xsd.integer)), + rdf.quad(ex.dataset, rdf.ns.rdf.type, rdf.ns._void.Dataset), + rdf.quad(ex.dataset, rdf.ns._void.triples, rdf.literal('4', rdf.ns.xsd.integer)), + rdf.quad(ex.dataset, rdf.ns._void.entities, rdf.literal('2', rdf.ns.xsd.integer)), ] const inputStream = Readable.from(data) @@ -52,17 +50,17 @@ describe('metadata.voidStats', () => { const result = await getStream.array(inputStream.pipe(sut)) equal( - toCanonical(result.slice(4)), - toCanonical(expectedMetadata), + result.slice(4).toCanonical(), + expectedMetadata.toCanonical(), ) }) it('returns zero counts for no data', async () => { const data = [] const expectedMetadata = [ - rdf.quad(ex.dataset, ns.rdf.type, ns._void.Dataset), - rdf.quad(ex.dataset, ns._void.triples, rdf.literal('0', ns.xsd.integer)), - rdf.quad(ex.dataset, ns._void.entities, rdf.literal('0', ns.xsd.integer)), + rdf.quad(ex.dataset, rdf.ns.rdf.type, rdf.ns._void.Dataset), + rdf.quad(ex.dataset, rdf.ns._void.triples, rdf.literal('0', rdf.ns.xsd.integer)), + rdf.quad(ex.dataset, rdf.ns._void.entities, rdf.literal('0', rdf.ns.xsd.integer)), ] const inputStream = Readable.from(data) @@ -73,8 +71,8 @@ describe('metadata.voidStats', () => { const result = await getStream.array(inputStream.pipe(sut)) equal( - toCanonical(result), - toCanonical(expectedMetadata), + result.toCanonical(), + expectedMetadata.toCanonical(), ) }) @@ -84,9 +82,9 @@ describe('metadata.voidStats', () => { rdf.quad(ex.alice, ex.name, rdf.literal('Alice')), ] const expectedMetadata = [ - rdf.quad(ex.dataset, ns.rdf.type, ns._void.Dataset), - rdf.quad(ex.dataset, ns._void.triples, rdf.literal('2', ns.xsd.integer)), - rdf.quad(ex.dataset, ns._void.entities, rdf.literal('0', ns.xsd.integer)), + rdf.quad(ex.dataset, rdf.ns.rdf.type, rdf.ns._void.Dataset), + rdf.quad(ex.dataset, rdf.ns._void.triples, rdf.literal('2', rdf.ns.xsd.integer)), + rdf.quad(ex.dataset, rdf.ns._void.entities, rdf.literal('0', rdf.ns.xsd.integer)), ] const inputStream = Readable.from(data) @@ -97,17 +95,17 @@ describe('metadata.voidStats', () => { const result = await getStream.array(inputStream.pipe(sut)) equal( - toCanonical(result.slice(2)), - toCanonical(expectedMetadata), + result.slice(2).toCanonical(), + expectedMetadata.toCanonical(), ) }) it('uses the named-graph given as parameter', async () => { const data = [] const expectedMetadata = [ - rdf.quad(ex.dataset, ns.rdf.type, ns._void.Dataset, ex.metadata), - rdf.quad(ex.dataset, ns._void.triples, rdf.literal('0', ns.xsd.integer), ex.metadata), - rdf.quad(ex.dataset, ns._void.entities, rdf.literal('0', ns.xsd.integer), ex.metadata), + rdf.quad(ex.dataset, rdf.ns.rdf.type, rdf.ns._void.Dataset, ex.metadata), + rdf.quad(ex.dataset, rdf.ns._void.triples, rdf.literal('0', rdf.ns.xsd.integer), ex.metadata), + rdf.quad(ex.dataset, rdf.ns._void.entities, rdf.literal('0', rdf.ns.xsd.integer), ex.metadata), ] const inputStream = Readable.from(data) @@ -119,8 +117,8 @@ describe('metadata.voidStats', () => { const result = await getStream.array(inputStream.pipe(sut)) equal( - toCanonical(result), - toCanonical(expectedMetadata), + result.toCanonical(), + expectedMetadata.toCanonical(), ) }) @@ -130,7 +128,7 @@ describe('metadata.voidStats', () => { rdf.quad(ex.alice, ex.name, rdf.literal('Alice')), ] const expectedMetadata = [ - rdf.quad(ex.dataset, ns.rdf.type, ns._void.Dataset), + rdf.quad(ex.dataset, rdf.ns.rdf.type, rdf.ns._void.Dataset), ] const inputStream = Readable.from(data) @@ -142,28 +140,28 @@ describe('metadata.voidStats', () => { const result = await getStream.array(inputStream.pipe(sut)) equal( - toCanonical(result.slice(2)), - toCanonical(expectedMetadata), + result.slice(2).toCanonical(), + expectedMetadata.toCanonical(), ) }) it('describes counts for class partitions', async () => { const data = [ - rdf.quad(ex.a_1, ns.rdf.type, ex.A), - rdf.quad(ex.a_2, ns.rdf.type, ex.A), - rdf.quad(ex.b_1, ns.rdf.type, ex.B), - rdf.quad(ex.c_1, ns.rdf.type, ex.C), + rdf.quad(ex.a_1, rdf.ns.rdf.type, ex.A), + rdf.quad(ex.a_2, rdf.ns.rdf.type, ex.A), + rdf.quad(ex.b_1, rdf.ns.rdf.type, ex.B), + rdf.quad(ex.c_1, rdf.ns.rdf.type, ex.C), ] const partition1 = rdf.namedNode('http://example.org/dataset/classPartition/0') const partition2 = rdf.namedNode('http://example.org/dataset/classPartition/1') const expectedMetadata = [ - rdf.quad(ex.dataset, ns.rdf.type, ns._void.Dataset), - rdf.quad(ex.dataset, ns._void.classPartition, partition1), - rdf.quad(ex.dataset, ns._void.classPartition, partition2), - rdf.quad(partition1, ns._void.class, ex.A), - rdf.quad(partition1, ns._void.entities, rdf.literal('2', ns.xsd.integer)), - rdf.quad(partition2, ns._void.class, ex.C), - rdf.quad(partition2, ns._void.entities, rdf.literal('1', ns.xsd.integer)), + rdf.quad(ex.dataset, rdf.ns.rdf.type, rdf.ns._void.Dataset), + rdf.quad(ex.dataset, rdf.ns._void.classPartition, partition1), + rdf.quad(ex.dataset, rdf.ns._void.classPartition, partition2), + rdf.quad(partition1, rdf.ns._void.class, ex.A), + rdf.quad(partition1, rdf.ns._void.entities, rdf.literal('2', rdf.ns.xsd.integer)), + rdf.quad(partition2, rdf.ns._void.class, ex.C), + rdf.quad(partition2, rdf.ns._void.entities, rdf.literal('1', rdf.ns.xsd.integer)), ] const inputStream = Readable.from(data) @@ -176,8 +174,8 @@ describe('metadata.voidStats', () => { const result = await getStream.array(inputStream.pipe(sut)) equal( - toCanonical(result.slice(4)), - toCanonical(expectedMetadata), + result.slice(4).toCanonical(), + expectedMetadata.toCanonical(), ) }) @@ -186,13 +184,13 @@ describe('metadata.voidStats', () => { const partition1 = rdf.namedNode('http://example.org/dataset/classPartition/0') const partition2 = rdf.namedNode('http://example.org/dataset/classPartition/1') const expectedMetadata = [ - rdf.quad(ex.dataset, ns.rdf.type, ns._void.Dataset), - rdf.quad(ex.dataset, ns._void.classPartition, partition1), - rdf.quad(ex.dataset, ns._void.classPartition, partition2), - rdf.quad(partition1, ns._void.class, ex.A), - rdf.quad(partition1, ns._void.entities, rdf.literal('0', ns.xsd.integer)), - rdf.quad(partition2, ns._void.class, ex.C), - rdf.quad(partition2, ns._void.entities, rdf.literal('0', ns.xsd.integer)), + rdf.quad(ex.dataset, rdf.ns.rdf.type, rdf.ns._void.Dataset), + rdf.quad(ex.dataset, rdf.ns._void.classPartition, partition1), + rdf.quad(ex.dataset, rdf.ns._void.classPartition, partition2), + rdf.quad(partition1, rdf.ns._void.class, ex.A), + rdf.quad(partition1, rdf.ns._void.entities, rdf.literal('0', rdf.ns.xsd.integer)), + rdf.quad(partition2, rdf.ns._void.class, ex.C), + rdf.quad(partition2, rdf.ns._void.entities, rdf.literal('0', rdf.ns.xsd.integer)), ] const inputStream = Readable.from(data) @@ -205,8 +203,8 @@ describe('metadata.voidStats', () => { const result = await getStream.array(inputStream.pipe(sut)) equal( - toCanonical(result), - toCanonical(expectedMetadata), + result.toCanonical(), + expectedMetadata.toCanonical(), ) }) @@ -220,13 +218,13 @@ describe('metadata.voidStats', () => { const partition1 = rdf.namedNode('http://example.org/dataset/propertyPartition/0') const partition2 = rdf.namedNode('http://example.org/dataset/propertyPartition/1') const expectedMetadata = [ - rdf.quad(ex.dataset, ns.rdf.type, ns._void.Dataset), - rdf.quad(ex.dataset, ns._void.propertyPartition, partition1), - rdf.quad(ex.dataset, ns._void.propertyPartition, partition2), - rdf.quad(partition1, ns._void.property, ex.p_1), - rdf.quad(partition1, ns._void.entities, rdf.literal('2', ns.xsd.integer)), - rdf.quad(partition2, ns._void.property, ex.p_3), - rdf.quad(partition2, ns._void.entities, rdf.literal('1', ns.xsd.integer)), + rdf.quad(ex.dataset, rdf.ns.rdf.type, rdf.ns._void.Dataset), + rdf.quad(ex.dataset, rdf.ns._void.propertyPartition, partition1), + rdf.quad(ex.dataset, rdf.ns._void.propertyPartition, partition2), + rdf.quad(partition1, rdf.ns._void.property, ex.p_1), + rdf.quad(partition1, rdf.ns._void.entities, rdf.literal('2', rdf.ns.xsd.integer)), + rdf.quad(partition2, rdf.ns._void.property, ex.p_3), + rdf.quad(partition2, rdf.ns._void.entities, rdf.literal('1', rdf.ns.xsd.integer)), ] const inputStream = Readable.from(data) @@ -239,8 +237,8 @@ describe('metadata.voidStats', () => { const result = await getStream.array(inputStream.pipe(sut)) equal( - toCanonical(result.slice(4)), - toCanonical(expectedMetadata), + result.slice(4).toCanonical(), + expectedMetadata.toCanonical(), ) }) @@ -249,13 +247,13 @@ describe('metadata.voidStats', () => { const partition1 = rdf.namedNode('http://example.org/dataset/propertyPartition/0') const partition2 = rdf.namedNode('http://example.org/dataset/propertyPartition/1') const expectedMetadata = [ - rdf.quad(ex.dataset, ns.rdf.type, ns._void.Dataset), - rdf.quad(ex.dataset, ns._void.propertyPartition, partition1), - rdf.quad(ex.dataset, ns._void.propertyPartition, partition2), - rdf.quad(partition1, ns._void.property, ex.p_1), - rdf.quad(partition1, ns._void.entities, rdf.literal('0', ns.xsd.integer)), - rdf.quad(partition2, ns._void.property, ex.p_3), - rdf.quad(partition2, ns._void.entities, rdf.literal('0', ns.xsd.integer)), + rdf.quad(ex.dataset, rdf.ns.rdf.type, rdf.ns._void.Dataset), + rdf.quad(ex.dataset, rdf.ns._void.propertyPartition, partition1), + rdf.quad(ex.dataset, rdf.ns._void.propertyPartition, partition2), + rdf.quad(partition1, rdf.ns._void.property, ex.p_1), + rdf.quad(partition1, rdf.ns._void.entities, rdf.literal('0', rdf.ns.xsd.integer)), + rdf.quad(partition2, rdf.ns._void.property, ex.p_3), + rdf.quad(partition2, rdf.ns._void.entities, rdf.literal('0', rdf.ns.xsd.integer)), ] const inputStream = Readable.from(data) @@ -268,17 +266,17 @@ describe('metadata.voidStats', () => { const result = await getStream.array(inputStream.pipe(sut)) equal( - toCanonical(result), - toCanonical(expectedMetadata), + result.toCanonical(), + expectedMetadata.toCanonical(), ) }) it('accepts string parameters', async () => { const data = [ - rdf.quad(ex.a_1, ns.rdf.type, ex.A), - rdf.quad(ex.a_2, ns.rdf.type, ex.A), - rdf.quad(ex.b_1, ns.rdf.type, ex.B), - rdf.quad(ex.c_1, ns.rdf.type, ex.C), + rdf.quad(ex.a_1, rdf.ns.rdf.type, ex.A), + rdf.quad(ex.a_2, rdf.ns.rdf.type, ex.A), + rdf.quad(ex.b_1, rdf.ns.rdf.type, ex.B), + rdf.quad(ex.c_1, rdf.ns.rdf.type, ex.C), rdf.quad(ex.a, ex.p_1, ex.b), rdf.quad(ex.a, ex.p_1, ex.b), rdf.quad(ex.a, ex.p_2, ex.b), From 2b199bd708d21a48db5240bf345541aef4e0fa0c Mon Sep 17 00:00:00 2001 From: tpluscode Date: Sun, 21 Jan 2024 20:39:19 +0100 Subject: [PATCH 11/28] revert: no need to remove usage of env.termSet --- packages/rdf/lib/PatternMatcher.js | 11 +++++------ packages/rdf/mapMatch.js | 2 +- packages/rdf/test/PatternMatcher.test.js | 2 +- 3 files changed, 7 insertions(+), 8 deletions(-) diff --git a/packages/rdf/lib/PatternMatcher.js b/packages/rdf/lib/PatternMatcher.js index 98daf0cf..0ed70afe 100644 --- a/packages/rdf/lib/PatternMatcher.js +++ b/packages/rdf/lib/PatternMatcher.js @@ -1,3 +1,5 @@ +import rdf from 'barnard59-env' + /** * @typedef {'subject' | 'predicate' | 'object' | 'graph'} QuadPart * @typedef {import('@rdfjs/term-set').default} TermSet @@ -5,16 +7,13 @@ class PatternMatcher { /** - * @param {import('barnard59-env').Environment} rdf * @param {object} [options] * @param {import('@rdfjs/types').Quad_Subject | Iterable} [options.subject] * @param {import('@rdfjs/types').Quad_Predicate | Iterable} [options.predicate] * @param {import('@rdfjs/types').Quad_Object | Iterable} [options.object] * @param {import('@rdfjs/types').Quad_Graph | Iterable} [options.graph] */ - constructor(rdf, { subject, predicate, object, graph } = {}) { - this.rdf = rdf - + constructor({ subject, predicate, object, graph } = {}) { /** * @type {Partial>} */ @@ -45,9 +44,9 @@ class PatternMatcher { } if (Symbol.iterator in value) { - this.pattern[name] = this.rdf.termSet([...value]) + this.pattern[name] = rdf.termSet([...value]) } else { - this.pattern[name] = this.rdf.termSet([value]) + this.pattern[name] = rdf.termSet([value]) } } } diff --git a/packages/rdf/mapMatch.js b/packages/rdf/mapMatch.js index 2676679f..de95143d 100644 --- a/packages/rdf/mapMatch.js +++ b/packages/rdf/mapMatch.js @@ -18,7 +18,7 @@ import PatternMatcher from './lib/PatternMatcher.js' * @template T */ function mapMatch({ map, subject, predicate, object, graph }) { - const matcher = new PatternMatcher(this.env, { subject, predicate, object, graph }) + const matcher = new PatternMatcher({ subject, predicate, object, graph }) return new Transform({ objectMode: true, diff --git a/packages/rdf/test/PatternMatcher.test.js b/packages/rdf/test/PatternMatcher.test.js index 4dee9b31..d1f07f5c 100644 --- a/packages/rdf/test/PatternMatcher.test.js +++ b/packages/rdf/test/PatternMatcher.test.js @@ -1,4 +1,4 @@ -import { strictEqual } from 'assert' +import { strictEqual } from 'node:assert' import rdf from 'barnard59-env' import PatternMatcher from '../lib/PatternMatcher.js' import { ex } from './support/namespaces.js' From 7cbbf1ef07078db159584a992806ab7d93888531 Mon Sep 17 00:00:00 2001 From: tpluscode Date: Sun, 21 Jan 2024 20:46:31 +0100 Subject: [PATCH 12/28] build: windows-friendly rimraf usage --- packages/rdf/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/rdf/package.json b/packages/rdf/package.json index a72ea2ef..ea2a7349 100644 --- a/packages/rdf/package.json +++ b/packages/rdf/package.json @@ -6,7 +6,7 @@ "type": "module", "scripts": { "test": "mocha", - "prebuild": "rimraf *.d.ts lib/**/*.d.ts lib/*.d.ts", + "prebuild": "rimraf -g *.d.ts lib/**/*.d.ts lib/*.d.ts", "build": "tsc", "prepack": "npm run build" }, From ab45766f4b418fdfe65edc7f704d5bd6bde6afdf Mon Sep 17 00:00:00 2001 From: tpluscode Date: Sun, 21 Jan 2024 21:03:44 +0100 Subject: [PATCH 13/28] test: applied context --- packages/rdf/test/mapMatch.test.js | 8 ++-- packages/rdf/test/membership.test.js | 37 +++++------------- packages/rdf/test/open.test.js | 1 + packages/rdf/test/setGraph.test.js | 8 ++-- packages/rdf/test/voidStats.test.js | 56 +++++++--------------------- 5 files changed, 31 insertions(+), 79 deletions(-) diff --git a/packages/rdf/test/mapMatch.test.js b/packages/rdf/test/mapMatch.test.js index a4e1d2ed..420480b6 100644 --- a/packages/rdf/test/mapMatch.test.js +++ b/packages/rdf/test/mapMatch.test.js @@ -3,14 +3,12 @@ import getStream from 'get-stream' import { isDuplexStream } from 'is-stream' import rdf from 'barnard59-env' import { Readable } from 'readable-stream' -import mapMatch from '../mapMatch.js' +import mapMatchUnbound from '../mapMatch.js' import * as ns from './support/namespaces.js' -describe('mapMatch', () => { - it('should be a factory', () => { - strictEqual(typeof mapMatch, 'function') - }) +const mapMatch = mapMatchUnbound.bind({ env: rdf }) +describe('mapMatch', () => { it('should return a duplex stream', () => { const stream = mapMatch({ predicate: '', map: () => {} }) diff --git a/packages/rdf/test/membership.test.js b/packages/rdf/test/membership.test.js index 876eb95a..d46c180e 100644 --- a/packages/rdf/test/membership.test.js +++ b/packages/rdf/test/membership.test.js @@ -1,18 +1,17 @@ -import { equal, strictEqual, throws } from 'node:assert' +import { strictEqual, throws } from 'node:assert' import getStream from 'get-stream' import { isDuplexStream as isDuplex } from 'is-stream' import rdf from 'barnard59-env' import { Readable } from 'readable-stream' -import append from '../lib/append.js' -import { toTarget, fromSource } from '../lib/membership.js' +import { expect } from 'chai' +import * as membership from '../lib/membership.js' + +const toTarget = membership.toTarget.bind({ env: rdf }) +const fromSource = membership.fromSource.bind({ env: rdf }) const ex = rdf.namespace('http://example.org/') describe('membership.toTarget', () => { - it('should be a factory', () => { - strictEqual(typeof append, 'function') - }) - const parameterSet = [ { targetUri: undefined, targetClass: ex.targetClass, property: ex.property, classes: [ex.class] }, { targetUri: ex.targetUri, targetClass: undefined, property: ex.property, classes: [ex.class] }, @@ -62,10 +61,7 @@ describe('membership.toTarget', () => { const result = await getStream.array(Readable.from(data).pipe(step)) - equal( - result.toCanonical(), - rdf.dataset([...data, ...expectedMetadata]).toCanonical(), - ) + expect(result).to.deep.contain.all.members([...data, ...expectedMetadata]) }) it('should append meta-data to the data with string parameters', async () => { @@ -92,18 +88,11 @@ describe('membership.toTarget', () => { const result = await getStream.array(Readable.from(data).pipe(step)) - equal( - result.toCanonical(), - rdf.dataset([...data, ...expectedMetadata]).toCanonical(), - ) + expect(result).to.deep.contain.all.members([...data, ...expectedMetadata]) }) }) describe('membership.fromSource', () => { - it('should be a factory', () => { - strictEqual(typeof append, 'function') - }) - const parameterSet = [ { sourceUri: undefined, sourceClass: ex.sourceClass, property: ex.property, classes: [ex.class] }, { sourceUri: ex.sourceUri, sourceClass: undefined, property: ex.property, classes: [ex.class] }, @@ -153,10 +142,7 @@ describe('membership.fromSource', () => { const result = await getStream.array(Readable.from(data).pipe(step)) - equal( - result.toCanonical(), - rdf.dataset([...data, ...expectedMetadata]).toCanonical(), - ) + expect(result).to.deep.contain.all.members([...data, ...expectedMetadata]) }) it('should append meta-data to the data with string parameters', async () => { @@ -183,9 +169,6 @@ describe('membership.fromSource', () => { const result = await getStream.array(Readable.from(data).pipe(step)) - equal( - result.toCanonical(), - rdf.dataset([...data, ...expectedMetadata]).toCanonical(), - ) + expect(result).to.deep.contain.all.members([...data, ...expectedMetadata]) }) }) diff --git a/packages/rdf/test/open.test.js b/packages/rdf/test/open.test.js index 9ef5a0e0..56287eb9 100644 --- a/packages/rdf/test/open.test.js +++ b/packages/rdf/test/open.test.js @@ -26,6 +26,7 @@ describe('open', function () { const quadStream = sinon.stub().returns('foo') const env = { fetch: async () => ({ + body: Readable.from(''), quadStream, }), } diff --git a/packages/rdf/test/setGraph.test.js b/packages/rdf/test/setGraph.test.js index 9004eba3..864dd6f8 100644 --- a/packages/rdf/test/setGraph.test.js +++ b/packages/rdf/test/setGraph.test.js @@ -3,14 +3,12 @@ import getStream from 'get-stream' import { isDuplexStream as isDuplex } from 'is-stream' import rdf from 'barnard59-env' import { Readable } from 'readable-stream' -import setGraph from '../setGraph.js' +import setGraphUnbound from '../setGraph.js' import * as ns from './support/namespaces.js' -describe('setGraph', () => { - it('should be a factory', () => { - strictEqual(typeof setGraph, 'function') - }) +const setGraph = setGraphUnbound.bind({ env: rdf }) +describe('setGraph', () => { it('should return a duplex stream', () => { const stream = setGraph(ns.ex.graph) diff --git a/packages/rdf/test/voidStats.test.js b/packages/rdf/test/voidStats.test.js index 569d5ed1..9b447205 100644 --- a/packages/rdf/test/voidStats.test.js +++ b/packages/rdf/test/voidStats.test.js @@ -1,20 +1,19 @@ -import { equal, strictEqual } from 'node:assert' +import { strictEqual } from 'node:assert' +import { expect } from 'chai' import assertThrows from 'assert-throws-async' import getStream from 'get-stream' import { isDuplexStream as isDuplex } from 'is-stream' import rdf from 'barnard59-env' import { Readable } from 'readable-stream' -import voidStats from '../lib/voidStats.js' +import voidStatsUnbound from '../lib/voidStats.js' + +const voidStats = voidStatsUnbound.bind({ env: rdf }) const ex = rdf.namespace('http://example.org/') /** * https://www.w3.org/TR/void/#statistics */ describe('metadata.voidStats', () => { - it('should be a factory', () => { - strictEqual(typeof voidStats, 'function') - }) - it('throws an error if no argument is given', async () => { await assertThrows(async () => { await voidStats() @@ -49,10 +48,7 @@ describe('metadata.voidStats', () => { const result = await getStream.array(inputStream.pipe(sut)) - equal( - result.slice(4).toCanonical(), - expectedMetadata.toCanonical(), - ) + expect(result.slice(4)).to.deep.contain.all.members(expectedMetadata) }) it('returns zero counts for no data', async () => { @@ -70,10 +66,7 @@ describe('metadata.voidStats', () => { const result = await getStream.array(inputStream.pipe(sut)) - equal( - result.toCanonical(), - expectedMetadata.toCanonical(), - ) + expect(result).to.deep.contain.all.members(expectedMetadata) }) it('returns zero counts for 0 classes', async () => { @@ -94,10 +87,7 @@ describe('metadata.voidStats', () => { const result = await getStream.array(inputStream.pipe(sut)) - equal( - result.slice(2).toCanonical(), - expectedMetadata.toCanonical(), - ) + expect(result.slice(2)).to.deep.contain.all.members(expectedMetadata) }) it('uses the named-graph given as parameter', async () => { @@ -116,10 +106,7 @@ describe('metadata.voidStats', () => { const result = await getStream.array(inputStream.pipe(sut)) - equal( - result.toCanonical(), - expectedMetadata.toCanonical(), - ) + expect(result).to.deep.contain.all.members(expectedMetadata) }) it('does not include total counts with includeTotals: false', async () => { @@ -139,10 +126,7 @@ describe('metadata.voidStats', () => { const result = await getStream.array(inputStream.pipe(sut)) - equal( - result.slice(2).toCanonical(), - expectedMetadata.toCanonical(), - ) + expect(result.slice(2)).to.deep.contain.all.members(expectedMetadata) }) it('describes counts for class partitions', async () => { @@ -173,10 +157,7 @@ describe('metadata.voidStats', () => { const result = await getStream.array(inputStream.pipe(sut)) - equal( - result.slice(4).toCanonical(), - expectedMetadata.toCanonical(), - ) + expect(result.slice(4)).to.deep.contain.all.members(expectedMetadata) }) it('describe counts for class partitions with no matches', async () => { @@ -202,10 +183,7 @@ describe('metadata.voidStats', () => { const result = await getStream.array(inputStream.pipe(sut)) - equal( - result.toCanonical(), - expectedMetadata.toCanonical(), - ) + expect(result).to.deep.contain.all.members(expectedMetadata) }) it('describe counts for property partitions', async () => { @@ -236,10 +214,7 @@ describe('metadata.voidStats', () => { const result = await getStream.array(inputStream.pipe(sut)) - equal( - result.slice(4).toCanonical(), - expectedMetadata.toCanonical(), - ) + expect(result.slice(4)).to.deep.contain.all.members(expectedMetadata) }) it('describe counts for property partitions with no matches', async () => { @@ -265,10 +240,7 @@ describe('metadata.voidStats', () => { const result = await getStream.array(inputStream.pipe(sut)) - equal( - result.toCanonical(), - expectedMetadata.toCanonical(), - ) + expect(result).to.deep.contain.all.members(expectedMetadata) }) it('accepts string parameters', async () => { From 4935ef796617db7a5b99c395b688f24d0841453a Mon Sep 17 00:00:00 2001 From: tpluscode Date: Sun, 21 Jan 2024 21:28:42 +0100 Subject: [PATCH 14/28] style: update linting config --- package-lock.json | 40 +++++++++++++++++++++++++++++++++++++--- package.json | 2 +- 2 files changed, 38 insertions(+), 4 deletions(-) diff --git a/package-lock.json b/package-lock.json index 59ca990a..e38cbaa2 100644 --- a/package-lock.json +++ b/package-lock.json @@ -13,7 +13,7 @@ ], "devDependencies": { "@changesets/cli": "^2.26.1", - "@tpluscode/eslint-config": "^0.4.4", + "@tpluscode/eslint-config": "^0.4.5", "@types/node": "^18", "@typescript-eslint/eslint-plugin": "^6.13.2", "@typescript-eslint/parser": "^6.13.2", @@ -6130,9 +6130,10 @@ } }, "node_modules/@tpluscode/eslint-config": { - "version": "0.4.4", + "version": "0.4.5", + "resolved": "https://registry.npmjs.org/@tpluscode/eslint-config/-/eslint-config-0.4.5.tgz", + "integrity": "sha512-JF3hkuNhS1Gi/TfMHagmcwLDRscMXAY36+WNwHvX3hnPfM4nrGUHignwSMsHdMdf4aOfhxvXNvucYmw8RbIZxw==", "dev": true, - "license": "MIT", "optionalDependencies": { "@typescript-eslint/eslint-plugin": ">=2", "@typescript-eslint/parser": ">=2", @@ -6146,6 +6147,7 @@ "eslint-plugin-node": ">=11", "eslint-plugin-promise": ">=6", "eslint-plugin-require-extensions": ">=0.1.3", + "eslint-plugin-unused-imports": "^3.0.0", "standard": ">=11" } }, @@ -12503,6 +12505,38 @@ "eslint": "*" } }, + "node_modules/eslint-plugin-unused-imports": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-unused-imports/-/eslint-plugin-unused-imports-3.0.0.tgz", + "integrity": "sha512-sduiswLJfZHeeBJ+MQaG+xYzSWdRXoSw61DpU13mzWumCkR0ufD0HmO4kdNokjrkluMHpj/7PJeN35pgbhW3kw==", + "dev": true, + "peer": true, + "dependencies": { + "eslint-rule-composer": "^0.3.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "peerDependencies": { + "@typescript-eslint/eslint-plugin": "^6.0.0", + "eslint": "^8.0.0" + }, + "peerDependenciesMeta": { + "@typescript-eslint/eslint-plugin": { + "optional": true + } + } + }, + "node_modules/eslint-rule-composer": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/eslint-rule-composer/-/eslint-rule-composer-0.3.0.tgz", + "integrity": "sha512-bt+Sh8CtDmn2OajxvNO+BX7Wn4CIWMpTRm3MaiKPCQcnnlm0CS2mhui6QaoeQugs+3Kj2ESKEEGJUdVafwhiCg==", + "dev": true, + "peer": true, + "engines": { + "node": ">=4.0.0" + } + }, "node_modules/eslint-utils": { "version": "2.1.0", "dev": true, diff --git a/package.json b/package.json index 8e2e1db2..5e15893e 100644 --- a/package.json +++ b/package.json @@ -15,7 +15,7 @@ ], "devDependencies": { "@changesets/cli": "^2.26.1", - "@tpluscode/eslint-config": "^0.4.4", + "@tpluscode/eslint-config": "^0.4.5", "@types/node": "^18", "@typescript-eslint/eslint-plugin": "^6.13.2", "@typescript-eslint/parser": "^6.13.2", From 2c64f4da790ceab0b4dd1795a81e55de4cfdacc6 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Mon, 22 Jan 2024 08:30:48 +0000 Subject: [PATCH 15/28] Version Packages --- .changeset/flat-lamps-thank.md | 7 ------- .changeset/lemon-crabs-fly.md | 5 ----- .changeset/modern-houses-chew.md | 5 ----- .changeset/old-squids-invent.md | 5 ----- .changeset/purple-taxis-camp.md | 5 ----- .changeset/rotten-frogs-teach.md | 5 ----- .changeset/wild-maps-warn.md | 5 ----- .changeset/young-geese-teach.md | 6 ------ packages/base/CHANGELOG.md | 6 ++++++ packages/base/package.json | 4 ++-- packages/cli/CHANGELOG.md | 15 +++++++++++++++ packages/cli/package.json | 8 ++++---- packages/core/CHANGELOG.md | 6 ++++++ packages/core/package.json | 4 ++-- packages/cube/CHANGELOG.md | 16 ++++++++++++++++ packages/cube/package.json | 10 +++++----- packages/env/CHANGELOG.md | 7 +++++++ packages/env/package.json | 2 +- packages/rdf/CHANGELOG.md | 12 ++++++++++++ packages/rdf/package.json | 4 ++-- packages/shacl/CHANGELOG.md | 10 ++++++++++ packages/shacl/package.json | 10 +++++----- packages/validation/CHANGELOG.md | 9 +++++++++ packages/validation/package.json | 8 ++++---- 24 files changed, 106 insertions(+), 68 deletions(-) delete mode 100644 .changeset/flat-lamps-thank.md delete mode 100644 .changeset/lemon-crabs-fly.md delete mode 100644 .changeset/modern-houses-chew.md delete mode 100644 .changeset/old-squids-invent.md delete mode 100644 .changeset/purple-taxis-camp.md delete mode 100644 .changeset/rotten-frogs-teach.md delete mode 100644 .changeset/wild-maps-warn.md delete mode 100644 .changeset/young-geese-teach.md diff --git a/.changeset/flat-lamps-thank.md b/.changeset/flat-lamps-thank.md deleted file mode 100644 index 79a63002..00000000 --- a/.changeset/flat-lamps-thank.md +++ /dev/null @@ -1,7 +0,0 @@ ---- -"barnard59-validation": patch -"barnard59-core": patch -"barnard59": patch ---- - -Improve Windows compatibility (re zazuko/rdf-loader-code#34) diff --git a/.changeset/lemon-crabs-fly.md b/.changeset/lemon-crabs-fly.md deleted file mode 100644 index 5d831079..00000000 --- a/.changeset/lemon-crabs-fly.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"barnard59-rdf": minor ---- - -Bundle TypeScript type declarations diff --git a/.changeset/modern-houses-chew.md b/.changeset/modern-houses-chew.md deleted file mode 100644 index 052bec9a..00000000 --- a/.changeset/modern-houses-chew.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"barnard59-cube": minor ---- - -Shape creation refactoring and improvements diff --git a/.changeset/old-squids-invent.md b/.changeset/old-squids-invent.md deleted file mode 100644 index 72c2c2cc..00000000 --- a/.changeset/old-squids-invent.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"barnard59-env": patch ---- - -Update `@zazuko/env` to v2 diff --git a/.changeset/purple-taxis-camp.md b/.changeset/purple-taxis-camp.md deleted file mode 100644 index b8327e38..00000000 --- a/.changeset/purple-taxis-camp.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"barnard59-shacl": patch ---- - -Updated `rdf-validate-shacl` diff --git a/.changeset/rotten-frogs-teach.md b/.changeset/rotten-frogs-teach.md deleted file mode 100644 index d45301c7..00000000 --- a/.changeset/rotten-frogs-teach.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"barnard59-env": patch ---- - -Exported environment did not include fs functionality diff --git a/.changeset/wild-maps-warn.md b/.changeset/wild-maps-warn.md deleted file mode 100644 index 26e8b53b..00000000 --- a/.changeset/wild-maps-warn.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"barnard59": minor ---- - -Support [`code:imports`](https://github.com/zazuko/rdf-transform-graph-imports) when loading pipeline definitions (closes #93) diff --git a/.changeset/young-geese-teach.md b/.changeset/young-geese-teach.md deleted file mode 100644 index 90da8e47..00000000 --- a/.changeset/young-geese-teach.md +++ /dev/null @@ -1,6 +0,0 @@ ---- -"barnard59-base": minor -"barnard59-cube": patch ---- - -added batch operation diff --git a/packages/base/CHANGELOG.md b/packages/base/CHANGELOG.md index 87947eb8..635b5db7 100644 --- a/packages/base/CHANGELOG.md +++ b/packages/base/CHANGELOG.md @@ -1,5 +1,11 @@ # barnard59-base +## 2.4.0 + +### Minor Changes + +- 7456a6a: added batch operation + ## 2.3.0 ### Minor Changes diff --git a/packages/base/package.json b/packages/base/package.json index 3fc9b878..156f7e07 100644 --- a/packages/base/package.json +++ b/packages/base/package.json @@ -1,6 +1,6 @@ { "name": "barnard59-base", - "version": "2.3.0", + "version": "2.4.0", "description": "Linked Data pipelines", "main": "index.js", "type": "module", @@ -39,7 +39,7 @@ "@types/readable-stream": "^4.0.10", "@types/readable-to-readable": "^0.1.0", "@types/through2": "^2.0.41", - "barnard59-core": "^5.3.0", + "barnard59-core": "^5.3.1", "chai": "^4.3.10", "get-stream": "^6.0.1", "into-stream": "^7.0.0", diff --git a/packages/cli/CHANGELOG.md b/packages/cli/CHANGELOG.md index 8b5eaac3..9a69a2c5 100644 --- a/packages/cli/CHANGELOG.md +++ b/packages/cli/CHANGELOG.md @@ -1,5 +1,20 @@ # barnard59 +## 4.5.0 + +### Minor Changes + +- 68dff05: Support [`code:imports`](https://github.com/zazuko/rdf-transform-graph-imports) when loading pipeline definitions (closes #93) + +### Patch Changes + +- 9d0ce9f: Improve Windows compatibility (re zazuko/rdf-loader-code#34) +- Updated dependencies [9d0ce9f] +- Updated dependencies [c090ff2] +- Updated dependencies [82dbe7e] + - barnard59-core@5.3.1 + - barnard59-env@1.2.2 + ## 4.4.0 ### Minor Changes diff --git a/packages/cli/package.json b/packages/cli/package.json index a6bbba66..9a799ecb 100644 --- a/packages/cli/package.json +++ b/packages/cli/package.json @@ -1,6 +1,6 @@ { "name": "barnard59", - "version": "4.4.0", + "version": "4.5.0", "description": "Barnard59 Linked Data pipelines", "type": "module", "main": "index.js", @@ -36,8 +36,8 @@ "@opentelemetry/semantic-conventions": "^0.24.0", "@opentelemetry/tracing": "^0.24.0", "@rdfjs/namespace": "^2.0.0", - "barnard59-core": "5.3.0", - "barnard59-env": "1.2.1", + "barnard59-core": "5.3.1", + "barnard59-env": "1.2.2", "commander": "^11.0.0", "pkgscan": "^1.0.24", "find-up": "^7.0.0", @@ -54,7 +54,7 @@ "@types/lodash": "^4.14.202", "@types/readable-stream": "^4.0.10", "approvals": "^6.2.2", - "barnard59-base": "^2.3.0", + "barnard59-base": "^2.4.0", "barnard59-formats": "^2.1.0", "barnard59-graph-store": "^5.1.0", "barnard59-http": "^2.0.0", diff --git a/packages/core/CHANGELOG.md b/packages/core/CHANGELOG.md index 0e877df8..9157e89d 100644 --- a/packages/core/CHANGELOG.md +++ b/packages/core/CHANGELOG.md @@ -1,5 +1,11 @@ # barnard59-core +## 5.3.1 + +### Patch Changes + +- 9d0ce9f: Improve Windows compatibility (re zazuko/rdf-loader-code#34) + ## 5.3.0 ### Minor Changes diff --git a/packages/core/package.json b/packages/core/package.json index ae8c101b..9602ee98 100644 --- a/packages/core/package.json +++ b/packages/core/package.json @@ -1,6 +1,6 @@ { "name": "barnard59-core", - "version": "5.3.0", + "version": "5.3.1", "description": "Core component of Barnard59 Linked Data pipelines", "type": "module", "main": "index.js", @@ -38,7 +38,7 @@ "devDependencies": { "@rdfjs/namespace": "^2.0.0", "@types/readable-stream": "^4.0.9", - "barnard59-env": "^1.2.1", + "barnard59-env": "^1.2.2", "barnard59-http": "^2.0.0", "barnard59-test-support": "^0.0.3", "chai": "^4.3.7", diff --git a/packages/cube/CHANGELOG.md b/packages/cube/CHANGELOG.md index c9df2946..b0f262cd 100644 --- a/packages/cube/CHANGELOG.md +++ b/packages/cube/CHANGELOG.md @@ -1,5 +1,21 @@ # barnard59-cube +## 1.3.0 + +### Minor Changes + +- f2e796c: Shape creation refactoring and improvements + +### Patch Changes + +- 7456a6a: added batch operation +- Updated dependencies [82dbe7e] +- Updated dependencies [c090ff2] +- Updated dependencies [7456a6a] + - barnard59-rdf@3.4.0 + - barnard59-shacl@1.3.1 + - barnard59-base@2.4.0 + ## 1.2.0 ### Minor Changes diff --git a/packages/cube/package.json b/packages/cube/package.json index 87029245..38f93e4c 100644 --- a/packages/cube/package.json +++ b/packages/cube/package.json @@ -1,6 +1,6 @@ { "name": "barnard59-cube", - "version": "1.2.0", + "version": "1.3.0", "description": "Build and check RDF cubes in Linked Data pipelines", "type": "module", "main": "index.js", @@ -20,12 +20,12 @@ }, "homepage": "https://github.com/zazuko/barnard59", "dependencies": { - "barnard59-base": "^2.2.0", + "barnard59-base": "^2.4.0", "barnard59-formats": "^2.0.0", "barnard59-http": "^2.0.0", - "barnard59-rdf": "^3.3.0", + "barnard59-rdf": "^3.4.0", "barnard59-sparql": "^2.1.1", - "barnard59-shacl": "^1.2.0", + "barnard59-shacl": "^1.3.1", "external-merge-sort": "^0.1.3", "lodash": "^4.17.21", "rdf-literal": "^1.3.0", @@ -34,7 +34,7 @@ }, "devDependencies": { "@rdfjs/to-ntriples": "^2.0.0", - "barnard59-env": "^1.2.0", + "barnard59-env": "^1.2.2", "chai": "^4.3.7", "get-stream": "^6.0.1", "is-stream": "^3.0.0", diff --git a/packages/env/CHANGELOG.md b/packages/env/CHANGELOG.md index be9008cf..cb5d9b5e 100644 --- a/packages/env/CHANGELOG.md +++ b/packages/env/CHANGELOG.md @@ -1,5 +1,12 @@ # barnard59-env +## 1.2.2 + +### Patch Changes + +- c090ff2: Update `@zazuko/env` to v2 +- 82dbe7e: Exported environment did not include fs functionality + ## 1.2.1 ### Patch Changes diff --git a/packages/env/package.json b/packages/env/package.json index 917e8515..1a746719 100644 --- a/packages/env/package.json +++ b/packages/env/package.json @@ -1,6 +1,6 @@ { "name": "barnard59-env", - "version": "1.2.1", + "version": "1.2.2", "type": "module", "main": "index.js", "scripts": { diff --git a/packages/rdf/CHANGELOG.md b/packages/rdf/CHANGELOG.md index 2559a52b..ee537f32 100644 --- a/packages/rdf/CHANGELOG.md +++ b/packages/rdf/CHANGELOG.md @@ -1,5 +1,17 @@ # barnard59-rdf +## 3.4.0 + +### Minor Changes + +- 82dbe7e: Bundle TypeScript type declarations + +### Patch Changes + +- Updated dependencies [c090ff2] +- Updated dependencies [82dbe7e] + - barnard59-env@1.2.2 + ## 3.3.0 ### Minor Changes diff --git a/packages/rdf/package.json b/packages/rdf/package.json index ea2a7349..8cd87aaf 100644 --- a/packages/rdf/package.json +++ b/packages/rdf/package.json @@ -1,6 +1,6 @@ { "name": "barnard59-rdf", - "version": "3.3.0", + "version": "3.4.0", "description": "RDF support for Linked Data pipelines", "main": "index.js", "type": "module", @@ -27,7 +27,7 @@ "homepage": "https://github.com/zazuko/barnard59", "dependencies": { "@rdfjs/fetch": "^3.0.0", - "barnard59-env": "^1.2.0", + "barnard59-env": "^1.2.2", "file-fetch": "^1.7.0", "is-stream": "^3.0.0", "lodash": "^4.17.21", diff --git a/packages/shacl/CHANGELOG.md b/packages/shacl/CHANGELOG.md index e666777d..c3df79a1 100644 --- a/packages/shacl/CHANGELOG.md +++ b/packages/shacl/CHANGELOG.md @@ -1,5 +1,15 @@ # barnard59-shacl +## 1.3.1 + +### Patch Changes + +- c090ff2: Updated `rdf-validate-shacl` +- Updated dependencies [82dbe7e] +- Updated dependencies [7456a6a] + - barnard59-rdf@3.4.0 + - barnard59-base@2.4.0 + ## 1.3.0 ### Minor Changes diff --git a/packages/shacl/package.json b/packages/shacl/package.json index 4c20ba43..8f501245 100644 --- a/packages/shacl/package.json +++ b/packages/shacl/package.json @@ -1,6 +1,6 @@ { "name": "barnard59-shacl", - "version": "1.3.0", + "version": "1.3.1", "description": "A barnard59 step that validates RDF using the Shapes Constraint Language (SHACL).", "type": "module", "main": "index.js", @@ -27,17 +27,17 @@ "is-stream": "^3.0.0", "rdf-validate-shacl": "^0.5.3", "readable-stream": "3 - 4", - "barnard59-base": "^2.3.0", + "barnard59-base": "^2.4.0", "barnard59-formats": "^2.1.0", - "barnard59-rdf": "^3.3.0" + "barnard59-rdf": "^3.4.0" }, "devDependencies": { "@rdfjs/to-ntriples": "^2.0.0", "@tpluscode/rdf-string": "^1.1.2", "@types/rdf-validate-shacl": "^0.4.6", "assert-throws-async": "^3.0.0", - "barnard59-core": "^5.3.0", - "barnard59-env": "^1.2.1", + "barnard59-core": "^5.3.1", + "barnard59-env": "^1.2.2", "barnard59-test-support": "*", "chai": "^4.3.4", "express": "^4.18.2", diff --git a/packages/validation/CHANGELOG.md b/packages/validation/CHANGELOG.md index d6150b0c..e84e685d 100644 --- a/packages/validation/CHANGELOG.md +++ b/packages/validation/CHANGELOG.md @@ -1,5 +1,14 @@ # barnard59-validation +## 0.4.2 + +### Patch Changes + +- 9d0ce9f: Improve Windows compatibility (re zazuko/rdf-loader-code#34) +- Updated dependencies [c090ff2] +- Updated dependencies [82dbe7e] + - barnard59-env@1.2.2 + ## 0.4.1 ### Patch Changes diff --git a/packages/validation/package.json b/packages/validation/package.json index 61a60362..4d046718 100644 --- a/packages/validation/package.json +++ b/packages/validation/package.json @@ -1,6 +1,6 @@ { "name": "barnard59-validation", - "version": "0.4.1", + "version": "0.4.2", "type": "module", "main": "cli.js", "bin": { @@ -15,8 +15,8 @@ "license": "MIT", "devDependencies": { "@jsdevtools/chai-exec": "^2.1.1", - "barnard59-base": "^2.0.1", - "barnard59-core": "^5.2.0", + "barnard59-base": "^2.4.0", + "barnard59-core": "^5.3.1", "barnard59-formats": "^2.1.0", "chai": "^4.3.0", "deep-equal": "^2.0.5", @@ -34,7 +34,7 @@ "homepage": "https://github.com/zazuko/barnard59", "dependencies": { "@rdfjs/namespace": "^2.0.0", - "barnard59-env": "^1.2.0", + "barnard59-env": "^1.2.2", "anylogger": "^1.0.11", "anylogger-console": "^1.0.0", "chalk": "^4.1.0", From 45d5fd2f484ac8d8344a81be9cec4ed74dc8151e Mon Sep 17 00:00:00 2001 From: Giacomo Citi Date: Mon, 22 Jan 2024 09:55:12 +0100 Subject: [PATCH 16/28] move report-summary to shacl package --- package-lock.json | 13 +++++++------ packages/cube/manifest.ttl | 6 ------ packages/cube/package.json | 5 ++--- packages/{cube => shacl}/lib/report.js | 2 +- packages/shacl/manifest.ttl | 8 ++++++++ packages/shacl/package.json | 12 ++++++------ .../{cube => shacl}/pipeline/report-summary.ttl | 0 7 files changed, 24 insertions(+), 22 deletions(-) rename packages/{cube => shacl}/lib/report.js (93%) rename packages/{cube => shacl}/pipeline/report-summary.ttl (100%) diff --git a/package-lock.json b/package-lock.json index 14f9573c..1553e42e 100644 --- a/package-lock.json +++ b/package-lock.json @@ -5443,7 +5443,8 @@ }, "node_modules/@rdfjs/to-ntriples": { "version": "2.0.0", - "license": "MIT" + "resolved": "https://registry.npmjs.org/@rdfjs/to-ntriples/-/to-ntriples-2.0.0.tgz", + "integrity": "sha512-nDhpfhx6W6HKsy4HjyLp3H1nbrX1CiUCWhWQwKcYZX1s9GOjcoQTwY7GUUbVec0hzdJDQBR6gnjxtENBDt482Q==" }, "node_modules/@rdfjs/traverser": { "version": "0.1.2", @@ -27490,6 +27491,7 @@ "barnard59-shell": "^0.1.0", "barnard59-test-support": "^0.0.3", "chai": "^4.3.7", + "mocha-chai-jest-snapshot": "^1.1.4", "shelljs": "^0.8.4", "strip-ansi": "^7.0.0" } @@ -27723,9 +27725,7 @@ "version": "1.2.0", "license": "MIT", "dependencies": { - "@rdfjs/to-ntriples": "^2.0.0", "barnard59-base": "^2.2.0", - "barnard59-env": "^1.2.0", "barnard59-formats": "^2.0.0", "barnard59-http": "^2.0.0", "barnard59-rdf": "^3.3.0", @@ -27734,11 +27734,12 @@ "external-merge-sort": "^0.1.3", "lodash": "^4.17.21", "rdf-literal": "^1.3.0", - "rdf-validate-shacl": "^0.5.1", "readable-stream": "3 - 4", "through2": "^4.0.2" }, "devDependencies": { + "@rdfjs/to-ntriples": "^2.0.0", + "barnard59-env": "^1.2.1", "chai": "^4.3.7", "get-stream": "^6.0.1", "is-stream": "^3.0.0", @@ -28207,7 +28208,9 @@ "license": "MIT", "dependencies": { "@rdfjs/fetch": "^3.0.0", + "@rdfjs/to-ntriples": "^2.0.0", "barnard59-base": "^2.3.0", + "barnard59-env": "^1.2.1", "barnard59-formats": "^2.1.0", "barnard59-rdf": "^3.3.0", "is-stream": "^3.0.0", @@ -28215,12 +28218,10 @@ "readable-stream": "3 - 4" }, "devDependencies": { - "@rdfjs/to-ntriples": "^2.0.0", "@tpluscode/rdf-string": "^1.1.0", "@types/rdf-validate-shacl": "^0.4.6", "assert-throws-async": "^3.0.0", "barnard59-core": "^5.3.0", - "barnard59-env": "^1.2.1", "barnard59-test-support": "*", "chai": "^4.3.4", "express": "^4.18.2", diff --git a/packages/cube/manifest.ttl b/packages/cube/manifest.ttl index b38b5d22..88f39213 100644 --- a/packages/cube/manifest.ttl +++ b/packages/cube/manifest.ttl @@ -63,9 +63,3 @@ b59:source "barnard59-cube/pipeline/fetch-cube.ttl" ; . - - a b59:CliCommand ; - b59:command "report-summary" ; - rdfs:label "Human-readable summary of SHACL validation report" ; - b59:source "barnard59-cube/pipeline/report-summary.ttl" ; -. diff --git a/packages/cube/package.json b/packages/cube/package.json index 56b713c4..ede3f477 100644 --- a/packages/cube/package.json +++ b/packages/cube/package.json @@ -20,8 +20,6 @@ }, "homepage": "https://github.com/zazuko/barnard59", "dependencies": { - "barnard59-env": "^1.2.0", - "@rdfjs/to-ntriples": "^2.0.0", "barnard59-base": "^2.2.0", "barnard59-formats": "^2.0.0", "barnard59-http": "^2.0.0", @@ -31,11 +29,12 @@ "external-merge-sort": "^0.1.3", "lodash": "^4.17.21", "rdf-literal": "^1.3.0", - "rdf-validate-shacl": "^0.5.1", "readable-stream": "3 - 4", "through2": "^4.0.2" }, "devDependencies": { + "@rdfjs/to-ntriples": "^2.0.0", + "barnard59-env": "^1.2.1", "chai": "^4.3.7", "get-stream": "^6.0.1", "is-stream": "^3.0.0", diff --git a/packages/cube/lib/report.js b/packages/shacl/lib/report.js similarity index 93% rename from packages/cube/lib/report.js rename to packages/shacl/lib/report.js index f4a3ba69..6e0ed066 100644 --- a/packages/cube/lib/report.js +++ b/packages/shacl/lib/report.js @@ -1,5 +1,5 @@ import termToNt from '@rdfjs/to-ntriples' -import rdf from 'barnard59-env' +import rdf from 'barnard59-env' // TODO;remove this and get env from context import ValidationReport from 'rdf-validate-shacl/src/validation-report.js' function validationResultToString(result) { diff --git a/packages/shacl/manifest.ttl b/packages/shacl/manifest.ttl index 6623f7d7..bfb2c0f3 100644 --- a/packages/shacl/manifest.ttl +++ b/packages/shacl/manifest.ttl @@ -24,3 +24,11 @@ rdfs:label "Validates the RDF in standard input against a SHACL document" ; b59:source "barnard59-shacl/pipeline/validate.ttl" ; . + + + + a b59:CliCommand ; + b59:command "report-summary" ; + rdfs:label "Human-readable summary of SHACL validation report" ; + b59:source "barnard59-shacl/pipeline/report-summary.ttl" ; +. diff --git a/packages/shacl/package.json b/packages/shacl/package.json index 6cf10b0b..b4d461dc 100644 --- a/packages/shacl/package.json +++ b/packages/shacl/package.json @@ -24,20 +24,20 @@ "homepage": "https://github.com/zazuko/barnard59", "dependencies": { "@rdfjs/fetch": "^3.0.0", - "is-stream": "^3.0.0", - "rdf-validate-shacl": "^0.5.1", - "readable-stream": "3 - 4", + "@rdfjs/to-ntriples": "^2.0.0", "barnard59-base": "^2.3.0", + "barnard59-env": "^1.2.1", "barnard59-formats": "^2.1.0", - "barnard59-rdf": "^3.3.0" + "barnard59-rdf": "^3.3.0", + "is-stream": "^3.0.0", + "rdf-validate-shacl": "^0.5.1", + "readable-stream": "3 - 4" }, "devDependencies": { - "@rdfjs/to-ntriples": "^2.0.0", "@tpluscode/rdf-string": "^1.1.0", "@types/rdf-validate-shacl": "^0.4.6", "assert-throws-async": "^3.0.0", "barnard59-core": "^5.3.0", - "barnard59-env": "^1.2.1", "barnard59-test-support": "*", "chai": "^4.3.4", "express": "^4.18.2", diff --git a/packages/cube/pipeline/report-summary.ttl b/packages/shacl/pipeline/report-summary.ttl similarity index 100% rename from packages/cube/pipeline/report-summary.ttl rename to packages/shacl/pipeline/report-summary.ttl From f8a60312b092663965005c6a36a9c703ca6cfe84 Mon Sep 17 00:00:00 2001 From: Giacomo Citi Date: Mon, 22 Jan 2024 10:04:15 +0100 Subject: [PATCH 17/28] fix namespaces --- packages/shacl/manifest.ttl | 2 +- packages/shacl/pipeline/report-summary.ttl | 6 +----- 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/packages/shacl/manifest.ttl b/packages/shacl/manifest.ttl index bfb2c0f3..32b16ed1 100644 --- a/packages/shacl/manifest.ttl +++ b/packages/shacl/manifest.ttl @@ -26,7 +26,7 @@ . - + a b59:CliCommand ; b59:command "report-summary" ; rdfs:label "Human-readable summary of SHACL validation report" ; diff --git a/packages/shacl/pipeline/report-summary.ttl b/packages/shacl/pipeline/report-summary.ttl index 9f636d5e..3989babe 100644 --- a/packages/shacl/pipeline/report-summary.ttl +++ b/packages/shacl/pipeline/report-summary.ttl @@ -1,14 +1,10 @@ @prefix code: . @prefix p: . -@prefix shacl: . @prefix base: . @prefix n3: . -@prefix ntriples: . @prefix rdf: . -@prefix rdfs: . - -@base . +@base . a p:Pipeline , p:Readable ; p:steps From 4751410eb1e1010e19cd496bd5b23f9c1c0fa159 Mon Sep 17 00:00:00 2001 From: Giacomo Citi Date: Mon, 22 Jan 2024 10:10:14 +0100 Subject: [PATCH 18/28] use env from context --- package-lock.json | 2 +- packages/shacl/lib/report.js | 3 +-- packages/shacl/package.json | 2 +- 3 files changed, 3 insertions(+), 4 deletions(-) diff --git a/package-lock.json b/package-lock.json index 1553e42e..cd7b5fc7 100644 --- a/package-lock.json +++ b/package-lock.json @@ -28210,7 +28210,6 @@ "@rdfjs/fetch": "^3.0.0", "@rdfjs/to-ntriples": "^2.0.0", "barnard59-base": "^2.3.0", - "barnard59-env": "^1.2.1", "barnard59-formats": "^2.1.0", "barnard59-rdf": "^3.3.0", "is-stream": "^3.0.0", @@ -28222,6 +28221,7 @@ "@types/rdf-validate-shacl": "^0.4.6", "assert-throws-async": "^3.0.0", "barnard59-core": "^5.3.0", + "barnard59-env": "^1.2.1", "barnard59-test-support": "*", "chai": "^4.3.4", "express": "^4.18.2", diff --git a/packages/shacl/lib/report.js b/packages/shacl/lib/report.js index 6e0ed066..95c1bf0e 100644 --- a/packages/shacl/lib/report.js +++ b/packages/shacl/lib/report.js @@ -1,5 +1,4 @@ import termToNt from '@rdfjs/to-ntriples' -import rdf from 'barnard59-env' // TODO;remove this and get env from context import ValidationReport from 'rdf-validate-shacl/src/validation-report.js' function validationResultToString(result) { @@ -25,5 +24,5 @@ function getMessages(report) { } export function getSummary(dataset) { - return getMessages(new ValidationReport(rdf.clownface({ dataset }))) + return getMessages(new ValidationReport(this.env.clownface({ dataset }))) } diff --git a/packages/shacl/package.json b/packages/shacl/package.json index b4d461dc..d4dd299b 100644 --- a/packages/shacl/package.json +++ b/packages/shacl/package.json @@ -26,7 +26,6 @@ "@rdfjs/fetch": "^3.0.0", "@rdfjs/to-ntriples": "^2.0.0", "barnard59-base": "^2.3.0", - "barnard59-env": "^1.2.1", "barnard59-formats": "^2.1.0", "barnard59-rdf": "^3.3.0", "is-stream": "^3.0.0", @@ -38,6 +37,7 @@ "@types/rdf-validate-shacl": "^0.4.6", "assert-throws-async": "^3.0.0", "barnard59-core": "^5.3.0", + "barnard59-env": "^1.2.1", "barnard59-test-support": "*", "chai": "^4.3.4", "express": "^4.18.2", From 37b5b191ffb0b469c4fae11241903eeb058957de Mon Sep 17 00:00:00 2001 From: Giacomo Citi Date: Mon, 22 Jan 2024 10:21:44 +0100 Subject: [PATCH 19/28] update docs --- .changeset/clean-icons-thank.md | 6 ++++++ packages/cube/README.md | 14 +++++++------- packages/shacl/readme.md | 9 +++++++++ 3 files changed, 22 insertions(+), 7 deletions(-) create mode 100644 .changeset/clean-icons-thank.md diff --git a/.changeset/clean-icons-thank.md b/.changeset/clean-icons-thank.md new file mode 100644 index 00000000..3dc03ac9 --- /dev/null +++ b/.changeset/clean-icons-thank.md @@ -0,0 +1,6 @@ +--- +"barnard59-shacl": minor +"barnard59-cube": minor +--- + +Additional commands: fetch-cube, fetch-constraint, report-summary diff --git a/packages/cube/README.md b/packages/cube/README.md index 2ee5aac5..08a3f50f 100644 --- a/packages/cube/README.md +++ b/packages/cube/README.md @@ -38,7 +38,7 @@ Pipeline `fetch-metadata` queries a given SPARQL endpoint to retrieve a [concise bounded description](https://docs.stardog.com/query-stardog/#describe-queries) of a given cube and its constraint (excluding the observations). ```bash -npx barnard59 cube fetch-metadata \ +barnard59 cube fetch-metadata \ --cube https://agriculture.ld.admin.ch/agroscope/PRIFm8t15/2 \ --endpoint https://int.lindas.admin.ch/query ``` @@ -55,7 +55,7 @@ The pipeline reads the metadata from `stdin`, allowing input from a local file ( ```bash cat cube.ttl \ -| npx barnard59 cube check-metadata \ +| barnard59 cube check-metadata \ --profile https://cube.link/v0.1.0/shape/standalone-constraint-constraint ``` SHACL reports for violations are written to `stdout`. @@ -69,7 +69,7 @@ In cases when a remote address give to `--profile` option does not include a cor Pipeline `fetch-observations` queries a given SPARQL endpoint to retrieve the observations of a given cube. ```bash -npx barnard59 cube fetch-observations \ +barnard59 cube fetch-observations \ --cube https://agriculture.ld.admin.ch/agroscope/PRIFm8t15/2 \ --endpoint https://int.lindas.admin.ch/query ``` @@ -83,7 +83,7 @@ The pipeline reads the observations from `stdin`, allowing input from a local fi ```bash cat observations.ttl \ -| npx barnard59 cube check-observations \ +| barnard59 cube check-observations \ --constraint metadata.ttl ``` @@ -98,12 +98,12 @@ To limit the output size, there is also a `maxViolations` option to stop validat ### Report Summary The validation pipelines write a machine-readable [standard](https://www.w3.org/TR/shacl/#validation-report) report to `stdout`. -An additional `report-summary` pipeline produces a human-readable summary of this report: +The `barnard59-shacl` package provides an additional `report-summary` pipeline to produce a human-readable summary of this report: ```bash cat observations.ttl \ -| npx barnard59 cube check-observations --constraint metadata.ttl \ -| npx barnard59 cube report-summary +| barnard59 cube check-observations --constraint metadata.ttl \ +| barnard59 shacl report-summary ``` diff --git a/packages/shacl/readme.md b/packages/shacl/readme.md index f10c3abe..cfe7249a 100644 --- a/packages/shacl/readme.md +++ b/packages/shacl/readme.md @@ -137,3 +137,12 @@ cat data.ttl | barnard59 shacl validate --shapes https://example.com/shapes.ttl Shapes can also be path relative to the working directory. In cases when a remote address give to `--shapes` option does not include a correct `content-type` header (or does not provide a `content-type` header at all), the pipeline will fail. In such cases, it is possible to use the `--shapesFormat` option to select the correct RDF parser. Its value must be a media type, such as `text/turtle`. + +### report-summary + +Creates a human-readable summary of the machine-readable [standard](https://www.w3.org/TR/shacl/#validation-report) output of the validation pipeline. +```bash +cat data.ttl \ +| barnard59 shacl validate --shapes https://example.com/shapes.ttl \ +| barnard59 shacl report-summary +``` From 452d885a1b95e2cde8b14b99a033ff40cfd37f44 Mon Sep 17 00:00:00 2001 From: Tomasz Pluskiewicz Date: Mon, 22 Jan 2024 12:34:27 +0100 Subject: [PATCH 20/28] chore: split chanegsets --- .changeset/clean-icons-thank.md | 3 +-- .changeset/sharp-taxis-listen.md | 5 +++++ 2 files changed, 6 insertions(+), 2 deletions(-) create mode 100644 .changeset/sharp-taxis-listen.md diff --git a/.changeset/clean-icons-thank.md b/.changeset/clean-icons-thank.md index 3dc03ac9..cfa0a92f 100644 --- a/.changeset/clean-icons-thank.md +++ b/.changeset/clean-icons-thank.md @@ -1,6 +1,5 @@ --- -"barnard59-shacl": minor "barnard59-cube": minor --- -Additional commands: fetch-cube, fetch-constraint, report-summary +Additional commands: `fetch-cube`, `fetch-constraint` diff --git a/.changeset/sharp-taxis-listen.md b/.changeset/sharp-taxis-listen.md new file mode 100644 index 00000000..bc8eda75 --- /dev/null +++ b/.changeset/sharp-taxis-listen.md @@ -0,0 +1,5 @@ +--- +"barnard59-shacl": minor +--- + +Added `report-summary` command From 9178b7eb8cec3e2982aed84d436f2da1192e05c3 Mon Sep 17 00:00:00 2001 From: Tomasz Pluskiewicz Date: Mon, 22 Jan 2024 12:14:09 +0100 Subject: [PATCH 21/28] fix: missing dts --- .changeset/healthy-radios-wait.md | 5 +++++ packages/env/.npmignore | 1 - packages/env/package.json | 6 ++++++ 3 files changed, 11 insertions(+), 1 deletion(-) create mode 100644 .changeset/healthy-radios-wait.md delete mode 100644 packages/env/.npmignore diff --git a/.changeset/healthy-radios-wait.md b/.changeset/healthy-radios-wait.md new file mode 100644 index 00000000..3e0a2b37 --- /dev/null +++ b/.changeset/healthy-radios-wait.md @@ -0,0 +1,5 @@ +--- +"barnard59-env": patch +--- + +.d.ts files were not included in package diff --git a/packages/env/.npmignore b/packages/env/.npmignore deleted file mode 100644 index 6461deec..00000000 --- a/packages/env/.npmignore +++ /dev/null @@ -1 +0,0 @@ -*.ts diff --git a/packages/env/package.json b/packages/env/package.json index 1a746719..ebf93a82 100644 --- a/packages/env/package.json +++ b/packages/env/package.json @@ -7,6 +7,12 @@ "build": "tsc", "prepack": "npm run build" }, + "files": [ + "*.js", + "*.d.ts", + "lib/*.js", + "lib/*.d.ts" + ], "dependencies": { "@zazuko/env-node": "^2.1.1", "@zazuko/vocabulary-extras-builders": "^1.1.3" From a2794322725d8f42998ecb940680ade98fcc24cc Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Mon, 22 Jan 2024 12:48:45 +0000 Subject: [PATCH 22/28] Version Packages --- .changeset/clean-icons-thank.md | 5 ----- .changeset/healthy-radios-wait.md | 5 ----- .changeset/sharp-taxis-listen.md | 5 ----- packages/cli/CHANGELOG.md | 7 +++++++ packages/cli/package.json | 4 ++-- packages/cube/CHANGELOG.md | 11 +++++++++++ packages/cube/package.json | 6 +++--- packages/env/CHANGELOG.md | 6 ++++++ packages/env/package.json | 2 +- packages/shacl/CHANGELOG.md | 6 ++++++ packages/shacl/package.json | 4 ++-- 11 files changed, 38 insertions(+), 23 deletions(-) delete mode 100644 .changeset/clean-icons-thank.md delete mode 100644 .changeset/healthy-radios-wait.md delete mode 100644 .changeset/sharp-taxis-listen.md diff --git a/.changeset/clean-icons-thank.md b/.changeset/clean-icons-thank.md deleted file mode 100644 index cfa0a92f..00000000 --- a/.changeset/clean-icons-thank.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"barnard59-cube": minor ---- - -Additional commands: `fetch-cube`, `fetch-constraint` diff --git a/.changeset/healthy-radios-wait.md b/.changeset/healthy-radios-wait.md deleted file mode 100644 index 3e0a2b37..00000000 --- a/.changeset/healthy-radios-wait.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"barnard59-env": patch ---- - -.d.ts files were not included in package diff --git a/.changeset/sharp-taxis-listen.md b/.changeset/sharp-taxis-listen.md deleted file mode 100644 index bc8eda75..00000000 --- a/.changeset/sharp-taxis-listen.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"barnard59-shacl": minor ---- - -Added `report-summary` command diff --git a/packages/cli/CHANGELOG.md b/packages/cli/CHANGELOG.md index 9a69a2c5..2101e713 100644 --- a/packages/cli/CHANGELOG.md +++ b/packages/cli/CHANGELOG.md @@ -1,5 +1,12 @@ # barnard59 +## 4.5.1 + +### Patch Changes + +- Updated dependencies [9178b7e] + - barnard59-env@1.2.3 + ## 4.5.0 ### Minor Changes diff --git a/packages/cli/package.json b/packages/cli/package.json index 9a799ecb..312a8bfe 100644 --- a/packages/cli/package.json +++ b/packages/cli/package.json @@ -1,6 +1,6 @@ { "name": "barnard59", - "version": "4.5.0", + "version": "4.5.1", "description": "Barnard59 Linked Data pipelines", "type": "module", "main": "index.js", @@ -37,7 +37,7 @@ "@opentelemetry/tracing": "^0.24.0", "@rdfjs/namespace": "^2.0.0", "barnard59-core": "5.3.1", - "barnard59-env": "1.2.2", + "barnard59-env": "1.2.3", "commander": "^11.0.0", "pkgscan": "^1.0.24", "find-up": "^7.0.0", diff --git a/packages/cube/CHANGELOG.md b/packages/cube/CHANGELOG.md index b0f262cd..d5dcd1ee 100644 --- a/packages/cube/CHANGELOG.md +++ b/packages/cube/CHANGELOG.md @@ -1,5 +1,16 @@ # barnard59-cube +## 1.4.0 + +### Minor Changes + +- 37b5b19: Additional commands: `fetch-cube`, `fetch-constraint` + +### Patch Changes + +- Updated dependencies [452d885] + - barnard59-shacl@1.4.0 + ## 1.3.0 ### Minor Changes diff --git a/packages/cube/package.json b/packages/cube/package.json index 01fe118f..51549653 100644 --- a/packages/cube/package.json +++ b/packages/cube/package.json @@ -1,6 +1,6 @@ { "name": "barnard59-cube", - "version": "1.3.0", + "version": "1.4.0", "description": "Build and check RDF cubes in Linked Data pipelines", "type": "module", "main": "index.js", @@ -24,7 +24,7 @@ "barnard59-formats": "^2.0.0", "barnard59-http": "^2.0.0", "barnard59-rdf": "^3.4.0", - "barnard59-shacl": "^1.3.1", + "barnard59-shacl": "^1.4.0", "barnard59-sparql": "^2.1.1", "external-merge-sort": "^0.1.3", "lodash": "^4.17.21", @@ -34,7 +34,7 @@ }, "devDependencies": { "@rdfjs/to-ntriples": "^2.0.0", - "barnard59-env": "^1.2.2", + "barnard59-env": "^1.2.3", "chai": "^4.3.7", "get-stream": "^6.0.1", "is-stream": "^3.0.0", diff --git a/packages/env/CHANGELOG.md b/packages/env/CHANGELOG.md index cb5d9b5e..1ee3b6d5 100644 --- a/packages/env/CHANGELOG.md +++ b/packages/env/CHANGELOG.md @@ -1,5 +1,11 @@ # barnard59-env +## 1.2.3 + +### Patch Changes + +- 9178b7e: .d.ts files were not included in package + ## 1.2.2 ### Patch Changes diff --git a/packages/env/package.json b/packages/env/package.json index ebf93a82..72054807 100644 --- a/packages/env/package.json +++ b/packages/env/package.json @@ -1,6 +1,6 @@ { "name": "barnard59-env", - "version": "1.2.2", + "version": "1.2.3", "type": "module", "main": "index.js", "scripts": { diff --git a/packages/shacl/CHANGELOG.md b/packages/shacl/CHANGELOG.md index c3df79a1..a2caac23 100644 --- a/packages/shacl/CHANGELOG.md +++ b/packages/shacl/CHANGELOG.md @@ -1,5 +1,11 @@ # barnard59-shacl +## 1.4.0 + +### Minor Changes + +- 452d885: Added `report-summary` command + ## 1.3.1 ### Patch Changes diff --git a/packages/shacl/package.json b/packages/shacl/package.json index c5d5e84b..2e759048 100644 --- a/packages/shacl/package.json +++ b/packages/shacl/package.json @@ -1,6 +1,6 @@ { "name": "barnard59-shacl", - "version": "1.3.1", + "version": "1.4.0", "description": "A barnard59 step that validates RDF using the Shapes Constraint Language (SHACL).", "type": "module", "main": "index.js", @@ -37,7 +37,7 @@ "@types/rdf-validate-shacl": "^0.4.6", "assert-throws-async": "^3.0.0", "barnard59-core": "^5.3.1", - "barnard59-env": "^1.2.2", + "barnard59-env": "^1.2.3", "barnard59-test-support": "*", "chai": "^4.3.4", "express": "^4.18.2", From e82aa36629c73c060d958195e34324d6429e03f4 Mon Sep 17 00:00:00 2001 From: Tomasz Pluskiewicz Date: Mon, 22 Jan 2024 13:57:41 +0100 Subject: [PATCH 23/28] chore: ban rdf-js --- .changeset/silver-sloths-design.md | 8 ++++++++ packages/cli/lib/pipeline.js | 8 ++++---- packages/core/lib/cloneTerm.ts | 2 +- packages/core/lib/factory/pipeline.ts | 2 +- packages/graph-store/get.js | 2 +- packages/shacl/report.js | 2 +- packages/shacl/validate.js | 2 +- 7 files changed, 17 insertions(+), 9 deletions(-) create mode 100644 .changeset/silver-sloths-design.md diff --git a/.changeset/silver-sloths-design.md b/.changeset/silver-sloths-design.md new file mode 100644 index 00000000..27215b09 --- /dev/null +++ b/.changeset/silver-sloths-design.md @@ -0,0 +1,8 @@ +--- +"barnard59-graph-store": patch +"barnard59-shacl": patch +"barnard59-core": patch +"barnard59": patch +--- + +Remove references of `rdf-js` types package, repaced with `@rdfjs/types` diff --git a/packages/cli/lib/pipeline.js b/packages/cli/lib/pipeline.js index cccf9c96..db3ca021 100644 --- a/packages/cli/lib/pipeline.js +++ b/packages/cli/lib/pipeline.js @@ -11,7 +11,7 @@ import findPipeline from '../findPipeline.js' import discoverManifests from './discoverManifests.js' /** - * @typedef {Map} OperationMap + * @typedef {Map} OperationMap */ /** @@ -35,12 +35,12 @@ const discoverOperations = async (pipelinePath) => { } /** - * @param {import('rdf-js').DatasetCore} dataset + * @param {import('@rdfjs/types').DatasetCore} dataset * @param {object} options * @param {import('winston').Logger} [options.logger] * @param {OperationMap} [options.knownOperations] * @param {string} options.pipelinePath - * @returns {Promise} + * @returns {Promise} */ export const desugar = async (dataset, { logger, knownOperations, pipelinePath }) => { const operations = knownOperations ?? await discoverOperations(pipelinePath) @@ -95,7 +95,7 @@ export const desugar = async (dataset, { logger, knownOperations, pipelinePath } /** * @param {string} filename - * @return {Promise} + * @return {Promise} */ async function fileToDataset(filename) { const stream = rdf.fromFile(filename, { diff --git a/packages/core/lib/cloneTerm.ts b/packages/core/lib/cloneTerm.ts index 0f5c1d54..7dd14e8a 100644 --- a/packages/core/lib/cloneTerm.ts +++ b/packages/core/lib/cloneTerm.ts @@ -1,4 +1,4 @@ -import type { Term } from 'rdf-js' +import type { Term } from '@rdfjs/types' import type { Environment } from 'barnard59-env' function cloneTerm(rdf: Environment, term: T | null | undefined): T | null { diff --git a/packages/core/lib/factory/pipeline.ts b/packages/core/lib/factory/pipeline.ts index 985f3a08..09143900 100644 --- a/packages/core/lib/factory/pipeline.ts +++ b/packages/core/lib/factory/pipeline.ts @@ -1,4 +1,4 @@ -import type { DatasetCore, Term } from 'rdf-js' +import type { DatasetCore, Term } from '@rdfjs/types' import type { GraphPointer, MultiPointer } from 'clownface' import { Logger } from 'winston' import { LoaderRegistry } from 'rdf-loaders-registry' diff --git a/packages/graph-store/get.js b/packages/graph-store/get.js index edfcb409..bd130da3 100644 --- a/packages/graph-store/get.js +++ b/packages/graph-store/get.js @@ -5,7 +5,7 @@ import unpromiseReadable from './lib/unpromiseReadable.js' * @this {import('barnard59-core').Context} * @param {Pick & { * endpoint: string, - * graph: string | import('rdf-js').NamedNode | import('rdf-js').DefaultGraph, + * graph: string | import('@rdfjs/types').NamedNode | import('@rdfjs/types').DefaultGraph, * }} options */ function get({ endpoint, graph, user, password }) { diff --git a/packages/shacl/report.js b/packages/shacl/report.js index 20a60dd0..123062b2 100644 --- a/packages/shacl/report.js +++ b/packages/shacl/report.js @@ -4,7 +4,7 @@ import SHACLValidator from 'rdf-validate-shacl' /** * @this {import('barnard59-core').Context} - * @param {import('rdf-js').DatasetCore} ds + * @param {import('@rdfjs/types').DatasetCore} ds * @param {number | undefined} maxViolations * @param {AsyncIterable} iterable */ diff --git a/packages/shacl/validate.js b/packages/shacl/validate.js index 16f70c6f..2795c22f 100644 --- a/packages/shacl/validate.js +++ b/packages/shacl/validate.js @@ -15,7 +15,7 @@ import { ValidationError } from './lib/errors.js' class ValidateChunk extends Transform { /** * @param {import('barnard59-core').Context} context - * @param {import('rdf-js').DatasetCore} shape + * @param {import('@rdfjs/types').DatasetCore} shape * @param {{ maxErrors?: number, onViolation?: OnViolation }} options */ constructor(context, shape, { maxErrors, onViolation } = {}) { From 91f14e4b1ac28accd7e8528ec28fb0b07e8c6c47 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Mon, 22 Jan 2024 13:01:41 +0000 Subject: [PATCH 24/28] Version Packages --- .changeset/silver-sloths-design.md | 8 -------- packages/cli/CHANGELOG.md | 8 ++++++++ packages/cli/package.json | 6 +++--- packages/core/CHANGELOG.md | 6 ++++++ packages/core/package.json | 2 +- packages/graph-store/CHANGELOG.md | 6 ++++++ packages/graph-store/package.json | 2 +- packages/shacl/CHANGELOG.md | 6 ++++++ packages/shacl/package.json | 4 ++-- 9 files changed, 33 insertions(+), 15 deletions(-) delete mode 100644 .changeset/silver-sloths-design.md diff --git a/.changeset/silver-sloths-design.md b/.changeset/silver-sloths-design.md deleted file mode 100644 index 27215b09..00000000 --- a/.changeset/silver-sloths-design.md +++ /dev/null @@ -1,8 +0,0 @@ ---- -"barnard59-graph-store": patch -"barnard59-shacl": patch -"barnard59-core": patch -"barnard59": patch ---- - -Remove references of `rdf-js` types package, repaced with `@rdfjs/types` diff --git a/packages/cli/CHANGELOG.md b/packages/cli/CHANGELOG.md index 2101e713..c25ce11c 100644 --- a/packages/cli/CHANGELOG.md +++ b/packages/cli/CHANGELOG.md @@ -1,5 +1,13 @@ # barnard59 +## 4.5.2 + +### Patch Changes + +- e82aa36: Remove references of `rdf-js` types package, repaced with `@rdfjs/types` +- Updated dependencies [e82aa36] + - barnard59-core@5.3.2 + ## 4.5.1 ### Patch Changes diff --git a/packages/cli/package.json b/packages/cli/package.json index 312a8bfe..c8ff4910 100644 --- a/packages/cli/package.json +++ b/packages/cli/package.json @@ -1,6 +1,6 @@ { "name": "barnard59", - "version": "4.5.1", + "version": "4.5.2", "description": "Barnard59 Linked Data pipelines", "type": "module", "main": "index.js", @@ -36,7 +36,7 @@ "@opentelemetry/semantic-conventions": "^0.24.0", "@opentelemetry/tracing": "^0.24.0", "@rdfjs/namespace": "^2.0.0", - "barnard59-core": "5.3.1", + "barnard59-core": "5.3.2", "barnard59-env": "1.2.3", "commander": "^11.0.0", "pkgscan": "^1.0.24", @@ -56,7 +56,7 @@ "approvals": "^6.2.2", "barnard59-base": "^2.4.0", "barnard59-formats": "^2.1.0", - "barnard59-graph-store": "^5.1.0", + "barnard59-graph-store": "^5.1.1", "barnard59-http": "^2.0.0", "barnard59-shell": "^0.1.0", "barnard59-test-support": "^0.0.3", diff --git a/packages/core/CHANGELOG.md b/packages/core/CHANGELOG.md index 9157e89d..804812ae 100644 --- a/packages/core/CHANGELOG.md +++ b/packages/core/CHANGELOG.md @@ -1,5 +1,11 @@ # barnard59-core +## 5.3.2 + +### Patch Changes + +- e82aa36: Remove references of `rdf-js` types package, repaced with `@rdfjs/types` + ## 5.3.1 ### Patch Changes diff --git a/packages/core/package.json b/packages/core/package.json index 9602ee98..d2b09f53 100644 --- a/packages/core/package.json +++ b/packages/core/package.json @@ -1,6 +1,6 @@ { "name": "barnard59-core", - "version": "5.3.1", + "version": "5.3.2", "description": "Core component of Barnard59 Linked Data pipelines", "type": "module", "main": "index.js", diff --git a/packages/graph-store/CHANGELOG.md b/packages/graph-store/CHANGELOG.md index d25d7f76..b880d611 100644 --- a/packages/graph-store/CHANGELOG.md +++ b/packages/graph-store/CHANGELOG.md @@ -1,5 +1,11 @@ # barnard59-graph-store +## 5.1.1 + +### Patch Changes + +- e82aa36: Remove references of `rdf-js` types package, repaced with `@rdfjs/types` + ## 5.1.0 ### Minor Changes diff --git a/packages/graph-store/package.json b/packages/graph-store/package.json index f78ed5e9..d72b0588 100644 --- a/packages/graph-store/package.json +++ b/packages/graph-store/package.json @@ -1,6 +1,6 @@ { "name": "barnard59-graph-store", - "version": "5.1.0", + "version": "5.1.1", "description": "SPARQL Graph Store Protocol support for Linked Data pipelines", "type": "module", "main": "index.js", diff --git a/packages/shacl/CHANGELOG.md b/packages/shacl/CHANGELOG.md index a2caac23..210633bc 100644 --- a/packages/shacl/CHANGELOG.md +++ b/packages/shacl/CHANGELOG.md @@ -1,5 +1,11 @@ # barnard59-shacl +## 1.4.1 + +### Patch Changes + +- e82aa36: Remove references of `rdf-js` types package, repaced with `@rdfjs/types` + ## 1.4.0 ### Minor Changes diff --git a/packages/shacl/package.json b/packages/shacl/package.json index 2e759048..36cda3b4 100644 --- a/packages/shacl/package.json +++ b/packages/shacl/package.json @@ -1,6 +1,6 @@ { "name": "barnard59-shacl", - "version": "1.4.0", + "version": "1.4.1", "description": "A barnard59 step that validates RDF using the Shapes Constraint Language (SHACL).", "type": "module", "main": "index.js", @@ -36,7 +36,7 @@ "@tpluscode/rdf-string": "^1.1.2", "@types/rdf-validate-shacl": "^0.4.6", "assert-throws-async": "^3.0.0", - "barnard59-core": "^5.3.1", + "barnard59-core": "^5.3.2", "barnard59-env": "^1.2.3", "barnard59-test-support": "*", "chai": "^4.3.4", From bd687b56d40752a25146ebf660e2f9fcd9acab37 Mon Sep 17 00:00:00 2001 From: Giacomo Citi Date: Tue, 23 Jan 2024 10:03:40 +0100 Subject: [PATCH 25/28] more assertions --- packages/sparql/test/inMemory.test.js | 27 ++++++++++++++++----------- 1 file changed, 16 insertions(+), 11 deletions(-) diff --git a/packages/sparql/test/inMemory.test.js b/packages/sparql/test/inMemory.test.js index d91362d4..45f204b4 100644 --- a/packages/sparql/test/inMemory.test.js +++ b/packages/sparql/test/inMemory.test.js @@ -1,4 +1,4 @@ -import { strictEqual } from 'assert' +import { ok, strictEqual } from 'assert' import getStream from 'get-stream' import { isReadableStream, isWritableStream } from 'is-stream' import { Readable } from 'readable-stream' @@ -22,19 +22,21 @@ describe('query', () => { it('should CONSTRUCT quads', async () => { const chunk1 = [ - rdf.quad(ns.ex.s, ns.ex.p, rdf.literal('0')), - rdf.quad(ns.ex.s, ns.ex.p, rdf.literal('1')), + rdf.quad(ns.ex.s1, ns.ex.p, ns.ex.o1), + rdf.quad(ns.ex.s2, ns.ex.p, ns.ex.s2), ] const chunk2 = [ - rdf.quad(ns.ex.s, ns.ex.p, rdf.literal('2')), + rdf.quad(ns.ex.s3, ns.ex.p, ns.ex.o3), ] - const construct = query('CONSTRUCT { ?s ?p ?o } WHERE { ?s ?p ?o }') + const construct = query('CONSTRUCT { ?s ?p "ok" } WHERE { ?s ?p ?s }') const pipeline = Readable.from([chunk1, chunk2]).pipe(construct) const result = await getStream.array(pipeline) + const dataset = rdf.dataset(result.flat()) strictEqual(result.length, 2) - strictEqual(result.flat().length, 3) + strictEqual(dataset.size, 1) + ok(dataset.has(rdf.quad(ns.ex.s2, ns.ex.p, rdf.literal('ok')))) }) }) @@ -53,18 +55,21 @@ describe('update', () => { }) it('should UPDATE quads', async () => { const chunk1 = [ - rdf.quad(ns.ex.s, ns.ex.p, rdf.literal('0')), - rdf.quad(ns.ex.s, ns.ex.p, rdf.literal('1')), + rdf.quad(ns.ex.s1, ns.ex.p, ns.ex.o1), + rdf.quad(ns.ex.s2, ns.ex.p, ns.ex.s2), ] const chunk2 = [ - rdf.quad(ns.ex.s, ns.ex.p, rdf.literal('2')), + rdf.quad(ns.ex.s3, ns.ex.p, ns.ex.o3), ] - const command = update('DELETE { ?s ?p ?o } WHERE { ?s ?p ?o }') + const command = update('DELETE { ?s ?p ?s } WHERE { ?s ?p ?s }') const pipeline = Readable.from([chunk1, chunk2]).pipe(command) const result = await getStream.array(pipeline) strictEqual(result.length, 2) - strictEqual(result.flat().length, 0) + const dataset = rdf.dataset(result.flat()) + strictEqual(dataset.size, 2) + ok(dataset.has(rdf.quad(ns.ex.s1, ns.ex.p, ns.ex.o1))) + ok(dataset.has(rdf.quad(ns.ex.s3, ns.ex.p, ns.ex.o3))) }) }) From bfb87e36bda64fd38f5180f24c40106546ef9642 Mon Sep 17 00:00:00 2001 From: Giacomo Citi Date: Tue, 23 Jan 2024 10:07:43 +0100 Subject: [PATCH 26/28] Update .changeset/six-flowers-look.md Co-authored-by: Tomasz Pluskiewicz --- .changeset/six-flowers-look.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.changeset/six-flowers-look.md b/.changeset/six-flowers-look.md index 16055a3b..57597cff 100644 --- a/.changeset/six-flowers-look.md +++ b/.changeset/six-flowers-look.md @@ -2,4 +2,4 @@ "barnard59-sparql": minor --- -Add In-Memory SPARQL operations +Add In-Memory SPARQL operations (closes #255) From ce6aca40ccab7573b1384d69b8cd0d8898b1fc1d Mon Sep 17 00:00:00 2001 From: Tomasz Pluskiewicz Date: Fri, 26 Jan 2024 12:40:41 +0100 Subject: [PATCH 27/28] fix: logger error --- .changeset/big-guests-judge.md | 5 +++++ packages/core/lib/defaultLogger.ts | 1 + 2 files changed, 6 insertions(+) create mode 100644 .changeset/big-guests-judge.md diff --git a/.changeset/big-guests-judge.md b/.changeset/big-guests-judge.md new file mode 100644 index 00000000..37e2ce06 --- /dev/null +++ b/.changeset/big-guests-judge.md @@ -0,0 +1,5 @@ +--- +"barnard59-core": patch +--- + +Using anylogger caused errors when calling without a level diff --git a/packages/core/lib/defaultLogger.ts b/packages/core/lib/defaultLogger.ts index def52d7b..0f965af3 100644 --- a/packages/core/lib/defaultLogger.ts +++ b/packages/core/lib/defaultLogger.ts @@ -74,6 +74,7 @@ function factory({ console = true, errorFilename = null, filename = null, level return ((logger)[method] || logger.debug)(...args) } } + facade.log = facade.debug return facade } From c58fb3ae862686bd152afb48b25394b6a7dd7046 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Fri, 26 Jan 2024 11:44:43 +0000 Subject: [PATCH 28/28] Version Packages --- .changeset/big-guests-judge.md | 5 ----- packages/cli/CHANGELOG.md | 7 +++++++ packages/cli/package.json | 4 ++-- packages/core/CHANGELOG.md | 6 ++++++ packages/core/package.json | 2 +- 5 files changed, 16 insertions(+), 8 deletions(-) delete mode 100644 .changeset/big-guests-judge.md diff --git a/.changeset/big-guests-judge.md b/.changeset/big-guests-judge.md deleted file mode 100644 index 37e2ce06..00000000 --- a/.changeset/big-guests-judge.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"barnard59-core": patch ---- - -Using anylogger caused errors when calling without a level diff --git a/packages/cli/CHANGELOG.md b/packages/cli/CHANGELOG.md index c25ce11c..48bcba90 100644 --- a/packages/cli/CHANGELOG.md +++ b/packages/cli/CHANGELOG.md @@ -1,5 +1,12 @@ # barnard59 +## 4.5.3 + +### Patch Changes + +- Updated dependencies [ce6aca4] + - barnard59-core@5.3.3 + ## 4.5.2 ### Patch Changes diff --git a/packages/cli/package.json b/packages/cli/package.json index c8ff4910..4d1091a0 100644 --- a/packages/cli/package.json +++ b/packages/cli/package.json @@ -1,6 +1,6 @@ { "name": "barnard59", - "version": "4.5.2", + "version": "4.5.3", "description": "Barnard59 Linked Data pipelines", "type": "module", "main": "index.js", @@ -36,7 +36,7 @@ "@opentelemetry/semantic-conventions": "^0.24.0", "@opentelemetry/tracing": "^0.24.0", "@rdfjs/namespace": "^2.0.0", - "barnard59-core": "5.3.2", + "barnard59-core": "5.3.3", "barnard59-env": "1.2.3", "commander": "^11.0.0", "pkgscan": "^1.0.24", diff --git a/packages/core/CHANGELOG.md b/packages/core/CHANGELOG.md index 804812ae..1241bdd8 100644 --- a/packages/core/CHANGELOG.md +++ b/packages/core/CHANGELOG.md @@ -1,5 +1,11 @@ # barnard59-core +## 5.3.3 + +### Patch Changes + +- ce6aca4: Using anylogger caused errors when calling without a level + ## 5.3.2 ### Patch Changes diff --git a/packages/core/package.json b/packages/core/package.json index d2b09f53..9e54258a 100644 --- a/packages/core/package.json +++ b/packages/core/package.json @@ -1,6 +1,6 @@ { "name": "barnard59-core", - "version": "5.3.2", + "version": "5.3.3", "description": "Core component of Barnard59 Linked Data pipelines", "type": "module", "main": "index.js",