diff --git a/.changeset/chilly-worms-compare.md b/.changeset/chilly-worms-compare.md new file mode 100644 index 00000000..83fc5595 --- /dev/null +++ b/.changeset/chilly-worms-compare.md @@ -0,0 +1,19 @@ +--- +"barnard59-core": major +--- + +This is breaking change for users creating and running pipeline programmatically. The `createPipeline` +function exported by the package now requires that an RDF/JS Environment is passed as an argument. +A compatible environment which includes all necessary factories can be imported from the new +`barnard59-env` package. + +```diff +import { createPipeline, run } from 'barnard59-core' +import env from 'barnard59-env' + +let pointer + +await run(createPipeline(pointer, { ++ env +}) +``` diff --git a/.changeset/early-news-lay.md b/.changeset/early-news-lay.md new file mode 100644 index 00000000..2dc7ef94 --- /dev/null +++ b/.changeset/early-news-lay.md @@ -0,0 +1,5 @@ +--- +"barnard59-base": patch +--- + +Remove redundant dependency on `rdf-utils-fs` diff --git a/.changeset/eleven-pugs-appear.md b/.changeset/eleven-pugs-appear.md new file mode 100644 index 00000000..cab9448d --- /dev/null +++ b/.changeset/eleven-pugs-appear.md @@ -0,0 +1,5 @@ +--- +"barnard59-env": major +--- + +First release diff --git a/.changeset/hungry-mayflies-begin.md b/.changeset/hungry-mayflies-begin.md new file mode 100644 index 00000000..44ff6dd0 --- /dev/null +++ b/.changeset/hungry-mayflies-begin.md @@ -0,0 +1,25 @@ +--- +"barnard59-graph-store": minor +"barnard59-validation": minor +"barnard59-formats": minor +"barnard59-sparql": minor +"barnard59-csvw": minor +"barnard59": minor +"barnard59-test-support": patch +"barnard59-test-e2e": patch +--- + +Removed dependency on any RDF/JS Environment. The CLI provides it at runtime to ensure that steps +use the same factories. Step implementors are encouraged to use the environment provided by the +barnard59 runtime insead of importing directly. + +```diff +-import rdf from 'rdf-ext' + +export function myStep() { +- const dataset = rdf.dataset() ++ const dataset = this.env.dataset() + + return rdf.dataset().toStream() +} +``` diff --git a/package-lock.json b/package-lock.json index 98916bea..13ba55e6 100644 --- a/package-lock.json +++ b/package-lock.json @@ -5026,6 +5026,14 @@ "version": "2.0.0", "license": "MIT" }, + "node_modules/@rdfjs/traverser": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/@rdfjs/traverser/-/traverser-0.1.2.tgz", + "integrity": "sha512-EBB/p9LrTMzupZ6VlxtBXyL0bdXFY7e5lAp2tHNwxOoe3kcR6hOJFVWdPT7pdWaSotyXbTIEQxG4PkXMw/OY7w==", + "dependencies": { + "@rdfjs/to-ntriples": "^2.0.0" + } + }, "node_modules/@rdfjs/types": { "version": "1.1.0", "license": "MIT", @@ -5912,7 +5920,8 @@ }, "node_modules/@types/jsonld": { "version": "1.5.9", - "license": "MIT" + "license": "MIT", + "peer": true }, "node_modules/@types/minimist": { "version": "1.2.2", @@ -5939,9 +5948,20 @@ "dev": true, "license": "MIT" }, + "node_modules/@types/rdf-dataset-ext": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/@types/rdf-dataset-ext/-/rdf-dataset-ext-1.0.6.tgz", + "integrity": "sha512-gSKGOXk4z4AQcK+dnb4SUbtXC1sLbYraOueqJ2luHvA//sSC4IfeyjVH9/rXi3tnfyEtNA037WSR8Og4ROfAlA==", + "peer": true, + "dependencies": { + "@types/readable-stream": "*", + "rdf-js": "^4.0.2" + } + }, "node_modules/@types/rdfjs__data-model": { "version": "2.0.4", "license": "MIT", + "peer": true, "dependencies": { "@rdfjs/types": "^1.0.1" } @@ -5949,15 +5969,19 @@ "node_modules/@types/rdfjs__dataset": { "version": "2.0.3", "license": "MIT", + "peer": true, "dependencies": { "@rdfjs/types": "*" } }, "node_modules/@types/rdfjs__environment": { - "version": "0.1.7", - "license": "MIT", + "version": "0.1.9", + "resolved": "https://registry.npmjs.org/@types/rdfjs__environment/-/rdfjs__environment-0.1.9.tgz", + "integrity": "sha512-u7NUsgI2r0X06JXY8lMqcvEpgjVv1eL8lh//CGRsN6ovgA5RKYiTjedU80No21vPU/LEQ3wB8fJ5diMPy+IDDw==", + "peer": true, "dependencies": { "@rdfjs/types": "*", + "@types/node": "*", "@types/rdfjs__data-model": "*", "@types/rdfjs__dataset": "*", "@types/rdfjs__namespace": "*", @@ -5969,6 +5993,7 @@ "node_modules/@types/rdfjs__formats-common": { "version": "3.1.0", "license": "MIT", + "peer": true, "dependencies": { "@types/rdfjs__parser-jsonld": "*", "@types/rdfjs__parser-n3": "*", @@ -5982,6 +6007,7 @@ "node_modules/@types/rdfjs__formats-common/node_modules/rdfxml-streaming-parser": { "version": "2.2.3", "license": "MIT", + "peer": true, "dependencies": { "@rdfjs/types": "*", "@rubensworks/saxes": "^6.0.1", @@ -5996,6 +6022,7 @@ "node_modules/@types/rdfjs__formats-common/node_modules/readable-stream": { "version": "4.4.2", "license": "MIT", + "peer": true, "dependencies": { "abort-controller": "^3.0.0", "buffer": "^6.0.3", @@ -6017,6 +6044,7 @@ "node_modules/@types/rdfjs__parser-jsonld": { "version": "2.1.1", "license": "MIT", + "peer": true, "dependencies": { "@types/jsonld": "*", "rdf-js": "^4.0.2" @@ -6025,6 +6053,7 @@ "node_modules/@types/rdfjs__parser-n3": { "version": "2.0.1", "license": "MIT", + "peer": true, "dependencies": { "rdf-js": "^4.0.2" } @@ -6032,6 +6061,7 @@ "node_modules/@types/rdfjs__serializer-jsonld": { "version": "2.0.0", "license": "MIT", + "peer": true, "dependencies": { "rdf-js": "^4.0.2" } @@ -6039,6 +6069,7 @@ "node_modules/@types/rdfjs__serializer-ntriples": { "version": "2.0.1", "license": "MIT", + "peer": true, "dependencies": { "rdf-js": "^4.0.2" } @@ -6046,6 +6077,7 @@ "node_modules/@types/rdfjs__sink-map": { "version": "2.0.1", "license": "MIT", + "peer": true, "dependencies": { "@rdfjs/types": "*" } @@ -6053,6 +6085,7 @@ "node_modules/@types/rdfjs__term-map": { "version": "2.0.5", "license": "MIT", + "peer": true, "dependencies": { "@rdfjs/types": "*" } @@ -6060,6 +6093,16 @@ "node_modules/@types/rdfjs__term-set": { "version": "2.0.4", "license": "MIT", + "peer": true, + "dependencies": { + "@rdfjs/types": "*" + } + }, + "node_modules/@types/rdfjs__traverser": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/@types/rdfjs__traverser/-/rdfjs__traverser-0.1.4.tgz", + "integrity": "sha512-bjIl9UwA931xm8hWJWrUpodO9UCii6ypVJpULi7befJpaeOyhEkVuZVOlci+oLPX828jzcd/8E4HxSwHZIXuNA==", + "peer": true, "dependencies": { "@rdfjs/types": "*" } @@ -6357,15 +6400,94 @@ "license": "ISC" }, "node_modules/@zazuko/env": { - "version": "1.1.1", - "license": "MIT", + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/@zazuko/env/-/env-1.9.0.tgz", + "integrity": "sha512-2aZeNY3R7f0enBy5FY+um8rRuBsYkTxOWtAp7uKt0nw9hbF0CVsw0G1BRbrUYnZ2oWZk4STTJGZZyUQytxjZ2w==", "dependencies": { + "@rdfjs/dataset": "^2.0.1", "@rdfjs/environment": "^0.1.2", + "@rdfjs/traverser": "^0.1.2", "@tpluscode/rdf-ns-builders": "^4.1.0", + "clownface": "^2.0.1", + "get-stream": "^8.0.1", + "rdf-dataset-ext": "^1.1.0" + }, + "peerDependencies": { "@types/clownface": "^2.0.0", + "@types/rdf-dataset-ext": "^1", "@types/rdfjs__environment": "^0.1.7", "@types/rdfjs__formats-common": "^3.1.0", - "clownface": "^2.0.0" + "@types/rdfjs__traverser": "^0.1.3" + } + }, + "node_modules/@zazuko/env-node": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@zazuko/env-node/-/env-node-1.0.0.tgz", + "integrity": "sha512-FDwIbWbyWOEw4vHd4vbyXCEpRsRXQ497V0K3swh6StO1R06cTyqW0z4vbNJsHRBkMYQLS7xfDlzWSUrHa+owsw==", + "dependencies": { + "@rdfjs/dataset": "^2.0.1", + "@rdfjs/environment": "^0.1.2", + "@rdfjs/fetch-lite": "^3.2.1", + "@rdfjs/formats-common": "^3.1.0", + "@rdfjs/traverser": "^0.1.2", + "@tpluscode/rdf-ns-builders": "^4.1.0", + "@zazuko/env": "^1.9.0", + "@zazuko/rdf-utils-fs": "^3.0.2", + "clownface": "^2.0.1" + } + }, + "node_modules/@zazuko/env-node/node_modules/@rdfjs/formats-common": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/@rdfjs/formats-common/-/formats-common-3.1.0.tgz", + "integrity": "sha512-wgz5za/Uls+pttLdLl/aH0m0LQNgjqpWwk9exNs2Smmb2CosynRo4S0+CxeNOVZh4zeUm7oAlr1CK/tyg4Ff6g==", + "dependencies": { + "@rdfjs/parser-jsonld": "^2.0.0", + "@rdfjs/parser-n3": "^2.0.0", + "@rdfjs/serializer-jsonld": "^2.0.0", + "@rdfjs/serializer-ntriples": "^2.0.0", + "@rdfjs/sink-map": "^2.0.0", + "rdfxml-streaming-parser": "^2.2.0" + } + }, + "node_modules/@zazuko/env-node/node_modules/rdfxml-streaming-parser": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/rdfxml-streaming-parser/-/rdfxml-streaming-parser-2.2.3.tgz", + "integrity": "sha512-HoH8urnga+YQ5sDY9ufRb0wg6FvwR284sSXpZ+fJE5X5Oej6dfzkFer81uBNZzyNmzJR1TpMYMznyXEjPMLhCA==", + "dependencies": { + "@rdfjs/types": "*", + "@rubensworks/saxes": "^6.0.1", + "@types/readable-stream": "^2.3.13", + "buffer": "^6.0.3", + "rdf-data-factory": "^1.1.0", + "readable-stream": "^4.0.0", + "relative-to-absolute-iri": "^1.0.0", + "validate-iri": "^1.0.0" + } + }, + "node_modules/@zazuko/env-node/node_modules/readable-stream": { + "version": "4.4.2", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-4.4.2.tgz", + "integrity": "sha512-Lk/fICSyIhodxy1IDK2HazkeGjSmezAWX2egdtJnYhtzKEsBPJowlI6F6LPb5tqIQILrMbx22S5o3GuJavPusA==", + "dependencies": { + "abort-controller": "^3.0.0", + "buffer": "^6.0.3", + "events": "^3.3.0", + "process": "^0.11.10", + "string_decoder": "^1.3.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + } + }, + "node_modules/@zazuko/env/node_modules/get-stream": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-8.0.1.tgz", + "integrity": "sha512-VaUJspBffn/LMCJVoMvSAdmscJyS1auj5Zulnn5UoYcY531UWmdwhRWkcGKnGU93m5HSXP9LP2usOryrBtQowA==", + "engines": { + "node": ">=16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/@zazuko/node-fetch": { @@ -6382,6 +6504,18 @@ "version": "2.0.0", "license": "MIT" }, + "node_modules/@zazuko/rdf-utils-fs": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@zazuko/rdf-utils-fs/-/rdf-utils-fs-3.0.2.tgz", + "integrity": "sha512-27okMMO6qlNqBScdM4r4oorZZvGirT1O7yf29pdyCEF86PkztTfLbIQt/s82nVHQXsyIpLPbkGBoSEaFi9cWfg==", + "dependencies": { + "readable-stream": ">=3.6.0" + }, + "peerDependencies": { + "@rdfjs/types": "*", + "@types/rdfjs__environment": "^0.1.8" + } + }, "node_modules/abab": { "version": "2.0.6", "dev": true, @@ -8610,6 +8744,10 @@ "resolved": "packages/csvw", "link": true }, + "node_modules/barnard59-env": { + "resolved": "packages/env", + "link": true + }, "node_modules/barnard59-formats": { "resolved": "packages/formats", "link": true @@ -9527,8 +9665,9 @@ } }, "node_modules/clownface": { - "version": "2.0.0", - "license": "MIT", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/clownface/-/clownface-2.0.1.tgz", + "integrity": "sha512-8RVfn/LZEl7BTDhIEIamz13Bhm5YahA1qiJigMb0HYGaiKnsVV0PpLBz0kzqyAI0+IzOlYbCLMFOAc1dkQfwgQ==", "dependencies": { "@rdfjs/environment": "^0.1.2" } @@ -22054,11 +22193,12 @@ } }, "node_modules/rdf-dataset-ext": { - "version": "1.0.1", - "license": "MIT", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/rdf-dataset-ext/-/rdf-dataset-ext-1.1.0.tgz", + "integrity": "sha512-CH85RfRKN9aSlbju8T7aM8hgCSWMBsh2eh/tGxUUtWMN+waxi6iFDt8/r4PAEmKaEA82guimZJ4ISbmJ2rvWQg==", "dependencies": { "rdf-canonize": "^3.0.0", - "readable-stream": "^3.4.0" + "readable-stream": "3 - 4" } }, "node_modules/rdf-js": { @@ -26080,7 +26220,7 @@ }, "packages/base": { "name": "barnard59-base", - "version": "1.2.2", + "version": "2.0.0", "license": "MIT", "dependencies": { "@opentelemetry/api": "^1.0.1", @@ -26093,13 +26233,12 @@ "through2": "^4.0.2" }, "devDependencies": { - "barnard59-core": "^3.0.2", + "barnard59-core": "^4.0.0", "chai": "^4.3.10", "get-stream": "^6.0.1", "into-stream": "^7.0.0", "isstream": "^0.1.2", "mocha": "^9.0.1", - "rdf-utils-fs": "^2.1.0", "sinon": "^17.0.0" }, "engines": { @@ -26386,7 +26525,7 @@ }, "packages/cli": { "name": "barnard59", - "version": "3.0.2", + "version": "4.0.0", "license": "MIT", "dependencies": { "@opentelemetry/api": "^1.0.0", @@ -26398,15 +26537,12 @@ "@opentelemetry/semantic-conventions": "^0.24.0", "@opentelemetry/tracing": "^0.24.0", "@rdfjs/namespace": "^2.0.0", - "@zazuko/env": "^1.0.1", - "barnard59-core": "3.0.2", - "clownface": "^2.0.0", + "barnard59-core": "4.0.0", + "barnard59-env": "0.0.0", "commander": "^11.0.0", "find-plugins": "^1.1.7", "is-graph-pointer": "^2.1.0", "lodash": "^4.17.21", - "rdf-dataset-ext": "^1.0.1", - "rdf-utils-fs": "^2.2.0", "readable-stream": "^3.6.0" }, "bin": { @@ -26414,12 +26550,12 @@ }, "devDependencies": { "approvals": "^6.2.2", - "barnard59-base": "^1.2.2", - "barnard59-formats": "^1.4.2", - "barnard59-graph-store": "^1.1.1", - "barnard59-http": "^1.1.1", + "barnard59-base": "^2.0.0", + "barnard59-formats": "^2.0.0", + "barnard59-graph-store": "^2.0.0", + "barnard59-http": "^2.0.0", "barnard59-shell": "^0.1.0", - "barnard59-test-support": "^0.0.1", + "barnard59-test-support": "^0.0.2", "chai": "^4.3.7", "shelljs": "^0.8.4", "strip-ansi": "^7.0.0" @@ -26452,13 +26588,10 @@ }, "packages/core": { "name": "barnard59-core", - "version": "3.0.2", + "version": "4.0.0", "license": "MIT", "dependencies": { "@opentelemetry/api": "^1.0.1", - "@rdfjs/namespace": "^2.0.0", - "@zazuko/env": "^1.0.1", - "clownface": "^2.0.0", "duplex-to": "^1.0.1", "duplexify": "^4.1.1", "is-graph-pointer": "^2.1.0", @@ -26471,15 +26604,16 @@ "winston": "^3.3.3" }, "devDependencies": { - "barnard59-http": "^1.1.1", - "barnard59-test-support": "^0.0.1", + "@rdfjs/namespace": "^2.0.0", + "barnard59-env": "^0.0.0", + "barnard59-http": "^2.0.0", + "barnard59-test-support": "^0.0.2", "chai": "^4.3.7", "get-stream": "^6.0.1", "husky": "^8.0.3", "lint-staged": "^13.2.2", "mocha": "^10.2.0", "nock": "^13.1.0", - "rdf-utils-fs": "^2.1.0", "sinon": "^15.0.4" } }, @@ -26492,11 +26626,10 @@ }, "packages/csvw": { "name": "barnard59-csvw", - "version": "1.0.2", + "version": "2.0.0", "license": "MIT", "dependencies": { "@rdfjs/fetch": "^3.1.1", - "@zazuko/env": "^1.0.1", "duplex-to": "^1.0.1", "file-fetch": "^1.7.0", "node-fetch": "^3.0.0", @@ -26504,6 +26637,7 @@ "readable-stream": "^3.6.0" }, "devDependencies": { + "barnard59-env": "^0.0.0", "express-as-promise": "^1.2.0", "get-stream": "^7.0.1", "is-stream": "^3.0.0" @@ -26539,9 +26673,17 @@ "url": "https://opencollective.com/node-fetch" } }, + "packages/env": { + "name": "barnard59-env", + "version": "0.0.0", + "license": "MIT", + "dependencies": { + "@zazuko/env-node": "^1.0.0" + } + }, "packages/formats": { "name": "barnard59-formats", - "version": "1.4.2", + "version": "2.0.0", "license": "MIT", "dependencies": { "@opentelemetry/api": "^1.0.1", @@ -26550,14 +26692,13 @@ "@rdfjs/serializer-jsonld": "^2.0.0", "@rdfjs/serializer-ntriples": "^2.0.0", "@rdfjs/sink-to-duplex": "^1.0.0", - "@zazuko/env": "^1.1.0", - "barnard59-base": "^1.2.2", - "rdf-dataset-ext": "^1.0.1", + "barnard59-base": "^2.0.0", "rdf-parser-csvw": "^0.15.0", "rdf-parser-csvw-xlsx": "^0.1.0", "rdfxml-streaming-parser": "^1.2.0" }, "devDependencies": { + "barnard59-env": "^0.0.0", "chai": "^4.3.7" }, "engines": { @@ -26566,7 +26707,7 @@ }, "packages/ftp": { "name": "barnard59-ftp", - "version": "1.0.3", + "version": "2.0.0", "license": "MIT", "dependencies": { "ftp": "^0.3.10", @@ -26719,10 +26860,9 @@ }, "packages/graph-store": { "name": "barnard59-graph-store", - "version": "1.1.1", + "version": "2.0.0", "license": "MIT", "dependencies": { - "@rdfjs/data-model": "^2.0.1", "duplex-to": "^1.0.0", "lodash": "^4.17.21", "promise-the-world": "^1.0.1", @@ -26730,8 +26870,8 @@ "sparql-http-client": "^2.4.0" }, "devDependencies": { - "@rdfjs/namespace": "^2.0.0", "@rdfjs/to-ntriples": "^2.0.0", + "@zazuko/env": "^1.9.0", "express-as-promise": "^1.2.0", "get-stream": "^6.0.1", "isstream": "^0.1.2" @@ -26740,13 +26880,13 @@ "node": ">= 14.0.0" }, "peerDependencies": { - "barnard59-base": "^1.2.2", - "barnard59-rdf": "^1.4.3" + "barnard59-base": "^2.0.0", + "barnard59-rdf": "^2.0.0" } }, "packages/http": { "name": "barnard59-http", - "version": "1.1.1", + "version": "2.0.0", "license": "MIT", "dependencies": { "@opentelemetry/api": "^1.0.1", @@ -26779,7 +26919,7 @@ }, "packages/rdf": { "name": "barnard59-rdf", - "version": "1.4.5", + "version": "2.0.0", "license": "MIT", "dependencies": { "@rdfjs/fetch": "^3.0.0", @@ -26818,7 +26958,7 @@ }, "packages/s3": { "name": "barnard59-s3", - "version": "0.1.0", + "version": "0.2.0", "license": "MIT", "dependencies": { "@aws-sdk/client-s3": "^3.420.0", @@ -26939,16 +27079,15 @@ }, "packages/sparql": { "name": "barnard59-sparql", - "version": "1.1.2", + "version": "2.0.0", "license": "MIT", "dependencies": { - "@rdfjs/namespace": "^2.0.0", "duplex-to": "^1.0.1", "sparql-http-client": "^2.4.0" }, "devDependencies": { "@tpluscode/rdf-string": "^1.0.3", - "@zazuko/env": "^1.0.1", + "@zazuko/env": "^1.0.0", "get-stream": "^6.0.0", "isstream": "^0.1.2", "mocha": "^9.0.2", @@ -27191,27 +27330,26 @@ }, "packages/validation": { "name": "barnard59-validation", - "version": "0.2.0", + "version": "0.3.0", "license": "MIT", "dependencies": { "@rdfjs/formats-common": "^2", - "@zazuko/env": "^1.1.1", + "@rdfjs/namespace": "^2.0.0", + "@zazuko/env-node": "^1.0.0", "anylogger": "^1.0.11", "anylogger-console": "^1.0.0", "chalk": "^4.1.0", "commander": "^11.0.0", - "rdf-dataset-ext": "^1.0.1", - "rdf-loader-code": "^2.0.0", - "rdf-utils-fs": "^2.1.0" + "rdf-loader-code": "^2.0.0" }, "bin": { "barnard59-validate": "cli.js" }, "devDependencies": { "@jsdevtools/chai-exec": "^2.1.1", - "barnard59-base": "^1.2.1", - "barnard59-core": "^3.0.1", - "barnard59-formats": "^1.4.1", + "barnard59-base": "^2.0.0", + "barnard59-core": "^4.0.0", + "barnard59-formats": "^2.0.0", "chai": "^4.3.0", "deep-equal": "^2.0.5", "esmock": "^2.3.8", @@ -27278,15 +27416,16 @@ }, "test/e2e": { "name": "barnard59-test-e2e", - "version": "0.0.2", + "version": "0.0.3", "dependencies": { "@rdfjs/formats-common": "^2.2.0", "@zazuko/env": "^1.0.1", - "barnard59-base": "^1.2.1", - "barnard59-core": "^3.0.0", - "barnard59-formats": "^1.4.0", - "barnard59-http": "^1.1.0", - "barnard59-test-support": "^0.0.1", + "barnard59-base": "^2.0.0", + "barnard59-core": "^4.0.0", + "barnard59-env": "^0.0.0", + "barnard59-formats": "^2.0.0", + "barnard59-http": "^2.0.0", + "barnard59-test-support": "^0.0.2", "chai": "^4.3.7", "get-stream": "^6.0.0", "nock": "^13.3.1", @@ -27299,7 +27438,7 @@ }, "test/support": { "name": "barnard59-test-support", - "version": "0.0.1", + "version": "0.0.2", "dependencies": { "@rdfjs/namespace": "^2.0.0", "@zazuko/env": "^1.0.1", diff --git a/packages/base/package.json b/packages/base/package.json index 6e4fabdd..bea0c6d9 100644 --- a/packages/base/package.json +++ b/packages/base/package.json @@ -36,7 +36,6 @@ "into-stream": "^7.0.0", "isstream": "^0.1.2", "mocha": "^9.0.1", - "rdf-utils-fs": "^2.1.0", "sinon": "^17.0.0" }, "engines": { diff --git a/packages/cli/findPipeline.js b/packages/cli/findPipeline.js index 42c5b5d1..d09e8d88 100644 --- a/packages/cli/findPipeline.js +++ b/packages/cli/findPipeline.js @@ -1,5 +1,4 @@ -import clownface from 'clownface' -import ns from './lib/namespaces.js' +import rdf from 'barnard59-env' export class MultipleRootsError extends Error { constructor(alternatives) { @@ -10,13 +9,13 @@ export class MultipleRootsError extends Error { } function findPipeline(dataset, iri) { - let ptr = clownface({ dataset }) + let ptr = rdf.clownface({ dataset }) if (iri) { ptr = ptr.namedNode(iri) } - ptr = ptr.has(ns.rdf.type, ns.p.Pipeline) + ptr = ptr.has(rdf.ns.rdf.type, rdf.ns.p.Pipeline) if (ptr.terms.length === 0) { throw new Error('no pipeline found in the dataset') diff --git a/packages/cli/lib/cli/dynamicCommands.js b/packages/cli/lib/cli/dynamicCommands.js index a1cfe06f..e5a20a2e 100644 --- a/packages/cli/lib/cli/dynamicCommands.js +++ b/packages/cli/lib/cli/dynamicCommands.js @@ -1,8 +1,7 @@ import module from 'module' -import rdf from '@zazuko/env' +import rdf from 'barnard59-env' import { program } from 'commander' import { parse } from '../pipeline.js' -import ns from '../namespaces.js' import runAction from './runAction.js' import { combine } from './options.js' @@ -12,8 +11,8 @@ const require = module.createRequire(import.meta.url) export async function * discoverCommands(manifests) { for await (const { name, manifest, version } of manifests) { const commands = manifest - .has(rdf.ns.rdf.type, ns.b59.CliCommand) - .has(ns.b59.command) + .has(rdf.ns.rdf.type, rdf.ns.b59.CliCommand) + .has(rdf.ns.b59.command) .toArray() if (!commands.length) { @@ -23,12 +22,12 @@ export async function * discoverCommands(manifests) { const command = program.command(`${name}`).version(version) for (const commandPtr of commands) { - const source = commandPtr.out(ns.b59.source).value - const pipeline = commandPtr.out(ns.b59.pipeline).value + const source = commandPtr.out(rdf.ns.b59.source).value + const pipeline = commandPtr.out(rdf.ns.b59.pipeline).value const { basePath, ptr } = await parse(require.resolve(source), pipeline) const pipelineSubCommand = command - .command(commandPtr.out(ns.b59.command).value) + .command(commandPtr.out(rdf.ns.b59.command).value) if (commandPtr.out(rdf.ns.rdfs.label).value) { pipelineSubCommand.description(commandPtr.out(rdf.ns.rdfs.label).value) } @@ -59,17 +58,17 @@ export async function * discoverCommands(manifests) { function getAnnotatedVariables(ptr) { return ptr - .out(ns.p.variables) - .out(ns.p.variable) + .out(rdf.ns.p.variables) + .out(rdf.ns.p.variable) .toArray() .map(variable => { - const requiredLiteral = variable.out(ns.p.required).term + const requiredLiteral = variable.out(rdf.ns.p.required).term const required = requiredLiteral ? !requiredLiteral.equals(FALSE) : true return { required, - name: variable.out(ns.p.name).value, - defaultValue: variable.out(ns.p.value).value, + name: variable.out(rdf.ns.p.name).value, + defaultValue: variable.out(rdf.ns.p.value).value, description: variable.out(rdf.ns.rdfs.label).value, } }) diff --git a/packages/cli/lib/cli/runAction.js b/packages/cli/lib/cli/runAction.js index 2c38f25e..6770adf4 100644 --- a/packages/cli/lib/cli/runAction.js +++ b/packages/cli/lib/cli/runAction.js @@ -2,6 +2,7 @@ import { promisify } from 'util' import { createWriteStream } from 'fs' import { finished, PassThrough } from 'readable-stream' import { SpanStatusCode } from '@opentelemetry/api' +import env from 'barnard59-env' import runner from '../../runner.js' import bufferDebug from './../bufferDebug.js' import tracer from './../tracer.js' @@ -28,7 +29,7 @@ export default async function (ptr, basePath, options = {}) { span.setAttribute('iri', ptr.value) const outputStream = createOutputStream(output) - const { finished: runFinished, pipeline } = await runner(ptr, { + const { finished: runFinished, pipeline } = await runner(ptr, env, { basePath, level, quiet, diff --git a/packages/cli/lib/discoverManifests.js b/packages/cli/lib/discoverManifests.js index a9578f96..01351f6f 100644 --- a/packages/cli/lib/discoverManifests.js +++ b/packages/cli/lib/discoverManifests.js @@ -1,9 +1,7 @@ import * as module from 'module' import fs from 'fs' import findPlugins from 'find-plugins' -import fromFile from 'rdf-utils-fs/fromFile.js' -import fromStream from 'rdf-dataset-ext/fromStream.js' -import rdf from '@zazuko/env' +import rdf from 'barnard59-env' const packagePattern = /^barnard59-(.+)$/ const require = module.createRequire(import.meta.url) @@ -18,7 +16,7 @@ export default async function * () { for (const { pkg } of packages) { const { version } = require(`${pkg.name}/package.json`) - const dataset = await fromStream(rdf.dataset(), fromFile(require.resolve(`${pkg.name}/manifest.ttl`))) + const dataset = await rdf.dataset().import(rdf.fromFile(require.resolve(`${pkg.name}/manifest.ttl`))) yield { name: pkg.name.match(packagePattern)[1], manifest: rdf.clownface({ dataset }), diff --git a/packages/cli/lib/namespaces.js b/packages/cli/lib/namespaces.js deleted file mode 100644 index 46408a4a..00000000 --- a/packages/cli/lib/namespaces.js +++ /dev/null @@ -1,10 +0,0 @@ -import rdf from '@zazuko/env' - -const ns = { - p: rdf.namespace('https://pipeline.described.at/'), - b59: rdf.namespace('https://barnard59.zazuko.com/vocab#'), - rdf: rdf.namespace('http://www.w3.org/1999/02/22-rdf-syntax-ns#'), - code: rdf.namespace('https://code.described.at/'), -} - -export default ns diff --git a/packages/cli/lib/pipeline.js b/packages/cli/lib/pipeline.js index 6131b0c1..543e9e23 100644 --- a/packages/cli/lib/pipeline.js +++ b/packages/cli/lib/pipeline.js @@ -1,21 +1,18 @@ import { dirname, resolve } from 'path' -import fromStream from 'rdf-dataset-ext/fromStream.js' -import rdf from '@zazuko/env' +import rdf from 'barnard59-env' import { isGraphPointer } from 'is-graph-pointer' -import fromFile from 'rdf-utils-fs/fromFile.js' import findPipeline from '../findPipeline.js' import discoverManifests from './discoverManifests.js' -import ns from './namespaces.js' const discoverOperations = async () => { const ops = rdf.termMap() for await (const { manifest } of discoverManifests()) { manifest - .has(rdf.ns.rdf.type, ns.p.Operation) + .has(rdf.ns.rdf.type, rdf.ns.p.Operation) .forEach(operation => { - const impl = operation.out(ns.code.implementedBy) - const type = impl.out(ns.rdf.type).term - const link = impl.out(ns.code.link).term + const impl = operation.out(rdf.ns.code.implementedBy) + const type = impl.out(rdf.ns.rdf.type).term + const link = impl.out(rdf.ns.code.link).term ops.set(operation.term, { type, link }) }) } @@ -27,10 +24,10 @@ export const desugar = async (dataset, { logger, knownOperations } = {}) => { knownOperations = knownOperations ?? await discoverOperations() const ptr = rdf.clownface({ dataset }) let n = 0 - ptr.has(ns.p.stepList).out(ns.p.stepList).forEach(listPointer => { + ptr.has(rdf.ns.p.stepList).out(rdf.ns.p.stepList).forEach(listPointer => { for (const step of listPointer.list()) { - if (isGraphPointer(step.has(ns.rdf.type, ns.p.Step)) || - isGraphPointer(step.has(ns.rdf.type, ns.p.Pipeline))) { + if (isGraphPointer(step.has(rdf.ns.rdf.type, rdf.ns.p.Step)) || + isGraphPointer(step.has(rdf.ns.rdf.type, rdf.ns.p.Pipeline))) { continue } // we expect a known operation @@ -45,14 +42,14 @@ export const desugar = async (dataset, { logger, knownOperations } = {}) => { const args = step.out(quad.predicate) step.deleteOut(quad.predicate) // keep args only if non-empty - if (!ns.rdf.nil.equals(args.term)) { - step.addOut(ns.code.arguments, args) + if (!rdf.ns.rdf.nil.equals(args.term)) { + step.addOut(rdf.ns.code.arguments, args) } - step.addOut(ns.rdf.type, ns.p.Step) + step.addOut(rdf.ns.rdf.type, rdf.ns.p.Step) const moduleNode = ptr.blankNode(`impl_${n++}`) - moduleNode.addOut(ns.rdf.type, type) - moduleNode.addOut(ns.code.link, link) - step.addOut(ns.code.implementedBy, moduleNode) + moduleNode.addOut(rdf.ns.rdf.type, type) + moduleNode.addOut(rdf.ns.code.link, link) + step.addOut(rdf.ns.code.implementedBy, moduleNode) } }) @@ -60,7 +57,7 @@ export const desugar = async (dataset, { logger, knownOperations } = {}) => { } async function fileToDataset(filename) { - return fromStream(rdf.dataset(), fromFile(filename)) + return rdf.dataset().import(rdf.fromFile(filename)) } export async function parse(filename, iri, { logger } = {}) { diff --git a/packages/cli/package.json b/packages/cli/package.json index 16f646dc..acfa8e55 100644 --- a/packages/cli/package.json +++ b/packages/cli/package.json @@ -32,15 +32,12 @@ "@opentelemetry/semantic-conventions": "^0.24.0", "@opentelemetry/tracing": "^0.24.0", "@rdfjs/namespace": "^2.0.0", - "@zazuko/env": "^1.0.1", "barnard59-core": "4.0.0", - "clownface": "^2.0.0", + "barnard59-env": "0.0.0", "commander": "^11.0.0", "find-plugins": "^1.1.7", "is-graph-pointer": "^2.1.0", "lodash": "^4.17.21", - "rdf-dataset-ext": "^1.0.1", - "rdf-utils-fs": "^2.2.0", "readable-stream": "^3.6.0" }, "devDependencies": { diff --git a/packages/cli/runner.js b/packages/cli/runner.js index 9be717ad..054ba0b1 100644 --- a/packages/cli/runner.js +++ b/packages/cli/runner.js @@ -2,7 +2,7 @@ import { createPipeline, defaultLogger, run } from 'barnard59-core' import tracer from './lib/tracer.js' -function create(ptr, { basePath, outputStream, logger, variables = new Map(), level = 'error', quiet } = {}) { +function create(ptr, env, { basePath, outputStream, logger, variables = new Map(), level = 'error', quiet } = {}) { return tracer.startActiveSpan('createPipeline', { 'pipeline.id': ptr.value }, async span => { try { if (!logger) { @@ -10,6 +10,7 @@ function create(ptr, { basePath, outputStream, logger, variables = new Map(), le } const pipeline = createPipeline(ptr, { + env, basePath, logger, variables, diff --git a/packages/cli/test/lib/cli/discoverCommands.test.js b/packages/cli/test/lib/cli/discoverCommands.test.js index 2456b29e..65d74435 100644 --- a/packages/cli/test/lib/cli/discoverCommands.test.js +++ b/packages/cli/test/lib/cli/discoverCommands.test.js @@ -1,8 +1,7 @@ import { expect } from 'chai' -import rdf from '@zazuko/env' +import rdf from 'barnard59-env' import { discoverCommands } from '../../../lib/cli/dynamicCommands.js' import discoverManifests from '../../../lib/discoverManifests.js' -import ns from '../../../lib/namespaces.js' describe('lib/cli/discoverCommands.js', () => { it('finds graph-store command', async () => { @@ -24,9 +23,9 @@ describe('lib/cli/discoverCommands.js', () => { let command const manifest = rdf.clownface() .blankNode() - .addOut(rdf.ns.rdf.type, ns.b59.CliCommand) - .addOut(ns.b59.command, 'bar') - .addOut(ns.b59.source, 'barnard59/test/support/definitions/variable-with-value.ttl') + .addOut(rdf.ns.rdf.type, rdf.ns.b59.CliCommand) + .addOut(rdf.ns.b59.command, 'bar') + .addOut(rdf.ns.b59.source, 'barnard59/test/support/definitions/variable-with-value.ttl') const manifests = [{ name: 'foo', version: '0.0.0', diff --git a/packages/cli/test/runner.test.js b/packages/cli/test/runner.test.js index 8d990f80..b68d1604 100644 --- a/packages/cli/test/runner.test.js +++ b/packages/cli/test/runner.test.js @@ -1,6 +1,7 @@ import assert, { strictEqual } from 'assert' import { resolve } from 'path' import { pipelineDefinitionLoader } from 'barnard59-test-support/loadPipelineDefinition.js' +import env from 'barnard59-env' import runner from '../runner.js' const loadPipelineDefinition = pipelineDefinitionLoader(import.meta.url) @@ -8,7 +9,7 @@ const loadPipelineDefinition = pipelineDefinitionLoader(import.meta.url) describe('run', () => { it('should emit an error if an error in the pipeline occurs', async () => { const ptr = await loadPipelineDefinition('error') - const run = await runner(ptr, { + const run = await runner(ptr, env, { outputStream: process.stdout, basePath: resolve('test'), }) diff --git a/packages/cli/test/simplify.test.js b/packages/cli/test/simplify.test.js index ec3dc628..3cfb0f4e 100644 --- a/packages/cli/test/simplify.test.js +++ b/packages/cli/test/simplify.test.js @@ -1,7 +1,6 @@ import { resolve } from 'path' import approvals from 'approvals' -import rdf from '@zazuko/env' -import toCanonical from 'rdf-dataset-ext/toCanonical.js' +import rdf from 'barnard59-env' import { pipelineDefinitionLoader } from 'barnard59-test-support/loadPipelineDefinition.js' import { desugar } from '../lib/pipeline.js' @@ -22,7 +21,7 @@ const check = async name => { const result = await desugar(pipeline.dataset, { knownOperations }) - approvals.verify(dirname, name, toCanonical(result)) + approvals.verify(dirname, name, result.toCanonical()) } describe('simplified syntax', () => { diff --git a/packages/core/lib/cloneTerm.js b/packages/core/lib/cloneTerm.js index a34bb71b..59f3e575 100644 --- a/packages/core/lib/cloneTerm.js +++ b/packages/core/lib/cloneTerm.js @@ -1,6 +1,4 @@ -import rdf from '@zazuko/env' - -function cloneTerm(term) { +function cloneTerm(rdf, term) { if (!term) { return null } diff --git a/packages/core/lib/defaultLoaderRegistry.js b/packages/core/lib/defaultLoaderRegistry.js index 09c116f3..38fec9d9 100644 --- a/packages/core/lib/defaultLoaderRegistry.js +++ b/packages/core/lib/defaultLoaderRegistry.js @@ -7,15 +7,15 @@ import pipelineLoader from './loader/pipeline.js' import variableLoader from './loader/variable.js' import fileLoader from './loader/file.js' -function factory() { +function factory(rdf) { const registry = new LoaderRegistry() ecmaScriptLoader.register(registry) ecmaScriptLiteralLoader.register(registry) - ecmaScriptModuleLoader.register(registry) - pipelineLoader.register(registry) - variableLoader.register(registry) - fileLoader.register(registry) + ecmaScriptModuleLoader.register(registry, rdf) + pipelineLoader.register(registry, rdf) + variableLoader.register(registry, rdf) + fileLoader.register(registry, rdf) return registry } diff --git a/packages/core/lib/factory/arguments.js b/packages/core/lib/factory/arguments.js index 310ad228..dcd164aa 100644 --- a/packages/core/lib/factory/arguments.js +++ b/packages/core/lib/factory/arguments.js @@ -1,6 +1,5 @@ import parseArguments from 'rdf-loader-code/arguments.js' import { unknownVariable } from '../loader/variable.js' -import ns from '../namespaces.js' async function createArguments(ptr, { basePath, context, loaderRegistry, logger, variables }) { const args = await parseArguments(ptr, { basePath, context, loaderRegistry, logger, variables }) @@ -9,7 +8,7 @@ async function createArguments(ptr, { basePath, context, loaderRegistry, logger, // This code maps the unknownVariable symbols to undefined for both kinds of arguments: // list - if (ptr.out(ns.code.arguments).isList()) { + if (ptr.out(context.env.ns.code.arguments).isList()) { return args.map(arg => arg === unknownVariable ? undefined : arg) } diff --git a/packages/core/lib/factory/operation.js b/packages/core/lib/factory/operation.js index 7bbe52cc..b938f80d 100644 --- a/packages/core/lib/factory/operation.js +++ b/packages/core/lib/factory/operation.js @@ -1,10 +1,8 @@ -import ns from '../namespaces.js' - async function createOperation(ptr, { basePath, context, loaderRegistry, logger, variables }) { const result = await loaderRegistry.load(ptr, { basePath, context, loaderRegistry, logger, variables }) if (typeof result !== 'function') { - const links = ptr.out(ns.code.link).values.join(', ') + const links = ptr.out(context.env.ns.code.link).values.join(', ') throw new Error(`Failed to load operation ${ptr.value} (${links})`) } diff --git a/packages/core/lib/factory/pipeline.js b/packages/core/lib/factory/pipeline.js index 33ed94ca..2c388493 100644 --- a/packages/core/lib/factory/pipeline.js +++ b/packages/core/lib/factory/pipeline.js @@ -1,8 +1,6 @@ -import clownface from 'clownface' import defaultLoaderRegistry from '../defaultLoaderRegistry.js' import defaultLogger from '../defaultLogger.js' import metadata from '../metadata.js' -import ns from '../namespaces.js' import Pipeline from '../Pipeline.js' import { VariableMap } from '../VariableMap.js' import createStep from './step.js' @@ -16,7 +14,7 @@ async function createPipelineVariables(ptr, { basePath, context, loaderRegistry, let localVariables = [] if (ptr) { - localVariables = await createVariables(ptr.out(ns.p.variables), { basePath, context, loaderRegistry, logger, variables }) + localVariables = await createVariables(ptr.out(context.env.ns.p.variables), { basePath, context, loaderRegistry, logger, variables }) } return VariableMap.merge(localVariables, variables) @@ -24,8 +22,9 @@ async function createPipelineVariables(ptr, { basePath, context, loaderRegistry, function createPipeline(ptr, { basePath, - context = {}, - loaderRegistry = defaultLoaderRegistry(), + env, + context = { env }, + loaderRegistry = defaultLoaderRegistry(context.env), logger = defaultLogger(), variables = new VariableMap(), } = {}) { @@ -33,13 +32,13 @@ function createPipeline(ptr, { throw new Error('the given graph pointer is invalid') } - ptr = clownface({ dataset: ptr.dataset, term: ptr.term }) + ptr = context.env.clownface({ dataset: ptr.dataset, term: ptr.term }) const onInit = async pipeline => { variables = await createPipelineVariables(ptr, { basePath, context, loaderRegistry, logger, variables }) context = await createPipelineContext(ptr, { basePath, context, logger, variables }) - logVariables(ptr, logger, variables) + logVariables(ptr, context, variables) // add pipeline factory with current values as defaults const defaults = { basePath, context, loaderRegistry, logger, variables } @@ -48,8 +47,8 @@ function createPipeline(ptr, { pipeline.variables = variables pipeline.context = context - for (const stepPtr of ptr.out(ns.p.steps).out(ns.p.stepList).list()) { - if (stepPtr.has(ns.rdf.type, ns.p.Pipeline).terms.length > 0) { + for (const stepPtr of ptr.out(context.env.ns.p.steps).out(context.env.ns.p.stepList).list()) { + if (stepPtr.has(context.env.ns.rdf.type, context.env.ns.p.Pipeline).terms.length > 0) { pipeline.addChild(createPipeline(stepPtr, { basePath, context, loaderRegistry, logger, variables })) } else { pipeline.addChild(await createStep(stepPtr, { basePath, context, loaderRegistry, logger, variables })) @@ -57,21 +56,21 @@ function createPipeline(ptr, { } } - return new Pipeline({ basePath, loaderRegistry, logger, onInit, ptr, ...metadata(ptr) }) + return new Pipeline({ basePath, loaderRegistry, logger, onInit, ptr, ...metadata(context.env, ptr) }) } -function logVariables(ptr, logger, variables) { +function logVariables(ptr, { env, logger }, variables) { if (variables.size) { for (const [key, value] of variables) { let level = 'verbose' - if (ptr.out(ns.p.variables).out(ns.p.variable).has(ns.p.name, key).term) { + if (ptr.out(env.ns.p.variables).out(env.ns.p.variable).has(env.ns.p.name, key).term) { level = 'info' } const isSensitive = !!ptr.any() - .has(ns.rdf.type, ns.p.Variable) - .has(ns.p.name, key) - .has(ns.p.sensitive, true) + .has(env.ns.rdf.type, env.ns.p.Variable) + .has(env.ns.p.name, key) + .has(env.ns.p.sensitive, true) .term logger[level](`variable ${key}: ${isSensitive ? '***' : value}`, { iri: ptr.value }) diff --git a/packages/core/lib/factory/step.js b/packages/core/lib/factory/step.js index 4b1898d8..344621d0 100644 --- a/packages/core/lib/factory/step.js +++ b/packages/core/lib/factory/step.js @@ -1,6 +1,5 @@ import { SpanStatusCode } from '@opentelemetry/api' import { isStream } from '../isStream.js' -import ns from '../namespaces.js' import PipelineError from '../PipelineError.js' import Step from '../Step.js' import tracer from '../tracer.js' @@ -11,7 +10,7 @@ async function createStep(ptr, { basePath, context, loaderRegistry, logger, vari return tracer.startActiveSpan('createStep', { attributes: { iri: ptr.value } }, async span => { try { const args = await createArguments(ptr, { basePath, context, loaderRegistry, logger, variables }) - const operation = await createOperation(ptr.out(ns.code.implementedBy), { basePath, context, loaderRegistry, logger, variables }) + const operation = await createOperation(ptr.out(context.env.ns.code.implementedBy), { basePath, context, loaderRegistry, logger, variables }) const stream = await operation.apply(context, args) if (!stream || !isStream(stream)) { diff --git a/packages/core/lib/factory/variables.js b/packages/core/lib/factory/variables.js index 23575d98..120e7fba 100644 --- a/packages/core/lib/factory/variables.js +++ b/packages/core/lib/factory/variables.js @@ -1,15 +1,11 @@ -import $rdf from '@zazuko/env' -import ns from '../namespaces.js' import { VariableMap } from '../VariableMap.js' -const FALSE = $rdf.literal('false', ns.xsd.boolean) - async function createVariables(ptr, { basePath, context, loaderRegistry, logger }) { const variables = new VariableMap() - for (const variablePtr of ptr.out(ns.p.variable).toArray()) { + for (const variablePtr of ptr.out(context.env.ns.p.variable).toArray()) { let variable - if (variablePtr.out(ns.p.value).term) { + if (variablePtr.out(context.env.ns.p.value).term) { variable = await loaderRegistry.load(variablePtr, { basePath, context, logger, variables }) if (!variable) { @@ -17,8 +13,8 @@ async function createVariables(ptr, { basePath, context, loaderRegistry, logger } } - const optional = variablePtr.out(ns.p.required).term?.equals(FALSE) || false - variables.set(variablePtr.out(ns.p.name).value, variable?.value, { optional }) + const optional = variablePtr.out(context.env.ns.p.required).term?.equals(context.env.FALSE) || false + variables.set(variablePtr.out(context.env.ns.p.name).value, variable?.value, { optional }) } return variables diff --git a/packages/core/lib/loader/file.js b/packages/core/lib/loader/file.js index e756d338..891ee3c6 100644 --- a/packages/core/lib/loader/file.js +++ b/packages/core/lib/loader/file.js @@ -2,7 +2,6 @@ import { resolve } from 'path' import * as fs from 'fs' import { isLiteral } from 'is-graph-pointer' import untildify from 'untildify' -import ns from '../namespaces.js' export default function loader(ptr, { basePath, variables }) { if (isLiteral(ptr)) { @@ -13,6 +12,6 @@ export default function loader(ptr, { basePath, variables }) { } } -loader.register = (registry) => { - registry.registerLiteralLoader(ns.p.FileContents, loader) +loader.register = (registry, rdf) => { + registry.registerLiteralLoader(rdf.ns.p.FileContents, loader) } diff --git a/packages/core/lib/loader/pipeline.js b/packages/core/lib/loader/pipeline.js index 9edcb6ad..defdaff5 100644 --- a/packages/core/lib/loader/pipeline.js +++ b/packages/core/lib/loader/pipeline.js @@ -1,16 +1,15 @@ import createPipeline from '../factory/pipeline.js' -import ns from '../namespaces.js' async function loader(ptr, { basePath, context = {}, loaderRegistry, variables } = {}) { - if (ptr.has(ns.rdf.type, ns.p.Pipeline).terms.length > 0) { + if (ptr.has(context.env.ns.rdf.type, context.env.ns.p.Pipeline).terms.length > 0) { return createPipeline(ptr, { basePath, context, loaderRegistry, logger: context.logger, variables }).stream } throw new Error('Unrecognized or missing pipeline type') } -loader.register = registry => { - registry.registerNodeLoader(ns.p.Pipeline, loader) +loader.register = (registry, env) => { + registry.registerNodeLoader(env.ns.p.Pipeline, loader) } export default loader diff --git a/packages/core/lib/loader/variable.js b/packages/core/lib/loader/variable.js index 8a2cbb78..d0944314 100644 --- a/packages/core/lib/loader/variable.js +++ b/packages/core/lib/loader/variable.js @@ -1,11 +1,9 @@ -import rdf from '@zazuko/env' import cloneTerm from '../cloneTerm.js' -import ns from '../namespaces.js' import { VariableMap } from '../VariableMap.js' const unknownVariable = Symbol('unknown-variable') -function loader(ptr, { variables = new VariableMap() } = {}) { +function loader(rdf, ptr, { variables = new VariableMap() } = {}) { if (ptr.term.termType === 'Literal') { const value = variables.get(ptr.value) @@ -18,7 +16,7 @@ function loader(ptr, { variables = new VariableMap() } = {}) { return value } - const name = ptr.out(ns.p.name).value + const name = ptr.out(rdf.ns.p.name).value let term // if the variables from the arguments contains it ... @@ -27,7 +25,7 @@ function loader(ptr, { variables = new VariableMap() } = {}) { term = rdf.literal(variables.get(name)) } else { // ... otherwise load the term from the dataset - term = cloneTerm(ptr.out(ns.p.value).term) + term = cloneTerm(rdf, ptr.out(rdf.ns.p.value).term) } // if there is a value, attached the name to it @@ -38,9 +36,9 @@ function loader(ptr, { variables = new VariableMap() } = {}) { return term } -loader.register = registry => { - registry.registerNodeLoader(ns.p.Variable, loader) - registry.registerLiteralLoader(ns.p.VariableName, loader) +loader.register = (registry, rdf) => { + registry.registerNodeLoader(rdf.ns.p.Variable, loader.bind(null, rdf)) + registry.registerLiteralLoader(rdf.ns.p.VariableName, loader.bind(null, rdf)) } export default loader diff --git a/packages/core/lib/metadata.js b/packages/core/lib/metadata.js index a20632e1..df169ed9 100644 --- a/packages/core/lib/metadata.js +++ b/packages/core/lib/metadata.js @@ -1,10 +1,8 @@ -import ns from './namespaces.js' - -function metadata(ptr) { - const readableObjectMode = Boolean(ptr.has(ns.rdf.type, ns.p.ReadableObjectMode).term) - const readable = Boolean(ptr.has(ns.rdf.type, ns.p.Readable).term) || readableObjectMode - const writableObjectMode = Boolean(ptr.has(ns.rdf.type, ns.p.WritableObjectMode).term) - const writable = Boolean(ptr.has(ns.rdf.type, ns.p.Writable).term) || writableObjectMode +function metadata(rdf, ptr) { + const readableObjectMode = Boolean(ptr.has(rdf.ns.rdf.type, rdf.ns.p.ReadableObjectMode).term) + const readable = Boolean(ptr.has(rdf.ns.rdf.type, rdf.ns.p.Readable).term) || readableObjectMode + const writableObjectMode = Boolean(ptr.has(rdf.ns.rdf.type, rdf.ns.p.WritableObjectMode).term) + const writable = Boolean(ptr.has(rdf.ns.rdf.type, rdf.ns.p.Writable).term) || writableObjectMode return { readable, diff --git a/packages/core/lib/namespaces.js b/packages/core/lib/namespaces.js deleted file mode 100644 index d0698884..00000000 --- a/packages/core/lib/namespaces.js +++ /dev/null @@ -1,10 +0,0 @@ -import namespace from '@rdfjs/namespace' - -const ns = { - code: namespace('https://code.described.at/'), - p: namespace('https://pipeline.described.at/'), - rdf: namespace('http://www.w3.org/1999/02/22-rdf-syntax-ns#'), - xsd: namespace('http://www.w3.org/2001/XMLSchema#'), -} - -export default ns diff --git a/packages/core/package.json b/packages/core/package.json index e3671f9b..22ece641 100644 --- a/packages/core/package.json +++ b/packages/core/package.json @@ -21,9 +21,6 @@ "homepage": "https://github.com/zazuko/barnard59", "dependencies": { "@opentelemetry/api": "^1.0.1", - "@rdfjs/namespace": "^2.0.0", - "@zazuko/env": "^1.0.1", - "clownface": "^2.0.0", "duplex-to": "^1.0.1", "duplexify": "^4.1.1", "is-graph-pointer": "^2.1.0", @@ -36,6 +33,8 @@ "winston": "^3.3.3" }, "devDependencies": { + "@rdfjs/namespace": "^2.0.0", + "barnard59-env": "^0.0.0", "barnard59-http": "^2.0.0", "barnard59-test-support": "^0.0.2", "chai": "^4.3.7", @@ -44,7 +43,6 @@ "lint-staged": "^13.2.2", "mocha": "^10.2.0", "nock": "^13.1.0", - "rdf-utils-fs": "^2.1.0", "sinon": "^15.0.4" }, "mocha": { diff --git a/packages/core/test/Pipeline.test.js b/packages/core/test/Pipeline.test.js index a418e78d..c445b159 100644 --- a/packages/core/test/Pipeline.test.js +++ b/packages/core/test/Pipeline.test.js @@ -4,6 +4,7 @@ import { promisify } from 'util' import getStream from 'get-stream' import stream from 'readable-stream' import { pipelineDefinitionLoader } from 'barnard59-test-support/loadPipelineDefinition.js' +import env from 'barnard59-env' import createPipeline from '../lib/factory/pipeline.js' import { isStream } from '../lib/isStream.js' import Pipeline from '../lib/Pipeline.js' @@ -21,7 +22,7 @@ describe('Pipeline', () => { it('should process the given pipeline definition', async () => { const ptr = await loadPipelineDefinition('read') - const pipeline = createPipeline(ptr, { basePath: resolve('test') }) + const pipeline = createPipeline(ptr, { env, basePath: resolve('test') }) const result = await getStream(pipeline.stream) @@ -31,7 +32,7 @@ describe('Pipeline', () => { it('should support writable pipelines', async () => { const ptr = await loadPipelineDefinition('write') - const pipeline = createPipeline(ptr, { basePath: resolve('test') }) + const pipeline = createPipeline(ptr, { env, basePath: resolve('test') }) pipeline.stream.end('test') @@ -43,7 +44,7 @@ describe('Pipeline', () => { it('should support nested pipelines', async () => { const ptr = await loadPipelineDefinition('nested') - const pipeline = createPipeline(ptr, { basePath: resolve('test') }) + const pipeline = createPipeline(ptr, { env, basePath: resolve('test') }) const result = await getStream(pipeline.stream) @@ -53,7 +54,7 @@ describe('Pipeline', () => { it('should emit error when nested pipeline step errors immediately', async () => { const ptr = await loadPipelineDefinition('nestedErrorBeforeInit') - const pipeline = createPipeline(ptr, { basePath: resolve('test') }) + const pipeline = createPipeline(ptr, { env, basePath: resolve('test') }) await rejects(async () => { await getStream(pipeline.stream) @@ -65,8 +66,9 @@ describe('Pipeline', () => { const result = [] const pipeline = createPipeline(ptr, { + env, basePath: resolve('test'), - context: { result }, + context: { env, result }, }) await getStream(pipeline.stream) @@ -77,7 +79,7 @@ describe('Pipeline', () => { it('should assign the pipeline stream to the .stream property', async () => { const ptr = await loadPipelineDefinition('nested') - const pipeline = createPipeline(ptr, { basePath: resolve('test') }) + const pipeline = createPipeline(ptr, { env, basePath: resolve('test') }) strictEqual(isStream(pipeline.stream), true) }) @@ -85,7 +87,7 @@ describe('Pipeline', () => { it('should assign the pipeline to the .pipeline property of the stream', async () => { const ptr = await loadPipelineDefinition('nested') - const pipeline = createPipeline(ptr, { basePath: resolve('test') }) + const pipeline = createPipeline(ptr, { env, basePath: resolve('test') }) strictEqual(pipeline.stream.pipeline, pipeline) }) @@ -93,7 +95,7 @@ describe('Pipeline', () => { it('should have a basePath string property', async () => { const ptr = await loadPipelineDefinition('read') - const pipeline = createPipeline(ptr, { basePath: resolve('test') }) + const pipeline = createPipeline(ptr, { env, basePath: resolve('test') }) strictEqual(typeof pipeline.basePath, 'string') }) @@ -101,7 +103,7 @@ describe('Pipeline', () => { it('should have a context object property', async () => { const ptr = await loadPipelineDefinition('read') - const pipeline = createPipeline(ptr, { basePath: resolve('test') }) + const pipeline = createPipeline(ptr, { env, basePath: resolve('test') }) strictEqual(typeof pipeline.context, 'object') }) @@ -109,7 +111,7 @@ describe('Pipeline', () => { it('should emit an error if the Pipeline contains no steps', async () => { const ptr = await loadPipelineDefinition('empty') - const pipeline = createPipeline(ptr, { basePath: resolve('test') }) + const pipeline = createPipeline(ptr, { env, basePath: resolve('test') }) await rejects(async () => { await getStream(pipeline.stream) @@ -119,7 +121,7 @@ describe('Pipeline', () => { it('should have a ptr clownface property', async () => { const ptr = await loadPipelineDefinition('read') - const pipeline = createPipeline(ptr, { basePath: resolve('test') }) + const pipeline = createPipeline(ptr, { env, basePath: resolve('test') }) strictEqual(typeof pipeline.ptr, 'object') strictEqual(typeof pipeline.ptr.any, 'function') @@ -129,7 +131,7 @@ describe('Pipeline', () => { it('should have a ptr variables Map property', async () => { const ptr = await loadPipelineDefinition('read') - const pipeline = createPipeline(ptr, { basePath: resolve('test') }) + const pipeline = createPipeline(ptr, { env, basePath: resolve('test') }) strictEqual(pipeline.variables instanceof Map, true) }) @@ -137,7 +139,7 @@ describe('Pipeline', () => { it('should emit an error if an operation returns an invalid stream', async () => { const ptr = await loadPipelineDefinition('step-invalid') - const pipeline = createPipeline(ptr, { basePath: resolve('test') }) + const pipeline = createPipeline(ptr, { env, basePath: resolve('test') }) await rejects(async () => { await getStream(pipeline.stream) @@ -147,7 +149,7 @@ describe('Pipeline', () => { it('should emit an error if an operation rejects with error', async () => { const ptr = await loadPipelineDefinition('step-operation-error') - const pipeline = createPipeline(ptr, { basePath: resolve('test') }) + const pipeline = createPipeline(ptr, { env, basePath: resolve('test') }) await rejects(async () => { await getStream(pipeline.stream) @@ -157,7 +159,7 @@ describe('Pipeline', () => { it('should emit step stream errors', async () => { const ptr = await loadPipelineDefinition('step-stream-error') - const pipeline = createPipeline(ptr, { basePath: resolve('test') }) + const pipeline = createPipeline(ptr, { env, basePath: resolve('test') }) await rejects(async () => { await getStream(pipeline.stream) @@ -167,7 +169,7 @@ describe('Pipeline', () => { it('should catch and emit step stream errors', async () => { const ptr = await loadPipelineDefinition('step-stream-throw') - const pipeline = createPipeline(ptr, { basePath: resolve('test') }) + const pipeline = createPipeline(ptr, { env, basePath: resolve('test') }) await rejects(async () => { await getStream(pipeline.stream) @@ -178,7 +180,7 @@ describe('Pipeline', () => { it('should emit an end event', async () => { const ptr = await loadPipelineDefinition('plain') - const pipeline = createPipeline(ptr, { basePath: resolve('test') }) + const pipeline = createPipeline(ptr, { env, basePath: resolve('test') }) const promise = eventToPromise(pipeline.stream, 'end') @@ -192,7 +194,7 @@ describe('Pipeline', () => { it('should emit an end event', async () => { const ptr = await loadPipelineDefinition('read') - const pipeline = createPipeline(ptr, { basePath: resolve('test') }) + const pipeline = createPipeline(ptr, { env, basePath: resolve('test') }) const promise = eventToPromise(pipeline.stream, 'end') @@ -204,7 +206,7 @@ describe('Pipeline', () => { it('should emit an error if the last step doesn\'t have a readable interface', async () => { const ptr = await loadPipelineDefinition('read-step-not-read') - const pipeline = createPipeline(ptr, { basePath: resolve('test') }) + const pipeline = createPipeline(ptr, { env, basePath: resolve('test') }) await rejects(async () => { await getStream(pipeline.stream) @@ -216,7 +218,7 @@ describe('Pipeline', () => { it('should emit an finish event', async () => { const ptr = await loadPipelineDefinition('write') - const pipeline = createPipeline(ptr, { basePath: resolve('test') }) + const pipeline = createPipeline(ptr, { env, basePath: resolve('test') }) const promise = eventToPromise(pipeline.stream, 'finish') diff --git a/packages/core/test/cloneTerm.test.js b/packages/core/test/cloneTerm.test.js index 1c4f959b..aa37a6ef 100644 --- a/packages/core/test/cloneTerm.test.js +++ b/packages/core/test/cloneTerm.test.js @@ -1,8 +1,10 @@ import { notStrictEqual, strictEqual, throws } from 'assert' -import rdf from '@zazuko/env' -import cloneTerm from '../lib/cloneTerm.js' +import rdf from 'barnard59-env' +import cloneTermUnbound from '../lib/cloneTerm.js' import ns from './support/namespaces.js' +const cloneTerm = cloneTermUnbound.bind(null, rdf) + describe('cloneTerm', () => { it('should be a function', () => { strictEqual(typeof cloneTerm, 'function') diff --git a/packages/core/test/defaultLoaderRegistry.test.js b/packages/core/test/defaultLoaderRegistry.test.js index 2b3a34eb..43460a94 100644 --- a/packages/core/test/defaultLoaderRegistry.test.js +++ b/packages/core/test/defaultLoaderRegistry.test.js @@ -1,51 +1,40 @@ import { strictEqual } from 'assert' +import env from 'barnard59-env' import defaultLoaderRegistry from '../lib/defaultLoaderRegistry.js' import ns from './support/namespaces.js' describe('defaultLoaderRegistry', () => { - it('should be a function', () => { - strictEqual(typeof defaultLoaderRegistry, 'function') + let registry + + beforeEach(() => { + registry = defaultLoaderRegistry(env) }) it('should contain the EcmaScript literal loader', () => { - const registry = defaultLoaderRegistry() - strictEqual(typeof registry._literalLoaders.get(ns.code.EcmaScript.value), 'function') }) it('should contain the EcmaScriptTemplateLiteral literal loader', () => { - const registry = defaultLoaderRegistry() - strictEqual(typeof registry._literalLoaders.get(ns.code.EcmaScriptTemplateLiteral.value), 'function') }) it('should contain the VariableName literal loader', () => { - const registry = defaultLoaderRegistry() - strictEqual(typeof registry._literalLoaders.get(ns.p.VariableName.value), 'function') }) it('should contain the EcmaScript node loader', () => { - const registry = defaultLoaderRegistry() - strictEqual(typeof registry._nodeLoaders.get(ns.code.EcmaScript.value), 'function') }) it('should contain the EcmaScriptModule node loader', () => { - const registry = defaultLoaderRegistry() - strictEqual(typeof registry._nodeLoaders.get(ns.code.EcmaScriptModule.value), 'function') }) it('should contain the Pipeline node loader', () => { - const registry = defaultLoaderRegistry() - strictEqual(typeof registry._nodeLoaders.get(ns.p.Pipeline.value), 'function') }) it('should contain the Variable node loader', () => { - const registry = defaultLoaderRegistry() - strictEqual(typeof registry._nodeLoaders.get(ns.p.Variable.value), 'function') }) }) diff --git a/packages/core/test/factory/arguments.test.js b/packages/core/test/factory/arguments.test.js index e5a913b6..64a690e9 100644 --- a/packages/core/test/factory/arguments.test.js +++ b/packages/core/test/factory/arguments.test.js @@ -1,11 +1,13 @@ import { deepStrictEqual, strictEqual } from 'assert' import { pipelineDefinitionLoader } from 'barnard59-test-support/loadPipelineDefinition.js' +import env from 'barnard59-env' import defaultLoaderRegistry from '../../lib/defaultLoaderRegistry.js' import createArguments from '../../lib/factory/arguments.js' import ns from '../support/namespaces.js' import { VariableMap } from '../../lib/VariableMap.js' const loadPipelineDefinition = pipelineDefinitionLoader(import.meta.url, '../support/definitions') +const context = { env } describe('factory/arguments', () => { it('should be a method', () => { @@ -16,7 +18,10 @@ describe('factory/arguments', () => { const definition = await loadPipelineDefinition('arguments') const ptr = [...definition.node(ns.ex.keyValues).out(ns.p.steps).out(ns.p.stepList).list()][0] - const args = await createArguments(ptr, { loaderRegistry: defaultLoaderRegistry() }) + const args = await createArguments(ptr, { + context, + loaderRegistry: defaultLoaderRegistry(env), + }) deepStrictEqual(args, [{ a: '1', b: '2' }]) }) @@ -28,7 +33,8 @@ describe('factory/arguments', () => { variables.set('a', undefined, { optional: true }) const args = await createArguments(ptr, { - loaderRegistry: defaultLoaderRegistry(), + context, + loaderRegistry: defaultLoaderRegistry(env), variables, }) @@ -39,7 +45,10 @@ describe('factory/arguments', () => { const definition = await loadPipelineDefinition('arguments') const ptr = [...definition.node(ns.ex.list).out(ns.p.steps).out(ns.p.stepList).list()][0] - const args = await createArguments(ptr, { loaderRegistry: defaultLoaderRegistry() }) + const args = await createArguments(ptr, { + context, + loaderRegistry: defaultLoaderRegistry(env), + }) deepStrictEqual(args, ['a', 'b']) }) @@ -51,7 +60,8 @@ describe('factory/arguments', () => { variables.set('a', undefined, { optional: true }) const args = await createArguments(ptr, { - loaderRegistry: defaultLoaderRegistry(), + context, + loaderRegistry: defaultLoaderRegistry(env), variables, }) @@ -63,8 +73,8 @@ describe('factory/arguments', () => { const ptr = [...definition.node(ns.ex.variable).out(ns.p.steps).out(ns.p.stepList).list()][0] const args = await createArguments(ptr, { - context: { variables: new Map([['abcd', '1234']]) }, - loaderRegistry: defaultLoaderRegistry(), + context: { env, variables: new Map([['abcd', '1234']]) }, + loaderRegistry: defaultLoaderRegistry(env), }) deepStrictEqual(args, ['1234']) diff --git a/packages/core/test/factory/operation.test.js b/packages/core/test/factory/operation.test.js index 8c7e1ca6..856c330e 100644 --- a/packages/core/test/factory/operation.test.js +++ b/packages/core/test/factory/operation.test.js @@ -1,12 +1,14 @@ import { strictEqual } from 'assert' import { resolve } from 'path' import { pipelineDefinitionLoader } from 'barnard59-test-support/loadPipelineDefinition.js' +import env from 'barnard59-env' import defaultLoaderRegistry from '../../lib/defaultLoaderRegistry.js' import createOperation from '../../lib/factory/operation.js' import ns from '../support/namespaces.js' import argsToStream from '../support/operations/argsToStream.js' const loadPipelineDefinition = pipelineDefinitionLoader(import.meta.url, '../support/definitions') +const context = { env } describe('factory/operation', () => { it('should be a method', () => { @@ -18,8 +20,9 @@ describe('factory/operation', () => { const ptr = [...definition.node(ns.ex('')).out(ns.p.steps).out(ns.p.stepList).list()][0].out(ns.code.implementedBy) const operation = await createOperation(ptr, { + context, basePath: resolve('test'), - loaderRegistry: defaultLoaderRegistry(), + loaderRegistry: defaultLoaderRegistry(env), }) strictEqual(operation, argsToStream) diff --git a/packages/core/test/factory/pipeline.test.js b/packages/core/test/factory/pipeline.test.js index 561513cf..5dae7cc1 100644 --- a/packages/core/test/factory/pipeline.test.js +++ b/packages/core/test/factory/pipeline.test.js @@ -4,6 +4,7 @@ import { expect } from 'chai' import sinon from 'sinon' import getStream from 'get-stream' import { pipelineDefinitionLoader } from 'barnard59-test-support/loadPipelineDefinition.js' +import env from 'barnard59-env' import createPipeline from '../../lib/factory/pipeline.js' import { isReadable, isReadableObjectMode, isWritable, isWritableObjectMode } from '../../lib/isStream.js' import Pipeline from '../../lib/Pipeline.js' @@ -18,7 +19,7 @@ describe('factory/pipeline', () => { it('should return a Pipeline object', async () => { const definition = await loadPipelineDefinition('plain') - const pipeline = createPipeline(definition, { basePath: resolve('test') }) + const pipeline = createPipeline(definition, { env, basePath: resolve('test') }) strictEqual(pipeline instanceof Pipeline, true) }) @@ -27,7 +28,7 @@ describe('factory/pipeline', () => { const definition = await loadPipelineDefinition('plain') const ptr = { dataset: definition.dataset, term: definition.term } - const pipeline = createPipeline(ptr, { basePath: resolve('test') }) + const pipeline = createPipeline(ptr, { env, basePath: resolve('test') }) await pipeline.init() strictEqual(pipeline.children.length, 2) @@ -37,7 +38,7 @@ describe('factory/pipeline', () => { const ptr = (await loadPipelineDefinition('read')).any() throws(() => { - createPipeline(ptr, { basePath: resolve('test') }) + createPipeline(ptr, { env, basePath: resolve('test') }) }) }) @@ -45,7 +46,7 @@ describe('factory/pipeline', () => { const ptr = (await loadPipelineDefinition('read')) throws(() => { - createPipeline({ term: ptr.term }, { basePath: resolve('test') }) + createPipeline({ term: ptr.term }, { env, basePath: resolve('test') }) }) }) @@ -53,16 +54,16 @@ describe('factory/pipeline', () => { const basePath = resolve('test') const ptr = await loadPipelineDefinition('read') - const pipeline = createPipeline(ptr, { basePath }) + const pipeline = createPipeline(ptr, { env, basePath }) strictEqual(pipeline.basePath, basePath) }) it('should use the given context', async () => { - const context = { abc: 'def' } + const context = { abc: 'def', env } const ptr = await loadPipelineDefinition('read') - const pipeline = createPipeline(ptr, { basePath: resolve('test'), context }) + const pipeline = createPipeline(ptr, { env, basePath: resolve('test'), context }) await getStream(pipeline.stream) strictEqual(pipeline.context.abc, context.abc) @@ -71,7 +72,7 @@ describe('factory/pipeline', () => { it('should create a pipeline with readable interface matching the rdf:type', async () => { const ptr = await loadPipelineDefinition('read') - const pipeline = createPipeline(ptr, { basePath: resolve('test') }) + const pipeline = createPipeline(ptr, { env, basePath: resolve('test') }) strictEqual(isReadable(pipeline.stream), true) strictEqual(!isReadableObjectMode(pipeline.stream), true) @@ -80,7 +81,7 @@ describe('factory/pipeline', () => { it('should create a pipeline with readable object mode interface matching the rdf:type', async () => { const ptr = await loadPipelineDefinition('read-object-mode') - const pipeline = createPipeline(ptr, { basePath: resolve('test') }) + const pipeline = createPipeline(ptr, { env, basePath: resolve('test') }) strictEqual(isReadableObjectMode(pipeline.stream), true) }) @@ -88,7 +89,7 @@ describe('factory/pipeline', () => { it('should create a pipeline with writable interface matching the rdf:type', async () => { const ptr = await loadPipelineDefinition('write') - const pipeline = createPipeline(ptr, { basePath: resolve('test') }) + const pipeline = createPipeline(ptr, { env, basePath: resolve('test') }) strictEqual(isWritable(pipeline.stream), true) strictEqual(!isWritableObjectMode(pipeline.stream), true) @@ -97,7 +98,7 @@ describe('factory/pipeline', () => { it('should create a pipeline with writable object mode interface matching the rdf:type', async () => { const ptr = await loadPipelineDefinition('write-object-mode') - const pipeline = createPipeline(ptr, { basePath: resolve('test') }) + const pipeline = createPipeline(ptr, { env, basePath: resolve('test') }) strictEqual(isWritableObjectMode(pipeline.stream), true) }) @@ -105,7 +106,7 @@ describe('factory/pipeline', () => { it('should attach createPipeline to the context', async () => { const definition = await loadPipelineDefinition('plain') - const pipeline = createPipeline(definition, { basePath: resolve('test') }) + const pipeline = createPipeline(definition, { env, basePath: resolve('test') }) await pipeline.init() strictEqual(typeof pipeline.context.createPipeline, 'function') @@ -124,6 +125,7 @@ describe('factory/pipeline', () => { // when const pipeline = createPipeline(definition, { + env, basePath: resolve('test'), logger, variables: new Map([['bar', 'secret'], ['baz', 'baz']]), diff --git a/packages/core/test/factory/step.test.js b/packages/core/test/factory/step.test.js index f61c4a29..b9d0a1b5 100644 --- a/packages/core/test/factory/step.test.js +++ b/packages/core/test/factory/step.test.js @@ -2,6 +2,7 @@ import { strictEqual, rejects } from 'assert' import { resolve } from 'path' import getStream from 'get-stream' import { pipelineDefinitionLoader } from 'barnard59-test-support/loadPipelineDefinition.js' +import env from 'barnard59-env' import defaultLoaderRegistry from '../../lib/defaultLoaderRegistry.js' import defaultLogger from '../../lib/defaultLogger.js' import createStep from '../../lib/factory/step.js' @@ -9,6 +10,7 @@ import Step from '../../lib/Step.js' import ns from '../support/namespaces.js' const loadPipelineDefinition = pipelineDefinitionLoader(import.meta.url, '../support/definitions') +const context = { env } describe('factory/step', () => { it('should be a method', () => { @@ -20,8 +22,9 @@ describe('factory/step', () => { const ptr = [...definition.node(ns.ex('')).out(ns.p.steps).out(ns.p.stepList).list()][0] const step = await createStep(ptr, { + context, basePath: resolve('test'), - loaderRegistry: defaultLoaderRegistry(), + loaderRegistry: defaultLoaderRegistry(env), logger: defaultLogger(), }) @@ -34,8 +37,9 @@ describe('factory/step', () => { await rejects(async () => { await createStep(ptr, { + context, basePath: resolve('test'), - loaderRegistry: defaultLoaderRegistry(), + loaderRegistry: defaultLoaderRegistry(env), logger: defaultLogger(), }) }, err => { @@ -51,8 +55,9 @@ describe('factory/step', () => { const ptr = [...definition.node(ns.ex('')).out(ns.p.steps).out(ns.p.stepList).list()][0] const step = await createStep(ptr, { + context, basePath: resolve('test'), - loaderRegistry: defaultLoaderRegistry(), + loaderRegistry: defaultLoaderRegistry(env), logger: defaultLogger(), }) diff --git a/packages/core/test/factory/variables.test.js b/packages/core/test/factory/variables.test.js index 0b0bd02e..d7f67be7 100644 --- a/packages/core/test/factory/variables.test.js +++ b/packages/core/test/factory/variables.test.js @@ -1,12 +1,14 @@ import { resolve } from 'path' import { expect } from 'chai' import { pipelineDefinitionLoader } from 'barnard59-test-support/loadPipelineDefinition.js' +import env from 'barnard59-env' import defaultLoaderRegistry from '../../lib/defaultLoaderRegistry.js' import createVariables from '../../lib/factory/variables.js' import { VariableMap } from '../../lib/VariableMap.js' import ns from '../support/namespaces.js' const loadPipelineDefinition = pipelineDefinitionLoader(import.meta.url, '../support/definitions') +const context = { env } describe('factory/variables', () => { it('should return a VariableMap', async () => { @@ -14,8 +16,9 @@ describe('factory/variables', () => { const ptr = definition.node(ns.ex('')).out(ns.p.variables) const variables = await createVariables(ptr, { + context, basePath: resolve('test'), - loaderRegistry: defaultLoaderRegistry(), + loaderRegistry: defaultLoaderRegistry(env), }) expect(variables).to.be.instanceOf(VariableMap) @@ -26,8 +29,9 @@ describe('factory/variables', () => { const ptr = definition.node(ns.ex.inline).out(ns.p.variables) const variables = await createVariables(ptr, { + context, basePath: resolve('test'), - loaderRegistry: defaultLoaderRegistry(), + loaderRegistry: defaultLoaderRegistry(env), }) expect(variables.get('optional')).to.be.undefined @@ -38,8 +42,9 @@ describe('factory/variables', () => { const ptr = definition.node(ns.ex.inline).out(ns.p.variables) const variables = await createVariables(ptr, { + context, basePath: resolve('test'), - loaderRegistry: defaultLoaderRegistry(), + loaderRegistry: defaultLoaderRegistry(env), }) expect([...variables.entries()]).to.deep.contain.members([['foo', 'bar']]) @@ -50,8 +55,9 @@ describe('factory/variables', () => { const ptr = definition.node(ns.ex.multiset).out(ns.p.variables) const variables = await createVariables(ptr, { + context, basePath: resolve('test'), - loaderRegistry: defaultLoaderRegistry(), + loaderRegistry: defaultLoaderRegistry(env), }) expect([...variables.entries()]).to.deep.contain.members([ diff --git a/packages/core/test/loader/file.test.js b/packages/core/test/loader/file.test.js index 5c2bf735..47aaea9c 100644 --- a/packages/core/test/loader/file.test.js +++ b/packages/core/test/loader/file.test.js @@ -1,7 +1,7 @@ import * as url from 'url' import * as os from 'os' import { expect } from 'chai' -import $rdf from '@zazuko/env' +import $rdf from 'barnard59-env' import loader from '../../lib/loader/file.js' import ns from '../support/namespaces.js' diff --git a/packages/core/test/loader/pipeline.test.js b/packages/core/test/loader/pipeline.test.js index 7ac9fc05..6d8a268e 100644 --- a/packages/core/test/loader/pipeline.test.js +++ b/packages/core/test/loader/pipeline.test.js @@ -1,12 +1,12 @@ import { strictEqual, rejects } from 'assert' import { resolve } from 'path' -import clownface from 'clownface' -import rdf from '@zazuko/env' +import rdf from 'barnard59-env' import { pipelineDefinitionLoader } from 'barnard59-test-support/loadPipelineDefinition.js' import { run } from '../../index.js' import loader from '../../lib/loader/pipeline.js' const loadPipelineDefinition = pipelineDefinitionLoader(import.meta.url, '../support/definitions') +const context = { env: rdf } describe('loader/pipeline', () => { it('should use the given variables', async () => { @@ -18,7 +18,7 @@ describe('loader/pipeline', () => { ['hello', 'world'], ]) - const stream = await loader(ptr, { basePath, variables }) + const stream = await loader(ptr, { context, basePath, variables }) await run(stream.pipeline, { resume: true }) strictEqual(stream.pipeline.variables.get('foo'), 'bar') @@ -26,10 +26,10 @@ describe('loader/pipeline', () => { }) it('should reject if the referred resource does not have a pipeline type', async () => { - const ptr = clownface({ dataset: rdf.dataset() }).blankNode() + const ptr = rdf.clownface({ dataset: rdf.dataset() }).blankNode() await rejects(async () => { - await loader(ptr) + await loader(ptr, { context }) }) }) }) diff --git a/packages/core/test/loader/variable.test.js b/packages/core/test/loader/variable.test.js index 7563ed69..36581b27 100644 --- a/packages/core/test/loader/variable.test.js +++ b/packages/core/test/loader/variable.test.js @@ -1,12 +1,13 @@ import { strictEqual } from 'assert' -import clownface from 'clownface' -import rdf from '@zazuko/env' -import loader from '../../lib/loader/variable.js' +import rdf from 'barnard59-env' +import loaderUnbound from '../../lib/loader/variable.js' import ns from '../support/namespaces.js' +const loader = loaderUnbound.bind(null, rdf) + describe('loader/variable', () => { it('should load a variable from the map by name', async () => { - const ptr = clownface({ dataset: rdf.dataset() }) + const ptr = rdf.clownface({ dataset: rdf.dataset() }) .blankNode() .addOut(ns.rdf.type, ns.p.Variable) .addOut(ns.p.name, 'foo') @@ -18,7 +19,7 @@ describe('loader/variable', () => { }) it('should add the variable name to variable value term', async () => { - const ptr = clownface({ dataset: rdf.dataset() }) + const ptr = rdf.clownface({ dataset: rdf.dataset() }) .blankNode() .addOut(ns.rdf.type, ns.p.Variable) .addOut(ns.p.name, 'foo') @@ -30,7 +31,7 @@ describe('loader/variable', () => { }) it('should load the variable from the dataset if it\'s not present in the variable map', async () => { - const ptr = clownface({ dataset: rdf.dataset() }) + const ptr = rdf.clownface({ dataset: rdf.dataset() }) .blankNode() .addOut(ns.rdf.type, ns.p.Variable) .addOut(ns.p.name, 'foo') @@ -42,7 +43,7 @@ describe('loader/variable', () => { }) it('should prioritize the variable value from the variable map', async () => { - const ptr = clownface({ dataset: rdf.dataset() }) + const ptr = rdf.clownface({ dataset: rdf.dataset() }) .blankNode() .addOut(ns.rdf.type, ns.p.Variable) .addOut(ns.p.name, 'foo') @@ -55,7 +56,7 @@ describe('loader/variable', () => { }) it('should load a variable value for a given variable name', async () => { - const ptr = clownface({ dataset: rdf.dataset() }).literal('foo', ns.p.VariableName) + const ptr = rdf.clownface({ dataset: rdf.dataset() }).literal('foo', ns.p.VariableName) const variables = new Map([['foo', 'bar']]) const result = await loader(ptr, { variables }) diff --git a/packages/core/test/metadata.test.js b/packages/core/test/metadata.test.js index 085364a9..10779086 100644 --- a/packages/core/test/metadata.test.js +++ b/packages/core/test/metadata.test.js @@ -1,7 +1,10 @@ import { strictEqual } from 'assert' -import metadata from '../lib/metadata.js' +import env from 'barnard59-env' +import metadataUnbound from '../lib/metadata.js' import createPipelineDefinition from './support/createPipelineDefinition.js' +const metadata = metadataUnbound.bind(null, env) + describe('metadata', () => { it('should be a function', () => { strictEqual(typeof metadata, 'function') diff --git a/packages/core/test/support/createPipelineDefinition.js b/packages/core/test/support/createPipelineDefinition.js index be3e9498..3dccb75f 100644 --- a/packages/core/test/support/createPipelineDefinition.js +++ b/packages/core/test/support/createPipelineDefinition.js @@ -1,5 +1,4 @@ -import clownface from 'clownface' -import rdf from '@zazuko/env' +import rdf from 'barnard59-env' import ns from './namespaces.js' function createPipelineDefinition({ @@ -10,7 +9,7 @@ function createPipelineDefinition({ writable, writableObjectMode, } = {}) { - const ptr = clownface({ + const ptr = rdf.clownface({ dataset: dataset || rdf.dataset(), term: term || rdf.blankNode(), }) diff --git a/packages/csvw/README.md b/packages/csvw/README.md index 66622e47..4f1ff6af 100644 --- a/packages/csvw/README.md +++ b/packages/csvw/README.md @@ -1,4 +1,4 @@ -# bardnard59-csvw +# barnard59-csvw Simplifies handling CSVW mapping documents in barnard59 pipelines diff --git a/packages/csvw/fetch.js b/packages/csvw/fetch.js index 7fc2378b..bf392f7e 100644 --- a/packages/csvw/fetch.js +++ b/packages/csvw/fetch.js @@ -1,4 +1,3 @@ -import rdf from '@zazuko/env' import toReadable from 'duplex-to/readable.js' import { PassThrough } from 'readable-stream' import fetchData from './lib/fetchData.js' @@ -9,8 +8,8 @@ function fetch({ csvw }) { Promise.resolve().then(async () => { try { - const metadata = await fetchMetadata(csvw) - const url = metadata.any().has(rdf.ns.csvw.url).out(rdf.ns.csvw.url) + const metadata = await fetchMetadata(this.env, csvw) + const url = metadata.any().has(this.env.ns.csvw.url).out(this.env.ns.csvw.url) const dataStream = await fetchData(url.value) dataStream.pipe(output) diff --git a/packages/csvw/filter.js b/packages/csvw/filter.js index e75b1a05..e30be9cd 100644 --- a/packages/csvw/filter.js +++ b/packages/csvw/filter.js @@ -1,11 +1,9 @@ -import rdf from '@zazuko/env' - export function excludeCsvwTriples(quad) { - if (quad.predicate.value.startsWith(rdf.ns.csvw('').value)) { + if (quad.predicate.value.startsWith(this.env.ns.csvw('').value)) { return false } - if (rdf.ns.rdf.type.equals(quad.predicate) && quad.object.value.startsWith(rdf.ns.csvw('').value)) { + if (this.env.ns.rdf.type.equals(quad.predicate) && quad.object.value.startsWith(this.env.ns.csvw('').value)) { return false } diff --git a/packages/csvw/lib/fetchMetadata.js b/packages/csvw/lib/fetchMetadata.js index 300377a5..bbdef286 100644 --- a/packages/csvw/lib/fetchMetadata.js +++ b/packages/csvw/lib/fetchMetadata.js @@ -1,9 +1,8 @@ import rdfFetch from '@rdfjs/fetch' -import rdf from '@zazuko/env' import checkResponse from './checkResponse.js' import commonFetch from './commonFetch.js' -async function fetchMetadata(url) { +async function fetchMetadata(rdf, url) { const res = await rdfFetch(url.toString(), { contentTypeLookup: extension => extension === '.json' ? 'application/ld+json' : undefined, factory: rdf, diff --git a/packages/csvw/package.json b/packages/csvw/package.json index d6826d56..81b977e9 100644 --- a/packages/csvw/package.json +++ b/packages/csvw/package.json @@ -21,7 +21,6 @@ "homepage": "https://github.com/zazuko/barnard59", "dependencies": { "@rdfjs/fetch": "^3.1.1", - "@zazuko/env": "^1.0.1", "duplex-to": "^1.0.1", "file-fetch": "^1.7.0", "node-fetch": "^3.0.0", @@ -29,6 +28,7 @@ "readable-stream": "^3.6.0" }, "devDependencies": { + "barnard59-env": "^0.0.0", "express-as-promise": "^1.2.0", "get-stream": "^7.0.1", "is-stream": "^3.0.0" diff --git a/packages/csvw/test/fetch.test.js b/packages/csvw/test/fetch.test.js index b65c97b8..7f46f43c 100644 --- a/packages/csvw/test/fetch.test.js +++ b/packages/csvw/test/fetch.test.js @@ -2,12 +2,15 @@ import { rejects, strictEqual } from 'assert' import withServer from 'express-as-promise/withServer.js' import getStream from 'get-stream' import { isReadableStream } from 'is-stream' -import fetch from '../fetch.js' +import env from 'barnard59-env' +import fetchUnbound from '../fetch.js' const csvContent = 'id,text\n1,abc\n' const fileMetdataUrl = 'file:./test/support/test.metadata.json' const fileMetdataTtlUrl = 'file:./test/support/test.metadata.ttl' +const fetch = fetchUnbound.bind({ env }) + describe('fetch', () => { it('should be a function', () => { strictEqual(typeof fetch, 'function') diff --git a/packages/env/README.md b/packages/env/README.md new file mode 100644 index 00000000..55cc3b07 --- /dev/null +++ b/packages/env/README.md @@ -0,0 +1,6 @@ +# barnard59-env + +[RDF/JS Environment](https://npm.im/@rdfjs/environment) for [barnard59](https://npm.im/barnard59). + +This is an extension of [@zazuko/env-node](https://npm.im/@zazuko/env-node) that adds namespaces commonly used +by pipelines to `env.ns`. diff --git a/packages/env/index.js b/packages/env/index.js new file mode 100644 index 00000000..91fcb65f --- /dev/null +++ b/packages/env/index.js @@ -0,0 +1,5 @@ +import { create } from '@zazuko/env-node' +import NamespacesFactory from './lib/Namespaces.js' +import ConstantsFactory from './lib/Constants.js' + +export default create(NamespacesFactory, ConstantsFactory) diff --git a/packages/env/lib/Constants.js b/packages/env/lib/Constants.js new file mode 100644 index 00000000..3a4afa3f --- /dev/null +++ b/packages/env/lib/Constants.js @@ -0,0 +1,5 @@ +export default class ConstantsFactory { + init() { + this.FALSE = this.literal('false', this.ns.xsd.boolean) + } +} diff --git a/packages/env/lib/Namespaces.js b/packages/env/lib/Namespaces.js new file mode 100644 index 00000000..111ffcc4 --- /dev/null +++ b/packages/env/lib/Namespaces.js @@ -0,0 +1,10 @@ +export default class NamespacesFactory { + init() { + this.ns = { + ...this.ns, + p: this.namespace('https://pipeline.described.at/'), + code: this.namespace('https://code.described.at/'), + b59: this.namespace('https://barnard59.zazuko.com/vocab#'), + } + } +} diff --git a/packages/env/package.json b/packages/env/package.json new file mode 100644 index 00000000..28b28be2 --- /dev/null +++ b/packages/env/package.json @@ -0,0 +1,20 @@ +{ + "name": "barnard59-env", + "version": "0.0.0", + "type": "module", + "main": "index.js", + "dependencies": { + "@zazuko/env-node": "^1.0.0" + }, + "repository": { + "type": "git", + "url": "git://github.com/zazuko/barnard59.git", + "directory": "packages/env" + }, + "author": "Zazuko GmbH", + "license": "MIT", + "bugs": { + "url": "https://github.com/zazuko/barnard59/issues" + }, + "homepage": "https://github.com/zazuko/barnard59" +} diff --git a/packages/formats/csvw.js b/packages/formats/csvw.js index d33ec967..89632df6 100644 --- a/packages/formats/csvw.js +++ b/packages/formats/csvw.js @@ -29,7 +29,7 @@ function parse(args) { return tracer.startActiveSpan('csvw:parse', async span => { try { - const dataset = await toDataset(metadata) + const dataset = await toDataset(this.env, metadata) span.addEvent('metadata') return sinkToDuplex(new CsvwParser({ metadata: dataset, diff --git a/packages/formats/lib/stream.js b/packages/formats/lib/stream.js index a6a43879..094793e6 100644 --- a/packages/formats/lib/stream.js +++ b/packages/formats/lib/stream.js @@ -1,10 +1,7 @@ -import fromStream from 'rdf-dataset-ext/fromStream.js' -import rdf from '@zazuko/env' - -export function toDataset(streamOrDataset) { +export function toDataset(rdf, streamOrDataset) { if (!streamOrDataset.readable) { return Promise.resolve(streamOrDataset) } - return fromStream(rdf.dataset(), streamOrDataset) + return rdf.dataset().import(streamOrDataset) } diff --git a/packages/formats/package.json b/packages/formats/package.json index f0fe493a..25cc1705 100644 --- a/packages/formats/package.json +++ b/packages/formats/package.json @@ -26,14 +26,13 @@ "@rdfjs/serializer-jsonld": "^2.0.0", "@rdfjs/serializer-ntriples": "^2.0.0", "@rdfjs/sink-to-duplex": "^1.0.0", - "@zazuko/env": "^1.1.0", "barnard59-base": "^2.0.0", - "rdf-dataset-ext": "^1.0.1", "rdf-parser-csvw": "^0.15.0", "rdf-parser-csvw-xlsx": "^0.1.0", "rdfxml-streaming-parser": "^1.2.0" }, "devDependencies": { + "barnard59-env": "^0.0.0", "chai": "^4.3.7" }, "engines": { diff --git a/packages/formats/test/jsonld.test.js b/packages/formats/test/jsonld.test.js index 7c77b0f5..09f5b207 100644 --- a/packages/formats/test/jsonld.test.js +++ b/packages/formats/test/jsonld.test.js @@ -1,6 +1,5 @@ import fs from 'fs' -import rdf from '@zazuko/env' -import fromStream from 'rdf-dataset-ext/fromStream.js' +import rdf from 'barnard59-env' import { expect } from 'chai' import { parse } from '../jsonld.js' @@ -10,7 +9,7 @@ describe('jsonld', () => { const input = fs.createReadStream(new URL('./assets/remote.json', import.meta.url)) const parser = parse() - const dataset = await fromStream(rdf.dataset(), input.pipe(parser)) + const dataset = await rdf.dataset().import(input.pipe(parser)) expect(dataset).to.have.property('size').gt(0) }) @@ -23,7 +22,7 @@ describe('jsonld', () => { 'http://example.org/ns/csvw': './test/assets/csvw.context.json', }, }) - const dataset = await fromStream(rdf.dataset(), input.pipe(parser)) + const dataset = await rdf.dataset().import(input.pipe(parser)) expect(dataset).to.have.property('size').gt(0) }) @@ -36,7 +35,7 @@ describe('jsonld', () => { 'http://example.org/ns/csvw': './test/assets/csvw.context.json', }), }) - const dataset = await fromStream(rdf.dataset(), input.pipe(parser)) + const dataset = await rdf.dataset().import(input.pipe(parser)) expect(dataset).to.have.property('size').gt(0) }) diff --git a/packages/formats/test/n3.test.js b/packages/formats/test/n3.test.js index 91631413..019c110f 100644 --- a/packages/formats/test/n3.test.js +++ b/packages/formats/test/n3.test.js @@ -1,6 +1,5 @@ import fs from 'fs' -import rdf from '@zazuko/env' -import fromStream from 'rdf-dataset-ext/fromStream.js' +import env from 'barnard59-env' import { expect } from 'chai' import { parse } from '../n3.js' @@ -9,8 +8,8 @@ describe('n3', () => { it('successfully loads the input file', async () => { const input = fs.createReadStream(new URL('./datasets/ontologist.n3', import.meta.url)) - const parser = parse() - const dataset = await fromStream(rdf.dataset(), input.pipe(parser)) + const parser = parse.call({ env }) + const dataset = await env.dataset().import(input.pipe(parser)) expect(dataset).to.have.property('size').gt(0) }) @@ -18,10 +17,10 @@ describe('n3', () => { it('forwards argument to parser options', async () => { const input = fs.createReadStream(new URL('./rules/weather.n3', import.meta.url)) - const parser = parse({ + const parser = parse.call({ env }, { format: 'text/n3', }) - const dataset = await fromStream(rdf.dataset(), input.pipe(parser)) + const dataset = await env.dataset().import(input.pipe(parser)) expect(dataset).to.have.property('size').gt(0) }) diff --git a/packages/formats/test/rdf-xml.test.js b/packages/formats/test/rdf-xml.test.js index 079e422e..854a3a05 100644 --- a/packages/formats/test/rdf-xml.test.js +++ b/packages/formats/test/rdf-xml.test.js @@ -1,6 +1,5 @@ import fs from 'fs' -import rdf from '@zazuko/env' -import fromStream from 'rdf-dataset-ext/fromStream.js' +import env from 'barnard59-env' import { expect } from 'chai' import { parse } from '../rdf-xml.js' @@ -9,8 +8,8 @@ describe('rdf/xml', () => { it('successfully loads the input file', async () => { const input = fs.createReadStream(new URL('./datasets/bioinformatics.rdf', import.meta.url)) - const parser = parse() - const dataset = await fromStream(rdf.dataset(), input.pipe(parser)) + const parser = parse.call({ env }) + const dataset = await env.dataset().import(input.pipe(parser)) expect(dataset).to.have.property('size').gt(0) }) diff --git a/packages/formats/xlsx.js b/packages/formats/xlsx.js index 70af1369..802fc1de 100644 --- a/packages/formats/xlsx.js +++ b/packages/formats/xlsx.js @@ -19,7 +19,7 @@ function parse(args) { return tracer.startActiveSpan('xlsx:parse', async span => { try { - const dataset = await toDataset(metadata) + const dataset = await toDataset(this.env, metadata) span.addEvent('metadata') return sinkToDuplex(new CsvwXlsxParser({ metadata: dataset, diff --git a/packages/graph-store/get.js b/packages/graph-store/get.js index 102ccbe5..7dbb7158 100644 --- a/packages/graph-store/get.js +++ b/packages/graph-store/get.js @@ -1,4 +1,3 @@ -import rdf from '@rdfjs/data-model' import Client from 'sparql-http-client' import unpromiseReadable from './lib/unpromiseReadable.js' @@ -9,10 +8,10 @@ function get({ endpoint, graph, user, password }) { password, }) - if (!graph || rdf.defaultGraph().equals(graph)) { - graph = rdf.defaultGraph() + if (!graph || this.env.defaultGraph().equals(graph)) { + graph = this.env.defaultGraph() } else { - graph = rdf.namedNode(graph.value || graph) + graph = this.env.namedNode(graph.value || graph) } return unpromiseReadable(client.store.get(graph)) diff --git a/packages/graph-store/package.json b/packages/graph-store/package.json index 2d94fe8c..4206ffdf 100644 --- a/packages/graph-store/package.json +++ b/packages/graph-store/package.json @@ -20,7 +20,6 @@ }, "homepage": "https://github.com/zazuko/barnard59", "dependencies": { - "@rdfjs/data-model": "^2.0.1", "duplex-to": "^1.0.0", "lodash": "^4.17.21", "promise-the-world": "^1.0.1", @@ -32,8 +31,8 @@ "barnard59-rdf": "^2.0.0" }, "devDependencies": { - "@rdfjs/namespace": "^2.0.0", "@rdfjs/to-ntriples": "^2.0.0", + "@zazuko/env": "^1.9.0", "express-as-promise": "^1.2.0", "get-stream": "^6.0.1", "isstream": "^0.1.2" diff --git a/packages/graph-store/test/get.test.js b/packages/graph-store/test/get.test.js index 01fcd4b2..414bc4b6 100644 --- a/packages/graph-store/test/get.test.js +++ b/packages/graph-store/test/get.test.js @@ -1,13 +1,13 @@ import { strictEqual } from 'assert' -import rdf from '@rdfjs/data-model' -import namespace from '@rdfjs/namespace' +import rdf from '@zazuko/env' import quadToNTriples from '@rdfjs/to-ntriples' import withServer from 'express-as-promise/withServer.js' import { array } from 'get-stream' import { isReadable, isWritable } from 'isstream' -import get from '../get.js' +import getUnbound from '../get.js' -const ns = namespace('http://example.org/') +const ns = rdf.namespace('http://example.org/') +const get = getUnbound.bind({ env: rdf }) describe('get', () => { it('should return a readable stream', async () => { diff --git a/packages/graph-store/test/post.test.js b/packages/graph-store/test/post.test.js index 1cd3e71d..91744a9b 100644 --- a/packages/graph-store/test/post.test.js +++ b/packages/graph-store/test/post.test.js @@ -1,7 +1,6 @@ import { strictEqual } from 'assert' import { promisify } from 'util' -import rdf from '@rdfjs/data-model' -import namespace from '@rdfjs/namespace' +import rdf from '@zazuko/env' import quadToNTriples from '@rdfjs/to-ntriples' import withServer from 'express-as-promise/withServer.js' import getStream from 'get-stream' @@ -9,7 +8,7 @@ import { isReadable, isWritable } from 'isstream' import { finished } from 'readable-stream' import post from '../post.js' -const ns = namespace('http://example.org/') +const ns = rdf.namespace('http://example.org/') describe('post', () => { it('should return a writable stream', async () => { diff --git a/packages/graph-store/test/put.test.js b/packages/graph-store/test/put.test.js index 73eefe2a..235a89c9 100644 --- a/packages/graph-store/test/put.test.js +++ b/packages/graph-store/test/put.test.js @@ -1,7 +1,6 @@ import { deepStrictEqual, strictEqual } from 'assert' import { promisify } from 'util' -import rdf from '@rdfjs/data-model' -import namespace from '@rdfjs/namespace' +import rdf from '@zazuko/env' import quadToNTriples from '@rdfjs/to-ntriples' import withServer from 'express-as-promise/withServer.js' import getStream from 'get-stream' @@ -9,7 +8,7 @@ import { isReadable, isWritable } from 'isstream' import { finished } from 'readable-stream' import put from '../put.js' -const ns = namespace('http://example.org/') +const ns = rdf.namespace('http://example.org/') describe('put', () => { it('should return a writable stream', async () => { diff --git a/packages/sparql/package.json b/packages/sparql/package.json index 86602e1c..65a22140 100644 --- a/packages/sparql/package.json +++ b/packages/sparql/package.json @@ -18,13 +18,12 @@ }, "homepage": "https://github.com/zazuko/barnard59-sparql", "dependencies": { - "@rdfjs/namespace": "^2.0.0", "duplex-to": "^1.0.1", "sparql-http-client": "^2.4.0" }, "devDependencies": { "@tpluscode/rdf-string": "^1.0.3", - "@zazuko/env": "^1.0.1", + "@zazuko/env": "^1.0.0", "get-stream": "^6.0.0", "isstream": "^0.1.2", "mocha": "^9.0.2", diff --git a/packages/sparql/test/support/namespaces.js b/packages/sparql/test/support/namespaces.js index 4e3a3cc9..7954495a 100644 --- a/packages/sparql/test/support/namespaces.js +++ b/packages/sparql/test/support/namespaces.js @@ -1,6 +1,5 @@ -import namespace from '@rdfjs/namespace' import rdf from '@zazuko/env' -const ex = namespace('http://example.org/', { factory: rdf }) +const ex = rdf.namespace('http://example.org/', { factory: rdf }) export { ex } diff --git a/packages/validation/lib/manifest.js b/packages/validation/lib/manifest.js index c520d230..1e3e171c 100644 --- a/packages/validation/lib/manifest.js +++ b/packages/validation/lib/manifest.js @@ -1,5 +1,5 @@ import path from 'path' -import rdf from '@zazuko/env' +import rdf from '@zazuko/env-node' import iriResolve from 'rdf-loader-code/lib/iriResolve.js' import * as parser from './parser.js' import { removeFilePart } from './utils.js' diff --git a/packages/validation/lib/parser.js b/packages/validation/lib/parser.js index b70cb1d5..9ebe113f 100644 --- a/packages/validation/lib/parser.js +++ b/packages/validation/lib/parser.js @@ -1,9 +1,7 @@ import fs from 'fs' import readline from 'readline' -import rdf from '@zazuko/env' -import fromFile from 'rdf-utils-fs/fromFile.js' +import rdf from '@zazuko/env-node' import iriResolve from 'rdf-loader-code/lib/iriResolve.js' -import fromStream from 'rdf-dataset-ext/fromStream.js' import Issue from './issue.js' import * as utils from './utils.js' import validatePipelineProperty from './validatePipelineProperty.js' @@ -15,12 +13,12 @@ const ns = { } export async function readGraph(file, checks) { - const quadStream = fromFile(file) + const quadStream = rdf.fromFile(file) const parserPromise = new Promise((resolve, reject) => { quadStream.on('error', reject) quadStream.on('end', resolve) }) - const datasetPromise = fromStream(rdf.dataset(), quadStream) + const datasetPromise = rdf.dataset().import(quadStream) let issue, dataset try { diff --git a/packages/validation/package.json b/packages/validation/package.json index 964b5c9c..2e081961 100644 --- a/packages/validation/package.json +++ b/packages/validation/package.json @@ -34,14 +34,13 @@ "homepage": "https://github.com/zazuko/barnard59", "dependencies": { "@rdfjs/formats-common": "^2", - "@zazuko/env": "^1.1.1", + "@rdfjs/namespace": "^2.0.0", + "@zazuko/env-node": "^1.0.0", "anylogger": "^1.0.11", "anylogger-console": "^1.0.0", "chalk": "^4.1.0", "commander": "^11.0.0", - "rdf-dataset-ext": "^1.0.1", - "rdf-loader-code": "^2.0.0", - "rdf-utils-fs": "^2.1.0" + "rdf-loader-code": "^2.0.0" }, "mocha": { "loader": "esmock" diff --git a/packages/validation/rules.json b/packages/validation/rules.json index 4a314c16..e69de29b 100644 --- a/packages/validation/rules.json +++ b/packages/validation/rules.json @@ -1,110 +0,0 @@ -[ - { - "ruleId": 3, - "ruleDescription": "manifest.ttl file exists and can be parsed", - "messageSuccess": "Manifest file for ${library} loaded successfully", - "messageFailure": "Missing manifest file for ${library}\n The following operations cannot be validated:\n * \"${operations}\"" - }, - { - "ruleId": 4, - "ruleDescription": "Pipeline has at least one property defined. Recognized choices: Readable, ReadableObjectMode, Writable, WritableObjectMode", - "messageSuccess": "Validated: property for pipeline ${pipeline} is defined", - "messageFailure": "Cannot validate pipeline ${pipeline}: the pipeline mode (Readable(ObjectMode)/Writable(ObjectMode)) is not defined" - }, - { - "ruleId": 5, - "ruleDescription": "Pipeline should have the same type and mode if its first stream is Writable(ObjectMode)", - "messageSuccess": "The pipeline type for ${pipeline} matches first stream", - "messageFailure": "The pipeline ${pipeline} must be of type ${type}" - }, - { - "ruleId": 6, - "ruleDescription": "Pipeline should have the same type and mode if its last stream is Readable(ObjectMode)", - "messageSuccess": "The pipeline type for ${pipeline} matches last stream", - "messageFailure": "The pipeline ${pipeline} must be of type ${type}" - }, - { - "ruleId": 7, - "ruleDescription": "Operation has at least one property defined. Recognized choices: Readable, Writable, ReadableObjectMode, WritableObjectMode", - "messageSuccess": "Validated: properties for operation ${operation} are defined", - "messageFailure": "Cannot validate operation ${operation}: no metadata" - }, - { - "ruleId": 8, - "ruleDescription": "Operation has property \"Operation\"", - "messageSuccess": "Validated: operation ${operation} is of type \"Operation\"", - "messageFailure": "Invalid operation: ${operation} is not of type \"Operation\"" - }, - { - "ruleId": 9, - "ruleDescription": "If there exists more than one step, first step must be either Readable or ReadableObjectMode", - "messageSuccess": "Validated operation ${operation}: first operation must be either Readable or ReadableObjectMode", - "messageFailure": "Invalid operation ${operation}: it is neither Readable nor ReadableObjectMode" - }, - { - "ruleId": 10, - "ruleDescription": "Writable operation must always be preceded by a Readable operation", - "messageSuccess": "Validated operation ${operation}: a Writable operation must always be preceded by a Readable operation", - "messageFailure": "Invalid operation ${operation}: previous operation is not Readable" - }, - { - "ruleId": 11, - "ruleDescription": "WritableObjectMode operation must always be preceded by a ReadableObjectMode operation", - "messageSuccess": "Validated operation ${operation}: a WritableObjectMode operation must always be preceded by a ReadableObjectMode operation", - "messageFailure": "Invalid operation ${operation}: previous operation is not ReadableObjectMode" - }, - { - "ruleId": 12, - "ruleDescription": "Readable operation must always be followed by a Writable operation", - "messageSuccess": "Validated operation ${operation}: a Readable operation must always be followed by a Writable operation", - "messageFailure": "Invalid operation ${operation}: next operation is not Writable" - }, - { - "ruleId": 13, - "ruleDescription": "ReadableObjectMode operation must be followed by a WritableObjectMode operation", - "messageSuccess": "Validated operation ${operation}: a ReadableObjectMode operation must be followed by a WritableObjectMode operation", - "messageFailure": "Invalid operation ${operation}: next operation is not WritableObjectMode" - }, - { - "ruleId": 14, - "ruleDescription": "Operation defined in manifest can be imported", - "messageSuccess": "${filename} can be imported for operation ${op}", - "messageFailure": "Cannot import ${filename} for operation ${op}" - }, - { - "ruleId": 15, - "ruleDescription": "Operation defined in manifest has a corresponding export", - "messageSuccess": "File ${filename} exports '${method}' for operation ${op}", - "messageFailure": "File ${filename} does not export ${method} for operation ${op}" - }, - { - "ruleId": 51, - "ruleDescription": "Pipeline should have the same type if its first stream is Writable(ObjectMode)", - "messageSuccess": "The pipeline type for ${pipeline} matches first stream", - "messageFailure": "The pipeline ${pipeline} must be of type Writable or WritableObjectMode" - }, - { - "ruleId": 61, - "ruleDescription": "Pipeline should have the same type if its last stream is Readable(ObjectMode)", - "messageSuccess": "The pipeline type for ${pipeline} matches last stream", - "messageFailure": "The pipeline ${pipeline} must be of type Readable or ReadableObjectMode" - }, - { - "ruleId": 100, - "ruleDescription": "Previous operation should have metadata", - "messageSuccess": "Validation can be performed for operation ${operation}: previous operation has metadata", - "messageFailure": "Cannot validate operation ${operation}: previous operation does not have metadata" - }, - { - "ruleId": 1337, - "ruleDescription": "Each codelink is described by code.implementedBy/code.link", - "messageSuccess": "Defined with both code.implementedBy & code.link", - "messageFailure": "Missing code.implementedBy/code.link" - }, - { - "ruleId": 2668, - "ruleDescription": "Each dependency must be installed", - "messageSuccess": "Found ${dependencyType} ${library} successfully", - "messageFailure": "Missing ${dependencyType} ${library}\n The following operations cannot be validated:\n * \"${operations}\"" - } -] diff --git a/packages/validation/test/helpers.js b/packages/validation/test/helpers.js index e7c61cf4..dd61f9b1 100644 --- a/packages/validation/test/helpers.js +++ b/packages/validation/test/helpers.js @@ -1,14 +1,13 @@ import { Readable } from 'stream' import deepEqual from 'deep-equal' import formats from '@rdfjs/formats-common' -import rdf from '@zazuko/env' -import fromStream from 'rdf-dataset-ext/fromStream.js' +import rdf from '@zazuko/env-node' const parser = formats.parsers.get('text/turtle') export async function turtleToCF(str) { const stream = Readable.from([str]) const quadStream = parser.import(stream) - return rdf.clownface({ dataset: await fromStream(rdf.dataset(), quadStream) }) + return rdf.clownface({ dataset: await rdf.dataset().import(quadStream) }) } export function checkArrayContainsField(array, field, value) { diff --git a/test/e2e/forEach.e2e.test.js b/test/e2e/forEach.e2e.test.js index f12c645c..48b842e9 100644 --- a/test/e2e/forEach.e2e.test.js +++ b/test/e2e/forEach.e2e.test.js @@ -3,13 +3,14 @@ import { resolve } from 'path' import { createPipeline } from 'barnard59-core' import getStream from 'get-stream' import { pipelineDefinitionLoader } from 'barnard59-test-support/loadPipelineDefinition.js' +import env from 'barnard59-env' const loadPipelineDefinition = pipelineDefinitionLoader(import.meta.url, 'definitions') describe('forEach', () => { it('should execute the example correctly', async () => { const ptr = await loadPipelineDefinition('foreach/csv-duplicate') - const pipeline = createPipeline(ptr, { basePath: resolve('.') }) + const pipeline = createPipeline(ptr, { env, basePath: resolve('.') }) const out = JSON.parse(await getStream(pipeline.stream)) @@ -22,7 +23,7 @@ describe('forEach', () => { * */ it('should preserve variables set during forEach execution', async () => { const ptr = await loadPipelineDefinition('foreach/with-handler') - const pipeline = createPipeline(ptr, { basePath: resolve('.') }) + const pipeline = createPipeline(ptr, { env, basePath: resolve('.') }) const out = await getStream.array(pipeline.stream) @@ -32,7 +33,7 @@ describe('forEach', () => { it('should be able to access variables from higher scopes', async () => { const ptr = await loadPipelineDefinition('foreach/with-variable') - const pipeline = createPipeline(ptr, { basePath: resolve('.') }) + const pipeline = createPipeline(ptr, { env, basePath: resolve('.') }) const out = await getStream.array(pipeline.stream) diff --git a/test/e2e/package.json b/test/e2e/package.json index 98f33a8a..4758f0bb 100644 --- a/test/e2e/package.json +++ b/test/e2e/package.json @@ -11,6 +11,7 @@ "@zazuko/env": "^1.0.1", "barnard59-base": "^2.0.0", "barnard59-core": "^4.0.0", + "barnard59-env": "^0.0.0", "barnard59-formats": "^2.0.0", "barnard59-http": "^2.0.0", "chai": "^4.3.7", diff --git a/test/e2e/pipeline.e2e.test.js b/test/e2e/pipeline.e2e.test.js index 8f49c424..bf2fba42 100644 --- a/test/e2e/pipeline.e2e.test.js +++ b/test/e2e/pipeline.e2e.test.js @@ -10,6 +10,7 @@ import toCanonical from 'rdf-dataset-ext/toCanonical.js' import fromStream from 'rdf-dataset-ext/fromStream.js' import rdf from '@zazuko/env' import fromFile from 'rdf-utils-fs/fromFile.js' +import env from 'barnard59-env' import { promisedEcmaScriptLoader, promisedUrlLoader } from './asyncLoaders.js' const loadPipelineDefinition = pipelineDefinitionLoader(import.meta.url, 'definitions') @@ -35,7 +36,7 @@ describe('Pipeline', () => { it('should load code using node: scheme', async () => { const ptr = await loadPipelineDefinition('world-clock/node') - const pipeline = await createPipeline(ptr) + const pipeline = await createPipeline(ptr, { env }) const out = await getStream(pipeline.stream) @@ -44,7 +45,7 @@ describe('Pipeline', () => { it('should load code using file: scheme', async () => { const ptr = await loadPipelineDefinition('world-clock/file') - const pipeline = await createPipeline(ptr, { basePath: process.cwd() }) + const pipeline = await createPipeline(ptr, { env, basePath: process.cwd() }) const out = await getStream(pipeline.stream) @@ -53,12 +54,12 @@ describe('Pipeline', () => { it('should load code using async loaders', async () => { const ptr = await loadPipelineDefinition('world-clock/async') - const loaderRegistry = defaultLoaderRegistry() + const loaderRegistry = defaultLoaderRegistry(env) promisedEcmaScriptLoader.register(loaderRegistry) promisedUrlLoader.register(loaderRegistry) - const pipeline = await createPipeline(ptr, { loaderRegistry }) + const pipeline = await createPipeline(ptr, { env, loaderRegistry }) const out = await getStream(pipeline.stream) @@ -68,9 +69,8 @@ describe('Pipeline', () => { it('should load file contents using loader', async () => { // given const ptr = await loadPipelineDefinition('file-loader') - const loaderRegistry = defaultLoaderRegistry() const pipeline = await createPipeline(ptr, { - loaderRegistry, + env, basePath: process.cwd(), })