Skip to content

Commit

Permalink
Get rid of as many node deps as possible and prefix all with node: to…
Browse files Browse the repository at this point in the history
… make it more clearly where we use nodejs
  • Loading branch information
pozylon committed Dec 19, 2024
1 parent 88818ac commit 3c95dc7
Show file tree
Hide file tree
Showing 44 changed files with 95 additions and 124 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/codeql-analysis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ name: "CodeQL"

on:
push:
branches: ['master', 'v2.x']
branches: ['master', 'v2.x', 'v3.x']
pull_request:
# The branches below must be a subset of the branches above
branches: [master]
Expand Down
2 changes: 1 addition & 1 deletion examples/kitchensink/load_env.js
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import dotenv from 'dotenv-extended';
import fs from 'fs';
import fs from 'node:fs';

dotenv.load({
silent: Boolean(process.env.SUPPRESS_ENV_ERRORS),
Expand Down
2 changes: 1 addition & 1 deletion examples/kitchensink/src/boot.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import express from 'express';
import http from 'http';
import http from 'node:http';
import { useExecutionCancellation } from 'graphql-yoga';
import { useResponseCache } from '@graphql-yoga/plugin-response-cache';
import { startPlatform, setAccessToken } from '@unchainedshop/platform';
Expand Down
24 changes: 4 additions & 20 deletions packages/api/src/context.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
import fs from 'fs';
import path from 'path';
import { UnchainedCore } from '@unchainedshop/core';
import instantiateLoaders, { UnchainedLoaders } from './loaders/index.js';
import { getLocaleContext, UnchainedLocaleContext } from './locale-context.js';
Expand Down Expand Up @@ -58,24 +56,10 @@ export type UnchainedContextResolver = (
},
) => Promise<Context>;

export const loadJSON = (filename) => {
try {
const base = typeof __filename !== 'undefined' && __filename;
if (!base)
return {
version: process.env.npm_package_version,
};
const absolutePath = path.resolve(path.dirname(base), filename);
const data = JSON.parse(fs.readFileSync(absolutePath, 'utf-8'));
return data;
} catch {
return null;
}
};

const packageJson = loadJSON('../package.json');

const { UNCHAINED_API_VERSION = packageJson?.version || '2.x' } = process.env;
const { default: packageJson } = await import(`${import.meta.dirname}/../package.json`, {
with: { type: 'json' },
});
const { UNCHAINED_API_VERSION = packageJson?.version || '3.x' } = process.env;

export const createContextResolver =
(
Expand Down
3 changes: 1 addition & 2 deletions packages/api/src/express/createBulkImportMiddleware.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
import { createLogger } from '@unchainedshop/logger';
import { checkAction } from '../acl.js';
import { actions } from '../roles/index.js';
import { IncomingMessage } from 'http';
import { Context } from '../context.js';

const logger = createLogger('unchained:bulk-import');
Expand All @@ -19,7 +18,7 @@ const methodWrongHandler = (res) => () => {
};

export default async function bulkImportMiddleware(
req: IncomingMessage & { query?: any; unchainedContext: Context },
req: Request & { query?: any; unchainedContext: Context },
res,
) {
try {
Expand Down
2 changes: 1 addition & 1 deletion packages/api/src/express/createERCMetadataMiddleware.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import path from 'path';
import path from 'node:path';
import { createLogger } from '@unchainedshop/logger';
import { Context } from '../context.js';
import { Request, RequestHandler } from 'express';
Expand Down
3 changes: 1 addition & 2 deletions packages/api/src/express/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,9 +11,8 @@ import { UnchainedCore } from '@unchainedshop/core';
import { emit } from '@unchainedshop/events';
import { API_EVENTS } from '../events.js';
import { User } from '@unchainedshop/core-users';
import { IncomingMessage } from 'http';

const resolveUserRemoteAddress = (req: IncomingMessage) => {
const resolveUserRemoteAddress = (req: e.Request) => {
const remoteAddress =
(req.headers['x-real-ip'] as string) ||
(req.headers['x-forwarded-for'] as string) ||
Expand Down
5 changes: 2 additions & 3 deletions packages/api/src/fastify/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,12 +8,11 @@ import { UnchainedCore } from '@unchainedshop/core';
import { emit } from '@unchainedshop/events';
import { API_EVENTS } from '../events.js';
import { User } from '@unchainedshop/core-users';
import { IncomingMessage } from 'http';
import fastifySession from '@fastify/session';
import fastifyCookie from '@fastify/cookie';
import { FastifyInstance } from 'fastify';
import { FastifyInstance, FastifyRequest } from 'fastify';

const resolveUserRemoteAddress = (req: IncomingMessage) => {
const resolveUserRemoteAddress = (req: FastifyRequest) => {
const remoteAddress =
(req.headers['x-real-ip'] as string) ||
(req.headers['x-forwarded-for'] as string) ||
Expand Down
1 change: 1 addition & 0 deletions packages/core-orders/src/module/configureOrdersModule.ts
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ export type OrdersModule = OrderQueries &
setPaymentProvider: (orderId: string, paymentProviderId: string) => Promise<Order>;
};

// @kontsedal/locco uses a deprecated way of importing files in ESM (node16 behavior)
const require = createRequire(import.meta.url);
const { Locker, MongoAdapter } = require('@kontsedal/locco');

Expand Down
2 changes: 1 addition & 1 deletion packages/core-worker/src/module/configureWorkerModule.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import os from 'os';
import os from 'node:os';
import { createLogger } from '@unchainedshop/logger';
import {
generateDbFilterById,
Expand Down
2 changes: 1 addition & 1 deletion packages/file-upload/src/director/FileAdapter.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { Readable } from 'stream';
import type { Readable } from 'node:stream';
import { log, LogLevel } from '@unchainedshop/logger';
import { IBaseAdapter } from '@unchainedshop/utils';
import { UploadedFile, UploadFileData } from '../types.js';
Expand Down
4 changes: 2 additions & 2 deletions packages/mongodb/src/initDb.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import { mkdirSync } from 'fs';
import { Db, MongoClient } from 'mongodb';

let mongod;
Expand All @@ -7,9 +6,10 @@ export const startDb = async () => {
try {
// eslint-disable-next-line
// @ts-ignore
const { mkdir } = await import('node:fs/promises');
const { MongoMemoryServer } = await import('mongodb-memory-server');
try {
mkdirSync(`${process.cwd()}/.db`);
await mkdir(`${process.cwd()}/.db`);
} catch {
//
}
Expand Down
3 changes: 2 additions & 1 deletion packages/platform/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,8 @@
"@unchainedshop/mongodb": "^3.0.0-rc6",
"@unchainedshop/plugins": "^3.0.0-rc6",
"@unchainedshop/roles": "^3.0.0-rc6",
"@unchainedshop/utils": "^3.0.0-rc6"
"@unchainedshop/utils": "^3.0.0-rc6",
"safe-stable-stringify": "^2.5.0"
},
"devDependencies": {
"@types/node": "^22.10.2",
Expand Down
4 changes: 2 additions & 2 deletions packages/platform/src/templates/resolveErrorReportTemplate.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { TemplateResolver } from '@unchainedshop/core';
import util from 'util';
import { stringify } from 'safe-stable-stringify';

const {
EMAIL_FROM = '[email protected]',
Expand All @@ -11,7 +11,7 @@ const {
const formatWorkItems = (workItems) => {
return workItems
.map(({ _id, type, started, error }) => {
const stringifiedErrors = util.inspect(error, false, 10, false);
const stringifiedErrors = stringify(error, null, 2);
return `${new Date(started).toLocaleString()} ${type} (${_id}): ${stringifiedErrors}`;
})
.join('\n');
Expand Down
2 changes: 1 addition & 1 deletion packages/plugins/src/events/node-event-emitter.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { EventEmitter } from 'events';
import { EventEmitter } from 'node:events';
import { setEmitAdapter, EmitAdapter } from '@unchainedshop/events';

const NodeEventEmitter = (): EmitAdapter => {
Expand Down
21 changes: 14 additions & 7 deletions packages/plugins/src/files/gridfs/gridfs-adapter.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
import { URL } from 'url';
import { Readable, PassThrough } from 'stream';
import { pipeline } from 'stream/promises';
import { Readable, PassThrough } from 'node:stream';
import { pipeline } from 'node:stream/promises';
import mimeType from 'mime-types';
import {
FileAdapter,
Expand All @@ -12,6 +11,8 @@ import {
import { UploadFileData } from '@unchainedshop/file-upload';
import sign from './sign.js';
import { filesSettings } from '@unchainedshop/core-files';
import { UnchainedCore } from '@unchainedshop/core';
import { GridFSFileUploadsModule } from './index.js';

const { ROOT_URL } = process.env;

Expand All @@ -23,7 +24,13 @@ const bufferToStream = (buffer: any) => {
return stream;
};

export const GridFSAdapter: IFileAdapter = {
export const GridFSAdapter: IFileAdapter<
UnchainedCore & {
modules: {
gridfsFileUploads: GridFSFileUploadsModule;
};
}
> = {
key: 'shop.unchained.file-upload-plugin.gridfs',
label: 'Uploads files to Database using GridFS',
version: '1.0.0',
Expand Down Expand Up @@ -64,8 +71,8 @@ export const GridFSAdapter: IFileAdapter = {
} as UploadFileData & { putURL: string };
},

async uploadFileFromStream(directoryName: string, rawFile: any, { modules }: any) {
let stream;
async uploadFileFromStream(directoryName: string, rawFile: any, { modules }) {
let stream: Readable;
let fileName;
if (rawFile instanceof Promise) {
const { filename: f, createReadStream } = await rawFile;
Expand Down Expand Up @@ -104,7 +111,7 @@ export const GridFSAdapter: IFileAdapter = {
async uploadFileFromURL(
directoryName: string,
{ fileLink, fileName: fname, fileId, headers }: any,
{ modules }: any,
{ modules },
) {
const { href } = new URL(fileLink);
const fileName = decodeURIComponent(fname || href.split('/').pop());
Expand Down
4 changes: 2 additions & 2 deletions packages/plugins/src/files/gridfs/gridfs-webhook.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { pipeline, finished } from 'stream/promises';
import { PassThrough } from 'stream';
import { pipeline, finished } from 'node:stream/promises';
import { PassThrough } from 'node:stream';
import { buildHashedFilename } from '@unchainedshop/file-upload';
import express from 'express';
import sign from './sign.js';
Expand Down
2 changes: 2 additions & 0 deletions packages/plugins/src/files/gridfs/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -27,3 +27,5 @@ export const configureGridFSFileUploadModule = ({ db }) => {
},
};
};

export type GridFSFileUploadsModule = ReturnType<typeof configureGridFSFileUploadModule>;
33 changes: 7 additions & 26 deletions packages/plugins/src/files/minio/minio-adapter.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,5 @@
import https from 'https';
import http, { OutgoingHttpHeaders } from 'http';
import { Readable } from 'stream';
import { URL } from 'url';
import { Readable } from 'node:stream';
import type { ReadableStream } from 'node:stream/web';
import { UploadFileData } from '@unchainedshop/file-upload';
import {
FileAdapter,
Expand Down Expand Up @@ -85,24 +83,6 @@ connectToMinio().then(function setClient(c) {
client = c;
});

const createHttpDownloadStream = async (
fileUrl: string,
headers: OutgoingHttpHeaders,
): Promise<http.IncomingMessage> => {
const { href, protocol } = new URL(fileUrl);
return new Promise((resolve, reject) => {
try {
if (protocol === 'http:') {
http.get(href, { headers }, resolve);
} else {
https.get(href, { headers }, resolve);
}
} catch (e) {
reject(e);
}
});
};

const getObjectStats = async (fileName: string) => {
if (!client) throw new Error('Minio not connected, check env variables');

Expand Down Expand Up @@ -211,17 +191,18 @@ export const MinioAdapter: IFileAdapter = {
const fileName = fname || href.split('/').pop();
const hashedFilename = await buildHashedFilename(directoryName, fileName, new Date());

const stream = await createHttpDownloadStream(fileLink, headers);
const type = mimeType.lookup(fileName) || stream.headers['content-type'];

const url = new URL(fileLink);
const response = await fetch(url, { headers });
const type = mimeType.lookup(fileName) || response.headers['content-type'];
const readable = Readable.fromWeb(response.body as ReadableStream<Uint8Array<ArrayBufferLike>>);
const metaData = {
'Content-Type': type,
};

await client.putObject(
MINIO_BUCKET_NAME,
generateMinioPath(directoryName, hashedFilename),
stream,
readable,
undefined,
metaData,
);
Expand Down
19 changes: 8 additions & 11 deletions packages/plugins/src/payment/datatrans-v2/api/makeFetcher.ts
Original file line number Diff line number Diff line change
@@ -1,11 +1,5 @@
import fs from 'fs';
import util from 'util';
import { resolve } from 'path';
import { createLogger } from '@unchainedshop/logger';

const readFile = util.promisify(fs.readFile);

const { DATATRANS_API_MOCKS_PATH } = process.env;
const { MOCK_APIS } = process.env;

const logger = createLogger('unchained:datatrans');

Expand All @@ -14,12 +8,15 @@ export default (
merchantId: string,
secret: string,
): ((path: string, body: unknown) => Promise<Response>) => {
if (DATATRANS_API_MOCKS_PATH) {
if (MOCK_APIS) {
return async (path): Promise<Response> => {
try {
const filePath = resolve(process.env.PWD, DATATRANS_API_MOCKS_PATH, `.${path}.json`);
const content = await readFile(filePath);
const json = JSON.parse(content.toString());
const { default: json } = await import(
`${import.meta.dirname}/../../../../tests/mock/datatrans/${path}.json`,
{
with: { type: 'json' },
}
);
return {
json: async () => json,
status: json?.error ? 500 : 204,
Expand Down
23 changes: 10 additions & 13 deletions packages/plugins/src/payment/payrexx/api/makeFetcher.ts
Original file line number Diff line number Diff line change
@@ -1,11 +1,6 @@
import fs from 'fs';
import util from 'util';
import { resolve } from 'path';
import { createLogger } from '@unchainedshop/logger';

const readFile = util.promisify(fs.readFile);

const { PAYREXX_API_MOCKS_PATH } = process.env;
const { MOCK_APIS } = process.env;

const logger = createLogger('unchained:payrexx');

Expand All @@ -14,17 +9,19 @@ export default (
instance: string = null,
secret: string = null,
): ((path: string, method: 'GET' | 'DELETE' | 'PUT' | 'POST', data?: any) => Promise<Response>) => {
if (PAYREXX_API_MOCKS_PATH) {
if (MOCK_APIS) {
return async (path): Promise<Response> => {
try {
const filePath = resolve(process.env.PWD, PAYREXX_API_MOCKS_PATH, `${path}.json`);
const content = await readFile(filePath);
const textData = content.toString();
const jsonData = JSON.parse(textData);
const { default: jsonData } = await import(
`${import.meta.dirname}/../../../../tests/mock/payrexx/${path}.json`,
{
with: { type: 'json' },
}
);
return {
json: async () => jsonData,
text: async () => textData,
ok: !jsonData?.error,
text: async () => JSON.stringify(jsonData?.error),
ok: true,
status: jsonData?.error ? 500 : 204,
} as any;
} catch (error) {
Expand Down
Loading

0 comments on commit 3c95dc7

Please sign in to comment.