From 9a28e467bac6b2d9be1d390dba154d03eb2e2b21 Mon Sep 17 00:00:00 2001 From: Iain Sproat <68657+iainsproat@users.noreply.github.com> Date: Wed, 4 Sep 2024 13:06:23 +0100 Subject: [PATCH] refactor(preview-service): conforms to design patterns, adds tests, and is typed (#2429) Co-authored-by: Kristaps Fabians Geikins --- .circleci/config.yml | 71 ++++++- .gitguardian.yml | 17 ++ .../workflows/preview-service-acceptance.yml | 94 +++++++++ packages/preview-service/Dockerfile | 19 +- packages/preview-service/Makefile | 19 -- packages/preview-service/app.js | 43 ----- packages/preview-service/bg_service/index.js | 161 ---------------- .../bg_service/prometheusMetrics.js | 147 -------------- packages/preview-service/bin/www | 90 +-------- packages/preview-service/eslint.config.mjs | 74 ++++++- packages/preview-service/jsconfig.json | 5 - packages/preview-service/knex.js | 18 -- .../observability/expressLogging.js | 7 - .../preview-service/observability/logging.js | 16 -- packages/preview-service/package.json | 38 +++- packages/preview-service/readme.md | 34 +++- packages/preview-service/renderPage/README.md | 3 + .../{render_page => renderPage}/src/app.js | 7 +- .../src/example.html | 0 .../src/favicon.ico | Bin packages/preview-service/routes/api.js | 53 ----- packages/preview-service/routes/index.js | 10 - packages/preview-service/routes/objects.js | 83 -------- packages/preview-service/routes/preview.js | 181 ------------------ .../routes/services/objects_utils.js | 61 ------ packages/preview-service/src/aliasLoader.ts | 8 + packages/preview-service/src/bin.ts | 8 + packages/preview-service/src/bootstrap.ts | 2 + .../src/clients/execHealthcheck.ts | 6 + packages/preview-service/src/clients/knex.ts | 23 +++ .../src/clients/previewService.ts | 22 +++ .../preview-service/src/clients/puppeteer.ts | 74 +++++++ .../src/domain/backgroundWorker.ts | 7 + packages/preview-service/src/domain/const.ts | 1 + packages/preview-service/src/domain/domain.ts | 14 ++ .../src/observability/expressLogging.ts | 34 ++++ .../src/observability/logging.ts | 13 ++ .../src/observability/metricsApp.ts | 28 +++ .../src/observability/metricsRoute.ts | 15 ++ .../src/observability/prometheusMetrics.ts | 170 ++++++++++++++++ .../src/repositories/objectPreview.ts | 68 +++++++ .../src/repositories/objects.ts | 93 +++++++++ .../src/repositories/previews.ts | 16 ++ packages/preview-service/src/root.ts | 21 ++ .../src/scripts/puppeteerDriver.js | 47 +++++ packages/preview-service/src/server/app.ts | 37 ++++ .../preview-service/src/server/background.ts | 60 ++++++ .../preview-service/src/server/routes/api.ts | 66 +++++++ .../src/server/routes/index.ts | 13 ++ .../src/server/routes/objects.ts | 94 +++++++++ .../src/server/routes/preview.ts | 71 +++++++ packages/preview-service/src/server/server.ts | 109 +++++++++++ .../src/services/360preview.ts | 61 ++++++ .../src/services/pollForPreview.ts | 73 +++++++ .../src/services/screenshot.ts | 87 +++++++++ .../src/services/taskManager.ts | 41 ++++ packages/preview-service/src/utils/brand.ts | 9 + packages/preview-service/src/utils/env.ts | 28 +++ .../preview-service/src/utils/errorHandler.ts | 25 +++ packages/preview-service/src/utils/headers.ts | 7 + packages/preview-service/src/utils/runtime.ts | 9 + .../utils/speckleObjectsStream.ts} | 25 ++- .../tests/acceptance/README.md | 3 + .../tests/acceptance/acceptance.spec.ts | 113 +++++++++++ .../tests/e2e/roundtrip.spec.ts | 34 ++++ .../preview-service/tests/helpers/helpers.ts | 49 +++++ .../tests/helpers/testExtensions.ts | 95 +++++++++ .../tests/helpers/testKnexClient.ts | 20 ++ .../tests/hooks/globalSetup.ts | 64 +++++++ .../repositories/objectPreview.spec.ts | 38 ++++ .../tests/migrations/README.md | 7 + .../tests/migrations/migrations.ts | 106 ++++++++++ .../unit/services/pollForPreview.spec.ts | 48 +++++ .../tests/unit/services/screenshot.spec.ts | 47 +++++ packages/preview-service/tsconfig.build.json | 5 + packages/preview-service/tsconfig.json | 109 +++++++++++ packages/preview-service/vitest.config.ts | 20 ++ ..._page.js => webpack.config.renderPage.cjs} | 10 +- setup/db/10-docker_postgres_init.sql | 16 +- .../templates/preview_service/deployment.yml | 12 +- utils/helm/speckle-server/values.schema.json | 15 ++ utils/helm/speckle-server/values.yaml | 6 + yarn.lock | 177 ++++++++++++++++- 83 files changed, 2774 insertions(+), 956 deletions(-) create mode 100644 .github/workflows/preview-service-acceptance.yml delete mode 100644 packages/preview-service/Makefile delete mode 100644 packages/preview-service/app.js delete mode 100644 packages/preview-service/bg_service/index.js delete mode 100644 packages/preview-service/bg_service/prometheusMetrics.js delete mode 100644 packages/preview-service/jsconfig.json delete mode 100644 packages/preview-service/knex.js delete mode 100644 packages/preview-service/observability/expressLogging.js delete mode 100644 packages/preview-service/observability/logging.js create mode 100644 packages/preview-service/renderPage/README.md rename packages/preview-service/{render_page => renderPage}/src/app.js (68%) rename packages/preview-service/{render_page => renderPage}/src/example.html (100%) rename packages/preview-service/{render_page => renderPage}/src/favicon.ico (100%) delete mode 100644 packages/preview-service/routes/api.js delete mode 100644 packages/preview-service/routes/index.js delete mode 100644 packages/preview-service/routes/objects.js delete mode 100644 packages/preview-service/routes/preview.js delete mode 100644 packages/preview-service/routes/services/objects_utils.js create mode 100644 packages/preview-service/src/aliasLoader.ts create mode 100644 packages/preview-service/src/bin.ts create mode 100644 packages/preview-service/src/bootstrap.ts create mode 100644 packages/preview-service/src/clients/execHealthcheck.ts create mode 100644 packages/preview-service/src/clients/knex.ts create mode 100644 packages/preview-service/src/clients/previewService.ts create mode 100644 packages/preview-service/src/clients/puppeteer.ts create mode 100644 packages/preview-service/src/domain/backgroundWorker.ts create mode 100644 packages/preview-service/src/domain/const.ts create mode 100644 packages/preview-service/src/domain/domain.ts create mode 100644 packages/preview-service/src/observability/expressLogging.ts create mode 100644 packages/preview-service/src/observability/logging.ts create mode 100644 packages/preview-service/src/observability/metricsApp.ts create mode 100644 packages/preview-service/src/observability/metricsRoute.ts create mode 100644 packages/preview-service/src/observability/prometheusMetrics.ts create mode 100644 packages/preview-service/src/repositories/objectPreview.ts create mode 100644 packages/preview-service/src/repositories/objects.ts create mode 100644 packages/preview-service/src/repositories/previews.ts create mode 100644 packages/preview-service/src/root.ts create mode 100644 packages/preview-service/src/scripts/puppeteerDriver.js create mode 100644 packages/preview-service/src/server/app.ts create mode 100644 packages/preview-service/src/server/background.ts create mode 100644 packages/preview-service/src/server/routes/api.ts create mode 100644 packages/preview-service/src/server/routes/index.ts create mode 100644 packages/preview-service/src/server/routes/objects.ts create mode 100644 packages/preview-service/src/server/routes/preview.ts create mode 100644 packages/preview-service/src/server/server.ts create mode 100644 packages/preview-service/src/services/360preview.ts create mode 100644 packages/preview-service/src/services/pollForPreview.ts create mode 100644 packages/preview-service/src/services/screenshot.ts create mode 100644 packages/preview-service/src/services/taskManager.ts create mode 100644 packages/preview-service/src/utils/brand.ts create mode 100644 packages/preview-service/src/utils/env.ts create mode 100644 packages/preview-service/src/utils/errorHandler.ts create mode 100644 packages/preview-service/src/utils/headers.ts create mode 100644 packages/preview-service/src/utils/runtime.ts rename packages/preview-service/{routes/speckleObjectsStream.js => src/utils/speckleObjectsStream.ts} (58%) create mode 100644 packages/preview-service/tests/acceptance/README.md create mode 100644 packages/preview-service/tests/acceptance/acceptance.spec.ts create mode 100644 packages/preview-service/tests/e2e/roundtrip.spec.ts create mode 100644 packages/preview-service/tests/helpers/helpers.ts create mode 100644 packages/preview-service/tests/helpers/testExtensions.ts create mode 100644 packages/preview-service/tests/helpers/testKnexClient.ts create mode 100644 packages/preview-service/tests/hooks/globalSetup.ts create mode 100644 packages/preview-service/tests/integration/repositories/objectPreview.spec.ts create mode 100644 packages/preview-service/tests/migrations/README.md create mode 100644 packages/preview-service/tests/migrations/migrations.ts create mode 100644 packages/preview-service/tests/unit/services/pollForPreview.spec.ts create mode 100644 packages/preview-service/tests/unit/services/screenshot.spec.ts create mode 100644 packages/preview-service/tsconfig.build.json create mode 100644 packages/preview-service/tsconfig.json create mode 100644 packages/preview-service/vitest.config.ts rename packages/preview-service/{webpack.config.render_page.js => webpack.config.renderPage.cjs} (83%) diff --git a/.circleci/config.yml b/.circleci/config.yml index 4c05d74022..e0e39997e6 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -36,6 +36,9 @@ workflows: - test-objectsender: filters: *filters-allow-all + - test-preview-service: + filters: *filters-allow-all + - test-ui-components: filters: *filters-allow-all @@ -176,6 +179,7 @@ workflows: - test-objectsender - test-server - test-server-no-ff + - test-preview-service - docker-publish-frontend: context: *docker-hub-context @@ -190,6 +194,7 @@ workflows: - test-objectsender - test-server - test-server-no-ff + - test-preview-service - docker-publish-frontend-2: context: *docker-hub-context @@ -204,6 +209,7 @@ workflows: - test-objectsender - test-server - test-server-no-ff + - test-preview-service - docker-publish-webhooks: context: *docker-hub-context @@ -218,6 +224,7 @@ workflows: - test-objectsender - test-server - test-server-no-ff + - test-preview-service - docker-publish-file-imports: context: *docker-hub-context @@ -232,6 +239,7 @@ workflows: - test-objectsender - test-server - test-server-no-ff + - test-preview-service - docker-publish-previews: context: *docker-hub-context @@ -246,6 +254,7 @@ workflows: - test-objectsender - test-server - test-server-no-ff + - test-preview-service - docker-publish-test-container: context: *docker-hub-context @@ -260,6 +269,7 @@ workflows: - test-objectsender - test-server - test-server-no-ff + - test-preview-service - docker-publish-monitor-container: context: *docker-hub-context @@ -274,6 +284,7 @@ workflows: - test-objectsender - test-server - test-server-no-ff + - test-preview-service - docker-publish-docker-compose-ingress: context: *docker-hub-context @@ -288,6 +299,7 @@ workflows: - test-objectsender - test-server - test-server-no-ff + - test-preview-service - publish-helm-chart: filters: &filters-publish @@ -331,6 +343,7 @@ workflows: - test-frontend-2 - test-viewer - test-objectsender + - test-preview-service - publish-viewer-sandbox-cloudflare-pages: filters: *filters-publish @@ -579,8 +592,7 @@ jobs: working_directory: 'packages/frontend-2' test-viewer: - docker: &docker-node-browsers-image - - image: cimg/node:18.19.0-browsers + docker: *docker-node-browsers-image resource_class: large steps: - checkout @@ -617,9 +629,60 @@ jobs: command: yarn test working_directory: 'packages/viewer' + test-preview-service: + docker: + - image: cimg/node:18.19.0 + - image: cimg/postgres:14.11 + environment: + POSTGRES_DB: preview_service_test + POSTGRES_PASSWORD: preview_service_test + POSTGRES_USER: preview_service_test + resource_class: large + environment: {} + steps: + - checkout + - restore_cache: + name: Restore Yarn Package Cache + keys: + - yarn-packages-server-{{ checksum "yarn.lock" }} + - run: + name: Install Dependencies + command: yarn + + - run: + name: Install Dependencies v2 (.node files missing bug) + command: yarn + + - save_cache: + name: Save Yarn Package Cache + key: yarn-packages-server-{{ checksum "yarn.lock" }} + paths: + - .yarn/cache + - .yarn/unplugged + + - run: + name: Build public packages + command: yarn build:public + + - run: + name: Lint everything + command: yarn lint:ci + working_directory: 'packages/preview-service' + + - run: + name: Copy .env.example to .env + command: | + #!/usr/bin/env bash + cp packages/preview-service/.env.example packages/preview-service/.env + sed -i~ '/^PG_CONNECTION_STRING=/s/=.*/="postgres:\/\/preview_service_test:preview_service_test@127.0.0.1:5432\/preview_service_test"/' packages/preview-service/.env + + - run: + name: Run tests + command: yarn test + working_directory: 'packages/preview-service' + test-objectsender: - docker: &docker-node-browsers-image - - image: cimg/node:18.19.0-browsers + docker: *docker-node-browsers-image resource_class: large steps: - checkout diff --git a/.gitguardian.yml b/.gitguardian.yml index 8de8c5ef28..d8f198267a 100644 --- a/.gitguardian.yml +++ b/.gitguardian.yml @@ -12,4 +12,21 @@ secret: name: local test license - match: 7a4ab6f7bfbcc0a37aa3a0fb00fd5b6edd1d524f393a6054e242eb28f5c06be5 name: 'packages/server/modules/core/tests/graph.spec.js - test secret' + - match: be603148062b367f828a58bdd695149d24f55f7c7f2e2c0bc31abd147cd07e86 + name: packages/server/modules/webhooks/tests/cleanup.spec.ts - test password + - match: d1c44da2d7d52afaf219ff9789df7c04a79be80977336d7c87652db736b07538 + name: packages/server/.env-example - test password for keycloak + - match: 05b116fa36d25a831d96d5b4ecd45b962ebf9345dcf81ac0950c4adb49e10183 + name: packages/server/modules/serverinvites/tests/invites.spec.ts - test password + - match: 22ef4aa9beab564872bb1f15ff7592894ad445a68d6b03364f890cc5c3866b5d + name: packages/server/modules/core/tests/users.spec.js - test password + - match: 05b116fa36d25a831d96d5b4ecd45b962ebf9345dcf81ac0950c4adb49e10183 + name: packages/server/modules/core/tests/users.spec.js - test password + - match: d1c44da2d7d52afaf219ff9789df7c04a79be80977336d7c87652db736b07538 + name: setup/keycloak/speckle-realm.json - secret for dev keycloak + - match: b92d3b9844a823512dd1831c1eea5d9810c154027e07a36f007232fc26e9f70c + name: setup/keycloak/speckle-realm.json - secret for dev keycloak + - match: 2e1b3675a4049cd39fe6db081735f747730969071528270800f00fa98720d198 + name: setup/keycloak/speckle-realm.json - algorithm name + version: 2 diff --git a/.github/workflows/preview-service-acceptance.yml b/.github/workflows/preview-service-acceptance.yml new file mode 100644 index 0000000000..ff76baa1e1 --- /dev/null +++ b/.github/workflows/preview-service-acceptance.yml @@ -0,0 +1,94 @@ +name: Preview service acceptance test + +on: + workflow_dispatch: + pull_request: # Pushing a new commit to the HEAD ref of a pull request will trigger the β€œsynchronize” event + paths: + - .yarnrc.yml . + - .yarn + - package.json + - packages/frontend-2/type-augmentations/stubs + - packages/preview-service + - packages/viewer + - packages/objectloader + - packages/shared + +jobs: + preview-service-acceptance: + name: Preview Service Acceptance test + runs-on: ubuntu-latest + services: + postgres: + # Docker Hub image + image: postgres:14 + env: + POSTGRES_DB: preview_service_test + POSTGRES_PASSWORD: preview_service_test + POSTGRES_USER: preview_service_test + # Set health checks to wait until postgres has started + options: >- + --health-cmd pg_isready + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + - 5432:5432 + + permissions: + contents: write # to update the screenshot saved in the branch. This is a HACK as GitHub API does not yet support uploading attachments to a comment. + pull-requests: write # to write a comment on the PR + + steps: + - uses: actions/checkout@v4 + - uses: pnpm/action-setup@v4 + name: Install pnpm + with: + version: 9 + run_install: false + - name: Install Node.js + uses: actions/setup-node@v4 + with: + node-version: 20 + cache: 'yarn' + - name: Install dependencies + working-directory: utils/preview-service-acceptance + run: yarn install + + #TODO load the docker image from a previous job + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + - name: Build and load preview-service Docker image + uses: docker/build-push-action@v6 + with: + context: . + file: ./packages/preview-service/Dockerfile + load: true + push: false + tags: speckle/preview-service:local + cache-from: type=gha + cache-to: type=gha,mode=max + + - name: Run the acceptance test + working-directory: packages/preview-service + run: yarn test:acceptance + env: + PREVIEW_SERVICE_IMAGE: speckle/preview-service:local + OUTPUT_FILE_PATH: /tmp/preview-service-output.png + NODE_ENV: test + PG_CONNECTION_STRING: postgres://preview_service_test:preview_service_test@localhost:5432/preview_service_test + + - uses: actions/upload-artifact@v4 + name: Upload the output from the preview-service + id: upload-preview-service-output + with: + name: preview-service-output + path: /tmp/preview-service-output.png + - uses: actions/github-script@v7 + with: + script: | + github.rest.issues.createComment({ + issue_number: context.issue.number, + owner: context.repo.owner, + repo: context.repo.repo, + body: 'πŸ“Έ Preview service has generated the following image:

' + }) diff --git a/packages/preview-service/Dockerfile b/packages/preview-service/Dockerfile index 1d27c3951e..17756a5b87 100644 --- a/packages/preview-service/Dockerfile +++ b/packages/preview-service/Dockerfile @@ -1,7 +1,7 @@ # NOTE: Docker context should be set to git root directory, to include the viewer ARG NODE_ENV=production -FROM node:18-bookworm-slim@sha256:408f8cbbb7b33a5bb94bdb8862795a94d2b64c2d516856824fd86c4a5594a443 as build-stage +FROM node:18-bookworm-slim@sha256:408f8cbbb7b33a5bb94bdb8862795a94d2b64c2d516856824fd86c4a5594a443 AS build-stage ARG NODE_ENV ENV NODE_ENV=${NODE_ENV} @@ -36,7 +36,9 @@ COPY packages/preview-service ./packages/preview-service/ # This way the foreach only builds the frontend and its deps RUN yarn workspaces foreach -W run build -FROM node:18-bookworm-slim@sha256:408f8cbbb7b33a5bb94bdb8862795a94d2b64c2d516856824fd86c4a5594a443 as node +# google-chrome-stable is only available for amd64 so we have to fix the platform +# hadolint ignore=DL3029 +FROM --platform=linux/amd64 node:18-bookworm-slim@sha256:408f8cbbb7b33a5bb94bdb8862795a94d2b64c2d516856824fd86c4a5594a443 AS node SHELL ["/bin/bash", "-o", "pipefail", "-c"] # hadolint ignore=DL3008,DL3015 @@ -47,13 +49,16 @@ RUN apt-get update && \ # wget has different versions for different architectures wget \ gnupg=2.2.40-1.1 && \ + # Clean up + apt-get clean && \ + rm -rf /var/lib/apt/lists/* && \ wget -q -O - https://dl-ssl.google.com/linux/linux_signing_key.pub | gpg --dearmor -o /usr/share/keyrings/googlechrome-linux-keyring.gpg && \ - sh -c 'echo "deb [arch=amd64 signed-by=/usr/share/keyrings/googlechrome-linux-keyring.gpg] https://dl-ssl.google.com/linux/chrome/deb/ stable main" >> /etc/apt/sources.list.d/google.list' && \ + sh -c 'echo "deb [arch=amd64 signed-by=/usr/share/keyrings/googlechrome-linux-keyring.gpg] https://dl-ssl.google.com/linux/chrome/deb/ stable main" >> /etc/apt/sources.list.d/google.list' # Install Chrome and fonts - apt-get update && \ +RUN apt-get update && \ DEBIAN_FRONTEND=noninteractive apt-get install -y \ --no-install-recommends \ - google-chrome-stable \ + google-chrome-stable=128.0.6613.119-1 \ fonts-ipafont-gothic=00303-23 \ fonts-wqy-zenhei=0.9.45-8 \ fonts-thai-tlwg=1:0.7.3-1 \ @@ -74,7 +79,7 @@ COPY .yarnrc.yml . COPY .yarn ./.yarn COPY package.json yarn.lock ./ -# Onyl copy in the relevant package.json files for the dependencies +# Only copy in the relevant package.json files for the dependencies COPY packages/frontend-2/type-augmentations/stubs ./packages/frontend-2/type-augmentations/stubs/ COPY packages/preview-service/package.json ./packages/preview-service/ @@ -99,4 +104,4 @@ RUN groupadd -g 30000 -r pptruser && \ # Run everything after as non-privileged user. USER pptruser -ENTRYPOINT [ "tini", "--", "node", "bin/www" ] +ENTRYPOINT [ "tini", "--", "node", "--loader=./dist/src/aliasLoader.js", "bin/www" ] diff --git a/packages/preview-service/Makefile b/packages/preview-service/Makefile deleted file mode 100644 index 686650ad86..0000000000 --- a/packages/preview-service/Makefile +++ /dev/null @@ -1,19 +0,0 @@ - - -build: - docker build -t preview-service ../.. -f Dockerfile - -run: - docker run -it --rm --net=host \ - -e PG_CONNECTION_STRING="postgres://speckle:speckle@127.0.0.1/speckle" \ - preview-service - -run-release: - docker run -it --rm --net=host \ - -e PG_CONNECTION_STRING="postgres://speckle:speckle@127.0.0.1/speckle" \ - speckle/speckle-preview-service:v2.3.3 - - -small: - docker build -t small-preview-service ../.. -f Dockerfile.small - docker run -it --rm --net=host -e PG_CONNECTION_STRING="postgres://speckle:speckle@127.0.0.1/speckle" small-preview-service bash diff --git a/packages/preview-service/app.js b/packages/preview-service/app.js deleted file mode 100644 index a0be11bf64..0000000000 --- a/packages/preview-service/app.js +++ /dev/null @@ -1,43 +0,0 @@ -'use strict' - -const createError = require('http-errors') -const express = require('express') -const path = require('path') -const cookieParser = require('cookie-parser') - -const indexRouter = require('./routes/index') -const previewRouter = require('./routes/preview') -const objectsRouter = require('./routes/objects') -const apiRouter = require('./routes/api') -const { LoggingExpressMiddleware } = require('./observability/expressLogging') - -const app = express() - -app.use(LoggingExpressMiddleware) - -app.use(express.json({ limit: '100mb' })) -app.use(express.urlencoded({ limit: '100mb', extended: false })) -app.use(cookieParser()) -app.use(express.static(path.join(__dirname, 'public'))) - -app.use('/', indexRouter) -app.use('/preview', previewRouter) -app.use('/objects', objectsRouter) -app.use('/api', apiRouter) - -// catch 404 and forward to error handler -app.use(function (req, res, next) { - next(createError(404, `Not Found: ${req.url}`)) -}) - -// error handler -app.use(function (err, req, res) { - let errorText = err.message - if (req.app.get('env') === 'development') { - errorText = `
${err.message}: ${err.status}\n${err.stack}
` - } - res.status(err.status || 500) - res.send(errorText) -}) - -module.exports = app diff --git a/packages/preview-service/bg_service/index.js b/packages/preview-service/bg_service/index.js deleted file mode 100644 index c4d97ba75b..0000000000 --- a/packages/preview-service/bg_service/index.js +++ /dev/null @@ -1,161 +0,0 @@ -'use strict' - -const crypto = require('crypto') -const knex = require('../knex') -const fetch = require('node-fetch') -const fs = require('fs') -const metrics = require('./prometheusMetrics') -const joinImages = require('join-images') -const { logger } = require('../observability/logging') - -let shouldExit = false - -const HEALTHCHECK_FILE_PATH = '/tmp/last_successful_query' - -async function startTask() { - const { rows } = await knex.raw(` - UPDATE object_preview - SET - "previewStatus" = 1, - "lastUpdate" = NOW() - FROM ( - SELECT "streamId", "objectId" FROM object_preview - WHERE "previewStatus" = 0 OR ("previewStatus" = 1 AND "lastUpdate" < NOW() - INTERVAL '1 WEEK') - ORDER BY "priority" ASC, "lastUpdate" ASC - LIMIT 1 - ) as task - WHERE object_preview."streamId" = task."streamId" AND object_preview."objectId" = task."objectId" - RETURNING object_preview."streamId", object_preview."objectId" - `) - return rows[0] -} - -async function doTask(task) { - const previewUrl = `http://127.0.0.1:3001/preview/${task.streamId}/${task.objectId}` - - try { - let res = await fetch(previewUrl) - res = await res.json() - // let imgBuffer = await res.buffer() // this gets the binary response body - - const metadata = {} - const allImgsArr = [] - let i = 0 - for (const angle in res) { - const imgBuffer = new Buffer.from( - res[angle].replace(/^data:image\/\w+;base64,/, ''), - 'base64' - ) - const previewId = crypto.createHash('md5').update(imgBuffer).digest('hex') - - // Save first preview image - if (i++ === 0) { - await knex.raw( - 'INSERT INTO "previews" (id, data) VALUES (?, ?) ON CONFLICT DO NOTHING', - [previewId, imgBuffer] - ) - metadata[angle] = previewId - } - - allImgsArr.push(imgBuffer) - } - - // stitch 360 image - const fullImg = await joinImages.joinImages(allImgsArr, { - direction: 'horizontal', - offset: 700, - margin: '0 700 0 700', - color: { alpha: 0, r: 0, g: 0, b: 0 } - }) - const png = await fullImg.png({ quality: 95 }) - const buff = await png.toBuffer() - const fullImgId = crypto.createHash('md5').update(buff).digest('hex') - - await knex.raw( - 'INSERT INTO "previews" (id, data) VALUES (?, ?) ON CONFLICT DO NOTHING', - [fullImgId, buff] - ) - metadata['all'] = fullImgId - - // Update preview metadata - await knex.raw( - ` - UPDATE object_preview - SET - "previewStatus" = 2, - "lastUpdate" = NOW(), - "preview" = ? - WHERE "streamId" = ? AND "objectId" = ? - `, - [metadata, task.streamId, task.objectId] - ) - - await knex.raw( - `NOTIFY preview_generation_update, 'finished:${task.streamId}:${task.objectId}'` - ) - } catch (err) { - // Update preview metadata - await knex.raw( - ` - UPDATE object_preview - SET - "previewStatus" = 3, - "lastUpdate" = NOW(), - "preview" = ? - WHERE "streamId" = ? AND "objectId" = ? - `, - [{ err: err.toString() }, task.streamId, task.objectId] - ) - metrics.metricOperationErrors.labels('preview').inc() - } -} - -async function tick() { - if (shouldExit) { - process.exit(0) - } - - try { - const task = await startTask() - - fs.writeFile(HEALTHCHECK_FILE_PATH, '' + Date.now(), () => {}) - - if (!task) { - setTimeout(tick, 1000) - return - } - - const metricDurationEnd = metrics.metricDuration.startTimer() - - await doTask(task) - - metricDurationEnd({ op: 'preview' }) - - // Check for another task very soon - setTimeout(tick, 10) - } catch (err) { - metrics.metricOperationErrors.labels('main_loop').inc() - logger.error(err, 'Error executing task') - setTimeout(tick, 5000) - } -} - -async function startPreviewService() { - logger.info('πŸ“Έ Started Preview Service') - - process.on('SIGTERM', () => { - shouldExit = true - logger.info('Shutting down...') - }) - - process.on('SIGINT', () => { - shouldExit = true - logger.info('Shutting down...') - }) - - metrics.initPrometheusMetrics() - - tick() -} - -module.exports = { startPreviewService } diff --git a/packages/preview-service/bg_service/prometheusMetrics.js b/packages/preview-service/bg_service/prometheusMetrics.js deleted file mode 100644 index 922c9ef72c..0000000000 --- a/packages/preview-service/bg_service/prometheusMetrics.js +++ /dev/null @@ -1,147 +0,0 @@ -/* eslint-disable no-unused-vars */ -'use strict' - -const http = require('http') -const prometheusClient = require('prom-client') -const knex = require('../knex') - -let metricFree = null -let metricUsed = null -let metricPendingAquires = null -let metricPendingCreates = null -let metricPendingValidations = null -let metricRemainingCapacity = null -let metricQueryDuration = null -let metricQueryErrors = null - -const queryStartTime = {} -prometheusClient.register.clear() -prometheusClient.register.setDefaultLabels({ - project: 'speckle-server', - app: 'preview-service' -}) -prometheusClient.collectDefaultMetrics() - -let prometheusInitialized = false - -function initKnexPrometheusMetrics() { - metricFree = new prometheusClient.Gauge({ - name: 'speckle_server_knex_free', - help: 'Number of free DB connections', - collect() { - this.set(knex.client.pool.numFree()) - } - }) - - metricUsed = new prometheusClient.Gauge({ - name: 'speckle_server_knex_used', - help: 'Number of used DB connections', - collect() { - this.set(knex.client.pool.numUsed()) - } - }) - - metricPendingAquires = new prometheusClient.Gauge({ - name: 'speckle_server_knex_pending', - help: 'Number of pending DB connection aquires', - collect() { - this.set(knex.client.pool.numPendingAcquires()) - } - }) - - metricPendingCreates = new prometheusClient.Gauge({ - name: 'speckle_server_knex_pending_creates', - help: 'Number of pending DB connection creates', - collect() { - this.set(knex.client.pool.numPendingCreates()) - } - }) - - metricPendingValidations = new prometheusClient.Gauge({ - name: 'speckle_server_knex_pending_validations', - help: 'Number of pending DB connection validations. This is a state between pending acquisition and acquiring a connection.', - collect() { - this.set(knex.client.pool.numPendingValidations()) - } - }) - - metricRemainingCapacity = new prometheusClient.Gauge({ - name: 'speckle_server_knex_remaining_capacity', - help: 'Remaining capacity of the DB connection pool', - collect() { - const postgresMaxConnections = - parseInt(process.env.POSTGRES_MAX_CONNECTIONS_PREVIEW_SERVICE) || 2 - const demand = - knex.client.pool.numUsed() + - knex.client.pool.numPendingCreates() + - knex.client.pool.numPendingValidations() + - knex.client.pool.numPendingAcquires() - - this.set(Math.max(postgresMaxConnections - demand, 0)) - } - }) - - metricQueryDuration = new prometheusClient.Summary({ - name: 'speckle_server_knex_query_duration', - help: 'Summary of the DB query durations in seconds' - }) - - metricQueryErrors = new prometheusClient.Counter({ - name: 'speckle_server_knex_query_errors', - help: 'Number of DB queries with errors' - }) - - knex.on('query', (data) => { - const queryId = data.__knexQueryUid + '' - queryStartTime[queryId] = Date.now() - }) - - knex.on('query-response', (data, obj, builder) => { - const queryId = obj.__knexQueryUid + '' - const durationSec = (Date.now() - queryStartTime[queryId]) / 1000 - delete queryStartTime[queryId] - if (!isNaN(durationSec)) metricQueryDuration.observe(durationSec) - }) - - knex.on('query-error', (err, querySpec) => { - const queryId = querySpec.__knexQueryUid + '' - const durationSec = (Date.now() - queryStartTime[queryId]) / 1000 - delete queryStartTime[queryId] - - if (!isNaN(durationSec)) metricQueryDuration.observe(durationSec) - metricQueryErrors.inc() - }) -} - -module.exports = { - initPrometheusMetrics() { - if (prometheusInitialized) return - prometheusInitialized = true - - initKnexPrometheusMetrics() - - // Define the HTTP server - const server = http.createServer(async (req, res) => { - if (req.url === '/metrics') { - res.setHeader('Content-Type', prometheusClient.register.contentType) - res.end(await prometheusClient.register.metrics()) - } else { - res.end('Speckle Preview Service - prometheus metrics') - } - }) - server.listen(Number(process.env.PROMETHEUS_METRICS_PORT) || 9094) - }, - - metricDuration: new prometheusClient.Histogram({ - name: 'speckle_server_operation_duration', - help: 'Summary of the operation durations in seconds', - buckets: [0.5, 1, 5, 10, 30, 60, 300, 600, 1200, 1800], - labelNames: ['op'] - }), - - metricOperationErrors: new prometheusClient.Counter({ - name: 'speckle_server_operation_errors', - help: 'Number of operations with errors', - labelNames: ['op'] - }) -} diff --git a/packages/preview-service/bin/www b/packages/preview-service/bin/www index aff48a6394..5bbe596274 100755 --- a/packages/preview-service/bin/www +++ b/packages/preview-service/bin/www @@ -1,90 +1,2 @@ #!/usr/bin/env node - -/** - * Module dependencies. - */ - -const app = require('../app') -const http = require('http') -const { startPreviewService } = require('../bg_service') -const { serverLogger, logger } = require('../observability/logging') - -/** - * Get port from environment and store in Express. - */ - -const port = normalizePort(process.env.PORT || '3001') -app.set('port', port) - -/** - * Create HTTP server. - */ - -const server = http.createServer(app) - -/** - * Listen on provided port, on all network interfaces. - */ - -const host = process.env.HOST || '127.0.0.1' -server.listen(port, host) -server.on('error', onError) -server.on('listening', onListening) - -/** - * Normalize a port into a number, string, or false. - */ - -function normalizePort(val) { - const port = parseInt(val, 10) - - if (isNaN(port)) { - // named pipe - return val - } - - if (port >= 0) { - // port number - return port - } - - return false -} - -/** - * Event listener for HTTP server "error" event. - */ - -function onError(error) { - if (error.syscall !== 'listen') { - throw error - } - - const bind = typeof port === 'string' ? 'Pipe ' + port : 'Port ' + port - - // handle specific listen errors with friendly messages - switch (error.code) { - case 'EACCES': - logger.error(error, bind + ' requires elevated privileges') - process.exit(1) - break - case 'EADDRINUSE': - logger.error(error, bind + ' is already in use') - process.exit(1) - break - default: - throw error - } -} - -/** - * Event listener for HTTP server "listening" event. - */ - -function onListening() { - const addr = server.address() - const bind = typeof addr === 'string' ? 'pipe ' + addr : 'port ' + addr?.port - serverLogger.info('Listening on ' + bind) - - startPreviewService() -} +import '../dist/src/bin.js' diff --git a/packages/preview-service/eslint.config.mjs b/packages/preview-service/eslint.config.mjs index 5e825c4d58..4fba16907f 100644 --- a/packages/preview-service/eslint.config.mjs +++ b/packages/preview-service/eslint.config.mjs @@ -1,32 +1,86 @@ -import { baseConfigs, globals } from '../../eslint.config.mjs' +import tseslint from 'typescript-eslint' +import { + baseConfigs, + getESMDirname, + globals, + prettierConfig +} from '../../eslint.config.mjs' -/** - * @type {Array} - */ const configs = [ ...baseConfigs, { - ignores: ['public', 'docs'] + ignores: ['dist', 'public', 'docs'] }, { - files: ['**/*.js', 'bin/www'], - ignores: ['render_page', '**/*.mjs'], + files: ['webpack.config.renderPage.cjs'], languageOptions: { - sourceType: 'commonjs', globals: { ...globals.node } } }, { - files: ['render_page/**/*.js'], + files: ['**/*.js'], + ignores: ['renderPage', '**/*.mjs', 'src/scripts/puppeteerDriver.js'], languageOptions: { sourceType: 'module', + globals: { + ...globals.node + } + } + }, + { + files: ['bin/www'], + languageOptions: { + sourceType: 'module', + globals: { + ...globals.node + } + } + }, + { + files: ['renderPage/**/*.js'], + languageOptions: { + sourceType: 'module', + globals: { + ...globals.browser + } + } + }, + { + files: ['src/scripts/puppeteerDriver.js'], + languageOptions: { globals: { ...globals.browser } } - } + }, + ...tseslint.configs.recommendedTypeChecked.map((c) => ({ + ...c, + files: [...(c.files || []), '**/*.ts', '**/*.d.ts'] + })), + { + files: ['**/*.ts', '**/*.d.ts'], + languageOptions: { + parserOptions: { + tsconfigRootDir: getESMDirname(import.meta.url), + project: './tsconfig.json' + } + }, + rules: { + '@typescript-eslint/no-explicit-any': 'error', + '@typescript-eslint/no-unsafe-return': 'error' + } + }, + { + files: ['**/*.spec.{js,ts}'], + languageOptions: { + globals: { + ...globals.node + } + } + }, + prettierConfig ] export default configs diff --git a/packages/preview-service/jsconfig.json b/packages/preview-service/jsconfig.json deleted file mode 100644 index 259b91eb83..0000000000 --- a/packages/preview-service/jsconfig.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "extends": "../../jsconfig.base.json", - "compilerOptions": {}, - "include": ["*.js", "bg_service", "bin/www", "render_page", "routes"] -} diff --git a/packages/preview-service/knex.js b/packages/preview-service/knex.js deleted file mode 100644 index 8a79ffea5d..0000000000 --- a/packages/preview-service/knex.js +++ /dev/null @@ -1,18 +0,0 @@ -/* eslint-disable camelcase */ -'use strict' - -module.exports = require('knex')({ - client: 'pg', - connection: { - application_name: 'speckle_preview_service', - connectionString: - process.env.PG_CONNECTION_STRING || 'postgres://speckle:speckle@127.0.0.1/speckle' - }, - pool: { - min: 0, - max: parseInt(process.env.POSTGRES_MAX_CONNECTIONS_PREVIEW_SERVICE) || 2, - acquireTimeoutMillis: 16000, //allows for 3x creation attempts plus idle time between attempts - createTimeoutMillis: 5000 - } - // migrations are in managed in the server package -}) diff --git a/packages/preview-service/observability/expressLogging.js b/packages/preview-service/observability/expressLogging.js deleted file mode 100644 index 49cd5fac13..0000000000 --- a/packages/preview-service/observability/expressLogging.js +++ /dev/null @@ -1,7 +0,0 @@ -const { logger } = require('./logging') -const HttpLogger = require('pino-http') - -module.exports.LoggingExpressMiddleware = HttpLogger({ - logger, - autoLogging: false -}) diff --git a/packages/preview-service/observability/logging.js b/packages/preview-service/observability/logging.js deleted file mode 100644 index b3ad843944..0000000000 --- a/packages/preview-service/observability/logging.js +++ /dev/null @@ -1,16 +0,0 @@ -const Observability = require('@speckle/shared/dist/commonjs/observability/index.js') - -// loggers for specific components within normal operation -const logger = Observability.extendLoggerComponent( - Observability.getLogger( - process.env.LOG_LEVEL || 'info', - process.env.LOG_PRETTY === 'true' - ), - 'preview-service' -) -const serverLogger = Observability.extendLoggerComponent(logger, 'server') - -module.exports = { - logger, - serverLogger -} diff --git a/packages/preview-service/package.json b/packages/preview-service/package.json index cc2bc47e00..8b19d46e9a 100644 --- a/packages/preview-service/package.json +++ b/packages/preview-service/package.json @@ -3,21 +3,33 @@ "private": true, "version": "2.5.4", "description": "Generate PNG previews of Speckle objects by using a headless viewer", - "main": "index.js", + "main": "bin/www", "homepage": "https://speckle.systems", "repository": { "type": "git", "url": "https://github.com/specklesystems/speckle-server.git", "directory": "packages/preview-service" }, + "type": "module", "engines": { "node": "^18.19.0" }, "scripts": { - "dev": "LOG_PRETTY=true nodemon --trace-deprecation ./bin/www", + "build:tsc:watch": "tsc -p ./tsconfig.build.json --watch", + "build:webpack:watch": "webpack --env build --config webpack.config.renderPage.cjs --watch", + "run:watch": "NODE_ENV=development LOG_PRETTY=true LOG_LEVEL=debug nodemon --exec \"yarn start\" --trace-deprecation --watch ./bin/www --watch ./dist", + "dev": "concurrently \"npm:build:tsc:watch\" \"npm:build:webpack:watch\" \"npm:run:watch\"", "dev:headed": "PREVIEWS_HEADED=true yarn dev", - "build": "webpack --env dev --config webpack.config.render_page.js && webpack --env build --config webpack.config.render_page.js", - "lint": "eslint ." + "build:tsc": "rimraf ./dist/src && tsc -p ./tsconfig.build.json", + "build:webpack": "webpack --env build --config webpack.config.renderPage.cjs", + "build": "yarn build:tsc && yarn build:webpack", + "lint": "yarn lint:tsc && yarn lint:eslint", + "lint:ci": "yarn lint:tsc", + "lint:tsc": "tsc --noEmit", + "lint:eslint": "eslint .", + "start": "node --loader=./dist/src/aliasLoader.js ./bin/www", + "test": "NODE_ENV=test LOG_LEVEL=silent LOG_PRETTY=true vitest run --sequence.shuffle --exclude 'tests/acceptance/**/*.spec.ts'", + "test:acceptance": "NODE_ENV=test LOG_LEVEL=debug LOG_PRETTY=true vitest run 'tests/acceptance/acceptance.spec.ts' --sequence.shuffle --hookTimeout 60000 --testNamePattern 'Acceptance'" }, "dependencies": { "@speckle/objectloader": "workspace:^", @@ -25,12 +37,15 @@ "@speckle/viewer": "workspace:^", "cookie-parser": "~1.4.4", "crypto": "^1.0.1", + "dotenv": "^16.4.5", + "esm-module-alias": "^2.2.0", "express": "^4.19.2", "file-type": "^16.5.4", "http-errors": "~1.6.3", "join-images": "^1.1.3", "knex": "^2.4.1", "lodash": "^4.17.21", + "lodash-es": "^4.17.21", "node-fetch": "^2.6.1", "pg": "^8.7.3", "pg-query-stream": "^4.2.3", @@ -40,18 +55,31 @@ "prom-client": "^14.0.1", "puppeteer": "^22.11.1", "sharp": "^0.32.6", + "tarn": "^3.0.2", "yargs": "^17.3.0", - "zlib": "^1.0.5" + "zlib": "^1.0.5", + "zod": "^3.23.8" }, "devDependencies": { "@babel/core": "^7.17.5", + "@types/express": "^4.17.13", + "@types/lodash-es": "^4.17.6", + "@types/node": "^18.19.38", + "@vitest/coverage-istanbul": "^1.6.0", "babel-loader": "^8.2.2", "clean-webpack-plugin": "^4.0.0-alpha.0", + "concurrently": "^8.2.2", + "crypto-random-string": "^5.0.0", "eslint": "^9.4.0", "eslint-config-prettier": "^9.1.0", + "eslint-plugin-vitest": "^0.5.4", "html-webpack-plugin": "^5.3.1", "nodemon": "^2.0.20", "prettier": "^2.5.1", + "rimraf": "^5.0.7", + "typescript": "^4.6.4", + "typescript-eslint": "^7.12.0", + "vitest": "^1.6.0", "webpack": "^5.76.0", "webpack-cli": "^4.6.0", "webpack-dev-server": "^4.6.0" diff --git a/packages/preview-service/readme.md b/packages/preview-service/readme.md index 7f9ac892fc..cf936abe41 100644 --- a/packages/preview-service/readme.md +++ b/packages/preview-service/readme.md @@ -10,17 +10,31 @@ This is an overview of this service: ## Run locally -With an updated viewer installed in the current directory, you should first build the frontend-part of the preview service: The simple webpage with the viewer that will be accessed with Puppeteer to generate the preview: +To run the preview service locally, you need to have a running database and server service. You can use the docker-compose file in the root of this repository to start the database. Please follow instructions in the packages/server README to start the server service. The server is required to provide the database migrations. +Firstly, copy the `.env.example` file to `.env` and fill in the required values. + +```bash +cp .env.example .env ``` + +The install the dependencies with: + +```bash +yarn install +``` + +Then build the service: + +```bash yarn build ``` -This should be rerun whenever you make changes to the viewer (if you make local viewer changes, don't forget to build the viewer module before running this) +This builds both typescript and webpack (for the page that is deployed to chromium to create the views). It should be rerun whenever you make changes to the viewer (if you make local viewer changes, don't forget to build the viewer module before running this) -After the viewer web page is up to date, run the preview service with: +Finally, you can run the preview service with: -``` +```bash yarn dev ``` @@ -28,10 +42,16 @@ This will use the default dev DB connection of `postgres://speckle:speckle@127.0 ### In a docker image -Once you build the previewservice Dockerfile, you can run it like so: +Navigate to the root of this git repository and build the preview service Dockerfile: +```bash +docker build -f packages/preview-service/Dockerfile -t speckle-preview-service:local . ``` -docker run --rm -p 3001:3001 -e PG_CONNECTION_STRING=postgres://speckle:speckle@host.docker.internal/speckle {IMAGEID} + +Once you have built the preview service Dockerfile, you can run it like so: + +```bash +docker run --rm -p 3001:3001 -e PORT=3001 -e PG_CONNECTION_STRING=postgres://speckle:speckle@host.docker.internal/speckle speckle-preview-service:local ``` ## Deployment notes @@ -42,4 +62,4 @@ You must limit the PreviewService container memory to a value that you want to a To limit the container memory when running with `docker run`, you can use the `-m` flag. -With docker compose, you must use a docker compose file at version 2 (not 3) and use the `mem_limit` option. +To limit the memory used in the v3 docker compose file, you can use the `mem_limit` key in the `preview-service` service definition. diff --git a/packages/preview-service/renderPage/README.md b/packages/preview-service/renderPage/README.md new file mode 100644 index 0000000000..0deda6ab50 --- /dev/null +++ b/packages/preview-service/renderPage/README.md @@ -0,0 +1,3 @@ +# Render Page + +This is the page that is rendered by Puppeteer within Chromium. It is packaged by Webpack to run in the browser context. diff --git a/packages/preview-service/render_page/src/app.js b/packages/preview-service/renderPage/src/app.js similarity index 68% rename from packages/preview-service/render_page/src/app.js rename to packages/preview-service/renderPage/src/app.js index 609eedc68f..8cb24d3542 100644 --- a/packages/preview-service/render_page/src/app.js +++ b/packages/preview-service/renderPage/src/app.js @@ -1,6 +1,5 @@ -import { LegacyViewer } from '@speckle/viewer' -import { DefaultViewerParams } from '@speckle/viewer' -// import { logger } from '../../observability/logging' +import { DefaultViewerParams, LegacyViewer } from '@speckle/viewer' + console.log('Initialising Viewer') const v = new LegacyViewer(document.getElementById('renderer'), DefaultViewerParams) window.v = v @@ -13,7 +12,7 @@ window.LoadData = async function LoadData(url) { } window.onload = () => { - const testUrl = window.location.hash.substr(1) + const testUrl = window.location.hash.substring(1) if (testUrl) { window.LoadData(testUrl) } diff --git a/packages/preview-service/render_page/src/example.html b/packages/preview-service/renderPage/src/example.html similarity index 100% rename from packages/preview-service/render_page/src/example.html rename to packages/preview-service/renderPage/src/example.html diff --git a/packages/preview-service/render_page/src/favicon.ico b/packages/preview-service/renderPage/src/favicon.ico similarity index 100% rename from packages/preview-service/render_page/src/favicon.ico rename to packages/preview-service/renderPage/src/favicon.ico diff --git a/packages/preview-service/routes/api.js b/packages/preview-service/routes/api.js deleted file mode 100644 index 50413a7fea..0000000000 --- a/packages/preview-service/routes/api.js +++ /dev/null @@ -1,53 +0,0 @@ -'use strict' - -const zlib = require('zlib') -const express = require('express') -const { getObjectsStream } = require('./services/objects_utils') -const { SpeckleObjectsStream } = require('./speckleObjectsStream') -const { pipeline, PassThrough } = require('stream') -const { logger } = require('../observability/logging') - -const router = express.Router() - -// This method was copy-pasted from the server method, without authentication/authorization (this web service is an internal one) -router.post('/getobjects/:streamId', async (req, res) => { - const boundLogger = logger.child({ - streamId: req.params.streamId - }) - const childrenList = JSON.parse(req.body.objects) - - const simpleText = req.headers.accept === 'text/plain' - - res.writeHead(200, { - 'Content-Encoding': 'gzip', - 'Content-Type': simpleText ? 'text/plain' : 'application/json' - }) - - const dbStream = await getObjectsStream({ - streamId: req.params.streamId, - objectIds: childrenList - }) - const speckleObjStream = new SpeckleObjectsStream(simpleText) - const gzipStream = zlib.createGzip() - - pipeline( - dbStream, - speckleObjStream, - gzipStream, - new PassThrough({ highWaterMark: 16384 * 31 }), - res, - (err) => { - if (err) { - boundLogger.error(err, `Error streaming objects.`) - } else { - boundLogger.info( - `Streamed ${childrenList.length} objects (size: ${ - gzipStream.bytesWritten / 1000000 - } MB)` - ) - } - } - ) -}) - -module.exports = router diff --git a/packages/preview-service/routes/index.js b/packages/preview-service/routes/index.js deleted file mode 100644 index 177321176c..0000000000 --- a/packages/preview-service/routes/index.js +++ /dev/null @@ -1,10 +0,0 @@ -'use strict' - -const express = require('express') -const router = express.Router() - -router.get('/', function (req, res) { - res.send('Speckle Object Preview Service') -}) - -module.exports = router diff --git a/packages/preview-service/routes/objects.js b/packages/preview-service/routes/objects.js deleted file mode 100644 index 47266b83ac..0000000000 --- a/packages/preview-service/routes/objects.js +++ /dev/null @@ -1,83 +0,0 @@ -'use strict' - -const zlib = require('zlib') -const express = require('express') -const { getObject, getObjectChildrenStream } = require('./services/objects_utils') -const { SpeckleObjectsStream } = require('./speckleObjectsStream') -const { pipeline, PassThrough } = require('stream') -const { logger } = require('../observability/logging') - -const router = express.Router() - -// This method was copy-pasted from the server method, without authentication/authorization (this web service is an internal one) -router.get('/:streamId/:objectId', async function (req, res) { - const boundLogger = logger.child({ - streamId: req.params.streamId, - objectId: req.params.objectId - }) - // Populate first object (the "commit") - const obj = await getObject({ - streamId: req.params.streamId, - objectId: req.params.objectId - }) - - if (!obj) { - return res.status(404).send('Failed to find object.') - } - - const simpleText = req.headers.accept === 'text/plain' - - res.writeHead(200, { - 'Content-Encoding': 'gzip', - 'Content-Type': simpleText ? 'text/plain' : 'application/json' - }) - - const dbStream = await getObjectChildrenStream({ - streamId: req.params.streamId, - objectId: req.params.objectId - }) - const speckleObjStream = new SpeckleObjectsStream(simpleText) - const gzipStream = zlib.createGzip() - - speckleObjStream.write(obj) - - pipeline( - dbStream, - speckleObjStream, - gzipStream, - new PassThrough({ highWaterMark: 16384 * 31 }), - res, - (err) => { - if (err) { - boundLogger.error(err, 'Error downloading object from stream') - } else { - boundLogger.info( - `Downloaded object from stream (size: ${ - gzipStream.bytesWritten / 1000000 - } MB)` - ) - } - } - ) -}) - -router.get('/:streamId/:objectId/single', async (req, res) => { - const boundLogger = logger.child({ - streamId: req.params.streamId, - objectId: req.params.objectId - }) - const obj = await getObject({ - streamId: req.params.streamId, - objectId: req.params.objectId - }) - - if (!obj) { - return res.status(404).send('Failed to find object.') - } - - boundLogger.info('Downloaded single object.') - - res.send(obj.data) -}) - -module.exports = router diff --git a/packages/preview-service/routes/preview.js b/packages/preview-service/routes/preview.js deleted file mode 100644 index ddff028515..0000000000 --- a/packages/preview-service/routes/preview.js +++ /dev/null @@ -1,181 +0,0 @@ -/* eslint-disable no-undef */ -'use strict' - -const express = require('express') -const router = express.Router() -const puppeteer = require('puppeteer') -const { logger } = require('../observability/logging') -const { reduce } = require('lodash') - -const shouldBeHeadless = process.env.PREVIEWS_HEADED !== 'true' - -const getChromiumExecutablePath = () => - process.env.CHROMIUM_EXECUTABLE_PATH || '/usr/bin/google-chrome-stable' -const getPuppeteerUserDataDir = () => process.env.USER_DATA_DIR || '/tmp/puppeteer' - -async function pageFunction(objectUrl) { - waitForAnimation = async (ms = 70) => - await new Promise((resolve) => { - setTimeout(resolve, ms) - }) - const ret = { - duration: 0, - mem: 0, - scr: {} - } - - const t0 = Date.now() - - await window.v.init() - - try { - await window.v.loadObjectAsync(objectUrl) - } catch { - // Main call failed. Wait some time for other objects to load inside the viewer and generate the preview anyway - await waitForAnimation(1000) - } - window.v.resize() - window.v.zoom(undefined, 0.95, false) - await waitForAnimation(100) - - for (let i = 0; i < 24; i++) { - window.v.setView({ azimuth: Math.PI / 12, polar: 0 }, false) - window.v.getRenderer().resetPipeline(true) - /** Not sure what the frame time when running pupeteer is, but it's not 16ms. - * That's why we're allowing more time between frames than probably needed - * In a future update, we'll have the viewer signal when convergence is complete - * regradless of how many frames/time that takes - */ - /** 22.11.2022 Alex: Commenting this out for now */ - // await waitForAnimation(2500) - await waitForAnimation() - ret.scr[i + ''] = await window.v.screenshot() - } - - ret.duration = (Date.now() - t0) / 1000 - ret.mem = { - total: performance.memory.totalJSHeapSize, - used: performance.memory.usedJSHeapSize - } - ret.userAgent = navigator.userAgent - return ret -} - -async function getScreenshot(objectUrl, boundLogger = logger) { - const launchParams = { - headless: shouldBeHeadless, - userDataDir: getPuppeteerUserDataDir(), - executablePath: getChromiumExecutablePath(), - protocolTimeout: 3600_000, - // we trust the web content that is running, so can disable the sandbox - // disabling the sandbox allows us to run the docker image without linux kernel privileges - args: ['--no-sandbox', '--disable-setuid-sandbox', '--disable-dev-shm-usage'] - } - - const browser = await puppeteer.launch(launchParams) - const page = await browser.newPage() - - const wrapperPromise = (async () => { - await page.goto('http://127.0.0.1:3001/render/') - - boundLogger.info('Page loaded') - - // Handle page crash (oom?) - page.on('error', (err) => { - throw err - }) - return await page.evaluate(pageFunction, objectUrl) - })() - - let ret = null - try { - ret = await wrapperPromise - } catch (err) { - boundLogger.error(err, 'Error generating preview.') - ret = { - error: err - } - } - - // Don't await for cleanup - browser.close() - - if (ret.error) { - return null - } - - boundLogger.info( - { - durationSeconds: ret.duration, - totalMemoryMB: ret.mem.total / 1000000, - resultingImages: { - count: Object.keys(ret.scr || {}).length, - totalStringSize: reduce(ret.scr || {}, (acc, val) => acc + val.length, 0) - } - }, - `Generated preview.` - ) - return ret.scr - - // return ` - // - //
Generated by: ${ret.userAgent}
- //
Duration in seconds: ${ret.duration}
- //
Memory in MB: ${ret.mem.total / 1000000}
- //
Used Memory in MB: ${ret.mem.used / 1000000}
- //
- //
- //
- //
- //
- // - // ` - - // const imageBuffer = new Buffer.from( - // b64Image.replace(/^data:image\/\w+;base64,/, ''), - // 'base64' - // ) - - // // await page.waitForTimeout(500); - // //var response = await page.screenshot({ - // // type: 'png', - // // clip: {x: 0, y: 0, width: 800, height: 800} - // //}); - - // return imageBuffer -} - -router.get('/:streamId/:objectId', async function (req, res) { - const safeParamRgx = /^[\w]+$/i - const { streamId, objectId } = req.params || {} - const boundLogger = logger.child({ streamId, objectId }) - if (!safeParamRgx.test(streamId) || !safeParamRgx.test(objectId)) { - return res.status(400).json({ error: 'Invalid streamId or objectId!' }) - } - - const objectUrl = `http://127.0.0.1:3001/streams/${req.params.streamId}/objects/${req.params.objectId}` - /* - let authToken = '' - let authorizationHeader = req.header( 'Authorization' ) - if ( authorizationHeader && authorizationHeader.toLowerCase().startsWith( 'bearer ' ) ) { - authToken = authorizationHeader.Substring( 'Bearer '.Length ).Trim() - } - // useful for testing (not the recommended way of passing the auth token) - if ( req.query.authToken ) { - authToken = req.query.authToken - } - */ - - boundLogger.info('Requesting screenshot.') - - const scr = await getScreenshot(objectUrl, boundLogger) - - if (!scr) { - return res.status(500).end() - } - - // res.setHeader( 'content-type', 'image/png' ) - res.send(scr) -}) - -module.exports = router diff --git a/packages/preview-service/routes/services/objects_utils.js b/packages/preview-service/routes/services/objects_utils.js deleted file mode 100644 index 3fc7d59018..0000000000 --- a/packages/preview-service/routes/services/objects_utils.js +++ /dev/null @@ -1,61 +0,0 @@ -'use strict' - -const knex = require('../../knex') - -const Objects = () => knex('objects') - -module.exports = { - async getObject({ streamId, objectId }) { - const res = await Objects().where({ streamId, id: objectId }).select('*').first() - if (!res) return null - res.data.totalChildrenCount = res.totalChildrenCount - delete res.streamId - return res - }, - - // NOTE: Copy pasted from server > modules/core/services/objects.js - async getObjectChildrenStream({ streamId, objectId }) { - const q = knex.with( - 'object_children_closure', - knex.raw( - `SELECT objects.id as parent, d.key as child, d.value as mindepth, ? as "streamId" - FROM objects - JOIN jsonb_each_text(objects.data->'__closure') d ON true - where objects.id = ?`, - [streamId, objectId] - ) - ) - q.select('id') - q.select(knex.raw('data::text as "dataText"')) - q.from('object_children_closure') - - q.rightJoin('objects', function () { - this.on('objects.streamId', '=', 'object_children_closure.streamId').andOn( - 'objects.id', - '=', - 'object_children_closure.child' - ) - }) - .where( - knex.raw('object_children_closure."streamId" = ? AND parent = ?', [ - streamId, - objectId - ]) - ) - .orderBy('objects.id') - return q.stream({ highWaterMark: 500 }) - }, - - async getObjectsStream({ streamId, objectIds }) { - const res = Objects() - .whereIn('id', objectIds) - .andWhere('streamId', streamId) - .orderBy('id') - .select( - knex.raw( - '"id", "speckleType", "totalChildrenCount", "totalChildrenCountByDepth", "createdAt", data::text as "dataText"' - ) - ) - return res.stream({ highWaterMark: 500 }) - } -} diff --git a/packages/preview-service/src/aliasLoader.ts b/packages/preview-service/src/aliasLoader.ts new file mode 100644 index 0000000000..8deeda0895 --- /dev/null +++ b/packages/preview-service/src/aliasLoader.ts @@ -0,0 +1,8 @@ +import generateAliasesResolver from 'esm-module-alias' +import { packageRoot, srcRoot } from './root.js' +import path from 'node:path' + +export const resolve = generateAliasesResolver({ + '@': srcRoot, + '#': path.resolve(packageRoot, './tests') +}) diff --git a/packages/preview-service/src/bin.ts b/packages/preview-service/src/bin.ts new file mode 100644 index 0000000000..e93580a819 --- /dev/null +++ b/packages/preview-service/src/bin.ts @@ -0,0 +1,8 @@ +import '@/bootstrap.js' // This has side-effects and has to be imported first + +import { startServer } from '@/server/server.js' +import { startPreviewService } from '@/server/background.js' +import { db } from '@/clients/knex.js' + +startServer({ db }) +startPreviewService({ db }) diff --git a/packages/preview-service/src/bootstrap.ts b/packages/preview-service/src/bootstrap.ts new file mode 100644 index 0000000000..50c8721e6a --- /dev/null +++ b/packages/preview-service/src/bootstrap.ts @@ -0,0 +1,2 @@ +import dotenv from 'dotenv' +dotenv.config() diff --git a/packages/preview-service/src/clients/execHealthcheck.ts b/packages/preview-service/src/clients/execHealthcheck.ts new file mode 100644 index 0000000000..eae84920f5 --- /dev/null +++ b/packages/preview-service/src/clients/execHealthcheck.ts @@ -0,0 +1,6 @@ +import fs from 'fs' + +export type UpdateHealthcheckData = () => void +export const updateHealthcheckDataFactory = + (deps: { healthCheckFilePath: string }) => () => + fs.writeFile(deps.healthCheckFilePath, Date.now().toLocaleString(), () => {}) diff --git a/packages/preview-service/src/clients/knex.ts b/packages/preview-service/src/clients/knex.ts new file mode 100644 index 0000000000..0baab31e62 --- /dev/null +++ b/packages/preview-service/src/clients/knex.ts @@ -0,0 +1,23 @@ +import { getPostgresConnectionString, getPostgresMaxConnections } from '@/utils/env.js' +import * as knex from 'knex' +import { get } from 'lodash-es' + +// CJS interop (types are off) +const knexBuilder = (get(knex, 'default') || + get(knex, 'knex')) as unknown as typeof knex.knex + +export const db = knexBuilder({ + client: 'pg', + connection: { + // eslint-disable-next-line camelcase + application_name: 'speckle_preview_service', + connectionString: getPostgresConnectionString() + }, + pool: { + min: 0, + max: getPostgresMaxConnections(), + acquireTimeoutMillis: 16000, //allows for 3x creation attempts plus idle time between attempts + createTimeoutMillis: 5000 + } + // migrations are managed in the server package +}) diff --git a/packages/preview-service/src/clients/previewService.ts b/packages/preview-service/src/clients/previewService.ts new file mode 100644 index 0000000000..9437a83344 --- /dev/null +++ b/packages/preview-service/src/clients/previewService.ts @@ -0,0 +1,22 @@ +import type { Angle, ObjectIdentifier } from '@/domain/domain.js' +import { isCastableToBrand } from '@/utils/brand.js' +import { z } from 'zod' + +const previewResponseSchema = z.record( + z.string().refine((value): value is Angle => isCastableToBrand(value)), + z.string() +) + +export type GeneratePreview = ( + task: ObjectIdentifier +) => Promise> + +export const generatePreviewFactory = + (deps: { serviceOrigin: string }): GeneratePreview => + async (task: ObjectIdentifier) => { + const previewUrl = `${deps.serviceOrigin}/preview/${task.streamId}/${task.objectId}` + const response = await fetch(previewUrl) + const responseBody: unknown = await response.json() + const previewResponse = previewResponseSchema.parse(responseBody) + return previewResponse + } diff --git a/packages/preview-service/src/clients/puppeteer.ts b/packages/preview-service/src/clients/puppeteer.ts new file mode 100644 index 0000000000..da7cd7ee90 --- /dev/null +++ b/packages/preview-service/src/clients/puppeteer.ts @@ -0,0 +1,74 @@ +import { extendLoggerComponent } from '@/observability/logging.js' +import { isDevelopment } from '@/utils/env.js' +import type { Logger } from 'pino' +import puppeteer, { type EvaluateFunc, type PuppeteerLaunchOptions } from 'puppeteer' + +export type LoadPageAndEvaluateScript = (...args: unknown[]) => Promise + +export type PuppeteerClient = { + loadPageAndEvaluateScript: LoadPageAndEvaluateScript + dispose: () => Promise +} + +export const puppeteerClientFactory = async (deps: { + logger: Logger + url: string + script: EvaluateFunc<[unknown[]]> + launchParams?: PuppeteerLaunchOptions + timeoutMilliseconds: number +}): Promise => { + const logger = extendLoggerComponent( + deps.logger.child({ renderPageUrl: deps.url }), + 'puppeteer' + ) + const { url, script, launchParams } = deps + const browser = await puppeteer.launch({ ...launchParams, dumpio: isDevelopment() }) + return { + loadPageAndEvaluateScript: async (...args: unknown[]) => { + if (!browser) { + const errorMessage = 'Browser must be initialized using init() before use.' + logger.error(errorMessage) + throw new Error(errorMessage) + } + logger.info('Loading page from {renderPageUrl}') + const page = await browser.newPage() + + page.setDefaultTimeout(deps.timeoutMilliseconds) + + await page.goto(url) + + logger.info('Page loaded from {renderPageUrl}') + + // Handle page crash (oom?) + page + .on('error', (err) => { + logger.error(err, 'Page crashed') + throw err + }) + .on('console', (message) => { + let messageText = message.text() + if (messageText.startsWith('data:image')) + messageText = messageText.substring(0, 200).concat('...') + logger.debug(`${message.type().substring(0, 3).toUpperCase()} ${messageText}`) + }) + .on('pageerror', ({ message }) => { + logger.error(message) + }) + .on('response', (response) => + logger.info(`${response.status()} ${response.url()}`) + ) + .on('requestfailed', (request) => + logger.error(`${request.failure()?.errorText} ${request.url()}`) + ) + + const evaluationResult: unknown = await page.evaluate(script, args) + + logger.info('Page evaluated with Puppeteer script.') + return evaluationResult + }, + dispose: async () => { + if (!browser) return + await browser.close() + } + } +} diff --git a/packages/preview-service/src/domain/backgroundWorker.ts b/packages/preview-service/src/domain/backgroundWorker.ts new file mode 100644 index 0000000000..f3a9da3ced --- /dev/null +++ b/packages/preview-service/src/domain/backgroundWorker.ts @@ -0,0 +1,7 @@ +export enum WorkStatus { + SUCCESS = 'SUCCESS', + NOWORKFOUND = 'NOWORKFOUND', + FAILED = 'FAILED' +} + +export type WorkToBeDone = () => Promise diff --git a/packages/preview-service/src/domain/const.ts b/packages/preview-service/src/domain/const.ts new file mode 100644 index 0000000000..898869b284 --- /dev/null +++ b/packages/preview-service/src/domain/const.ts @@ -0,0 +1 @@ +export const REQUEST_ID_HEADER = 'x-request-id' diff --git a/packages/preview-service/src/domain/domain.ts b/packages/preview-service/src/domain/domain.ts new file mode 100644 index 0000000000..3457164d92 --- /dev/null +++ b/packages/preview-service/src/domain/domain.ts @@ -0,0 +1,14 @@ +import { Brand } from '@/utils/brand.js' + +export type ObjectIdentifier = { + streamId: string + objectId: string +} + +export type Preview = { + previewId: string + imgBuffer: Buffer +} + +export type Angle = Brand +export type PreviewId = Brand diff --git a/packages/preview-service/src/observability/expressLogging.ts b/packages/preview-service/src/observability/expressLogging.ts new file mode 100644 index 0000000000..6f56599b99 --- /dev/null +++ b/packages/preview-service/src/observability/expressLogging.ts @@ -0,0 +1,34 @@ +import { REQUEST_ID_HEADER } from '@/domain/const.js' +import { logger } from '@/observability/logging.js' +import { randomUUID } from 'crypto' +import type { IncomingHttpHeaders, IncomingMessage } from 'http' +import { pinoHttp } from 'pino-http' + +function determineRequestId(headers: IncomingHttpHeaders, uuidGenerator = randomUUID) { + const idHeader = headers[REQUEST_ID_HEADER] + if (!idHeader) return uuidGenerator() + if (Array.isArray(idHeader)) return idHeader[0] ?? uuidGenerator() + return idHeader +} + +const generateReqId = (req: IncomingMessage) => determineRequestId(req.headers) + +export const loggingExpressMiddleware = pinoHttp({ + genReqId: generateReqId, + logger, + autoLogging: true, + // this is here, to force logging 500 responses as errors in the final log + // and we don't really care about 3xx stuff + // all the user related 4xx responses are treated as info + customLogLevel: (req, res, error) => { + if (res.statusCode >= 400 && res.statusCode < 500) { + return 'info' + } else if (res.statusCode >= 500 || error) { + return 'error' + } else if (res.statusCode >= 300 && res.statusCode < 400) { + return 'silent' + } + + return 'info' //default + } +}) diff --git a/packages/preview-service/src/observability/logging.ts b/packages/preview-service/src/observability/logging.ts new file mode 100644 index 0000000000..eb72c0f560 --- /dev/null +++ b/packages/preview-service/src/observability/logging.ts @@ -0,0 +1,13 @@ +import { getLogLevel, isLogPretty } from '@/utils/env.js' +import { + extendLoggerComponent as elc, + getLogger +} from '@speckle/shared/dist/commonjs/observability/index.js' +export const extendLoggerComponent = elc + +export const logger = extendLoggerComponent( + getLogger(getLogLevel(), isLogPretty()), + 'preview-service' +) +export const serverLogger = extendLoggerComponent(logger, 'server') +export const testLogger = getLogger(getLogLevel(), isLogPretty()) diff --git a/packages/preview-service/src/observability/metricsApp.ts b/packages/preview-service/src/observability/metricsApp.ts new file mode 100644 index 0000000000..aa19f67f83 --- /dev/null +++ b/packages/preview-service/src/observability/metricsApp.ts @@ -0,0 +1,28 @@ +import { loggingExpressMiddleware } from '@/observability/expressLogging.js' +import { metricsRouterFactory } from '@/observability/metricsRoute.js' +import { initPrometheusMetrics } from '@/observability/prometheusMetrics.js' +import { errorHandler } from '@/utils/errorHandler.js' +import express from 'express' +import createError from 'http-errors' +import type { Knex } from 'knex' + +export const appFactory = (deps: { db: Knex }) => { + const { db } = deps + initPrometheusMetrics({ db }) + const app = express() + + app.use(loggingExpressMiddleware) + app.use(express.json({ limit: '100mb' })) + app.use(express.urlencoded({ limit: '100mb', extended: false })) + + app.use('/metrics', metricsRouterFactory()) + + // catch 404 and forward to error handler + app.use(function (req, _res, next) { + next(createError(404, `Not Found: ${req.url}`)) + }) + app.set('json spaces', 2) // pretty print json + + app.use(errorHandler) + return app +} diff --git a/packages/preview-service/src/observability/metricsRoute.ts b/packages/preview-service/src/observability/metricsRoute.ts new file mode 100644 index 0000000000..af386ea0bd --- /dev/null +++ b/packages/preview-service/src/observability/metricsRoute.ts @@ -0,0 +1,15 @@ +import express, { RequestHandler } from 'express' +import prometheusClient from 'prom-client' + +export const metricsRouterFactory = () => { + const metricsRouter = express.Router() + + metricsRouter.get( + '/metrics', + (async (_req, res) => { + res.setHeader('Content-Type', prometheusClient.register.contentType) + res.end(await prometheusClient.register.metrics()) + }) as RequestHandler //FIXME: this works around a type error with async, which is resolved in express 5 + ) + return metricsRouter +} diff --git a/packages/preview-service/src/observability/prometheusMetrics.ts b/packages/preview-service/src/observability/prometheusMetrics.ts new file mode 100644 index 0000000000..619f2e9a02 --- /dev/null +++ b/packages/preview-service/src/observability/prometheusMetrics.ts @@ -0,0 +1,170 @@ +import { logger } from '@/observability/logging.js' +import { getPostgresMaxConnections } from '@/utils/env.js' +import type { Knex } from 'knex' +import { isObject } from 'lodash-es' +import type { Counter, Histogram, Summary } from 'prom-client' +import prometheusClient from 'prom-client' +import { Pool } from 'tarn' + +// let metricFree: Gauge | null = null +// let metricUsed: Gauge = null +// let metricPendingAquires: Gauge | null = null +let metricQueryDuration: Summary | null = null +let metricQueryErrors: Counter | null = null +export let metricDuration: Histogram | null = null +export let metricOperationErrors: Counter | null = null + +let prometheusInitialized = false + +function isPrometheusInitialized() { + return prometheusInitialized +} + +function initKnexPrometheusMetrics(params: { db: Knex }) { + const queryStartTime: Record = {} + const { db } = params + if (!('pool' in db.client)) { + throw new Error( + 'DB client does not have a pool. Skipping knex metrics initialization.' + ) + } + // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access + const dbConnectionPool = db.client.pool as Pool + //metricFree = + new prometheusClient.Gauge({ + name: 'speckle_server_knex_free', + help: 'Number of free DB connections', + collect() { + this.set(dbConnectionPool.numFree()) + } + }) + + //metricUsed = + new prometheusClient.Gauge({ + name: 'speckle_server_knex_used', + help: 'Number of used DB connections', + collect() { + this.set(dbConnectionPool.numUsed()) + } + }) + + //metricPendingAquires = + new prometheusClient.Gauge({ + name: 'speckle_server_knex_pending', + help: 'Number of pending DB connection aquires', + collect() { + this.set(dbConnectionPool.numPendingAcquires()) + } + }) + + //metricPendingCreates = + new prometheusClient.Gauge({ + name: 'speckle_server_knex_pending_creates', + help: 'Number of pending DB connection creates', + collect() { + this.set(dbConnectionPool.numPendingCreates()) + } + }) + + //metricPendingValidations = + new prometheusClient.Gauge({ + name: 'speckle_server_knex_pending_validations', + help: 'Number of pending DB connection validations. This is a state between pending acquisition and acquiring a connection.', + collect() { + this.set(dbConnectionPool.numPendingValidations()) + } + }) + + //metricRemainingCapacity = + new prometheusClient.Gauge({ + name: 'speckle_server_knex_remaining_capacity', + help: 'Remaining capacity of the DB connection pool', + collect() { + const postgresMaxConnections = getPostgresMaxConnections() + const demand = + dbConnectionPool.numUsed() + + dbConnectionPool.numPendingCreates() + + dbConnectionPool.numPendingValidations() + + dbConnectionPool.numPendingAcquires() + + this.set(Math.max(postgresMaxConnections - demand, 0)) + } + }) + + metricQueryDuration = new prometheusClient.Summary({ + name: 'speckle_server_knex_query_duration', + help: 'Summary of the DB query durations in seconds' + }) + + metricQueryErrors = new prometheusClient.Counter({ + name: 'speckle_server_knex_query_errors', + help: 'Number of DB queries with errors' + }) + + db.on('query', (data) => { + if (isObject(data) && '__knexQueryUid' in data) { + const queryId = String(data.__knexQueryUid) + queryStartTime[queryId] = Date.now() + } + }) + + db.on('query-response', (_data, obj) => { + if (isObject(obj) && '__knexQueryUid' in obj) { + const queryId = String(obj.__knexQueryUid) + const durationSec = (Date.now() - queryStartTime[queryId]) / 1000 + delete queryStartTime[queryId] + if (metricQueryDuration && !isNaN(durationSec)) + metricQueryDuration.observe(durationSec) + } + }) + + db.on('query-error', (_err, querySpec) => { + if (isObject(querySpec) && '__knexQueryUid' in querySpec) { + const queryId = String(querySpec.__knexQueryUid) + const durationSec = (Date.now() - queryStartTime[queryId]) / 1000 + delete queryStartTime[queryId] + + if (metricQueryDuration && !isNaN(durationSec)) + metricQueryDuration.observe(durationSec) + + if (metricQueryErrors) metricQueryErrors.inc() + } + }) +} + +export function initPrometheusMetrics(params: { db: Knex }) { + logger.info('Initializing Prometheus metrics...') + if (isPrometheusInitialized()) { + logger.info('Prometheus metrics already initialized') + return + } + + prometheusInitialized = true + + prometheusClient.register.clear() + prometheusClient.register.setDefaultLabels({ + project: 'speckle-server', + app: 'preview-service' + }) + + try { + metricDuration = new prometheusClient.Histogram({ + name: 'speckle_server_operation_duration', + help: 'Summary of the operation durations in seconds', + buckets: [0.5, 1, 5, 10, 30, 60, 300, 600, 1200, 1800], + labelNames: ['op'] + }) + + metricOperationErrors = new prometheusClient.Counter({ + name: 'speckle_server_operation_errors', + help: 'Number of operations with errors', + labelNames: ['op'] + }) + + initKnexPrometheusMetrics(params) + prometheusClient.collectDefaultMetrics() + } catch (e) { + logger.error(e, 'Failed to initialize Prometheus metrics.') + prometheusInitialized = false + } +} diff --git a/packages/preview-service/src/repositories/objectPreview.ts b/packages/preview-service/src/repositories/objectPreview.ts new file mode 100644 index 0000000000..8950273214 --- /dev/null +++ b/packages/preview-service/src/repositories/objectPreview.ts @@ -0,0 +1,68 @@ +import type { Angle, ObjectIdentifier, PreviewId } from '@/domain/domain.js' +import type { Knex } from 'knex' + +export type ObjectPreviewRow = ObjectIdentifier & { + preview: Record + previewStatus: number + lastUpdate: number +} +export const ObjectPreview = (deps: { db: Knex }) => + deps.db('object_preview') + +export type GetNextUnstartedObjectPreview = () => Promise +export const getNextUnstartedObjectPreviewFactory = + (deps: { db: Knex }): GetNextUnstartedObjectPreview => + async () => { + const { db } = deps + const { + rows: [maybeRow] + } = await db.raw<{ rows: ObjectIdentifier[] }>(` + UPDATE object_preview + SET + "previewStatus" = 1, + "lastUpdate" = NOW() + FROM ( + SELECT "streamId", "objectId" FROM object_preview + WHERE "previewStatus" = 0 OR ("previewStatus" = 1 AND "lastUpdate" < NOW() - INTERVAL '1 WEEK') + ORDER BY "priority" ASC, "lastUpdate" ASC + LIMIT 1 + ) as task + WHERE object_preview."streamId" = task."streamId" AND object_preview."objectId" = task."objectId" + RETURNING object_preview."streamId", object_preview."objectId" + `) + return maybeRow + } + +export type UpdatePreviewMetadataParams = ObjectIdentifier & { + metadata: Record +} +export type UpdatePreviewMetadata = ( + params: UpdatePreviewMetadataParams +) => Promise +export const updatePreviewMetadataFactory = + (deps: { db: Knex }): UpdatePreviewMetadata => + async (params) => { + const { db } = deps + // Update preview metadata + await db.raw( + ` + UPDATE object_preview + SET + "previewStatus" = 2, + "lastUpdate" = NOW(), + "preview" = ? + WHERE "streamId" = ? AND "objectId" = ? + `, + [params.metadata, params.streamId, params.objectId] + ) + } + +export type NotifyUpdate = (params: ObjectIdentifier) => Promise +export const notifyUpdateFactory = + (deps: { db: Knex }): NotifyUpdate => + async (params) => { + const { db } = deps + await db.raw( + `NOTIFY preview_generation_update, 'finished:${params.streamId}:${params.objectId}'` + ) + } diff --git a/packages/preview-service/src/repositories/objects.ts b/packages/preview-service/src/repositories/objects.ts new file mode 100644 index 0000000000..0bab649494 --- /dev/null +++ b/packages/preview-service/src/repositories/objects.ts @@ -0,0 +1,93 @@ +import type { ObjectIdentifier } from '@/domain/domain.js' +import type { Knex } from 'knex' + +export const Objects = (deps: { db: Knex }) => deps.db('objects') + +type DbObject = { + id: string + streamId: string + data: object + totalChildrenCount: number +} + +type ReturnedObject = { + id: string + data: { totalChildrenCount: number } & Record +} + +export type GetObject = (params: ObjectIdentifier) => Promise +export const getObjectFactory = + (deps: { db: Knex }): GetObject => + async ({ streamId, objectId }) => { + const { db } = deps + const res = await Objects({ db }) + .where({ streamId, id: objectId }) + .select('*') + .first() + if (!res) return null + const returned: ReturnedObject = { + id: res.id, + data: { totalChildrenCount: res.totalChildrenCount, ...res.data } + } + return returned + } + +export type GetObjectChildrenStream = ( + params: ObjectIdentifier +) => Promise +export const getObjectChildrenStreamFactory = + (deps: { db: Knex }): GetObjectChildrenStream => + async ({ streamId, objectId }) => { + const { db } = deps + const q = db.with( + 'object_children_closure', + db.raw( + `SELECT objects.id as parent, d.key as child, d.value as mindepth, ? as "streamId" + FROM objects + JOIN jsonb_each_text(objects.data->'__closure') d ON true + where objects.id = ?`, + [streamId, objectId] + ) + ) + await q.select('id') + await q.select(db.raw('data::text as "dataText"')) + await q.from('object_children_closure') + + await q + .rightJoin('objects', function () { + this.on('objects.streamId', '=', 'object_children_closure.streamId').andOn( + 'objects.id', + '=', + 'object_children_closure.child' + ) + }) + .where( + db.raw('object_children_closure."streamId" = ? AND parent = ?', [ + streamId, + objectId + ]) + ) + .orderBy('objects.id') + return q.stream({ highWaterMark: 500 }) + } + +type BatchObjectIdentifier = { + streamId: string + objectIds: string[] +} +export type GetObjectsStream = (params: BatchObjectIdentifier) => NodeJS.ReadableStream +export const getObjectsStreamFactory = + (deps: { db: Knex }): GetObjectsStream => + ({ streamId, objectIds }) => { + const { db } = deps + const res = Objects({ db }) + .whereIn('id', objectIds) + .andWhere('streamId', streamId) + .orderBy('id') + .select( + db.raw( + '"id", "speckleType", "totalChildrenCount", "totalChildrenCountByDepth", "createdAt", data::text as "dataText"' + ) + ) + return res.stream({ highWaterMark: 500 }) + } diff --git a/packages/preview-service/src/repositories/previews.ts b/packages/preview-service/src/repositories/previews.ts new file mode 100644 index 0000000000..f2bbcf677a --- /dev/null +++ b/packages/preview-service/src/repositories/previews.ts @@ -0,0 +1,16 @@ +import type { Preview } from '@/domain/domain.js' +import type { Knex } from 'knex' + +export type PreviewRow = { id: string; data: Buffer } +export const Previews = (deps: { db: Knex }) => deps.db('previews') + +export type InsertPreview = (params: Preview) => Promise +export const insertPreviewFactory = + (deps: { db: Knex }): InsertPreview => + async (params) => { + const { db } = deps + await db.raw( + 'INSERT INTO "previews" (id, data) VALUES (?, ?) ON CONFLICT DO NOTHING', + [params.previewId, params.imgBuffer] + ) + } diff --git a/packages/preview-service/src/root.ts b/packages/preview-service/src/root.ts new file mode 100644 index 0000000000..13b51d800f --- /dev/null +++ b/packages/preview-service/src/root.ts @@ -0,0 +1,21 @@ +import path from 'node:path' +import fs from 'node:fs' +import { fileURLToPath } from 'url' + +/** + * Singleton module for src root and package root directory resolution + */ + +const __filename = fileURLToPath(import.meta.url) +const srcRoot = path.dirname(__filename) + +// Recursively walk back from __dirname till we find our package.json +let packageRoot = srcRoot +while (packageRoot !== '/') { + if (fs.readdirSync(packageRoot).includes('package.json')) { + break + } + packageRoot = path.resolve(packageRoot, '..') +} + +export { srcRoot, packageRoot } diff --git a/packages/preview-service/src/scripts/puppeteerDriver.js b/packages/preview-service/src/scripts/puppeteerDriver.js new file mode 100644 index 0000000000..a13adb14c1 --- /dev/null +++ b/packages/preview-service/src/scripts/puppeteerDriver.js @@ -0,0 +1,47 @@ +export const puppeteerDriver = async (objectUrl) => { + const waitForAnimation = async (ms = 70) => + await new Promise((resolve) => { + setTimeout(resolve, ms) + }) + const ret = { + duration: 0, + mem: 0, + scr: {} + } + + const t0 = Date.now() + + await window.v.init() + + try { + await window.v.loadObjectAsync(objectUrl) + } catch { + // Main call failed. Wait some time for other objects to load inside the viewer and generate the preview anyway + await waitForAnimation(1000) + } + window.v.resize() + window.v.zoom(undefined, 0.95, false) + await waitForAnimation(100) + + for (let i = 0; i < 24; i++) { + window.v.setView({ azimuth: Math.PI / 12, polar: 0 }, false) + window.v.getRenderer().resetPipeline(true) + /** Not sure what the frame time when running pupeteer is, but it's not 16ms. + * That's why we're allowing more time between frames than probably needed + * In a future update, we'll have the viewer signal when convergence is complete + * regradless of how many frames/time that takes + */ + /** 22.11.2022 Alex: Commenting this out for now */ + // await waitForAnimation(2500) + await waitForAnimation() + ret.scr[i + ''] = await window.v.screenshot() + } + + ret.duration = (Date.now() - t0) / 1000 + ret.mem = { + total: performance.memory.totalJSHeapSize, + used: performance.memory.usedJSHeapSize + } + ret.userAgent = navigator.userAgent + return ret +} diff --git a/packages/preview-service/src/server/app.ts b/packages/preview-service/src/server/app.ts new file mode 100644 index 0000000000..42a8a684b9 --- /dev/null +++ b/packages/preview-service/src/server/app.ts @@ -0,0 +1,37 @@ +import { loggingExpressMiddleware } from '@/observability/expressLogging.js' +import { srcRoot } from '@/root.js' +import apiRouterFactory from '@/server/routes/api.js' +import indexRouterFactory from '@/server/routes/index.js' +import objectsRouterFactory from '@/server/routes/objects.js' +import previewRouterFactory from '@/server/routes/preview.js' +import { errorHandler } from '@/utils/errorHandler.js' +import express from 'express' +import createError from 'http-errors' +import type { Knex } from 'knex' +import path from 'path' + +export const appFactory = (deps: { db: Knex }) => { + const { db } = deps + const app = express() + + app.use(loggingExpressMiddleware) + + app.use(express.json({ limit: '100mb' })) + app.use(express.urlencoded({ limit: '100mb', extended: false })) + //webpack will build the renderPage and save it to the packages/preview-service/dist/public directory + app.use(express.static(path.join(srcRoot, '../public'))) + + app.use('/', indexRouterFactory()) + app.use('/preview', previewRouterFactory()) + app.use('/objects', objectsRouterFactory({ db })) + app.use('/api', apiRouterFactory({ db })) + + // catch 404 and forward to error handler + app.use(function (req, res, next) { + next(createError(404, `Not Found: ${req.url}`)) + }) + + app.set('json spaces', 2) // pretty print json + app.use(errorHandler) + return app +} diff --git a/packages/preview-service/src/server/background.ts b/packages/preview-service/src/server/background.ts new file mode 100644 index 0000000000..5247ce8cec --- /dev/null +++ b/packages/preview-service/src/server/background.ts @@ -0,0 +1,60 @@ +/** + * @fileoverview Background service for preview service. This service is responsible for generating 360 previews for objects. + */ +//FIXME this doesn't quite fit in the /server directory, but it's not a service either. It's a background worker. +import { updateHealthcheckDataFactory } from '@/clients/execHealthcheck.js' +import { generatePreviewFactory } from '@/clients/previewService.js' +import { extendLoggerComponent, logger } from '@/observability/logging.js' +import { initPrometheusMetrics } from '@/observability/prometheusMetrics.js' +import { + getNextUnstartedObjectPreviewFactory, + notifyUpdateFactory, + updatePreviewMetadataFactory +} from '@/repositories/objectPreview.js' +import { insertPreviewFactory } from '@/repositories/previews.js' +import { generateAndStore360PreviewFactory } from '@/services/360preview.js' +import { pollForAndCreatePreviewFactory } from '@/services/pollForPreview.js' +import { forceExit, repeatedlyDoSomeWorkFactory } from '@/services/taskManager.js' +import { getHealthCheckFilePath, serviceOrigin } from '@/utils/env.js' +import type { Knex } from 'knex' + +export function startPreviewService(params: { db: Knex }) { + const { db } = params + const backgroundLogger = extendLoggerComponent(logger, 'backgroundWorker') + backgroundLogger.info('πŸ“Έ Starting Preview Service background worker') + + process.on('SIGTERM', () => { + forceExit() + backgroundLogger.info('Shutting down...') + }) + + process.on('SIGINT', () => { + forceExit() + backgroundLogger.info('Shutting down...') + }) + + initPrometheusMetrics({ db }) + repeatedlyDoSomeWorkFactory({ + doSomeWork: pollForAndCreatePreviewFactory({ + updateHealthcheckData: updateHealthcheckDataFactory({ + healthCheckFilePath: getHealthCheckFilePath() + }), + getNextUnstartedObjectPreview: getNextUnstartedObjectPreviewFactory({ db }), + generateAndStore360Preview: generateAndStore360PreviewFactory({ + generatePreview: generatePreviewFactory({ serviceOrigin: serviceOrigin() }), + insertPreview: insertPreviewFactory({ db }) + }), + updatePreviewMetadata: updatePreviewMetadataFactory({ db }), + notifyUpdate: notifyUpdateFactory({ db }), + logger: backgroundLogger + }), + onExit: () => { + process.exit(0) + }, + delayPeriods: { + onSuccess: 10, + onNoWorkFound: 1000, + onFailed: 5000 + } + })() +} diff --git a/packages/preview-service/src/server/routes/api.ts b/packages/preview-service/src/server/routes/api.ts new file mode 100644 index 0000000000..dbed859a42 --- /dev/null +++ b/packages/preview-service/src/server/routes/api.ts @@ -0,0 +1,66 @@ +import { getObjectsStreamFactory } from '@/repositories/objects.js' +import { isSimpleTextRequested, simpleTextOrJsonContentType } from '@/utils/headers.js' +import { SpeckleObjectsStream } from '@/utils/speckleObjectsStream.js' +import express from 'express' +import type { Knex } from 'knex' +import { PassThrough, pipeline } from 'stream' +import zlib from 'zlib' +import { z } from 'zod' + +const apiRouterFactory = (deps: { db: Knex }) => { + const { db } = deps + const apiRouter = express.Router() + + const getObjectsRequestBodySchema = z.object({ + objects: z.preprocess((objects) => JSON.parse(String(objects)), z.array(z.string())) + }) + + // This method was copy-pasted from the server method, without authentication/authorization (this web service is an internal one) + apiRouter.post( + '/getobjects/:streamId', + (async (req, res) => { + const boundLogger = req.log.child({ + streamId: req.params.streamId + }) + const getObjectsRequestBody = await getObjectsRequestBodySchema.parseAsync( + req.body + ) + + res.writeHead(200, { + 'Content-Encoding': 'gzip', + 'Content-Type': simpleTextOrJsonContentType(req) + }) + + const dbStream = getObjectsStreamFactory({ db })({ + streamId: req.params.streamId, + objectIds: getObjectsRequestBody.objects + }) + const speckleObjStream = new SpeckleObjectsStream(isSimpleTextRequested(req)) + const gzipStream = zlib.createGzip() + + pipeline( + dbStream, + speckleObjStream, + gzipStream, + new PassThrough({ highWaterMark: 16384 * 31 }), + res, + (err) => { + if (err) { + boundLogger.error(err, `Error streaming objects.`) + } else { + boundLogger.info( + { + numberOfStreamedObjects: getObjectsRequestBody.objects.length, + sizeOfStreamedObjectsMB: gzipStream.bytesWritten / 1000000 + }, + 'Streamed {numberOfStreamedObjects} objects (size: {sizeOfStreamedObjectsMB} MB)' + ) + } + } + ) + }) as express.RequestHandler //FIXME: this works around a type error with async, which is resolved in express 5 + ) + return apiRouter +} + +export default apiRouterFactory diff --git a/packages/preview-service/src/server/routes/index.ts b/packages/preview-service/src/server/routes/index.ts new file mode 100644 index 0000000000..7cfad1b423 --- /dev/null +++ b/packages/preview-service/src/server/routes/index.ts @@ -0,0 +1,13 @@ +import express from 'express' + +const indexRouterFactory = () => { + const indexRouter = express.Router() + + indexRouter.get('/', (_req, res) => { + res.send('Speckle Object Preview Service') + }) + + return indexRouter +} + +export default indexRouterFactory diff --git a/packages/preview-service/src/server/routes/objects.ts b/packages/preview-service/src/server/routes/objects.ts new file mode 100644 index 0000000000..5e04599987 --- /dev/null +++ b/packages/preview-service/src/server/routes/objects.ts @@ -0,0 +1,94 @@ +import { + getObjectChildrenStreamFactory, + getObjectFactory +} from '@/repositories/objects.js' +import { isSimpleTextRequested, simpleTextOrJsonContentType } from '@/utils/headers.js' +import { SpeckleObjectsStream } from '@/utils/speckleObjectsStream.js' +import express, { RequestHandler } from 'express' +import type { Knex } from 'knex' +import { PassThrough, pipeline } from 'stream' +import zlib from 'zlib' + +const objectsRouterFactory = (deps: { db: Knex }) => { + const { db } = deps + const objectsRouter = express.Router() + + // This method was copy-pasted from the server method, without authentication/authorization (this web service is an internal one) + objectsRouter.get( + '/:streamId/:objectId', + async function (req, res) { + const boundLogger = req.log.child({ + streamId: req.params.streamId, + objectId: req.params.objectId + }) + // Populate first object (the "commit") + const obj = await getObjectFactory({ db })({ + streamId: req.params.streamId, + objectId: req.params.objectId + }) + + if (!obj) { + return res.status(404).send('Failed to find object.') + } + + res.writeHead(200, { + 'Content-Encoding': 'gzip', + 'Content-Type': simpleTextOrJsonContentType(req) + }) + + const dbStream = await getObjectChildrenStreamFactory({ db })({ + streamId: req.params.streamId, + objectId: req.params.objectId + }) + const speckleObjStream = new SpeckleObjectsStream(isSimpleTextRequested(req)) + const gzipStream = zlib.createGzip() + + speckleObjStream.write(obj) + + pipeline( + dbStream, + speckleObjStream, + gzipStream, + new PassThrough({ highWaterMark: 16384 * 31 }), + res, + (err) => { + if (err) { + boundLogger.error(err, 'Error downloading object from stream') + } else { + boundLogger.info( + `Downloaded object from stream (size: ${ + gzipStream.bytesWritten / 1000000 + } MB)` + ) + } + } + ) + } as RequestHandler //FIXME: this works around a type error with async, which is resolved in express 5 + ) + + objectsRouter.get( + '/:streamId/:objectId/single', + (async (req, res) => { + const boundLogger = req.log.child({ + streamId: req.params.streamId, + objectId: req.params.objectId + }) + const obj = await getObjectFactory({ db })({ + streamId: req.params.streamId, + objectId: req.params.objectId + }) + + if (!obj) { + return res.status(404).send('Failed to find object.') + } + + boundLogger.info('Downloaded single object.') + + res.send(obj.data) + }) as RequestHandler //FIXME: this works around a type error with async, which is resolved in express 5 + ) + + return objectsRouter +} + +export default objectsRouterFactory diff --git a/packages/preview-service/src/server/routes/preview.ts b/packages/preview-service/src/server/routes/preview.ts new file mode 100644 index 0000000000..c68490a3da --- /dev/null +++ b/packages/preview-service/src/server/routes/preview.ts @@ -0,0 +1,71 @@ +import { puppeteerClientFactory } from '@/clients/puppeteer.js' +import { puppeteerDriver } from '@/scripts/puppeteerDriver.js' +import { getScreenshotFactory } from '@/services/screenshot.js' +import { + getChromiumExecutablePath, + getPreviewTimeout, + getPuppeteerUserDataDir, + serviceOrigin, + shouldBeHeadless +} from '@/utils/env.js' +import express, { RequestHandler } from 'express' + +const previewRouterFactory = () => { + const previewRouter = express.Router() + + previewRouter.get( + '/:streamId/:objectId', + async function (req, res) { + const { streamId, objectId } = req.params || {} + const safeParamRgx = /^[\w]+$/i + if (!safeParamRgx.test(streamId) || !safeParamRgx.test(objectId)) { + return res.status(400).json({ error: 'Invalid streamId or objectId!' }) + } + const boundLogger = req.log.child({ streamId, objectId }) + + boundLogger.info('Requesting screenshot.') + + //FIXME should we be creating a puppeteer client for every request, or per app instance? + const puppeteerClient = await puppeteerClientFactory({ + logger: boundLogger, + url: `${serviceOrigin()}/render/`, + script: puppeteerDriver, + launchParams: { + headless: shouldBeHeadless(), + userDataDir: getPuppeteerUserDataDir(), + executablePath: getChromiumExecutablePath(), + protocolTimeout: getPreviewTimeout(), + // we trust the web content that is running, so can disable the sandbox + // disabling the sandbox allows us to run the docker image without linux kernel privileges + args: ['--no-sandbox', '--disable-setuid-sandbox', '--disable-dev-shm-usage'] + }, + timeoutMilliseconds: getPreviewTimeout() + }) + + let screenshot: { [key: string]: string } | null = null + try { + screenshot = await getScreenshotFactory({ + loadPageAndEvaluateScript: puppeteerClient.loadPageAndEvaluateScript, + logger: boundLogger, + serviceOrigin: serviceOrigin() + })({ + objectId, + streamId + }) + } finally { + await puppeteerClient.dispose() + } + + if (!screenshot) { + return res.status(500).end() + } + + res.setHeader('content-type', 'image/png') + res.send(screenshot) + } as RequestHandler //FIXME: this works around a type error with async, which is resolved in express 5 + ) + + return previewRouter +} + +export default previewRouterFactory diff --git a/packages/preview-service/src/server/server.ts b/packages/preview-service/src/server/server.ts new file mode 100644 index 0000000000..d504a853e4 --- /dev/null +++ b/packages/preview-service/src/server/server.ts @@ -0,0 +1,109 @@ +import { serverLogger } from '@/observability/logging.js' +import { appFactory as metricsAppFactory } from '@/observability/metricsApp.js' +import { appFactory } from '@/server/app.js' +import { getAppPort, getHost, getMetricsPort } from '@/utils/env.js' +import http from 'http' +import type { Knex } from 'knex' +import { isNaN, isString, toNumber } from 'lodash-es' + +export const startServer = (params: { db: Knex; serveOnRandomPort?: boolean }) => { + const { db } = params + /** + * Get port from environment and store in Express. + */ + const inputPort = params.serveOnRandomPort ? 0 : normalizePort(getAppPort()) + const app = appFactory({ db }) + app.set('port', inputPort) + + // we place the metrics on a separate port as we wish to expose it to external monitoring tools, but do not wish to expose other routes (for now) + const inputMetricsPort = params.serveOnRandomPort + ? 0 + : normalizePort(getMetricsPort()) + const metricsApp = metricsAppFactory({ db }) + metricsApp.set('port', inputMetricsPort) + + /** + * Create HTTP server. + */ + + const server = http.createServer(app) + const metricsServer = http.createServer(metricsApp) + + /** + * Listen on provided port, on all network interfaces. + */ + const host = getHost() + server.on('error', onErrorFactory(inputPort)) + server.on('listening', () => { + serverLogger.info('πŸ“‘ Started Preview Service server') + onListening(server) + }) + server.listen(inputPort, host) + metricsServer.on('error', onErrorFactory(inputPort)) + metricsServer.on('listening', () => { + serverLogger.info('πŸ“Š Started Preview Service metrics server') + onListening(metricsServer) + }) + metricsServer.listen(inputMetricsPort, host) + + return { app, server, metricsServer } +} + +export const stopServer = (params: { server: http.Server }) => { + const { server } = params + server.close() +} + +/** + * Normalize a port into a number, string, or false. + */ +function normalizePort(val: string | number) { + const port = toNumber(val) + if (!isNaN(port) && port >= 0) return port + + throw new Error('Invalid port; port must be a positive integer.') +} + +/** + * Event listener for HTTP server "error" event. + */ + +const onErrorFactory = (port: string | number | false) => (error: Error) => { + if ('syscall' in error && error.syscall !== 'listen') { + throw error + } + + const bind = isString(port) ? 'Pipe ' + port : 'Port ' + port + + if (!('code' in error)) throw error + + // handle specific listen errors with friendly messages + switch (error.code) { + case 'EACCES': + serverLogger.error(error, bind + ' requires elevated privileges') + process.exit(1) + case 'EADDRINUSE': + serverLogger.error(error, bind + ' is already in use') + process.exit(1) + default: + throw error + } +} + +/** + * Event listener for HTTP server "listening" event. + */ + +function onListening(referenceServer: http.Server) { + const addr = referenceServer.address() + if (!addr) throw new Error('Server address is not defined') + + switch (typeof addr) { + case 'string': + serverLogger.info(`Listening on pipe ${addr}`) + return addr + default: + serverLogger.info(`Listening on port ${addr.port}`) + return addr.port + } +} diff --git a/packages/preview-service/src/services/360preview.ts b/packages/preview-service/src/services/360preview.ts new file mode 100644 index 0000000000..a74c6d13ec --- /dev/null +++ b/packages/preview-service/src/services/360preview.ts @@ -0,0 +1,61 @@ +import type { GeneratePreview } from '@/clients/previewService.js' +import type { Angle, ObjectIdentifier, PreviewId } from '@/domain/domain.js' +import type { InsertPreview } from '@/repositories/previews.js' +import crypto from 'crypto' +import { joinImages } from 'join-images' + +export type GenerateAndStore360Preview = ( + task: ObjectIdentifier +) => Promise<{ metadata: Record }> +export const generateAndStore360PreviewFactory = + (deps: { + generatePreview: GeneratePreview + insertPreview: InsertPreview + }): GenerateAndStore360Preview => + async (task: ObjectIdentifier) => { + const responseBody = await deps.generatePreview(task) + + // metadata is key of angle and value of previewId + const metadata: Record = {} + const allImgsArr: Buffer[] = [] + let i = 0 + for (const aKey in responseBody) { + const angle = aKey as Angle + const value = responseBody[angle] + if (!value) { + continue + } + const imgBuffer = Buffer.from( + value.replace(/^data:image\/\w+;base64,/, ''), + 'base64' + ) + const previewId = crypto + .createHash('md5') + .update(imgBuffer) + .digest('hex') as PreviewId + + // Save first preview image + if (i++ === 0) { + await deps.insertPreview({ previewId, imgBuffer }) + metadata[angle] = previewId + } + + allImgsArr.push(imgBuffer) + } + + // stitch 360 image + const fullImg = await joinImages(allImgsArr, { + direction: 'horizontal', + offset: 700, + margin: '0 700 0 700', + color: { alpha: 0, r: 0, g: 0, b: 0 } + }) + const png = fullImg.png({ quality: 95 }) + const buff = await png.toBuffer() + const fullImgId = crypto.createHash('md5').update(buff).digest('hex') as PreviewId + + await deps.insertPreview({ previewId: fullImgId, imgBuffer: buff }) + metadata['all' as Angle] = fullImgId + + return { metadata } + } diff --git a/packages/preview-service/src/services/pollForPreview.ts b/packages/preview-service/src/services/pollForPreview.ts new file mode 100644 index 0000000000..aa819b3cd7 --- /dev/null +++ b/packages/preview-service/src/services/pollForPreview.ts @@ -0,0 +1,73 @@ +import type { UpdateHealthcheckData } from '@/clients/execHealthcheck.js' +import { + metricDuration, + metricOperationErrors +} from '@/observability/prometheusMetrics.js' +import type { + GetNextUnstartedObjectPreview, + NotifyUpdate, + UpdatePreviewMetadata +} from '@/repositories/objectPreview.js' +import type { GenerateAndStore360Preview } from '@/services/360preview.js' +import type { Logger } from 'pino' +import type { LabelValues } from 'prom-client' +import { WorkStatus, type WorkToBeDone } from '@/domain/backgroundWorker.js' + +export const pollForAndCreatePreviewFactory = + (deps: { + updateHealthcheckData: UpdateHealthcheckData + getNextUnstartedObjectPreview: GetNextUnstartedObjectPreview + generateAndStore360Preview: GenerateAndStore360Preview + updatePreviewMetadata: UpdatePreviewMetadata + notifyUpdate: NotifyUpdate + logger: Logger + }): WorkToBeDone => + async () => { + try { + const task = await deps.getNextUnstartedObjectPreview() + + // notify the healthcheck that we are still alive + deps.updateHealthcheckData() + + if (!task) { + return WorkStatus.NOWORKFOUND + } + + let metricDurationEnd: + | ((labels?: LabelValues) => number) + | undefined = undefined + if (metricDuration) { + metricDurationEnd = metricDuration.startTimer() + } + + try { + const { metadata } = await deps.generateAndStore360Preview(task) + + await deps.updatePreviewMetadata({ + metadata, + streamId: task.streamId, + objectId: task.objectId + }) + + await deps.notifyUpdate({ streamId: task.streamId, objectId: task.objectId }) + } catch (err) { + await deps.updatePreviewMetadata({ + metadata: { err: err instanceof Error ? err.message : JSON.stringify(err) }, + streamId: task.streamId, + objectId: task.objectId + }) + metricOperationErrors?.labels('preview').inc() + } + if (metricDurationEnd) { + metricDurationEnd({ op: 'preview' }) + } + + return WorkStatus.SUCCESS + } catch (err) { + if (metricOperationErrors) { + metricOperationErrors.labels('main_loop').inc() + } + deps.logger.error(err, 'Error executing task') + return WorkStatus.FAILED + } + } diff --git a/packages/preview-service/src/services/screenshot.ts b/packages/preview-service/src/services/screenshot.ts new file mode 100644 index 0000000000..e1a355df79 --- /dev/null +++ b/packages/preview-service/src/services/screenshot.ts @@ -0,0 +1,87 @@ +import { LoadPageAndEvaluateScript } from '@/clients/puppeteer.js' +import type { ObjectIdentifier } from '@/domain/domain.js' +import { reduce } from 'lodash-es' +import type { Logger } from 'pino' +import { z } from 'zod' + +export type GetScreenshot = ( + params: ObjectIdentifier +) => Promise<{ [key: string]: string } | null> + +export const getScreenshotFactory = + (deps: { + loadPageAndEvaluateScript: LoadPageAndEvaluateScript + logger: Logger + serviceOrigin: string + }): GetScreenshot => + async (params) => { + const objectUrl = `${deps.serviceOrigin}/streams/${params.streamId}/objects/${params.objectId}` + + const RenderOutputSchema = z.object({ + duration: z.number(), + mem: z.object({ total: z.number() }), + scr: z.record(z.string()) + }) + type RenderOutput = z.infer + + let renderOutput: RenderOutput + try { + // assume it is of type RenderOutput, and validate later + const rawRenderOutput = await deps.loadPageAndEvaluateScript(objectUrl) + renderOutput = await RenderOutputSchema.parseAsync(rawRenderOutput) + } catch (err) { + if (err instanceof z.ZodError) { + deps.logger.error( + err, + 'Error generating preview. Expected output was not returned.' + ) + } else { + deps.logger.error(err, 'Error generating preview.') + } + return null + } + + deps.logger.info( + { + durationSeconds: renderOutput.duration, + totalMemoryMB: renderOutput.mem.total / 1000000, + resultingImages: { + count: Object.keys(renderOutput.scr || {}).length, + totalStringSize: reduce( + renderOutput.scr || {}, + (acc: number, val: string) => acc + val.length, + 0 + ) + } + }, + `Generated preview.` + ) + return renderOutput.scr + + // return ` + // + //
Generated by: ${ret.userAgent}
+ //
Duration in seconds: ${ret.duration}
+ //
Memory in MB: ${ret.mem.total / 1000000}
+ //
Used Memory in MB: ${ret.mem.used / 1000000}
+ //
+ //
+ //
+ //
+ //
+ // + // ` + + // const imageBuffer = new Buffer.from( + // b64Image.replace(/^data:image\/\w+;base64,/, ''), + // 'base64' + // ) + + // // await page.waitForTimeout(500); + // //var response = await page.screenshot({ + // // type: 'png', + // // clip: {x: 0, y: 0, width: 800, height: 800} + // //}); + + // return imageBuffer + } diff --git a/packages/preview-service/src/services/taskManager.ts b/packages/preview-service/src/services/taskManager.ts new file mode 100644 index 0000000000..428e9a5907 --- /dev/null +++ b/packages/preview-service/src/services/taskManager.ts @@ -0,0 +1,41 @@ +import { WorkStatus, WorkToBeDone } from '@/domain/backgroundWorker.js' +import { throwUncoveredError } from '@speckle/shared/dist/esm/index.js' + +let shouldExit = false + +export function forceExit() { + shouldExit = true +} + +type RepeatedlyDoSomeWork = () => void +export const repeatedlyDoSomeWorkFactory = + (deps: { + doSomeWork: WorkToBeDone + onExit: () => void + delayPeriods: { + onSuccess: number + onNoWorkFound: number + onFailed: number + } + }): RepeatedlyDoSomeWork => + async () => { + if (shouldExit) { + deps.onExit() + return + } + + const status = await deps.doSomeWork() + switch (status) { + case WorkStatus.SUCCESS: + setTimeout(repeatedlyDoSomeWorkFactory(deps), deps.delayPeriods.onSuccess) + break + case WorkStatus.NOWORKFOUND: + setTimeout(repeatedlyDoSomeWorkFactory(deps), deps.delayPeriods.onNoWorkFound) + break + case WorkStatus.FAILED: + setTimeout(repeatedlyDoSomeWorkFactory(deps), deps.delayPeriods.onFailed) + break + default: + throwUncoveredError(status) + } + } diff --git a/packages/preview-service/src/utils/brand.ts b/packages/preview-service/src/utils/brand.ts new file mode 100644 index 0000000000..d541d79444 --- /dev/null +++ b/packages/preview-service/src/utils/brand.ts @@ -0,0 +1,9 @@ +declare const brand: unique symbol + +export type Brand = T & { [brand]: TBrand } + +export const isCastableToBrand = ( + val: string | undefined | null +): val is TBrand => { + return !!val +} diff --git a/packages/preview-service/src/utils/env.ts b/packages/preview-service/src/utils/env.ts new file mode 100644 index 0000000000..8d8a69999d --- /dev/null +++ b/packages/preview-service/src/utils/env.ts @@ -0,0 +1,28 @@ +export const getAppPort = () => process.env.PORT || '3001' +export const getChromiumExecutablePath = () => { + if (isDevelopment()) return undefined // use default + return process.env.CHROMIUM_EXECUTABLE_PATH || '/usr/bin/google-chrome-stable' +} +export const getHealthCheckFilePath = () => + process.env.HEALTHCHECK_FILE_PATH || '/tmp/last_successful_query' +export const getHost = () => process.env.HOST || '127.0.0.1' +export const getLogLevel = () => process.env.LOG_LEVEL || 'info' +export const getMetricsPort = () => process.env.PROMETHEUS_METRICS_PORT || '9094' +export const getNodeEnv = () => process.env.NODE_ENV || 'production' +export const getPostgresConnectionString = () => + process.env.PG_CONNECTION_STRING || 'postgres://speckle:speckle@127.0.0.1/speckle' +export const getPostgresMaxConnections = () => + parseInt(process.env.POSTGRES_MAX_CONNECTIONS_PREVIEW_SERVICE || '2') +export const getPreviewTimeout = () => + parseInt(process.env.PREVIEW_TIMEOUT || '3600000') +export const getPuppeteerUserDataDir = () => { + if (isDevelopment()) return undefined // use default + return process.env.USER_DATA_DIR || '/tmp/puppeteer' +} +export const isDevelopment = () => + getNodeEnv() === 'development' || getNodeEnv() === 'dev' +export const isLogPretty = () => process.env.LOG_PRETTY?.toLocaleLowerCase() === 'true' +export const isProduction = () => getNodeEnv() === 'production' +export const isTest = () => getNodeEnv() === 'test' +export const serviceOrigin = () => `http://${getHost()}:${getAppPort()}` +export const shouldBeHeadless = () => process.env.PREVIEWS_HEADED !== 'true' diff --git a/packages/preview-service/src/utils/errorHandler.ts b/packages/preview-service/src/utils/errorHandler.ts new file mode 100644 index 0000000000..626c7b1106 --- /dev/null +++ b/packages/preview-service/src/utils/errorHandler.ts @@ -0,0 +1,25 @@ +import { ErrorRequestHandler } from 'express' +import { isNaN, isObject, isString } from 'lodash-es' + +export const errorHandler: ErrorRequestHandler = (err, req, res) => { + if ( + isObject(err) && + 'status' in err && + typeof err.status === 'number' && + !isNaN(err.status) + ) { + res.status(err?.status) + } else { + res.status(500) + } + + res.setHeader('Content-Type', 'application/json') + + if (req.app.get('env') === 'development') { + res.send(JSON.stringify(err, undefined, 2)) + } else if (isObject(err) && 'message' in err && isString(err.message)) { + res.send(JSON.stringify({ message: err.message })) + } else { + res.send(JSON.stringify({ message: 'Internal Server Error' })) + } +} diff --git a/packages/preview-service/src/utils/headers.ts b/packages/preview-service/src/utils/headers.ts new file mode 100644 index 0000000000..ed9d6b577f --- /dev/null +++ b/packages/preview-service/src/utils/headers.ts @@ -0,0 +1,7 @@ +import express from 'express' + +export const isSimpleTextRequested = (req: express.Request) => + req.headers.accept === 'text/plain' + +export const simpleTextOrJsonContentType = (req: express.Request) => + isSimpleTextRequested(req) ? 'text/plain' : 'application/json' diff --git a/packages/preview-service/src/utils/runtime.ts b/packages/preview-service/src/utils/runtime.ts new file mode 100644 index 0000000000..d369f1791f --- /dev/null +++ b/packages/preview-service/src/utils/runtime.ts @@ -0,0 +1,9 @@ +import path from 'path' +import { fileURLToPath } from 'url' + +export const getDirname = (importMetaUrl: string) => { + const __filename = fileURLToPath(importMetaUrl) + const __dirname = path.dirname(__filename) + + return __dirname +} diff --git a/packages/preview-service/routes/speckleObjectsStream.js b/packages/preview-service/src/utils/speckleObjectsStream.ts similarity index 58% rename from packages/preview-service/routes/speckleObjectsStream.js rename to packages/preview-service/src/utils/speckleObjectsStream.ts index f67a0d564d..68ffa54d1b 100644 --- a/packages/preview-service/routes/speckleObjectsStream.js +++ b/packages/preview-service/src/utils/speckleObjectsStream.ts @@ -1,9 +1,12 @@ -const { Transform } = require('stream') +import { Transform, type TransformCallback } from 'stream' // A stream that converts database objects stream to "{id}\t{data_json}\n" stream or a json stream of obj.data fields -class SpeckleObjectsStream extends Transform { - constructor(simpleText) { +export class SpeckleObjectsStream extends Transform { + isFirstObject: boolean + simpleText: boolean + + constructor(simpleText: boolean) { super({ writableObjectMode: true }) this.simpleText = simpleText @@ -11,7 +14,11 @@ class SpeckleObjectsStream extends Transform { this.isFirstObject = true } - _transform(dbObj, encoding, callback) { + _transform( + dbObj: { id: string; dataText: unknown; data: unknown }, + _encoding: BufferEncoding, + callback: TransformCallback + ) { let objData = dbObj.dataText if (objData === undefined) objData = JSON.stringify(dbObj.data) @@ -28,14 +35,16 @@ class SpeckleObjectsStream extends Transform { } callback() } catch (e) { - callback(e) + if (typeof e === 'undefined' || e === null || e instanceof Error) { + callback(e) + } else { + callback(new Error(JSON.stringify(e))) + } } } - _flush(callback) { + _flush(callback: TransformCallback) { if (!this.simpleText) this.push(']') callback() } } - -exports.SpeckleObjectsStream = SpeckleObjectsStream diff --git a/packages/preview-service/tests/acceptance/README.md b/packages/preview-service/tests/acceptance/README.md new file mode 100644 index 0000000000..dc4d53c83c --- /dev/null +++ b/packages/preview-service/tests/acceptance/README.md @@ -0,0 +1,3 @@ +# Acceptance tests + +This directory contains acceptance tests for the Preview Service. diff --git a/packages/preview-service/tests/acceptance/acceptance.spec.ts b/packages/preview-service/tests/acceptance/acceptance.spec.ts new file mode 100644 index 0000000000..4ab6e1cfdb --- /dev/null +++ b/packages/preview-service/tests/acceptance/acceptance.spec.ts @@ -0,0 +1,113 @@ +import { acceptanceTest } from '#/helpers/testExtensions.js' +import { ObjectPreview, type ObjectPreviewRow } from '@/repositories/objectPreview.js' +import { Previews } from '@/repositories/previews.js' +import cryptoRandomString from 'crypto-random-string' +import { afterEach, beforeEach, describe, expect, inject } from 'vitest' +import { promises as fs } from 'fs' +import { spawn } from 'child_process' +import { OBJECTS_TABLE_NAME } from '#/migrations/migrations.js' +import type { Angle } from '@/domain/domain.js' + +describe.sequential('Acceptance', () => { + describe.sequential('Run the preview-service image in docker', () => { + beforeEach(() => { + const dbName = inject('dbName') + //purposefully running in the background without waiting + void runProcess('docker', [ + 'run', + '--env', + `PG_CONNECTION_STRING=postgres://preview_service_test:preview_service_test@host.docker.internal:5432/${dbName}`, + '--rm', + '--name', + 'preview-service', + 'speckle/preview-service:local' + ]) + }) + afterEach(async () => { + await runProcess('docker', ['stop', 'preview-service']) + }) + + // we use integration test and not e2e test because we don't need the server + acceptanceTest( + 'loads data, runs docker image, extracts rendered image', + { + timeout: 300000 //5 minutes + }, + async ({ context }) => { + const { db } = context + const dbName = inject('dbName') + console.log('Running test in database: %s', dbName) + // load data + const streamId = cryptoRandomString({ length: 10 }) + const objectId = cryptoRandomString({ length: 10 }) + + //TODO load object rows from file or sqlite or similar + const objectRow = { + id: objectId, + streamId, + speckleType: 'Base', + totalChildrenCount: 0, + totalChildrenCountByDepth: {}, + data: {} + } + await db.batchInsert(OBJECTS_TABLE_NAME, [objectRow]) + + const objectPreviewRow = { + streamId, + objectId, + priority: 0, + previewStatus: 0 + } + await ObjectPreview({ db }).insert(objectPreviewRow).onConflict().ignore() + + //poll the database until the preview is ready + let objectPreviewResult: Pick[] = + [] + while ( + objectPreviewResult.length === 0 || + objectPreviewResult[0].previewStatus !== 2 + ) { + objectPreviewResult = await ObjectPreview({ db }) + .select(['preview', 'previewStatus']) + .where('streamId', streamId) + .andWhere('objectId', objectId) + + // wait a second before polling again + await new Promise((resolve) => setTimeout(resolve, 1000)) + } + + const previewData = await Previews({ db }) + .select(['data']) + .where('id', objectPreviewResult[0].preview['all' as Angle]) + .first() + + if (!previewData) { + expect(previewData).toBeDefined() + expect(previewData).not.toBeNull() + return //HACK to appease typescript + } + + //TODO use environment variable + const outputFilePath = + process.env.OUTPUT_FILE_PATH || '/tmp/preview-service-output.png' + await fs.writeFile(outputFilePath, previewData.data) + } + ) + }) +}) + +function runProcess(cmd: string, cmdArgs: string[], extraEnv?: Record) { + return new Promise((resolve, reject) => { + const childProc = spawn(cmd, cmdArgs, { env: { ...process.env, ...extraEnv } }) + childProc.stdout.pipe(process.stdout) + childProc.stderr.pipe(process.stderr) + + childProc.on('close', (code) => { + if (code === 0) { + resolve('success') + } else { + reject(`Parser exited with code ${code}`) + } + }) + }) +} diff --git a/packages/preview-service/tests/e2e/roundtrip.spec.ts b/packages/preview-service/tests/e2e/roundtrip.spec.ts new file mode 100644 index 0000000000..4daa6eb4e7 --- /dev/null +++ b/packages/preview-service/tests/e2e/roundtrip.spec.ts @@ -0,0 +1,34 @@ +// example tests to confirm the servers are running and the API is working + +import { getServerPort } from '#/helpers/helpers.js' +import { e2eTest } from '#/helpers/testExtensions.js' +import { describe } from 'vitest' + +describe.concurrent('E2E', () => { + describe.concurrent('Example', () => { + e2eTest('should start a server on an unique port', async ({ context }) => { + const port = getServerPort(context.server) + console.log(`port1 : ${port}`) + await Promise.resolve() + }) + e2eTest('should start a server on a different port', async ({ context }) => { + const port = getServerPort(context.server) + console.log(`port2 : ${port}`) + await Promise.resolve() + }) + }) + describe.concurrent('adding a job in the database', () => { + e2eTest('should create a preview', async ({ context }) => { + const port = getServerPort(context.server) + console.log(`port3 : ${port}`) + + //TODO add an object in the object store + //TODO add a job in the database + //wait for the job in the database to be updated + //wait for the job in the database to be completed + //ensure the preview is created + //ensure the preview has all the required angles + await Promise.resolve() + }) + }) +}) diff --git a/packages/preview-service/tests/helpers/helpers.ts b/packages/preview-service/tests/helpers/helpers.ts new file mode 100644 index 0000000000..c97b10855a --- /dev/null +++ b/packages/preview-service/tests/helpers/helpers.ts @@ -0,0 +1,49 @@ +import { startServer } from '@/server/server.js' +import type { Knex } from 'knex' +import http from 'http' +import type { AddressInfo } from 'net' +import { getPostgresConnectionString } from '@/utils/env.js' + +export const startAndWaitOnServers = async (deps: { db: Knex }) => { + let serverAddress: string | AddressInfo | null = null + let metricsServerAddress: string | AddressInfo | null = null + + const { db } = deps + const { app, server, metricsServer } = startServer({ db, serveOnRandomPort: true }) + server.on('listening', () => { + serverAddress = server.address() + }) + metricsServer.on('listening', () => { + metricsServerAddress = metricsServer.address() + }) + + //HACK wait until both servers are available + while (!serverAddress || !metricsServerAddress) { + // wait for the servers to start + await new Promise((resolve) => setTimeout(resolve, 100)) + } + + return { app, server, metricsServer } +} + +export const getServerPort = (server: http.Server) => { + const address = server.address() + if (address && typeof address !== 'string') { + return address.port + } + throw new Error('Server port is not available') +} + +export const customizePostgresConnectionString = (databaseName?: string) => { + const originalPostgresConnectionString = getPostgresConnectionString() + if (!databaseName) return originalPostgresConnectionString + + const originalPostgresUrl = new URL(originalPostgresConnectionString) + const protocol = originalPostgresUrl.protocol + const user = originalPostgresUrl.username + const pass = originalPostgresUrl.password + const host = originalPostgresUrl.hostname + const port = originalPostgresUrl.port + const origin = `${protocol}//${user}:${pass}@${host}:${port}` + return new URL(databaseName, origin).toString() +} diff --git a/packages/preview-service/tests/helpers/testExtensions.ts b/packages/preview-service/tests/helpers/testExtensions.ts new file mode 100644 index 0000000000..3c8c48d5e1 --- /dev/null +++ b/packages/preview-service/tests/helpers/testExtensions.ts @@ -0,0 +1,95 @@ +import { stopServer } from '@/server/server.js' +import { inject, test } from 'vitest' +import { getTestDb } from '#/helpers/testKnexClient.js' +import { startAndWaitOnServers } from '#/helpers/helpers.js' +import type { Knex } from 'knex' +import { Server } from 'http' + +export interface AcceptanceTestContext { + context: { + db: Knex + } +} + +// vitest reference: https://vitest.dev/guide/test-context#fixture-initialization +export const acceptanceTest = test.extend({ + // this key has to match the top level key in the interface (i.e. `context`). Some vitest typing magic at work here. + context: [ + // eslint-disable-next-line @typescript-eslint/no-unused-vars + async ({ task, onTestFinished }, use) => { + const dbName = inject('dbName') + // equivalent of beforeEach + const db = getTestDb(dbName) + + // schedule the cleanup. Runs regardless of test status, and runs after afterEach. + onTestFinished(async () => { + //no-op + }) + + // now run the test + await use({ db }) + }, + { auto: true } // we want to run this for each databaseIntegrationTest, even if the context is not explicitly requested by the test + ] +}) + +export interface DatabaseIntegrationTestContext { + context: { + db: Knex.Transaction + } +} + +// vitest reference: https://vitest.dev/guide/test-context#fixture-initialization +export const databaseIntegrationTest = test.extend({ + // this key has to match the top level key in the interface (i.e. `context`). Some vitest typing magic at work here. + context: [ + // eslint-disable-next-line @typescript-eslint/no-unused-vars + async ({ task, onTestFinished }, use) => { + const dbName = inject('dbName') + // equivalent of beforeEach + const db = await getTestDb(dbName).transaction() + + // schedule the cleanup. Runs regardless of test status, and runs after afterEach. + onTestFinished(async () => { + await db.rollback() + }) + + // now run the test + await use({ db }) + }, + { auto: true } // we want to run this for each databaseIntegrationTest, even if the context is not explicitly requested by the test + ] +}) + +export interface E2ETestContext extends DatabaseIntegrationTestContext { + context: { + db: Knex.Transaction + server: Server + metricsServer: Server + } +} + +// vitest reference: https://vitest.dev/guide/test-context#fixture-initialization +export const e2eTest = test.extend({ + // this key has to match the top level key in the interface (i.e. `context`). Some vitest typing magic at work here. + context: [ + // eslint-disable-next-line @typescript-eslint/no-unused-vars + async ({ task, onTestFinished }, use) => { + const dbName = inject('dbName') + // equivalent of beforeEach + const db = await getTestDb(dbName).transaction() + const { server, metricsServer } = await startAndWaitOnServers({ db }) + + // schedule the cleanup. Runs regardless of test status, and runs after afterEach. + onTestFinished(async () => { + if (server) stopServer({ server }) + if (metricsServer) stopServer({ server: metricsServer }) + if (db) await db.rollback() + }) + + // now run the test + await use({ db, server, metricsServer }) + }, + { auto: true } // we want to run this for each e2eTest, even if the context is not explicitly requested by the test + ] +}) diff --git a/packages/preview-service/tests/helpers/testKnexClient.ts b/packages/preview-service/tests/helpers/testKnexClient.ts new file mode 100644 index 0000000000..0681655224 --- /dev/null +++ b/packages/preview-service/tests/helpers/testKnexClient.ts @@ -0,0 +1,20 @@ +/* eslint-disable camelcase */ +import { knex } from 'knex' +import { customizePostgresConnectionString } from '#/helpers/helpers.js' + +export const getTestDb = (databaseName?: string) => + knex({ + client: 'pg', + connection: { + application_name: 'speckle_preview_service', + connectionString: customizePostgresConnectionString(databaseName) + }, + pool: { min: 0, max: 2 } + // migrations are managed in the server package for production + // for tests, we are creating a new database for each test run so we can't use this default migration functionality + // migrations: { + // extension: '.ts', + // directory: path.resolve(__dirname, '../migrations'), + // loadExtensions: ['js', 'ts'] + // } + }) diff --git a/packages/preview-service/tests/hooks/globalSetup.ts b/packages/preview-service/tests/hooks/globalSetup.ts new file mode 100644 index 0000000000..205de35e15 --- /dev/null +++ b/packages/preview-service/tests/hooks/globalSetup.ts @@ -0,0 +1,64 @@ +/** + * These hooks are run once, before and after the test suite. + * It is configured via the vitest.config.ts file. + */ +import '@/bootstrap.js' // This has side-effects and has to be imported first +import { getTestDb } from '#/helpers/testKnexClient.js' +import { down, up } from '#/migrations/migrations.js' +import { testLogger as logger } from '@/observability/logging.js' +import cryptoRandomString from 'crypto-random-string' +import type { GlobalSetupContext } from 'vitest/node' + +declare module 'vitest' { + export interface ProvidedContext { + dbName: string + } +} + +const dbName = `preview_service_${cryptoRandomString({ + length: 10, + type: 'alphanumeric' +})}`.toLocaleLowerCase() //postgres will automatically lower case new db names + +/** + * Global setup hook + * This hook is run once before any tests are run + * Defined in vitest.config.ts under test.globalSetup + */ +export async function setup({ provide }: GlobalSetupContext) { + logger.info('πŸƒπŸ»β€β™€οΈβ€βž‘οΈ Running vitest setup global hook') + const superUserDbClient = getTestDb() + await superUserDbClient.raw(`CREATE DATABASE ${dbName} + WITH + OWNER = preview_service_test + ENCODING = 'UTF8' + TABLESPACE = pg_default + CONNECTION LIMIT = -1;`) + await superUserDbClient.destroy() // need to explicitly close the connection in clients to prevent hanging tests + + // this provides the dbName to all tests, and can be accessed via inject('dbName'). NB: The test extensions already implement this, so use a test extension. + provide('dbName', dbName) + + const db = getTestDb(dbName) + await up(db) //we need the migration to occur in our new database, so cannot use knex's built in migration functionality. + await db.destroy() // need to explicitly close the connection in clients to prevent hanging tests + logger.info('πŸ’πŸ½β€β™€οΈ Completed the vitest setup global hook') +} + +/** + * Global teardown hook + * This hook is run once after all tests are run + * Defined in vitest.config.ts under test.globalTeardown + */ +export async function teardown() { + logger.info('πŸƒπŸ»β€β™€οΈ Running vitest teardown global hook') + const db = getTestDb(dbName) + await down(db) //we need the migration to occur in our named database, so cannot use knex's built in migration functionality. + await db.destroy() // need to explicitly close the connection in clients to prevent hanging tests + + //use connection without database to drop the db + const superUserDbClient = getTestDb() + await superUserDbClient.raw(`DROP DATABASE ${dbName};`) + await superUserDbClient.destroy() // need to explicitly close the connection in clients to prevent hanging tests + logger.info('βœ… Completed the vitest teardown global hook') +} diff --git a/packages/preview-service/tests/integration/repositories/objectPreview.spec.ts b/packages/preview-service/tests/integration/repositories/objectPreview.spec.ts new file mode 100644 index 0000000000..cdb509a71e --- /dev/null +++ b/packages/preview-service/tests/integration/repositories/objectPreview.spec.ts @@ -0,0 +1,38 @@ +import { databaseIntegrationTest } from '#/helpers/testExtensions.js' +import { + ObjectPreview, + getNextUnstartedObjectPreviewFactory +} from '@/repositories/objectPreview.js' +import cryptoRandomString from 'crypto-random-string' +import { describe, expect } from 'vitest' + +describe.concurrent('Repositories: ObjectPreview', () => { + describe.concurrent('getNextUnstartedObjectPreview', () => { + databaseIntegrationTest( + 'should return the next unstarted object preview', + async ({ context }) => { + const streamId = cryptoRandomString({ length: 10 }) + const objectId = cryptoRandomString({ length: 10 }) + const insertionObject = { + streamId, + objectId, + priority: 0, + previewStatus: 0 + } + const sqlQuery = ObjectPreview({ db: context.db }) + .insert(insertionObject) + .onConflict() + .ignore() + await context.db.raw(sqlQuery.toQuery()) + + const getNextUnstartedObjectPreview = getNextUnstartedObjectPreviewFactory({ + db: context.db + }) + const result = await getNextUnstartedObjectPreview() + expect(result).toBeDefined() + expect(result.streamId).toEqual(streamId) + expect(result.objectId).toEqual(objectId) + } + ) + }) +}) diff --git a/packages/preview-service/tests/migrations/README.md b/packages/preview-service/tests/migrations/README.md new file mode 100644 index 0000000000..580bce566b --- /dev/null +++ b/packages/preview-service/tests/migrations/README.md @@ -0,0 +1,7 @@ +# Knex Migrations + +This is not your regular knex migrations directory. + +Because the test database is expected to be in a clean state before each test, we need to run migrations rollback and up before each run of tests and additionally rollback after each run. + +Therefore we can just have one single migration file, and don't need to version it. diff --git a/packages/preview-service/tests/migrations/migrations.ts b/packages/preview-service/tests/migrations/migrations.ts new file mode 100644 index 0000000000..166db93d50 --- /dev/null +++ b/packages/preview-service/tests/migrations/migrations.ts @@ -0,0 +1,106 @@ +import type { Knex } from 'knex' + +const OBJECT_PREVIEW_TABLE_NAME = 'object_preview' +const PREVIEWS_TABLE_NAME = 'previews' +export const OBJECTS_TABLE_NAME = 'objects' +const DB_NAME_PREFIX = 'preview_service_' + +const getDatabaseName = (deps: { db: Knex }) => { + return deps.db.raw<{ rows: { datname: string }[] }>( + `SELECT current_database() as datname` + ) +} + +const getAllTableNames = (deps: { db: Knex }) => { + return deps.db.raw<{ rows: { tablename: string }[] }>( + `SELECT tablename FROM pg_tables WHERE schemaname='public'` + ) +} + +const throwIfDbNameDoesNotStartWithPrefix = async (deps: { db: Knex }) => { + const { rows: dbNameRows } = await getDatabaseName(deps) + const dbName = dbNameRows[0].datname + if (!dbName.startsWith(DB_NAME_PREFIX)) { + throw new Error( + `Database name does not start with "${DB_NAME_PREFIX}", it is unsafe to migrate to test schema. Aborting.` + ) + } +} + +const hasExpectedTableNames = (params: { tableNames: string[] }) => { + const { tableNames } = params + return ( + tableNames.length === 3 && + [OBJECT_PREVIEW_TABLE_NAME, OBJECTS_TABLE_NAME, PREVIEWS_TABLE_NAME].every((t) => + tableNames.includes(t) + ) + ) +} + +const throwIfNotSafeToMigrateUp = async (deps: { db: Knex }) => { + await throwIfDbNameDoesNotStartWithPrefix(deps) + + const { rows } = await getAllTableNames(deps) + const tableNames = rows.map((x) => x.tablename) + if (tableNames.length > 0 && !hasExpectedTableNames({ tableNames })) { + throw new Error( + `Database has unexpected tables, it is unsafe to migrate to test schema. Aborting. Tables found: ${tableNames.join( + ', ' + )}` + ) + } +} + +const throwIfNotSafeToMigrateDown = async (deps: { db: Knex }) => { + await throwIfDbNameDoesNotStartWithPrefix(deps) + + const { rows } = await getAllTableNames(deps) + const tableNames = rows.map((x) => x.tablename) + if (!hasExpectedTableNames({ tableNames })) { + throw new Error( + `Database already has unexpected tables, it is unsafe to migrate to test schema. Aborting. Tables found: ${tableNames.join( + ', ' + )}` + ) + } +} + +export const up = async (db: Knex) => { + await throwIfNotSafeToMigrateUp({ db }) + + await db.schema.createTable(OBJECT_PREVIEW_TABLE_NAME, (table) => { + table.string('streamId', 10) //ignoring fk on streams table for simplicity + table.string('objectId').notNullable() + table.integer('previewStatus').notNullable().defaultTo(0) //TODO should be an enum + table.integer('priority').notNullable().defaultTo(1) + table.timestamp('lastUpdate').notNullable().defaultTo(db.fn.now()) + table.jsonb('preview') + table.primary(['streamId', 'objectId']) + table.index(['previewStatus', 'priority', 'lastUpdate']) + }) + + await db.schema.createTable(PREVIEWS_TABLE_NAME, (table) => { + table.string('id').primary() + table.binary('data') + }) + + await db.schema.createTable(OBJECTS_TABLE_NAME, (table) => { + table.string('id') + table.string('streamId', 10) //ignoring fk on streams table for simplicity + table.string('speckleType', 1024).defaultTo('Base').notNullable() + table.integer('totalChildrenCount') + table.jsonb('totalChildrenCountByDepth') + table.timestamp('createdAt').defaultTo(db.fn.now()) + table.jsonb('data') + table.index('id') + table.index('streamId') + table.primary(['streamId', 'id']) + }) +} + +export const down = async (db: Knex) => { + await throwIfNotSafeToMigrateDown({ db }) + await db.schema.dropTable(OBJECT_PREVIEW_TABLE_NAME) + await db.schema.dropTable(PREVIEWS_TABLE_NAME) + await db.schema.dropTable(OBJECTS_TABLE_NAME) +} diff --git a/packages/preview-service/tests/unit/services/pollForPreview.spec.ts b/packages/preview-service/tests/unit/services/pollForPreview.spec.ts new file mode 100644 index 0000000000..e2474c4e2d --- /dev/null +++ b/packages/preview-service/tests/unit/services/pollForPreview.spec.ts @@ -0,0 +1,48 @@ +import { logger } from '@/observability/logging.js' +import { pollForAndCreatePreviewFactory } from '@/services/pollForPreview.js' +import { describe, expect, it } from 'vitest' + +describe.concurrent('Polling for preview', () => { + describe.concurrent('pollForAndCreatePreview', () => { + it('calls all component functions with expected parameters', async () => { + const called: Record = {} + const pollForAndCreatePreview = pollForAndCreatePreviewFactory({ + updateHealthcheckData: () => { + called['updateHealthcheckData'] = called['updateHealthcheckData']++ || 1 + }, + getNextUnstartedObjectPreview: async () => + Promise.resolve({ + streamId: 'streamId', + objectId: 'objectId' + }), + generateAndStore360Preview: async (task) => { + called['generateAndStore360Preview'] = + called['generateAndStore360Preview']++ || 1 + expect(task).toEqual({ streamId: 'streamId', objectId: 'objectId' }) + return Promise.resolve({ metadata: { all: 'myJoinedUpPreviewId' } }) + }, + updatePreviewMetadata: async (params) => { + called['updatePreviewMetadata'] = called['updatePreviewMetadata']++ || 1 + expect(params).toEqual({ + metadata: { all: 'myJoinedUpPreviewId' }, + streamId: 'streamId', + objectId: 'objectId' + }) + return Promise.resolve() + }, + notifyUpdate: async (task) => { + called['notifyUpdate'] = called['notifyUpdate']++ || 1 + expect(task).toEqual({ streamId: 'streamId', objectId: 'objectId' }) + return Promise.resolve() + }, + logger + }) + + await pollForAndCreatePreview() + expect(called['updateHealthcheckData']).toBeGreaterThanOrEqual(1) + expect(called['generateAndStore360Preview']).toBeGreaterThanOrEqual(1) + expect(called['updatePreviewMetadata']).toBeGreaterThanOrEqual(1) + expect(called['notifyUpdate']).toBeGreaterThanOrEqual(1) + }) + }) +}) diff --git a/packages/preview-service/tests/unit/services/screenshot.spec.ts b/packages/preview-service/tests/unit/services/screenshot.spec.ts new file mode 100644 index 0000000000..94d294f718 --- /dev/null +++ b/packages/preview-service/tests/unit/services/screenshot.spec.ts @@ -0,0 +1,47 @@ +import { LoadPageAndEvaluateScript } from '@/clients/puppeteer.js' +import { logger } from '@/observability/logging.js' +import { getScreenshotFactory } from '@/services/screenshot.js' +import { afterEach, describe, expect, it, vi } from 'vitest' + +describe('Screenshot', () => { + afterEach(() => { + vi.restoreAllMocks() + }) + describe('with Puppeteer returning a valid responses', () => { + const loadPageAndEvaluateScript: LoadPageAndEvaluateScript = ( + urlOfObjectToScreenshot + ) => { + //NOTE if this expectation fails it won't get explicitly captured by vitest. Instead we get null output from getScreenshot. + expect(urlOfObjectToScreenshot).toBe( + 'http://localhost:0000/streams/streamId/objects/objectId' + ) + return Promise.resolve({ + duration: 1000, + mem: { total: 500, used: 400 }, + userAgent: 'Test Testerson', + scr: { + '0': 'data:image/png;base64,foobar', + '1': 'data:image/png;base64,foobar' + } + }) + } + + it('receives the screenshot', async () => { + const getScreenshot = getScreenshotFactory({ + loadPageAndEvaluateScript, + logger, + serviceOrigin: 'http://localhost:0000' + }) + const screenshot = await getScreenshot({ + streamId: 'streamId', + objectId: 'objectId' + }) + if (!screenshot) { + expect(screenshot).not.toBe(null) + return //to avoid TS error + } + expect(screenshot['0']).toBe('data:image/png;base64,foobar') + expect(screenshot['1']).toBe('data:image/png;base64,foobar') + }) + }) +}) diff --git a/packages/preview-service/tsconfig.build.json b/packages/preview-service/tsconfig.build.json new file mode 100644 index 0000000000..f2d7b55d71 --- /dev/null +++ b/packages/preview-service/tsconfig.build.json @@ -0,0 +1,5 @@ +{ + "extends": "./tsconfig.json", + "include": ["src/**/*"], + "exclude": ["**/*.spec.js", "**/*.spec.ts", "tests/**/*"] +} diff --git a/packages/preview-service/tsconfig.json b/packages/preview-service/tsconfig.json new file mode 100644 index 0000000000..0eebc69565 --- /dev/null +++ b/packages/preview-service/tsconfig.json @@ -0,0 +1,109 @@ +{ + "compilerOptions": { + /* Visit https://aka.ms/tsconfig.json to read more about this file */ + + /* Projects */ + // "incremental": true, /* Enable incremental compilation */ + // "composite": true, /* Enable constraints that allow a TypeScript project to be used with project references. */ + // "tsBuildInfoFile": "./", /* Specify the folder for .tsbuildinfo incremental compilation files. */ + // "disableSourceOfProjectReferenceRedirect": true, /* Disable preferring source files instead of declaration files when referencing composite projects */ + // "disableSolutionSearching": true, /* Opt a project out of multi-project reference checking when editing. */ + // "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */ + + /* Language and Environment */ + "target": "ES2022" /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */, + // "lib": [], /* Specify a set of bundled library declaration files that describe the target runtime environment. */ + // "jsx": "preserve", /* Specify what JSX code is generated. */ + // "experimentalDecorators": true, /* Enable experimental support for TC39 stage 2 draft decorators. */ + // "emitDecoratorMetadata": true, /* Emit design-type metadata for decorated declarations in source files. */ + // "jsxFactory": "", /* Specify the JSX factory function used when targeting React JSX emit, e.g. 'React.createElement' or 'h' */ + // "jsxFragmentFactory": "", /* Specify the JSX Fragment reference used for fragments when targeting React JSX emit e.g. 'React.Fragment' or 'Fragment'. */ + // "jsxImportSource": "", /* Specify module specifier used to import the JSX factory functions when using `jsx: react-jsx*`.` */ + // "reactNamespace": "", /* Specify the object invoked for `createElement`. This only applies when targeting `react` JSX emit. */ + // "noLib": true, /* Disable including any library files, including the default lib.d.ts. */ + // "useDefineForClassFields": true, /* Emit ECMAScript-standard-compliant class fields. */ + + /* Modules */ + "module": "node16" /* Specify what module code is generated. */, + "rootDir": "./" /* Specify the root folder within your source files. */, + "moduleResolution": "node16" /* Specify how TypeScript looks up a file from a given module specifier. */, + "baseUrl": "./" /* Specify the base directory to resolve non-relative module names. */, + "paths": { + "@/*": ["./src/*"], + "#/*": ["./tests/*"] + }, + // "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */ + // "typeRoots": [], /* Specify multiple folders that act like `./node_modules/@types`. */ + // "types": [], /* Specify type package names to be included without being referenced in a source file. */ + // "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */ + // "resolveJsonModule": true, /* Enable importing .json files */ + // "noResolve": true, /* Disallow `import`s, `require`s or ``s from expanding the number of files TypeScript should add to a project. */ + + /* JavaScript Support */ + "allowJs": true /* Allow JavaScript files to be a part of your program. Use the `checkJS` option to get errors from these files. */, + "checkJs": false /* Enable error reporting in type-checked JavaScript files. */, + // "maxNodeModuleJsDepth": 1, /* Specify the maximum folder depth used for checking JavaScript files from `node_modules`. Only applicable with `allowJs`. */ + + /* Emit */ + // "declaration": true, /* Generate .d.ts files from TypeScript and JavaScript files in your project. */ + // "declarationMap": true, /* Create sourcemaps for d.ts files. */ + // "emitDeclarationOnly": true, /* Only output d.ts files and not JavaScript files. */ + "sourceMap": true /* Create source map files for emitted JavaScript files. */, + // "outFile": "./", /* Specify a file that bundles all outputs into one JavaScript file. If `declaration` is true, also designates a file that bundles all .d.ts output. */ + "outDir": "./dist" /* Specify an output folder for all emitted files. */, + // "removeComments": true, /* Disable emitting comments. */ + // "noEmit": true, /* Disable emitting files from a compilation. */ + // "importHelpers": true, /* Allow importing helper functions from tslib once per project, instead of including them per-file. */ + // "importsNotUsedAsValues": "remove", /* Specify emit/checking behavior for imports that are only used for types */ + // "downlevelIteration": true, /* Emit more compliant, but verbose and less performant JavaScript for iteration. */ + // "sourceRoot": "", /* Specify the root path for debuggers to find the reference source code. */ + // "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */ + // "inlineSourceMap": true, /* Include sourcemap files inside the emitted JavaScript. */ + // "inlineSources": true, /* Include source code in the sourcemaps inside the emitted JavaScript. */ + // "emitBOM": true, /* Emit a UTF-8 Byte Order Mark (BOM) in the beginning of output files. */ + // "newLine": "crlf", /* Set the newline character for emitting files. */ + // "stripInternal": true, /* Disable emitting declarations that have `@internal` in their JSDoc comments. */ + // "noEmitHelpers": true, /* Disable generating custom helper functions like `__extends` in compiled output. */ + // "noEmitOnError": true, /* Disable emitting files if any type checking errors are reported. */ + // "preserveConstEnums": true, /* Disable erasing `const enum` declarations in generated code. */ + // "declarationDir": "./", /* Specify the output directory for generated declaration files. */ + // "preserveValueImports": true, /* Preserve unused imported values in the JavaScript output that would otherwise be removed. */ + + /* Interop Constraints */ + // "isolatedModules": true, /* Ensure that each file can be safely transpiled without relying on other imports. */ + "allowSyntheticDefaultImports": true /* Allow 'import x from y' when a module doesn't have a default export. */, + "esModuleInterop": true /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables `allowSyntheticDefaultImports` for type compatibility. */, + // "preserveSymlinks": true, /* Disable resolving symlinks to their realpath. This correlates to the same flag in node. */ + "forceConsistentCasingInFileNames": true /* Ensure that casing is correct in imports. */, + + /* Type Checking */ + "strict": true /* Enable all strict type-checking options. */, + // "noImplicitAny": true, /* Enable error reporting for expressions and declarations with an implied `any` type.. */ + // "strictNullChecks": true, /* When type checking, take into account `null` and `undefined`. */ + // "strictFunctionTypes": true, /* When assigning functions, check to ensure parameters and the return values are subtype-compatible. */ + // "strictBindCallApply": true, /* Check that the arguments for `bind`, `call`, and `apply` methods match the original function. */ + // "strictPropertyInitialization": true, /* Check for class properties that are declared but not set in the constructor. */ + // "noImplicitThis": true, /* Enable error reporting when `this` is given the type `any`. */ + // "useUnknownInCatchVariables": true, /* Type catch clause variables as 'unknown' instead of 'any'. */ + // "alwaysStrict": true, /* Ensure 'use strict' is always emitted. */ + // "noUnusedLocals": true, /* Enable error reporting when a local variables aren't read. */ + // "noUnusedParameters": true, /* Raise an error when a function parameter isn't read */ + // "exactOptionalPropertyTypes": true, /* Interpret optional property types as written, rather than adding 'undefined'. */ + // "noImplicitReturns": true, /* Enable error reporting for codepaths that do not explicitly return in a function. */ + // "noFallthroughCasesInSwitch": true, /* Enable error reporting for fallthrough cases in switch statements. */ + // "noUncheckedIndexedAccess": true, /* Include 'undefined' in index signature results */ + // "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an override modifier. */ + // "noPropertyAccessFromIndexSignature": true, /* Enforces using indexed accessors for keys declared using an indexed type */ + // "allowUnusedLabels": true, /* Disable error reporting for unused labels. */ + // "allowUnreachableCode": true, /* Disable error reporting for unreachable code. */ + + /* Completeness */ + // "skipDefaultLibCheck": true, /* Skip type checking .d.ts files that are included with TypeScript. */ + "skipLibCheck": true /* Skip type checking all .d.ts files. */ + }, + "ts-node": { + "swc": true + }, + "include": ["src/**/*", "tests/**/*", "vitest.config.ts"], + "exclude": ["node_modules", "coverage", "reports"] +} diff --git a/packages/preview-service/vitest.config.ts b/packages/preview-service/vitest.config.ts new file mode 100644 index 0000000000..a751fa9b1b --- /dev/null +++ b/packages/preview-service/vitest.config.ts @@ -0,0 +1,20 @@ +import path from 'path' +import { configDefaults, defineConfig } from 'vitest/config' + +export default defineConfig({ + test: { + exclude: [...configDefaults.exclude], + globalSetup: ['./tests/hooks/globalSetup.ts'], + // reporters: ['verbose', 'hanging-process'] //uncomment to debug hanging processes etc. + sequence: { + shuffle: true, + concurrent: true + } + }, + resolve: { + alias: { + '@': path.resolve(__dirname, './src'), + '#': path.resolve(__dirname, './tests') + } + } +}) diff --git a/packages/preview-service/webpack.config.render_page.js b/packages/preview-service/webpack.config.renderPage.cjs similarity index 83% rename from packages/preview-service/webpack.config.render_page.js rename to packages/preview-service/webpack.config.renderPage.cjs index 838fca57dd..26daecd85a 100644 --- a/packages/preview-service/webpack.config.render_page.js +++ b/packages/preview-service/webpack.config.renderPage.cjs @@ -21,11 +21,11 @@ if (env === 'build') { */ const config = { mode, - entry: path.resolve(__dirname + '/render_page/src/app.js'), + entry: path.resolve(path.join(__dirname, 'renderPage', 'src', 'app.js')), target: 'web', devtool: 'source-map', output: { - path: path.resolve(__dirname + '/public/render'), + path: path.resolve(path.join(__dirname, 'dist', 'public', 'render')), filename: outputFile }, module: { @@ -47,16 +47,16 @@ const config = { new CleanWebpackPlugin({ cleanStaleWebpackAssets: false }), new HtmlWebpackPlugin({ title: 'Speckle Viewer Example', - template: 'render_page/src/example.html', + template: 'renderPage/src/example.html', filename: 'index.html', - favicon: 'render_page/src/favicon.ico' + favicon: 'renderPage/src/favicon.ico' }) ], resolve: { modules: [ path.resolve('../../node_modules'), path.resolve('./node_modules'), - path.resolve('.render_page/src') + path.resolve('.renderPage/src') ], extensions: ['.json', '.js'] }, diff --git a/setup/db/10-docker_postgres_init.sql b/setup/db/10-docker_postgres_init.sql index a13eb2069e..fbe3923ec9 100644 --- a/setup/db/10-docker_postgres_init.sql +++ b/setup/db/10-docker_postgres_init.sql @@ -1,8 +1,20 @@ CREATE DATABASE speckle2_test - WITH + WITH OWNER = speckle ENCODING = 'UTF8' LC_COLLATE = 'en_US.utf8' LC_CTYPE = 'en_US.utf8' TABLESPACE = pg_default - CONNECTION LIMIT = -1; \ No newline at end of file + CONNECTION LIMIT = -1; +CREATE USER preview_service_test WITH PASSWORD 'preview_service_test'; +CREATE DATABASE preview_service_test + WITH + OWNER = preview_service_test + ENCODING = 'UTF8' + LC_COLLATE = 'en_US.utf8' + LC_CTYPE = 'en_US.utf8' + TABLESPACE = pg_default + CONNECTION LIMIT = -1; +GRANT CREATE ON TABLESPACE pg_default TO preview_service_test; -- required to create databases +ALTER USER preview_service_test CREATEDB; -- Allow user to create databases +GRANT pg_write_all_data TO preview_service_test; diff --git a/utils/helm/speckle-server/templates/preview_service/deployment.yml b/utils/helm/speckle-server/templates/preview_service/deployment.yml index 04dbda5e0c..76adcf4c26 100644 --- a/utils/helm/speckle-server/templates/preview_service/deployment.yml +++ b/utils/helm/speckle-server/templates/preview_service/deployment.yml @@ -35,7 +35,7 @@ spec: command: - node - -e - - process.exit(Date.now() - require('fs').readFileSync('/tmp/last_successful_query', 'utf8') > 3600 * 1000) + - {{ printf "process.exit(Date.now() - require('fs').readFileSync('/tmp/last_successful_query', 'utf8') > %d)" .Values.preview_service.puppeteer.timeoutMilliseconds }} resources: requests: @@ -84,6 +84,16 @@ spec: value: "/postgres-certificate/ca-certificate.crt" {{- end }} + {{- if .Values.preview_service.puppeteer.userDataDirectory }} + - name: USER_DATA_DIR + value: {{ .Values.preview_service.puppeteer.userDataDirectory | quote }} + {{- end }} + + {{- if .Values.preview_service.puppeteer.timeoutMilliseconds }} + - name: PREVIEW_TIMEOUT + value: {{ .Values.preview_service.puppeteer.timeoutMilliseconds | quote }} + {{- end }} + {{- if .Values.preview_service.affinity }} affinity: {{- include "speckle.renderTpl" (dict "value" .Values.preview_service.affinity "context" $) | nindent 8 }} {{- end }} diff --git a/utils/helm/speckle-server/values.schema.json b/utils/helm/speckle-server/values.schema.json index ebe8df9337..d365141069 100644 --- a/utils/helm/speckle-server/values.schema.json +++ b/utils/helm/speckle-server/values.schema.json @@ -1666,6 +1666,21 @@ } } }, + "puppeteer": { + "type": "object", + "properties": { + "userDataDirectory": { + "type": "string", + "description": "The path to the user data directory. If not set, defaults to '/tmp/puppeteer'. This is mounted in the deployment as a volume with read-write access.", + "default": "" + }, + "timeoutMilliseconds": { + "type": "string", + "description": "The timeout in milliseconds for the Puppeteer service.", + "default": "3600000" + } + } + }, "requests": { "type": "object", "properties": { diff --git a/utils/helm/speckle-server/values.yaml b/utils/helm/speckle-server/values.yaml index 77192342c1..8763ec7873 100644 --- a/utils/helm/speckle-server/values.yaml +++ b/utils/helm/speckle-server/values.yaml @@ -1051,6 +1051,12 @@ preview_service: ## @param preview_service.monitoring.metricsPort The port on which the metrics server will be exposed. metricsPort: '9094' + puppeteer: + ## @param preview_service.puppeteer.userDataDirectory The path to the user data directory. If not set, defaults to '/tmp/puppeteer'. This is mounted in the deployment as a volume with read-write access. + userDataDirectory: '' + ## @param preview_service.puppeteer.timeoutMilliseconds The timeout in milliseconds for the Puppeteer service. + timeoutMilliseconds: '3600000' + requests: ## @param preview_service.requests.cpu The CPU that should be available on a node when scheduling this pod. ## ref: https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/ diff --git a/yarn.lock b/yarn.lock index 2b5943ce0e..8d51828b4b 100644 --- a/yarn.lock +++ b/yarn.lock @@ -15296,12 +15296,21 @@ __metadata: "@speckle/objectloader": "workspace:^" "@speckle/shared": "workspace:^" "@speckle/viewer": "workspace:^" + "@types/express": "npm:^4.17.13" + "@types/lodash-es": "npm:^4.17.6" + "@types/node": "npm:^18.19.38" + "@vitest/coverage-istanbul": "npm:^1.6.0" babel-loader: "npm:^8.2.2" clean-webpack-plugin: "npm:^4.0.0-alpha.0" + concurrently: "npm:^8.2.2" cookie-parser: "npm:~1.4.4" crypto: "npm:^1.0.1" + crypto-random-string: "npm:^5.0.0" + dotenv: "npm:^16.4.5" eslint: "npm:^9.4.0" eslint-config-prettier: "npm:^9.1.0" + eslint-plugin-vitest: "npm:^0.5.4" + esm-module-alias: "npm:^2.2.0" express: "npm:^4.19.2" file-type: "npm:^16.5.4" html-webpack-plugin: "npm:^5.3.1" @@ -15309,6 +15318,7 @@ __metadata: join-images: "npm:^1.1.3" knex: "npm:^2.4.1" lodash: "npm:^4.17.21" + lodash-es: "npm:^4.17.21" node-fetch: "npm:^2.6.1" nodemon: "npm:^2.0.20" pg: "npm:^8.7.3" @@ -15319,12 +15329,18 @@ __metadata: prettier: "npm:^2.5.1" prom-client: "npm:^14.0.1" puppeteer: "npm:^22.11.1" + rimraf: "npm:^5.0.7" sharp: "npm:^0.32.6" + tarn: "npm:^3.0.2" + typescript: "npm:^4.6.4" + typescript-eslint: "npm:^7.12.0" + vitest: "npm:^1.6.0" webpack: "npm:^5.76.0" webpack-cli: "npm:^4.6.0" webpack-dev-server: "npm:^4.6.0" yargs: "npm:^17.3.0" zlib: "npm:^1.0.5" + zod: "npm:^3.23.8" languageName: unknown linkType: soft @@ -18638,6 +18654,15 @@ __metadata: languageName: node linkType: hard +"@types/node@npm:^18.19.38": + version: 18.19.39 + resolution: "@types/node@npm:18.19.39" + dependencies: + undici-types: "npm:~5.26.4" + checksum: 10/d2fe84adf087a4184217b666f675e99678060d15f84882a4a1c3e49c3dca521a7e99a201a3c073c2b60b00419f1f4c3b357d8f7397f65e400dc3b77b0145a1da + languageName: node + linkType: hard + "@types/nodemailer@npm:^6.4.5": version: 6.4.5 resolution: "@types/nodemailer@npm:6.4.5" @@ -19302,6 +19327,16 @@ __metadata: languageName: node linkType: hard +"@typescript-eslint/scope-manager@npm:7.13.1": + version: 7.13.1 + resolution: "@typescript-eslint/scope-manager@npm:7.13.1" + dependencies: + "@typescript-eslint/types": "npm:7.13.1" + "@typescript-eslint/visitor-keys": "npm:7.13.1" + checksum: 10/fea9ab8f72ace1dd55d835037efe038c70021275581855820cdb7fc4b01e8afb51723856537adff1fdb0ea3899c1f8b593fd75c34b5087ca2ef2f7c72e610050 + languageName: node + linkType: hard + "@typescript-eslint/type-utils@npm:7.12.0": version: 7.12.0 resolution: "@typescript-eslint/type-utils@npm:7.12.0" @@ -19333,6 +19368,13 @@ __metadata: languageName: node linkType: hard +"@typescript-eslint/types@npm:7.13.1": + version: 7.13.1 + resolution: "@typescript-eslint/types@npm:7.13.1" + checksum: 10/006a5518608184c1d017b27fb4f66ce28bc75f89e2380ac42969ebdf0dc726af1cfcdf4ba36ce2858e9f6907d6f4295d3453859d7e9a35bc7855d4ebc900955d + languageName: node + linkType: hard + "@typescript-eslint/typescript-estree@npm:5.62.0": version: 5.62.0 resolution: "@typescript-eslint/typescript-estree@npm:5.62.0" @@ -19370,6 +19412,25 @@ __metadata: languageName: node linkType: hard +"@typescript-eslint/typescript-estree@npm:7.13.1": + version: 7.13.1 + resolution: "@typescript-eslint/typescript-estree@npm:7.13.1" + dependencies: + "@typescript-eslint/types": "npm:7.13.1" + "@typescript-eslint/visitor-keys": "npm:7.13.1" + debug: "npm:^4.3.4" + globby: "npm:^11.1.0" + is-glob: "npm:^4.0.3" + minimatch: "npm:^9.0.4" + semver: "npm:^7.6.0" + ts-api-utils: "npm:^1.3.0" + peerDependenciesMeta: + typescript: + optional: true + checksum: 10/5c68b5faa962e5f984067aa91770486af817858d2fa35b54a44fa4d5c0c612ba23b52b191d8051d9e4439e5425251e32861c81239e9400a29de057f8360537fb + languageName: node + linkType: hard + "@typescript-eslint/utils@npm:7.12.0, @typescript-eslint/utils@npm:^7.4.0, @typescript-eslint/utils@npm:^7.8.0, @typescript-eslint/utils@npm:^7.9.0": version: 7.12.0 resolution: "@typescript-eslint/utils@npm:7.12.0" @@ -19402,6 +19463,20 @@ __metadata: languageName: node linkType: hard +"@typescript-eslint/utils@npm:^7.7.1": + version: 7.13.1 + resolution: "@typescript-eslint/utils@npm:7.13.1" + dependencies: + "@eslint-community/eslint-utils": "npm:^4.4.0" + "@typescript-eslint/scope-manager": "npm:7.13.1" + "@typescript-eslint/types": "npm:7.13.1" + "@typescript-eslint/typescript-estree": "npm:7.13.1" + peerDependencies: + eslint: ^8.56.0 + checksum: 10/e1bc916dcb567c6b35819f635a84561e015f40b28d650b987f74c79b013ec43fb4f5b61199d4039fcdf9480281f945f622650cba2e68739600822da05808a706 + languageName: node + linkType: hard + "@typescript-eslint/visitor-keys@npm:5.62.0": version: 5.62.0 resolution: "@typescript-eslint/visitor-keys@npm:5.62.0" @@ -19422,6 +19497,16 @@ __metadata: languageName: node linkType: hard +"@typescript-eslint/visitor-keys@npm:7.13.1": + version: 7.13.1 + resolution: "@typescript-eslint/visitor-keys@npm:7.13.1" + dependencies: + "@typescript-eslint/types": "npm:7.13.1" + eslint-visitor-keys: "npm:^3.4.3" + checksum: 10/811e9642851359b5197d45a9878143c4c608aaef887a20c26f57f8b012ce9e316d232b82a311bdd52a2af0c8b8da5d4bd9401ce565fc7bdb43cd44556e76d225 + languageName: node + linkType: hard + "@ungap/structured-clone@npm:^1.0.0": version: 1.2.0 resolution: "@ungap/structured-clone@npm:1.2.0" @@ -19683,6 +19768,25 @@ __metadata: languageName: node linkType: hard +"@vitest/coverage-istanbul@npm:^1.6.0": + version: 1.6.0 + resolution: "@vitest/coverage-istanbul@npm:1.6.0" + dependencies: + debug: "npm:^4.3.4" + istanbul-lib-coverage: "npm:^3.2.2" + istanbul-lib-instrument: "npm:^6.0.1" + istanbul-lib-report: "npm:^3.0.1" + istanbul-lib-source-maps: "npm:^5.0.4" + istanbul-reports: "npm:^3.1.6" + magicast: "npm:^0.3.3" + picocolors: "npm:^1.0.0" + test-exclude: "npm:^6.0.0" + peerDependencies: + vitest: 1.6.0 + checksum: 10/a77796e01f5b9c280a40dde7d9581aadeebeeb1d2fdc1598cbdaf736e14c72716bb46c9311fe7ee6776621b05f7ef52aa1ec6c750a04f0a29085f8971991fc54 + languageName: node + linkType: hard + "@vitest/coverage-v8@npm:^1.6.0": version: 1.6.0 resolution: "@vitest/coverage-v8@npm:1.6.0" @@ -25771,6 +25875,26 @@ __metadata: languageName: node linkType: hard +"concurrently@npm:^8.2.2": + version: 8.2.2 + resolution: "concurrently@npm:8.2.2" + dependencies: + chalk: "npm:^4.1.2" + date-fns: "npm:^2.30.0" + lodash: "npm:^4.17.21" + rxjs: "npm:^7.8.1" + shell-quote: "npm:^1.8.1" + spawn-command: "npm:0.0.2" + supports-color: "npm:^8.1.1" + tree-kill: "npm:^1.2.2" + yargs: "npm:^17.7.2" + bin: + conc: dist/bin/concurrently.js + concurrently: dist/bin/concurrently.js + checksum: 10/dcb1aa69d9c611a7bda9d4fc0fe1e388f971d1744acec7e0d52dffa2ef55743f1266ec9292f414c5789b9f61734b3fce772bd005d4de9564a949fb121b97bae1 + languageName: node + linkType: hard + "confbox@npm:^0.1.3": version: 0.1.3 resolution: "confbox@npm:0.1.3" @@ -26392,6 +26516,15 @@ __metadata: languageName: node linkType: hard +"crypto-random-string@npm:^5.0.0": + version: 5.0.0 + resolution: "crypto-random-string@npm:5.0.0" + dependencies: + type-fest: "npm:^2.12.2" + checksum: 10/bb1b918649e3e4d0e5bc3ee8d141829e7baed938c31a607943103bb32b04167575fe618e2937899df0fde0e3f7d8e2859ae7af7af2167cbeab372dbd13f5ce7e + languageName: node + linkType: hard + "crypto@npm:^1.0.1": version: 1.0.1 resolution: "crypto@npm:1.0.1" @@ -26971,7 +27104,7 @@ __metadata: languageName: node linkType: hard -"date-fns@npm:^2.29.3": +"date-fns@npm:^2.29.3, date-fns@npm:^2.30.0": version: 2.30.0 resolution: "date-fns@npm:2.30.0" dependencies: @@ -29247,6 +29380,23 @@ __metadata: languageName: node linkType: hard +"eslint-plugin-vitest@npm:^0.5.4": + version: 0.5.4 + resolution: "eslint-plugin-vitest@npm:0.5.4" + dependencies: + "@typescript-eslint/utils": "npm:^7.7.1" + peerDependencies: + eslint: ^8.57.0 || ^9.0.0 + vitest: "*" + peerDependenciesMeta: + "@typescript-eslint/eslint-plugin": + optional: true + vitest: + optional: true + checksum: 10/a81eda0b6fff5f05afa9e4e2deb114562e8a53e224293a0dd3f524c01a240a1f8b6c7284d15862c5b740adc6816a2f23e5b96fc65d95c0abd24a5ef171215589 + languageName: node + linkType: hard + "eslint-plugin-vue@npm:^9.26.0": version: 9.26.0 resolution: "eslint-plugin-vue@npm:9.26.0" @@ -29393,6 +29543,13 @@ __metadata: languageName: node linkType: hard +"esm-module-alias@npm:^2.2.0": + version: 2.2.0 + resolution: "esm-module-alias@npm:2.2.0" + checksum: 10/baf90732dd45442f6ef1ad34f1d864f772d45366ea43d3f5f4d3337d7badf75e2f48ef611fa14bd09a4180699f385eee18ce8561026f677d6a988385094098ce + languageName: node + linkType: hard + "esm-resolve@npm:^1.0.8": version: 1.0.9 resolution: "esm-resolve@npm:1.0.9" @@ -33986,7 +34143,7 @@ __metadata: languageName: node linkType: hard -"istanbul-lib-instrument@npm:^6.0.0": +"istanbul-lib-instrument@npm:^6.0.0, istanbul-lib-instrument@npm:^6.0.1": version: 6.0.2 resolution: "istanbul-lib-instrument@npm:6.0.2" dependencies: @@ -46187,6 +46344,13 @@ __metadata: languageName: node linkType: hard +"spawn-command@npm:0.0.2, spawn-command@npm:^0.0.2-1": + version: 0.0.2 + resolution: "spawn-command@npm:0.0.2" + checksum: 10/f13e8c3c63abd4a0b52fb567eba5f7940d480c5ed3ec61781d38a1850f179b1196c39e6efa2bbd301f82c1bf1cd7807abc8fbd8fc8e44bcaa3975a124c0d1657 + languageName: node + linkType: hard + "spawn-command@npm:0.0.2-1": version: 0.0.2-1 resolution: "spawn-command@npm:0.0.2-1" @@ -46194,13 +46358,6 @@ __metadata: languageName: node linkType: hard -"spawn-command@npm:^0.0.2-1": - version: 0.0.2 - resolution: "spawn-command@npm:0.0.2" - checksum: 10/f13e8c3c63abd4a0b52fb567eba5f7940d480c5ed3ec61781d38a1850f179b1196c39e6efa2bbd301f82c1bf1cd7807abc8fbd8fc8e44bcaa3975a124c0d1657 - languageName: node - linkType: hard - "spawn-wrap@npm:^2.0.0": version: 2.0.0 resolution: "spawn-wrap@npm:2.0.0" @@ -52409,7 +52566,7 @@ __metadata: languageName: node linkType: hard -"zod@npm:3.23.8": +"zod@npm:3.23.8, zod@npm:^3.23.8": version: 3.23.8 resolution: "zod@npm:3.23.8" checksum: 10/846fd73e1af0def79c19d510ea9e4a795544a67d5b34b7e1c4d0425bf6bfd1c719446d94cdfa1721c1987d891321d61f779e8236fde517dc0e524aa851a6eff1