From a596c3c0ea0d2c9f3a2a578e6ecb4586dd36a137 Mon Sep 17 00:00:00 2001 From: Amina <10723626+0xawaz@users.noreply.github.com> Date: Mon, 25 Nov 2024 15:22:22 +0100 Subject: [PATCH] build: add fhevm db migration image (#136) * build: add fhevm-db-migration image * build: add db migration to docker-compose * fix: insert test tenant binary keys instead of paths * fix: use COPY for large binary files * build: add db-migration docker image * build: fix hadolint warn * build: enable push --- .github/workflows/fhevm-db-migration.yml | 46 ++++++++++++++ .hadolint.yaml | 3 + fhevm-engine/coprocessor/Makefile | 4 ++ fhevm-engine/coprocessor/docker-compose.yml | 57 ++++++++++++----- fhevm-engine/fhevm-db/Dockerfile | 19 ++++++ fhevm-engine/fhevm-db/initialize_db.sh | 31 ++++++++++ .../migrations/20240722111257_coprocessor.sql | 62 +++++++++++++++++++ 7 files changed, 208 insertions(+), 14 deletions(-) create mode 100644 .github/workflows/fhevm-db-migration.yml create mode 100644 .hadolint.yaml create mode 100644 fhevm-engine/fhevm-db/Dockerfile create mode 100644 fhevm-engine/fhevm-db/initialize_db.sh create mode 100644 fhevm-engine/fhevm-db/migrations/20240722111257_coprocessor.sql diff --git a/.github/workflows/fhevm-db-migration.yml b/.github/workflows/fhevm-db-migration.yml new file mode 100644 index 00000000..a4bef461 --- /dev/null +++ b/.github/workflows/fhevm-db-migration.yml @@ -0,0 +1,46 @@ +name: "fhEVM DB migration Docker Image" + +on: + push: + branches: + - main + - ci/db-migration + paths: + - .github/workflows/fhevm-coprocessor.yml + - .github/workflows/common-docker.yml + - fhevm-engine/fhevm-db/** + release: + types: + - published + +concurrency: + group: fhevm-coprocessor-${{ github.ref_name }} + cancel-in-progress: false + +jobs: + docker-coprocessor: + uses: ./.github/workflows/common-docker.yml + permissions: + contents: "read" + id-token: "write" + packages: "write" + with: + working-directory: "." + push_image: true + image-name: "fhevm-db-migration" + generate-dev-image: false + docker-file: "fhevm-engine/fhevm-db/Dockerfile" + arm-build: true + + secrets: + BLOCKCHAIN_ACTIONS_TOKEN: ${{ secrets.BLOCKCHAIN_ACTIONS_TOKEN }} + GRAVITON_BUILDER_SSH_PRIVATE_KEY: ${{ secrets.GRAVITON_BUILDER_SSH_PRIVATE_KEY }} + + done: + runs-on: ubuntu-latest + name: Pipeline Done + steps: + - name: Success + run: echo Pipeline Done + needs: + - docker-coprocessor diff --git a/.hadolint.yaml b/.hadolint.yaml new file mode 100644 index 00000000..6454064c --- /dev/null +++ b/.hadolint.yaml @@ -0,0 +1,3 @@ +ignored: + - DL3018 + - DL3008 \ No newline at end of file diff --git a/fhevm-engine/coprocessor/Makefile b/fhevm-engine/coprocessor/Makefile index 7b1d62fb..004515bc 100644 --- a/fhevm-engine/coprocessor/Makefile +++ b/fhevm-engine/coprocessor/Makefile @@ -17,6 +17,10 @@ init_db: $(DB_URL) sqlx migrate run $(DB_URL) cargo test setup_test_user -- --nocapture --ignored +.PHONY: run +run: + docker compose up -d + .PHONY: recreate_db recreate_db: $(MAKE) cleanup diff --git a/fhevm-engine/coprocessor/docker-compose.yml b/fhevm-engine/coprocessor/docker-compose.yml index ffaed7ac..aed104ef 100644 --- a/fhevm-engine/coprocessor/docker-compose.yml +++ b/fhevm-engine/coprocessor/docker-compose.yml @@ -1,37 +1,66 @@ -version: '3.8' +name: fhevm + services: db: + container_name: db image: postgres:15.7 restart: always environment: - - POSTGRES_USER=postgres - - POSTGRES_PASSWORD=postgres + POSTGRES_USER: postgres + POSTGRES_PASSWORD: postgres ports: - '5432:5432' - volumes: + healthcheck: + test: ["CMD-SHELL", "pg_isready -U postgres"] + interval: 10s + timeout: 5s + retries: 3 + volumes: - db:/var/lib/postgresql/data - coproc: - image: ghcr.io/zama-ai/fhevm-coprocessor:v7 + + migration: + container_name: migration + image: ghcr.io/zama-ai/fhevm-db-migration:v1.1 + environment: + DATABASE_URL: postgresql://postgres:postgres@db:5432/coprocessor + depends_on: + db: + condition: service_healthy + + coprocessor: + container_name: coprocessor + image: ghcr.io/zama-ai/fhevm-coprocessor:v0.1.0-3 environment: - - DATABASE_URL=postgresql://postgres:postgres@db:5432/coprocessor + DATABASE_URL: postgresql://postgres:postgres@db:5432/coprocessor ports: - '50051:50051' volumes: - - ${PWD}/coprocessor.key:/usr/share/coprocessor.key + - ./coprocessor.key:/usr/share/coprocessor.key command: - --run-bg-worker - --run-server - --server-addr=0.0.0.0:50051 - --coprocessor-private-key=/usr/share/coprocessor.key + depends_on: + migration: + condition: service_completed_successfully + geth: - image: ghcr.io/zama-ai/geth-coprocessor-devnode:v6 + container_name: geth + image: ghcr.io/zama-ai/geth-coprocessor-devnode:v8 environment: - - FHEVM_COPROCESSOR_API_KEY=a1503fb6-d79b-4e9e-826d-44cf262f3e05 - - FHEVM_COPROCESSOR_URL=coproc:50051 - - COPROCESSOR_CONTRACT_ADDRESS=0x6819e3aDc437fAf9D533490eD3a7552493fCE3B1 - - ACL_CONTRACT_ADDRESS=0x339EcE85B9E11a3A3AA557582784a15d7F82AAf2 + FHEVM_COPROCESSOR_API_KEY: a1503fb6-d79b-4e9e-826d-44cf262f3e05 + FHEVM_COPROCESSOR_URL: coprocessor:50051 + COPROCESSOR_CONTRACT_ADDRESS: 0x6819e3aDc437fAf9D533490eD3a7552493fCE3B1 + ACL_CONTRACT_ADDRESS: 0x339EcE85B9E11a3A3AA557582784a15d7F82AAf2 + volumes: + - ./geth:/geth ports: - '8745:8545' + depends_on: + coprocessor: + condition: service_started + volumes: db: - driver: local + driver: local \ No newline at end of file diff --git a/fhevm-engine/fhevm-db/Dockerfile b/fhevm-engine/fhevm-db/Dockerfile new file mode 100644 index 00000000..9b4343db --- /dev/null +++ b/fhevm-engine/fhevm-db/Dockerfile @@ -0,0 +1,19 @@ +# Use the Rust image as the base +FROM rust:1.74 + +# Install dependencies and tools +RUN apt-get update && \ + apt-get install -y --no-install-recommends libpq-dev postgresql-client xxd && \ + cargo install sqlx-cli --no-default-features --features postgres --locked && \ + apt-get clean && rm -rf /var/lib/apt/lists/* + +# Copy migrations and initialization script +COPY fhevm-engine/fhevm-db/initialize_db.sh /initialize_db.sh +COPY fhevm-engine/fhevm-db/migrations /migrations +COPY fhevm-engine/fhevm-keys /fhevm-keys + +# Make the script executable +RUN chmod +x /initialize_db.sh + +# Run the initialization script as the entrypoint +ENTRYPOINT ["/bin/bash", "/initialize_db.sh"] diff --git a/fhevm-engine/fhevm-db/initialize_db.sh b/fhevm-engine/fhevm-db/initialize_db.sh new file mode 100644 index 00000000..20400c40 --- /dev/null +++ b/fhevm-engine/fhevm-db/initialize_db.sh @@ -0,0 +1,31 @@ +#!/bin/bash + +# 1: Create Database +echo "Creating database..." +sqlx database create + +# 2: Run sqlx migrations +echo "Running migrations..." +sqlx migrate run --source /migrations || { echo "Failed to run migrations."; exit 1; } + +# 3. Insert test tenant with keys +echo "Start preparing tenant query..." +TENANT_API_KEY=a1503fb6-d79b-4e9e-826d-44cf262f3e05 +CHAIN_ID=12345 +ACL_CONTRACT_ADDRESS=0x339EcE85B9E11a3A3AA557582784a15d7F82AAf2 +INPUT_VERIFIER_ADDRESS=0x69dE3158643e738a0724418b21a35FAA20CBb1c5 +PKS_FILE="/fhevm-keys/pks" +SKS_FILE="/fhevm-keys/sks" +PUBLIC_PARAMS_FILE="/fhevm-keys/pp" + +TMP_CSV="/tmp/tenant_data.csv" +echo "tenant_api_key,chain_id,acl_contract_address,verifying_contract_address,pks_key,sks_key,public_params" > $TMP_CSV + +echo "$TENANT_API_KEY,$CHAIN_ID,$ACL_CONTRACT_ADDRESS,$INPUT_VERIFIER_ADDRESS,\"\\x$(cat $PKS_FILE | xxd -p | tr -d '\n')\",\"\\x$(cat $SKS_FILE | xxd -p | tr -d '\n')\",\"\\x$(cat $PUBLIC_PARAMS_FILE | xxd -p | tr -d '\n')\"" >> $TMP_CSV + +echo "Inserting tenant data using \COPY..." +psql $DATABASE_URL -c "\COPY tenants (tenant_api_key, chain_id, acl_contract_address, verifying_contract_address, pks_key, sks_key, public_params) FROM '$TMP_CSV' CSV HEADER;" + +rm -f $TMP_CSV + +echo "Database initialization complete." \ No newline at end of file diff --git a/fhevm-engine/fhevm-db/migrations/20240722111257_coprocessor.sql b/fhevm-engine/fhevm-db/migrations/20240722111257_coprocessor.sql new file mode 100644 index 00000000..25b2b94b --- /dev/null +++ b/fhevm-engine/fhevm-db/migrations/20240722111257_coprocessor.sql @@ -0,0 +1,62 @@ + +CREATE TABLE IF NOT EXISTS computations ( + tenant_id INT NOT NULL, + output_handle BYTEA NOT NULL, + output_type SMALLINT NOT NULL, + -- can be handle or scalar, depends on is_scalar field + -- only second dependency can ever be scalar + dependencies BYTEA[] NOT NULL, + fhe_operation SMALLINT NOT NULL, + created_at TIMESTAMP NOT NULL DEFAULT NOW(), + completed_at TIMESTAMP, + is_scalar BOOLEAN NOT NULL, + is_completed BOOLEAN NOT NULL DEFAULT 'f', + is_error BOOLEAN NOT NULL DEFAULT 'f', + error_message TEXT, + PRIMARY KEY (tenant_id, output_handle) +); + +CREATE TABLE IF NOT EXISTS ciphertexts ( + tenant_id INT NOT NULL, + handle BYTEA NOT NULL, + ciphertext BYTEA NOT NULL, + ciphertext_version SMALLINT NOT NULL, + ciphertext_type SMALLINT NOT NULL, + -- if ciphertext came from blob we have its reference + input_blob_hash BYTEA, + input_blob_index INT NOT NULL DEFAULT 0, + created_at TIMESTAMP DEFAULT NOW(), + PRIMARY KEY (tenant_id, handle, ciphertext_version) +); + +-- store for audits and historical reference +CREATE TABLE IF NOT EXISTS input_blobs ( + tenant_id INT NOT NULL, + blob_hash BYTEA NOT NULL, + blob_data BYTEA NOT NULL, + blob_ciphertext_count INT NOT NULL, + created_at TIMESTAMP DEFAULT NOW(), + PRIMARY KEY (tenant_id, blob_hash) +); + +CREATE TABLE IF NOT EXISTS tenants ( + tenant_id SERIAL PRIMARY KEY, + tenant_api_key UUID NOT NULL DEFAULT gen_random_uuid(), + -- for EIP712 signatures + chain_id INT NOT NULL, + -- for EIP712 signatures + verifying_contract_address TEXT NOT NULL, + acl_contract_address TEXT NOT NULL, + pks_key BYTEA NOT NULL, + sks_key BYTEA NOT NULL, + public_params BYTEA NOT NULL, + -- for debugging, can be null + cks_key BYTEA, + -- admin api key is allowed to create more tenants with their keys + is_admin BOOLEAN DEFAULT 'f' +); + +CREATE INDEX IF NOT EXISTS computations_dependencies_index ON computations USING GIN (dependencies); +CREATE INDEX IF NOT EXISTS computations_completed_index ON computations (is_completed); +CREATE INDEX IF NOT EXISTS computations_errors_index ON computations (is_error); +CREATE UNIQUE INDEX IF NOT EXISTS tenants_by_api_key ON tenants (tenant_api_key); \ No newline at end of file