diff --git a/.github/workflows/dockerhub-release-aio.yml b/.github/workflows/dockerhub-release-aio.yml index e5a5a8251..ce0efb597 100644 --- a/.github/workflows/dockerhub-release-aio.yml +++ b/.github/workflows/dockerhub-release-aio.yml @@ -74,6 +74,7 @@ jobs: push: true build-args: | postgres_version=${{ needs.settings.outputs.base_docker_version }} + envoy_lds=lds.supabase.yaml ${{ needs.settings.outputs.build_args }} target: production tags: ${{ needs.settings.outputs.image_tag }}_${{ matrix.arch }} diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 1457eb100..d22b16f16 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -17,6 +17,10 @@ jobs: uses: actions/checkout@v4 - uses: DeterminateSystems/nix-installer-action@main + with: + extra-conf: | + substituters = https://cache.nixos.org https://nix-postgres-artifacts.s3.amazonaws.com + trusted-public-keys = nix-postgres-artifacts:dGZlQOvKcNEjvT7QEAJbcV6b6uk7VF/hWMjhYleiaLI=% cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY= - name: Set PostgreSQL versions id: set-versions @@ -30,9 +34,9 @@ jobs: matrix: postgres_version: ${{ fromJson(needs.prepare.outputs.postgres_versions) }} include: - - runner: [self-hosted, X64] + - runner: ubuntu-22.04 arch: amd64 - - runner: arm-runner + - runner: ubuntu-22.04 arch: arm64 runs-on: ${{ matrix.runner }} timeout-minutes: 180 @@ -40,9 +44,14 @@ jobs: POSTGRES_PORT: 5478 POSTGRES_PASSWORD: password steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: DeterminateSystems/nix-installer-action@main + with: + extra-conf: | + substituters = https://cache.nixos.org https://nix-postgres-artifacts.s3.amazonaws.com + trusted-public-keys = nix-postgres-artifacts:dGZlQOvKcNEjvT7QEAJbcV6b6uk7VF/hWMjhYleiaLI=% cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY= + - name: Set PostgreSQL version environment variable run: echo "POSTGRES_MAJOR_VERSION=${{ matrix.postgres_version }}" >> $GITHUB_ENV @@ -54,7 +63,7 @@ jobs: - name: Generate common-nix.vars.pkr.hcl run: | - PG_VERSION=$(sudo nix run nixpkgs#yq -- '.postgres_release["postgres'${{ matrix.postgres_version }}'"]' ansible/vars.yml) + PG_VERSION=$(nix run nixpkgs#yq -- '.postgres_release["postgres'${{ matrix.postgres_version }}'"]' ansible/vars.yml) PG_VERSION=$(echo $PG_VERSION | tr -d '"') # Remove any surrounding quotes echo 'postgres-version = "'$PG_VERSION'"' > common-nix.vars.pkr.hcl # Ensure there's a newline at the end of the file @@ -67,96 +76,16 @@ jobs: - name: Generate args id: args run: | - ARGS=$(sudo nix run nixpkgs#yq -- 'to_entries | map(select(.value|type == "!!str")) | map(.key + "=" + .value) | join("\n")' ansible/vars.yml) + ARGS=$(nix run nixpkgs#yq -- 'to_entries | map(select(.value|type == "!!str")) | map(.key + "=" + .value) | join("\n")' ansible/vars.yml) echo "result<> $GITHUB_OUTPUT echo "$ARGS" >> $GITHUB_OUTPUT echo "EOF" >> $GITHUB_OUTPUT - - run: docker context create builders - - uses: docker/setup-buildx-action@v3 - with: - endpoint: builders - - uses: docker/build-push-action@v5 - with: - load: true - context: . - file: Dockerfile-${{ env.PGMAJOR }} - target: production - build-args: | - ${{ steps.args.outputs.result }} - tags: supabase/postgres:${{ steps.settings.outputs.postgres-version }},supabase_postgres - cache-from: | - type=gha,scope=${{ github.ref_name }}-${{ steps.settings.outputs.postgres-version }}-${{ matrix.arch }} - type=gha,scope=${{ github.base_ref }}-${{ steps.settings.outputs.postgres-version }}-${{ matrix.arch }} - cache-to: type=gha,mode=max,scope=${{ github.ref_name }}-${{ steps.settings.outputs.postgres-version }}-${{ matrix.arch }} - - - name: Start Postgres - run: | - docker run --rm --pull=never \ - -e POSTGRES_PASSWORD=${{ env.POSTGRES_PASSWORD }} \ - -p ${{ env.POSTGRES_PORT }}:5432 \ - --name supabase_postgres \ - -d supabase/postgres:${{ steps.settings.outputs.postgres-version }} - - - name: Install psql - run: | - sudo sh -c 'echo "deb http://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" > /etc/apt/sources.list.d/pgdg.list' - wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | sudo apt-key add - - sudo apt update - sudo apt install -y --no-install-recommends postgresql-client-${{ env.PGMAJOR }} - - - name: Install pg_prove - run: sudo cpan -T TAP::Parser::SourceHandler::pgTAP - env: - SHELL: /bin/bash - - - name: Wait for healthy database - run: | - count=0 - until [ "$(docker inspect -f '{{.State.Health.Status}}' "$container")" == "healthy" ]; do - exit=$? - count=$((count + 1)) - if [ $count -ge "$retries" ]; then - echo "Retry $count/$retries exited $exit, no more retries left." - docker stop -t 2 "$container" - return $exit - fi - sleep 1; - done; - echo "$container container is healthy" - env: - retries: 20 - container: supabase_postgres - - - name: Run tests - run: pg_prove migrations/tests/test.sql - env: - PGHOST: localhost - PGPORT: ${{ env.POSTGRES_PORT }} - PGDATABASE: postgres - PGUSER: supabase_admin - PGPASSWORD: ${{ env.POSTGRES_PASSWORD }} - - - name: Check migrations are idempotent - run: | - for sql in ./migrations/db/migrations/*.sql; do - echo "$0: running $sql" - psql -v ON_ERROR_STOP=1 --no-password --no-psqlrc -f "$sql" - done - env: - PGHOST: localhost - PGPORT: ${{ env.POSTGRES_PORT }} - PGDATABASE: postgres - PGUSER: supabase_admin - PGPASSWORD: ${{ env.POSTGRES_PASSWORD }} - - - name: Update Dockerfile.dbmate version - run: | - sed -i 's/%VERSION%/${{ env.PGMAJOR }}/g' migrations/Dockerfile.dbmate - + #TODO PR Convert to develop branch flakeurl - name: verify schema.sql is committed run: | - docker compose -f migrations/docker-compose.yaml up db dbmate --abort-on-container-exit + GIT_SHA=${{github.sha}} + nix run github:supabase/postgres/${GIT_SHA}#dbmate-tool -- --version ${{ env.PGMAJOR }} if ! git diff --exit-code --quiet migrations/schema-${{ env.PGMAJOR }}.sql; then echo "Detected changes in schema.sql:" git diff migrations/schema-${{ env.PGMAJOR }}.sql diff --git a/.gitignore b/.gitignore index a6950d2ff..ae375b82a 100644 --- a/.gitignore +++ b/.gitignore @@ -22,3 +22,5 @@ result* #IDE .idea/ .vscode/ + +db \ No newline at end of file diff --git a/ansible/files/envoy_config/lds.supabase.yaml b/ansible/files/envoy_config/lds.supabase.yaml new file mode 100644 index 000000000..2fc7cae13 --- /dev/null +++ b/ansible/files/envoy_config/lds.supabase.yaml @@ -0,0 +1,436 @@ +resources: + - '@type': type.googleapis.com/envoy.config.listener.v3.Listener + name: http_listener + address: + socket_address: + address: '::' + port_value: 80 + ipv4_compat: true + filter_chains: + - filters: &ref_1 + - name: envoy.filters.network.http_connection_manager + typed_config: + '@type': >- + type.googleapis.com/envoy.extensions.filters.network.http_connection_manager.v3.HttpConnectionManager + access_log: + - name: envoy.access_loggers.stdout + filter: + status_code_filter: + comparison: + op: GE + value: + default_value: 400 + runtime_key: unused + typed_config: + '@type': >- + type.googleapis.com/envoy.extensions.access_loggers.stream.v3.StdoutAccessLog + generate_request_id: false + http_filters: + - name: envoy.filters.http.cors + typed_config: + '@type': >- + type.googleapis.com/envoy.extensions.filters.http.cors.v3.Cors + - name: envoy.filters.http.rbac + typed_config: + '@type': >- + type.googleapis.com/envoy.extensions.filters.http.rbac.v3.RBAC + rules: + action: DENY + policies: + api_key_missing: + permissions: + - any: true + principals: + - not_id: + or_ids: + ids: + - header: + name: apikey + present_match: true + - header: + name: ':path' + string_match: + contains: apikey= + api_key_not_valid: + permissions: + - any: true + principals: + - not_id: + or_ids: + ids: + - header: + name: apikey + string_match: + exact: anon_key + - header: + name: apikey + string_match: + exact: service_key + - header: + name: apikey + string_match: + exact: supabase_admin_key + - header: + name: ':path' + string_match: + contains: apikey=anon_key + - header: + name: ':path' + string_match: + contains: apikey=service_key + - header: + name: ':path' + string_match: + contains: apikey=supabase_admin_key + - name: envoy.filters.http.lua + typed_config: + '@type': >- + type.googleapis.com/envoy.extensions.filters.http.lua.v3.Lua + source_codes: + remove_apikey_and_empty_key_query_parameters: + inline_string: |- + function envoy_on_request(request_handle) + local path = request_handle:headers():get(":path") + request_handle + :headers() + :replace(":path", path:gsub("&=[^&]*", ""):gsub("?=[^&]*$", ""):gsub("?=[^&]*&", "?"):gsub("&apikey=[^&]*", ""):gsub("?apikey=[^&]*$", ""):gsub("?apikey=[^&]*&", "?")) + end + remove_empty_key_query_parameters: + inline_string: |- + function envoy_on_request(request_handle) + local path = request_handle:headers():get(":path") + request_handle + :headers() + :replace(":path", path:gsub("&=[^&]*", ""):gsub("?=[^&]*$", ""):gsub("?=[^&]*&", "?")) + end + - name: envoy.filters.http.compressor.brotli + typed_config: + '@type': >- + type.googleapis.com/envoy.extensions.filters.http.compressor.v3.Compressor + response_direction_config: + common_config: + min_content_length: 100 + content_type: + - application/vnd.pgrst.object+json + - application/vnd.pgrst.array+json + - application/openapi+json + - application/geo+json + - text/csv + - application/vnd.pgrst.plan + - application/vnd.pgrst.object + - application/vnd.pgrst.array + - application/javascript + - application/json + - application/xhtml+xml + - image/svg+xml + - text/css + - text/html + - text/plain + - text/xml + disable_on_etag_header: true + request_direction_config: + common_config: + enabled: + default_value: false + runtime_key: request_compressor_enabled + compressor_library: + name: text_optimized + typed_config: + '@type': >- + type.googleapis.com/envoy.extensions.compression.brotli.compressor.v3.Brotli + - name: envoy.filters.http.compressor.gzip + typed_config: + '@type': >- + type.googleapis.com/envoy.extensions.filters.http.compressor.v3.Compressor + response_direction_config: + common_config: + min_content_length: 100 + content_type: + - application/vnd.pgrst.object+json + - application/vnd.pgrst.array+json + - application/openapi+json + - application/geo+json + - text/csv + - application/vnd.pgrst.plan + - application/vnd.pgrst.object + - application/vnd.pgrst.array + - application/javascript + - application/json + - application/xhtml+xml + - image/svg+xml + - text/css + - text/html + - text/plain + - text/xml + disable_on_etag_header: true + request_direction_config: + common_config: + enabled: + default_value: false + runtime_key: request_compressor_enabled + compressor_library: + name: text_optimized + typed_config: + '@type': >- + type.googleapis.com/envoy.extensions.compression.gzip.compressor.v3.Gzip + - name: envoy.filters.http.router + typed_config: + '@type': >- + type.googleapis.com/envoy.extensions.filters.http.router.v3.Router + dynamic_stats: false + local_reply_config: + mappers: + - filter: + and_filter: + filters: + - status_code_filter: + comparison: + value: + default_value: 403 + runtime_key: unused + - header_filter: + header: + name: ':path' + string_match: + prefix: /customer/v1/privileged/ + status_code: 401 + body: + inline_string: Unauthorized + headers_to_add: + - header: + key: WWW-Authenticate + value: Basic realm="Unknown" + - filter: + and_filter: + filters: + - status_code_filter: + comparison: + value: + default_value: 403 + runtime_key: unused + - header_filter: + header: + name: ':path' + string_match: + prefix: /metrics/aggregated + invert_match: true + status_code: 401 + body_format_override: + json_format: + message: >- + `apikey` request header or query parameter is either + missing or invalid. Double check your Supabase `anon` + or `service_role` API key. + hint: '%RESPONSE_CODE_DETAILS%' + json_format_options: + sort_properties: false + merge_slashes: true + route_config: + name: route_config_0 + virtual_hosts: + - name: virtual_host_0 + domains: + - '*' + typed_per_filter_config: + envoy.filters.http.cors: + '@type': >- + type.googleapis.com/envoy.extensions.filters.http.cors.v3.CorsPolicy + allow_origin_string_match: + - safe_regex: + regex: \* + allow_methods: GET,HEAD,PUT,PATCH,POST,DELETE,OPTIONS,TRACE,CONNECT + allow_headers: apikey,authorization,x-client-info + max_age: '3600' + routes: + - match: + path: /health + direct_response: + status: 200 + body: + inline_string: Healthy + typed_per_filter_config: &ref_0 + envoy.filters.http.rbac: + '@type': >- + type.googleapis.com/envoy.extensions.filters.http.rbac.v3.RBACPerRoute + - match: + safe_regex: + google_re2: + max_program_size: 150 + regex: >- + /auth/v1/(verify|callback|authorize|sso/saml/(acs|metadata|slo)|\.well-known/(openid-configuration|jwks\.json)) + request_headers_to_remove: + - apikey + - sb-opk + route: + cluster: gotrue + regex_rewrite: + pattern: + regex: ^/auth/v1 + substitution: '' + retry_policy: + num_retries: 3 + retry_on: 5xx + timeout: 35s + typed_per_filter_config: *ref_0 + - match: + prefix: /auth/v1/ + request_headers_to_remove: + - apikey + - sb-opk + route: + cluster: gotrue + prefix_rewrite: / + timeout: 35s + - match: + prefix: /rest/v1/ + query_parameters: + - name: apikey + present_match: true + request_headers_to_remove: + - apikey + - sb-opk + route: + cluster: postgrest + prefix_rewrite: / + timeout: 125s + typed_per_filter_config: + envoy.filters.http.lua: + '@type': >- + type.googleapis.com/envoy.extensions.filters.http.lua.v3.LuaPerRoute + name: remove_apikey_and_empty_key_query_parameters + - match: + prefix: /rest/v1/ + request_headers_to_remove: + - apikey + - sb-opk + route: + cluster: postgrest + prefix_rewrite: / + timeout: 125s + typed_per_filter_config: + envoy.filters.http.lua: + '@type': >- + type.googleapis.com/envoy.extensions.filters.http.lua.v3.LuaPerRoute + name: remove_empty_key_query_parameters + - match: + prefix: /rest-admin/v1/ + query_parameters: + - name: apikey + present_match: true + request_headers_to_remove: + - apikey + - sb-opk + route: + cluster: postgrest_admin + prefix_rewrite: / + typed_per_filter_config: + envoy.filters.http.lua: + '@type': >- + type.googleapis.com/envoy.extensions.filters.http.lua.v3.LuaPerRoute + name: remove_apikey_and_empty_key_query_parameters + - match: + prefix: /rest-admin/v1/ + request_headers_to_remove: + - apikey + - sb-opk + route: + cluster: postgrest_admin + prefix_rewrite: / + - match: + path: /graphql/v1 + request_headers_to_add: + header: + key: Content-Profile + value: graphql_public + request_headers_to_remove: + - apikey + - sb-opk + route: + cluster: postgrest + prefix_rewrite: /rpc/graphql + timeout: 125s + - match: + prefix: /admin/v1/ + request_headers_to_remove: + - sb-opk + route: + cluster: admin_api + prefix_rewrite: / + timeout: 600s + - match: + prefix: /customer/v1/privileged/ + request_headers_to_remove: + - sb-opk + route: + cluster: admin_api + prefix_rewrite: /privileged/ + typed_per_filter_config: + envoy.filters.http.rbac: + '@type': >- + type.googleapis.com/envoy.extensions.filters.http.rbac.v3.RBACPerRoute + rbac: + rules: + action: DENY + policies: + basic_auth: + permissions: + - any: true + principals: + - header: + name: authorization + invert_match: true + string_match: + exact: Basic c2VydmljZV9yb2xlOnNlcnZpY2Vfa2V5 + treat_missing_header_as_empty: true + - match: + prefix: /metrics/aggregated + request_headers_to_remove: + - sb-opk + route: + cluster: admin_api + prefix_rewrite: /supabase-internal/metrics + typed_per_filter_config: + envoy.filters.http.rbac: + '@type': >- + type.googleapis.com/envoy.extensions.filters.http.rbac.v3.RBACPerRoute + rbac: + rules: + action: DENY + policies: + not_private_ip: + permissions: + - any: true + principals: + - not_id: + direct_remote_ip: + address_prefix: 10.0.0.0 + prefix_len: 8 + include_attempt_count_in_response: true + retry_policy: + num_retries: 5 + retry_back_off: + base_interval: 0.1s + max_interval: 1s + retry_on: gateway-error + stat_prefix: ingress_http + - '@type': type.googleapis.com/envoy.config.listener.v3.Listener + name: https_listener + address: + socket_address: + address: '::' + port_value: 443 + ipv4_compat: true + filter_chains: + - filters: *ref_1 + transport_socket: + name: envoy.transport_sockets.tls + typed_config: + '@type': >- + type.googleapis.com/envoy.extensions.transport_sockets.tls.v3.DownstreamTlsContext + common_tls_context: + tls_certificates: + - certificate_chain: + filename: /etc/envoy/fullChain.pem + private_key: + filename: /etc/envoy/privKey.pem + diff --git a/ansible/tasks/setup-supabase-internal.yml b/ansible/tasks/setup-supabase-internal.yml index aea3a78c7..7aa931763 100644 --- a/ansible/tasks/setup-supabase-internal.yml +++ b/ansible/tasks/setup-supabase-internal.yml @@ -114,3 +114,6 @@ import_tasks: internal/install-salt.yml tags: - aws-only + +- name: Envoy - use lds.supabase.yaml for /etc/envoy/lds.yaml + command: mv /etc/envoy/lds.supabase.yaml /etc/envoy/lds.yaml diff --git a/ansible/vars.yml b/ansible/vars.yml index 113eb3035..1209c29c7 100644 --- a/ansible/vars.yml +++ b/ansible/vars.yml @@ -11,8 +11,8 @@ postgres_major: # Full version strings for each major version postgres_release: - postgres15: "15.8.1.010-staging6" - postgres16: "16.3.1.016-staging6" + postgres15: "15.8.1.012" + postgres16: "16.3.1.018" # Non Postgres Extensions pgbouncer_release: "1.19.0" diff --git a/docker/all-in-one/Dockerfile b/docker/all-in-one/Dockerfile index 70082bfb1..d9e5f3fa2 100644 --- a/docker/all-in-one/Dockerfile +++ b/docker/all-in-one/Dockerfile @@ -227,9 +227,12 @@ COPY docker/all-in-one/etc/gotrue.env /etc/gotrue.env # Customizations for envoy ARG envoy_release +ARG envoy_lds="lds.yaml" ADD --chmod=755 --chown=envoy:envoy "https://raw.githubusercontent.com/envoyproxy/envoy/v${envoy_release}/restarter/hot-restarter.py" /opt/envoy-hot-restarter.py COPY --chmod=775 --chown=envoy:envoy ansible/files/envoy_config/ /etc/envoy/ COPY --chmod=755 --chown=envoy:envoy ansible/files/start-envoy.sh /opt/ +RUN mv /etc/envoy/${envoy_lds} /etc/envoy/lds.yaml +RUN rm -f /etc/envoy/lds.supabase.yaml # Customizations for kong COPY docker/all-in-one/etc/kong/kong.conf /etc/kong/kong.conf diff --git a/flake.nix b/flake.nix index b66ec3449..71b87d580 100644 --- a/flake.nix +++ b/flake.nix @@ -530,7 +530,7 @@ chmod +x $out/bin/pg-restore ''; sync-exts-versions = pkgs.runCommand "sync-exts-versions" { } '' - mkdir -p $out/bin + mkdir -p $out/bin substitute ${./nix/tools/sync-exts-versions.sh.in} $out/bin/sync-exts-versions \ --subst-var-by 'YQ' '${pkgs.yq}/bin/yq' \ --subst-var-by 'JQ' '${pkgs.jq}/bin/jq' \ @@ -539,8 +539,42 @@ --subst-var-by 'NIX' '${pkgs.nixVersions.nix_2_20}/bin/nix' chmod +x $out/bin/sync-exts-versions ''; + dbmate-tool = + let + migrationsDir = ./migrations/db; + ansibleVars = ./ansible/vars.yml; + pgbouncerAuthSchemaSql = ./ansible/files/pgbouncer_config/pgbouncer_auth_schema.sql; + statExtensionSql = ./ansible/files/stat_extension.sql; + in + pkgs.runCommand "dbmate-tool" { + buildInputs = with pkgs; [ + overmind + dbmate + nix + jq + yq + ]; + nativeBuildInputs = with pkgs; [ + makeWrapper + ]; + } '' + mkdir -p $out/bin $out/migrations + cp -r ${migrationsDir}/* $out + substitute ${./nix/tools/dbmate-tool.sh.in} $out/bin/dbmate-tool \ + --subst-var-by 'PGSQL_DEFAULT_PORT' '${pgsqlDefaultPort}' \ + --subst-var-by 'MIGRATIONS_DIR' $out \ + --subst-var-by 'PGSQL_SUPERUSER' '${pgsqlSuperuser}' \ + --subst-var-by 'ANSIBLE_VARS' ${ansibleVars} \ + --subst-var-by 'CURRENT_SYSTEM' '${system}' \ + --subst-var-by 'PGBOUNCER_AUTH_SCHEMA_SQL' '${pgbouncerAuthSchemaSql}' \ + --subst-var-by 'STAT_EXTENSION_SQL' '${statExtensionSql}' + chmod +x $out/bin/dbmate-tool + wrapProgram $out/bin/dbmate-tool \ + --prefix PATH : ${pkgs.lib.makeBinPath [ pkgs.overmind pkgs.dbmate pkgs.nix pkgs.jq pkgs.yq ]} + ''; }; + # Create a testing harness for a PostgreSQL package. This is used for # 'nix flake check', and works with any PostgreSQL package you hand it. makeCheckHarness = pgpkg: @@ -654,9 +688,11 @@ start-server = mkApp "start-server" "start-postgres-server"; start-client = mkApp "start-client" "start-postgres-client"; start-replica = mkApp "start-replica" "start-postgres-replica"; - migration-test = mkApp "migrate-tool" "migrate-postgres"; + migrate-postgres = mkApp "migrate-tool" "migrate-postgres"; sync-exts-versions = mkApp "sync-exts-versions" "sync-exts-versions"; pg-restore = mkApp "pg-restore" "pg-restore"; + dbmate-tool = mkApp "dbmate-tool" "dbmate-tool"; + migration-unit-tests = mkApp "migration-unit-tests" "migration-unit-tests"; }; # 'devShells.default' lists the set of packages that are included in the @@ -695,6 +731,7 @@ basePackages.start-replica basePackages.migrate-tool basePackages.sync-exts-versions + dbmate ]; shellHook = '' export HISTFILE=.history diff --git a/migrations/README.md b/migrations/README.md index bfd7308d4..19d2bf4b3 100644 --- a/migrations/README.md +++ b/migrations/README.md @@ -1,3 +1,36 @@ +# Usage + +from the root of the `supabase/postgres` project, you can run the following commands: + + +```shell +Usage: nix run .#dbmate-tool -- [options] + +Options: + -v, --version [15|16|orioledb-17|all] Specify the PostgreSQL version to use (required defaults to --version all) + -p, --port PORT Specify the port number to use (default: 5435) + -h, --help Show this help message + +Description: + Runs 'dbmate up' against a locally running the version of database you specify. Or 'all' to run against all versions. + NOTE: To create a migration, you must run 'nix develop' and then 'dbmate new ' to create a new migration file. + +Examples: + nix run .#dbmate-tool + nix run .#dbmate-tool -- --version 15 + nix run .#dbmate-tool -- --version 16 --port 5433 + +``` + +This can also be run from a github "flake url" for example: + +```shell +nix run github:supabase/postgres#dbmate-tool -- --version 15 + +or + +nix run github:supabase/postgres/mybranch#dbmate-tool -- --version 15 +``` # supabase/migrations `supabase/migrations` is a consolidation of SQL migrations from: @@ -9,6 +42,8 @@ aiming to provide a single source of truth for migrations on the platform that can be depended upon by those components. For more information on goals see [the RFC](https://www.notion.so/supabase/Centralize-SQL-Migrations-cd3847ae027d4f2bba9defb2cc82f69a) + + ## How it was Created Migrations were pulled (in order) from: @@ -20,10 +55,12 @@ For compatibility with hosted projects, we include [migrate.sh](migrate.sh) that 1. Run all `db/init-scripts` with `postgres` superuser role. 2. Run all `db/migrations` with `supabase_admin` superuser role. -3. Finalize role passwords with `/etc/postgres.schema.sql` if present. +3. Finalize role passwords with `/etc/postgresql.schema.sql` if present. Additionally, [supabase/postgres](https://github.com/supabase/postgres/blob/develop/ansible/playbook-docker.yml#L9) image contains several migration scripts to configure default extensions. These are run first by docker entrypoint and included in ami by ansible. + + ## Guidelines - Migrations are append only. Never edit existing migrations once they are on master. diff --git a/nix/ext/pg_graphql.nix b/nix/ext/pg_graphql.nix index 1a900a821..f54dff46a 100644 --- a/nix/ext/pg_graphql.nix +++ b/nix/ext/pg_graphql.nix @@ -1,18 +1,18 @@ -{ lib, stdenv, fetchFromGitHub, postgresql, buildPgrxExtension_0_11_3, cargo, rust-bin }: +{ lib, stdenv, fetchFromGitHub, postgresql, buildPgrxExtension_0_12_6, cargo, rust-bin }: let - rustVersion = "1.76.0"; + rustVersion = "1.80.0"; cargo = rust-bin.stable.${rustVersion}.default; in -buildPgrxExtension_0_11_3 rec { +buildPgrxExtension_0_12_6 rec { pname = "pg_graphql"; - version = "1.5.7"; + version = "1.5.9"; inherit postgresql; src = fetchFromGitHub { owner = "supabase"; repo = pname; rev = "v${version}"; - hash = "sha256-Q6XfcTKVOjo5pGy8QACc4QCHolKxEGU8e0TTC6Zg8go="; + hash = "sha256-YpLN43FtLhp2cb7cyM+4gEx8GTwsRiKTfxaMq0b8hk0="; }; nativeBuildInputs = [ cargo ]; @@ -25,7 +25,7 @@ buildPgrxExtension_0_11_3 rec { RUSTFLAGS = "-C link-arg=-undefined -C link-arg=dynamic_lookup"; PGPORT = "5434"; }; - cargoHash = "sha256-WkHufMw8OvinMRYd06ZJACnVvY9OLi069nCgq3LSmMY="; + cargoHash = "sha256-d2RSHtJgbYlOvArjOTaeYoca01UyWPUEO5vhktxxB6U="; # FIXME (aseipp): disable the tests since they try to install .control # files into the wrong spot, aside from that the one main test seems diff --git a/nix/tools/dbmate-tool.sh.in b/nix/tools/dbmate-tool.sh.in new file mode 100644 index 000000000..b7aa0d26e --- /dev/null +++ b/nix/tools/dbmate-tool.sh.in @@ -0,0 +1,273 @@ +#!/usr/bin/env bash +# shellcheck shell=bash + +[ ! -z "$DEBUG" ] && set -x + +# Default values +PSQL_VERSION="ALL" +PORTNO="@PGSQL_DEFAULT_PORT@" +PGSQL_SUPERUSER="@PGSQL_SUPERUSER@" +PGPASSWORD="${PGPASSWORD:-postgres}" +PGSQL_USER="postgres" +FLAKE_URL="github:supabase/postgres" +MIGRATIONS_DIR="@MIGRATIONS_DIR@" +CURRENT_SYSTEM="@CURRENT_SYSTEM@" +ANSIBLE_VARS="@ANSIBLE_VARS@" +PGBOUNCER_AUTH_SCHEMA_SQL=@PGBOUNCER_AUTH_SCHEMA_SQL@ +STAT_EXTENSION_SQL=@STAT_EXTENSION_SQL@ +# Cleanup function +cleanup() { + echo "Cleaning up..." + + # Kill overmind processes first + if [ -S "./.overmind.sock" ]; then + overmind kill || true + sleep 2 + fi + + # Kill any remaining postgres processes + echo "Killing any remaining postgres processes..." + pkill -9 postgres || true + pkill -9 -f "tmux.*overmind.*postgresql" || true + + # Extra cleanup for tmux sessions + tmux ls 2>/dev/null | grep 'overmind' | cut -d: -f1 | xargs -I{} tmux kill-session -t {} || true + + # Remove socket and Procfile + rm -f .overmind.sock Procfile + + # Verify cleanup + remaining=$(ps aux | grep -E "(postgres|overmind|tmux.*postgresql)" | grep -v grep || true) + if [ ! -z "$remaining" ]; then + echo "Warning: Some processes might still be running:" + echo "$remaining" + fi +} + +# Set up trap for cleanup on script exit + +# Function to display help +print_help() { + echo "Usage: nix run .#dbmate-tool -- [options]" + echo + echo "Options:" + echo " -v, --version [15|16|orioledb-17|all] Specify the PostgreSQL version to use (required defaults to --version all)" + echo " -p, --port PORT Specify the port number to use (default: 5435)" + echo " -h, --help Show this help message" + echo + echo "Description:" + echo " Runs 'dbmate up' against a locally running the version of database you specify. Or 'all' to run against all versions." + echo " NOTE: To create a migration, you must run 'nix develop' and then 'dbmate new ' to create a new migration file." + echo + echo "Examples:" + echo " nix run .#dbmate-tool" + echo " nix run .#dbmate-tool -- --version 15" + echo " nix run .#dbmate-tool -- --version 16 --port 5433" +} + + +# Parse arguments +while [[ "$#" -gt 0 ]]; do + case "$1" in + -v|--version) + if [[ -n "$2" && ! "$2" =~ ^- ]]; then + PSQL_VERSION="$2" + shift 2 + else + echo "Error: --version requires an argument (15, 16, or orioledb-17)" + exit 1 + fi + ;; + -u|--user) + if [[ -n "$2" && ! "$2" =~ ^- ]]; then + PGSQL_USER="$2" + shift 2 + else + echo "Error: --user requires an argument" + exit 1 + fi + ;; + -f|--flake-url) + if [[ -n "$2" && ! "$2" =~ ^- ]]; then + FLAKE_URL="$2" + shift 2 + else + echo "Error: --flake-url requires an argument" + exit 1 + fi + ;; + -p|--port) + if [[ -n "$2" && ! "$2" =~ ^- ]]; then + PORTNO="$2" + shift 2 + else + echo "Error: --port requires an argument" + exit 1 + fi + ;; + -h|--help) + print_help + exit 0 + ;; + *) + echo "Unknown option: $1" + print_help + exit 1 + ;; + esac +done + +# Function to wait for PostgreSQL to be ready +wait_for_postgres() { + local max_attempts=30 # Increased significantly + local attempt=1 + + # Give overmind a moment to actually start the process + sleep 2 + + while [ $attempt -le $max_attempts ]; do + "${PSQLBIN}/pg_isready" -h localhost -p "$PORTNO" -U "$PGSQL_SUPERUSER" -d postgres + local status=$? + + if [ $status -eq 0 ]; then + echo "PostgreSQL is ready!" + return 0 + fi + echo "Waiting for PostgreSQL to start (attempt $attempt/$max_attempts)..." + sleep 2 + attempt=$((attempt + 1)) + done + + echo "PostgreSQL failed to start after $max_attempts attempts" + overmind echo postgres + return 1 +} + +trim_schema() { + case "$CURRENT_SYSTEM" in + "x86_64-darwin"|"aarch64-darwin") + sed -i '' '/INSERT INTO public.schema_migrations/,$d' "./db/schema.sql" + echo "Matched: $CURRENT_SYSTEM" + ;; + *) + sed -i '/INSERT INTO public.schema_migrations/,$d' "./db/schema.sql" + ;; + esac +} +overmind_start() { + cat > Procfile << EOF +postgres_${PSQL_VERSION}: exec nix run "$FLAKE_URL#start-server" "$PSQL_VERSION" +EOF + overmind start -D + echo "Waiting for overmind socket..." + max_wait=5 + count=0 + while [ $count -lt $max_wait ]; do + if [ -S "./.overmind.sock" ]; then + # Found the socket, give it a moment to be ready + sleep 2 + echo "Socket file found and ready" + break + fi + echo "Waiting for socket file (attempt $count/$max_wait)" + sleep 1 + count=$((count + 1)) + done +} +migrate_version() { + echo "PSQL_VERSION: $PSQL_VERSION" + overmind kill || true + rm -f .overmind.sock Procfile || true + PSQLBIN=$(nix build --no-link "$FLAKE_URL#psql_$PSQL_VERSION/bin" --json | jq -r '.[].outputs.out + "/bin"') + echo "Using PostgreSQL version $PSQL_VERSION from $PSQLBIN" + + # Start overmind + overmind_start + echo "Waiting for overmind socket..." + + + echo "Waiting for PostgreSQL to be ready..." + + #Wait for PostgreSQL to be ready to accept connections + if ! wait_for_postgres; then + echo "Failed to connect to PostgreSQL server" + exit 1 + fi + + echo "PostgreSQL server is ready" + + # Configure PostgreSQL roles and permissions + if ! "${PSQLBIN}/psql" -v ON_ERROR_STOP=1 --no-password --no-psqlrc -U "$PGSQL_SUPERUSER" -p "$PORTNO" -h localhost -d postgres <<-EOSQL +create role postgres superuser login password '$PGPASSWORD'; +alter database postgres owner to postgres; +EOSQL + then + echo "Failed to configure PostgreSQL roles and permissions" + exit 1 + fi + "${PSQLBIN}/psql" -v ON_ERROR_STOP=1 --no-password --no-psqlrc -U postgres -p "$PORTNO" -h localhost -d postgres -f "$PGBOUNCER_AUTH_SCHEMA_SQL" + "${PSQLBIN}/psql" -v ON_ERROR_STOP=1 --no-password --no-psqlrc -U postgres -p "$PORTNO" -h localhost -d postgres -f "$STAT_EXTENSION_SQL" + + #set db url to run dbmate + export DATABASE_URL="postgres://$PGSQL_USER:$PGPASSWORD@localhost:$PORTNO/postgres?sslmode=disable" + #export path so dbmate can find correct psql and pg_dump + export PATH="$PSQLBIN:$PATH" + # run init scripts + if ! dbmate --migrations-dir "$MIGRATIONS_DIR/init-scripts" up; then + echo "Error: Initial migration failed" + exit 1 + fi + + # Password update command + if ! "${PSQLBIN}/psql" -v ON_ERROR_STOP=1 --no-password --no-psqlrc -U postgres -p "$PORTNO" -h localhost -c "ALTER USER supabase_admin WITH PASSWORD '$PGPASSWORD'"; then + echo "Error: Failed to update supabase_admin password" + exit 1 + fi + + # Set up database URL + export DATABASE_URL="postgres://$PGSQL_SUPERUSER:$PGPASSWORD@localhost:$PORTNO/postgres?sslmode=disable" + # Run migrations + if ! dbmate --migrations-dir "$MIGRATIONS_DIR/migrations" up; then + echo "Error: Final migration failed" + exit 1 + fi + + echo "Running dbmate dump with $PSQLBIN" + dbmate dump + + echo "CURRENT_SYSTEM: $CURRENT_SYSTEM" + if [ -f "./db/schema.sql" ]; then + trim_schema + cp "./db/schema.sql" "./migrations/schema-$PSQL_VERSION.sql" + echo "Schema file moved to ./migrations/schema-$PSQL_VERSION.sql" + echo "PSQLBIN is $PSQLBIN" + else + echo "Warning: schema.sql file not found in ./db directory" + exit 1 + fi + + # If we get here, all commands succeeded + echo "PostgreSQL migration completed successfully" + echo "Check migrations are idempotent" + for sql in ./migrations/db/migrations/*.sql; do + echo "$0: running $sql" + "${PSQLBIN}/psql" -v ON_ERROR_STOP=1 --no-password --no-psqlrc -U "$PGSQL_SUPERUSER" -p "$PORTNO" -h localhost -d postgres -f "$sql" || { + echo "Failed to execute $sql" + exit 1 + } + done +} + +if [ "$PSQL_VERSION" == "all" ]; then + VERSIONS=$(yq '.postgres_major[]' "$ANSIBLE_VARS" | tr -d '"') + echo "$VERSIONS" | while read -r version; do + PSQL_VERSION="$version" + echo "Migrating to PostgreSQL version $PSQL_VERSION" + migrate_version + cleanup + done +else + echo "Migrating to PostgreSQL version $PSQL_VERSION" + migrate_version + cleanup +fi diff --git a/nix/tools/run-server.sh.in b/nix/tools/run-server.sh.in index 977a437fb..2b079bbf1 100644 --- a/nix/tools/run-server.sh.in +++ b/nix/tools/run-server.sh.in @@ -38,7 +38,6 @@ export LANGUAGE=en_US.UTF-8 export LC_ALL=en_US.UTF-8 export LANG=en_US.UTF-8 export LC_CTYPE=en_US.UTF-8 -mkdir -p "$DATDIR" echo "NOTE: using port $PORTNO for server" echo "NOTE: using temporary directory $DATDIR for data, which will not be removed" echo "NOTE: you are free to re-use this data directory at will"