diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 822e09c9dc..f56413c49f 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -38,293 +38,27 @@ env: jobs: ######################## - # SQLC code gen check + # run postgres integration tests with SMALL_TRANCHES ######################## - sqlc-check: - name: Sqlc check + postgres-integration-test: + name: postgres itests runs-on: ubuntu-latest - steps: - - name: git checkout - uses: actions/checkout@v3 - - - name: setup go ${{ env.GO_VERSION }} - uses: ./.github/actions/setup-go - with: - go-version: '${{ env.GO_VERSION }}' - - - name: docker image cache - uses: satackey/action-docker-layer-caching@v0.0.11 - # Ignore the failure of a step and avoid terminating the job. - continue-on-error: true - - - name: Generate sql models - run: make sqlc-check - - ######################## - # RPC and mobile compilation check - ######################## - rpc-check: - name: RPC and mobile compilation check - runs-on: ubuntu-latest - steps: - - name: git checkout - uses: actions/checkout@v3 - - - name: setup go ${{ env.GO_VERSION }} - uses: ./.github/actions/setup-go - with: - go-version: '${{ env.GO_VERSION }}' - - - name: run check - run: make rpc-check - - - name: run JSON/WASM stub compilation check - run: make rpc-js-compile - - - name: build mobile RPC bindings - run: make mobile-rpc - - - name: build mobile specific code - run: go build --tags="mobile" ./mobile - - ######################## - # check commits - ######################## - check-commits: - if: github.event_name == 'pull_request' - name: check commits - runs-on: ubuntu-latest - steps: - - name: git checkout - uses: actions/checkout@v3 - with: - fetch-depth: 0 - - - name: setup go ${{ env.GO_VERSION }} - uses: ./.github/actions/setup-go - with: - go-version: '${{ env.GO_VERSION }}' - - - name: fetch and rebase on ${{ github.base_ref }} - uses: ./.github/actions/rebase - - - name: check commits - run: scripts/check-each-commit.sh upstream/${{ github.base_ref }} - - ######################## - # lint code - ######################## - lint: - name: lint code - runs-on: ubuntu-latest - steps: - - name: git checkout - uses: actions/checkout@v3 - with: - fetch-depth: 0 - - - name: setup go ${{ env.GO_VERSION }} - uses: ./.github/actions/setup-go - with: - go-version: '${{ env.GO_VERSION }}' - - - name: check code format - run: make fmt-check - - - name: check go modules tidiness - run: make tidy-module-check - - - name: lint proto files - run: make protolint - - - name: lint - run: GOGC=50 make lint - - ######################## - # cross compilation - ######################## - cross-compile: - name: cross compilation - runs-on: ubuntu-latest - steps: - - name: git checkout - uses: actions/checkout@v3 - - name: setup go ${{ env.GO_VERSION }} - uses: ./.github/actions/setup-go - with: - go-version: '${{ env.GO_VERSION }}' - key-prefix: cross-compile - - - name: build release for all architectures - run: make release - - ######################## - # sample configuration check - ######################## - sample-conf-check: - name: sample configuration check - runs-on: ubuntu-latest - steps: - - name: git checkout - uses: actions/checkout@v3 - - - name: setup go ${{ env.GO_VERSION }} - uses: ./.github/actions/setup-go - with: - go-version: '${{ env.GO_VERSION }}' - - - name: check default values in sample-lnd.conf file - run: make sample-conf-check - - ######################## - # run unit tests - ######################## - unit-test: - name: run unit tests - runs-on: ubuntu-latest - strategy: - # Allow other tests in the matrix to continue if one fails. - fail-fast: false - matrix: - unit_type: - - btcd unit-cover - - unit tags="kvdb_etcd" - - unit tags="kvdb_postgres" - - unit tags="kvdb_sqlite" - - btcd unit-race - - unit-module - - steps: - - name: git checkout - uses: actions/checkout@v3 - with: - fetch-depth: 0 - - - name: fetch and rebase on ${{ github.base_ref }} - if: github.event_name == 'pull_request' - uses: ./.github/actions/rebase - - - name: git checkout fuzzing seeds - uses: actions/checkout@v3 - with: - repository: lightninglabs/lnd-fuzz - path: lnd-fuzz - - - name: rsync fuzzing seeds - run: rsync -a --ignore-existing lnd-fuzz/ ./ - - - name: setup go ${{ env.GO_VERSION }} - uses: ./.github/actions/setup-go - with: - go-version: '${{ env.GO_VERSION }}' - key-prefix: unit-test - - - name: install bitcoind - run: ./scripts/install_bitcoind.sh $BITCOIN_VERSION - - - name: run ${{ matrix.unit_type }} - run: make ${{ matrix.unit_type }} - - - name: Send coverage - uses: shogo82148/actions-goveralls@v1 - if: matrix.unit_type == 'btcd unit-cover' - with: - path-to-profile: coverage.txt - flag-name: 'unit' - parallel: true - - - ######################## - # run integration tests with TRANCHES - ######################## - basic-integration-test: - name: basic itests - runs-on: ubuntu-latest if: '!contains(github.event.pull_request.labels.*.name, ''no-itest'')' - strategy: - # Allow other tests in the matrix to continue if one fails. - fail-fast: false - matrix: - include: - - name: btcd - args: backend=btcd cover=1 - - name: bitcoind - args: backend=bitcoind cover=1 - - name: bitcoind-notxindex - args: backend="bitcoind notxindex" - - name: neutrino - args: backend=neutrino cover=1 - steps: - - name: git checkout - uses: actions/checkout@v3 - with: - fetch-depth: 0 - - - name: fetch and rebase on ${{ github.base_ref }} - if: github.event_name == 'pull_request' - uses: ./.github/actions/rebase - - - name: setup go ${{ env.GO_VERSION }} - uses: ./.github/actions/setup-go - with: - go-version: '${{ env.GO_VERSION }}' - key-prefix: integration-test - - - name: install bitcoind - run: ./scripts/install_bitcoind.sh $BITCOIN_VERSION - - name: run ${{ matrix.name }} - run: make itest-parallel tranches=${{ env.TRANCHES }} ${{ matrix.args }} shuffleseed=${{ github.run_id }}${{ strategy.job-index }} - - - name: Send coverage - if: ${{ contains(matrix.args, 'cover=1') }} - uses: shogo82148/actions-goveralls@v1 - with: - path-to-profile: coverage.txt - flag-name: 'itest-${{ matrix.name }}' - parallel: true - - - name: Zip log files on failure - if: ${{ failure() }} - timeout-minutes: 5 # timeout after 5 minute - run: 7z a logs-itest-${{ matrix.name }}.zip itest/**/*.log - - - name: Upload log files on failure - uses: actions/upload-artifact@v3 - if: ${{ failure() }} - with: - name: logs-itest-${{ matrix.name }} - path: logs-itest-${{ matrix.name }}.zip - retention-days: 5 - - ######################## - # run integration tests with SMALL_TRANCHES - ######################## - integration-test: - name: itests - runs-on: ubuntu-latest - if: '!contains(github.event.pull_request.labels.*.name, ''no-itest'')' strategy: # Allow other tests in the matrix to continue if one fails. fail-fast: false matrix: include: - - name: bitcoind-rpcpolling - args: backend="bitcoind rpcpolling" - - name: bitcoind-etcd - args: backend=bitcoind dbbackend=etcd - - name: bitcoind-sqlite - args: backend=bitcoind dbbackend=sqlite - - name: bitcoind-sqlite-nativesql - args: backend=bitcoind dbbackend=sqlite nativesql=true - name: bitcoind-postgres args: backend=bitcoind dbbackend=postgres - name: bitcoind-postgres-nativesql args: backend=bitcoind dbbackend=postgres nativesql=true + steps: - name: git checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: fetch-depth: 0 @@ -364,132 +98,3 @@ jobs: name: logs-itest-${{ matrix.name }} path: logs-itest-${{ matrix.name }}.zip retention-days: 5 - - - ######################## - # run windows integration test - ######################## - windows-integration-test: - name: windows itest - runs-on: windows-latest - if: '!contains(github.event.pull_request.labels.*.name, ''no-itest'')' - steps: - - name: git checkout - uses: actions/checkout@v3 - with: - fetch-depth: 0 - - - name: fetch and rebase on ${{ github.base_ref }} - if: github.event_name == 'pull_request' - uses: ./.github/actions/rebase - - - name: setup go ${{ env.GO_VERSION }} - uses: ./.github/actions/setup-go - with: - go-version: '${{ env.GO_VERSION }}' - key-prefix: integration-test - - - name: run itest - run: make itest-parallel tranches=${{ env.SMALL_TRANCHES }} windows=1 shuffleseed=${{ github.run_id }} - - - name: kill any remaining lnd processes - if: ${{ failure() }} - shell: powershell - run: taskkill /IM lnd-itest.exe /T /F - - - name: Zip log files on failure - if: ${{ failure() }} - timeout-minutes: 5 # timeout after 5 minute - run: 7z a logs-itest-windows.zip itest/**/*.log - - - name: Upload log files on failure - uses: actions/upload-artifact@v3 - if: ${{ failure() }} - with: - name: logs-itest-windows - path: logs-itest-windows.zip - retention-days: 5 - - ######################## - # run macOS integration test - ######################## - macos-integration-test: - name: macOS itest - runs-on: macos-14 - if: '!contains(github.event.pull_request.labels.*.name, ''no-itest'')' - steps: - - name: git checkout - uses: actions/checkout@v3 - with: - fetch-depth: 0 - - - name: fetch and rebase on ${{ github.base_ref }} - if: github.event_name == 'pull_request' - uses: ./.github/actions/rebase - - - name: setup go ${{ env.GO_VERSION }} - uses: ./.github/actions/setup-go - with: - go-version: '${{ env.GO_VERSION }}' - key-prefix: integration-test - - - name: run itest - run: make itest-parallel tranches=${{ env.SMALL_TRANCHES }} shuffleseed=${{ github.run_id }} - - - name: Zip log files on failure - if: ${{ failure() }} - timeout-minutes: 5 # timeout after 5 minute - run: 7z a logs-itest-macos.zip itest/**/*.log - - - name: Upload log files on failure - uses: actions/upload-artifact@v3 - if: ${{ failure() }} - with: - name: logs-itest-macos - path: logs-itest-macos.zip - retention-days: 5 - - ######################## - # check pinned dependencies - ######################## - dep-pin: - name: check pinned dependencies - runs-on: ubuntu-latest - strategy: - # Allow other tests in the matrix to continue if one fails. - fail-fast: false - matrix: - pinned_dep: - - google.golang.org/grpc v1.59.0 - - github.com/golang/protobuf v1.5.3 - - steps: - - name: git checkout - uses: actions/checkout@v3 - - - name: ensure dependencies at correct version - run: if ! grep -q "${{ matrix.pinned_dep }}" go.mod; then echo dependency ${{ matrix.pinned_dep }} should not be altered ; exit 1 ; fi - - ######################## - # check PR updates release notes - ######################## - milestone-check: - name: check release notes updated - runs-on: ubuntu-latest - if: '!contains(github.event.pull_request.labels.*.name, ''no-changelog'')' - steps: - - name: git checkout - uses: actions/checkout@v3 - - - name: release notes check - run: scripts/check-release-notes.sh - - # Notify about the completion of all coverage collecting jobs. - finish: - if: ${{ always() }} - needs: [unit-test, basic-integration-test] - runs-on: ubuntu-latest - steps: - - uses: shogo82148/actions-goveralls@v1 - with: - parallel-finished: true diff --git a/Makefile b/Makefile index 2f0c8c9bae..46c8f19692 100644 --- a/Makefile +++ b/Makefile @@ -191,23 +191,7 @@ check: unit itest db-instance: ifeq ($(dbbackend),postgres) - # Remove a previous postgres instance if it exists. - docker rm lnd-postgres --force || echo "Starting new postgres container" - - # Start a fresh postgres instance. Allow a maximum of 500 connections so - # that multiple lnd instances with a maximum number of connections of 20 - # each can run concurrently. Note that many of the settings here are - # specifically for integration testing and are not fit for running - # production nodes. The increase in max connections ensures that there - # are enough entries allocated for the RWConflictPool to allow multiple - # conflicting transactions to track serialization conflicts. The - # increase in predicate locks and locks per transaction is to allow the - # queries to lock individual rows instead of entire tables, helping - # reduce serialization conflicts. Disabling sequential scan for small - # tables also helps prevent serialization conflicts by ensuring lookups - # lock only relevant rows in the index rather than the entire table. - docker run --name lnd-postgres -e POSTGRES_PASSWORD=postgres -p 6432:5432 -d postgres:13-alpine -N 1500 -c max_pred_locks_per_transaction=1024 -c max_locks_per_transaction=128 -c enable_seqscan=off - docker logs -f lnd-postgres >itest/postgres.log 2>&1 & + scripts/build_postgres.sh $(ITEST_PARALLELISM) # Wait for the instance to be started. sleep $(POSTGRES_START_DELAY) diff --git a/itest/lnd_test.go b/itest/lnd_test.go index 07b8554f2f..22399d5803 100644 --- a/itest/lnd_test.go +++ b/itest/lnd_test.go @@ -127,6 +127,12 @@ func TestLightningNetworkDaemon(t *testing.T) { ht := harnessTest.Subtest(t1) ht.SetTestName(testCase.Name) + postgresPort := 6432 + if testCasesRunTranche != nil { + postgresPort += int(*testCasesRunTranche) + } + ht.SetPostgresPort(postgresPort) + ht.RunTestCase(testCase) }) diff --git a/lntest/harness.go b/lntest/harness.go index dc8033c39c..ddda94f0a6 100644 --- a/lntest/harness.go +++ b/lntest/harness.go @@ -469,6 +469,10 @@ func (h *HarnessTest) SetTestName(name string) { h.manager.currentTestCase = cleanTestCaseName } +func (h *HarnessTest) SetPostgresPort(port int) { + h.manager.postgresPort = port +} + // NewNode creates a new node and asserts its creation. The node is guaranteed // to have finished its initialization and all its subservers are started. func (h *HarnessTest) NewNode(name string, diff --git a/lntest/harness_node_manager.go b/lntest/harness_node_manager.go index 48afa6b358..559e304888 100644 --- a/lntest/harness_node_manager.go +++ b/lntest/harness_node_manager.go @@ -46,6 +46,9 @@ type nodeManager struct { // feeServiceURL is the url of the fee service. feeServiceURL string + + // postgresPort is the port of the postgres instance. + postgresPort int } // newNodeManager creates a new node manager instance. @@ -86,6 +89,7 @@ func (nm *nodeManager) newNode(t *testing.T, name string, extraArgs []string, LndBinary: nm.lndBinary, NetParams: miner.HarnessNetParams, SkipUnlock: noAuth, + PostgresPort: nm.postgresPort, } node, err := node.NewHarnessNode(t, cfg) diff --git a/lntest/node/config.go b/lntest/node/config.go index 01ab47f1da..2675a8fabc 100644 --- a/lntest/node/config.go +++ b/lntest/node/config.go @@ -159,6 +159,10 @@ type BaseNodeConfig struct { // postgresDBName is the name of the postgres database where lnd data // is stored in. postgresDBName string + + // postgresPort is the port where the postgres database is listening + // on. + PostgresPort int } func (cfg BaseNodeConfig) P2PAddr() string { diff --git a/lntest/node/harness_node.go b/lntest/node/harness_node.go index 0ac74f8779..e4d5291871 100644 --- a/lntest/node/harness_node.go +++ b/lntest/node/harness_node.go @@ -39,9 +39,6 @@ const ( // release of announcements by AuthenticatedGossiper to the network. trickleDelay = 50 - postgresDsn = "postgres://postgres:postgres@localhost:" + - "6432/%s?sslmode=disable" - // commitInterval specifies the maximum interval the graph database // will wait between attempting to flush a batch of modifications to // disk(db.batch-commit-interval). @@ -123,11 +120,11 @@ func NewHarnessNode(t *testing.T, cfg *BaseNodeConfig) (*HarnessNode, error) { var dbName string if cfg.DBBackend == BackendPostgres { var err error - dbName, err = createTempPgDB() + dbName, err = createTempPgDB(cfg.PostgresPort) if err != nil { return nil, err } - cfg.PostgresDsn = postgresDatabaseDsn(dbName) + cfg.PostgresDsn = postgresDatabaseDsn(dbName, cfg.PostgresPort) } cfg.OriginalExtraArgs = cfg.ExtraArgs @@ -838,8 +835,8 @@ func (hn *HarnessNode) BackupDB() error { // Backup database. backupDBName := hn.Cfg.postgresDBName + "_backup" err := executePgQuery( - "CREATE DATABASE " + backupDBName + " WITH TEMPLATE " + - hn.Cfg.postgresDBName, + "CREATE DATABASE "+backupDBName+" WITH TEMPLATE "+ + hn.Cfg.postgresDBName, hn.Cfg.PostgresPort, ) if err != nil { return err @@ -869,14 +866,15 @@ func (hn *HarnessNode) RestoreDB() error { // Restore database. backupDBName := hn.Cfg.postgresDBName + "_backup" err := executePgQuery( - "DROP DATABASE " + hn.Cfg.postgresDBName, + "DROP DATABASE "+hn.Cfg.postgresDBName, + hn.Cfg.PostgresPort, ) if err != nil { return err } err = executePgQuery( - "ALTER DATABASE " + backupDBName + " RENAME TO " + - hn.Cfg.postgresDBName, + "ALTER DATABASE "+backupDBName+" RENAME TO "+ + hn.Cfg.postgresDBName, hn.Cfg.PostgresPort, ) if err != nil { return err @@ -916,12 +914,15 @@ func (hn *HarnessNode) UpdateGlobalPolicy(policy *lnrpc.RoutingPolicy) { hn.RPC.UpdateChannelPolicy(updateFeeReq) } -func postgresDatabaseDsn(dbName string) string { - return fmt.Sprintf(postgresDsn, dbName) +func postgresDatabaseDsn(dbName string, port int) string { + postgresDsn := "postgres://postgres:postgres@localhost:" + + "%d/%s?sslmode=disable" + + return fmt.Sprintf(postgresDsn, port, dbName) } // createTempPgDB creates a temp postgres database. -func createTempPgDB() (string, error) { +func createTempPgDB(port int) (string, error) { // Create random database name. randBytes := make([]byte, 8) _, err := rand.Read(randBytes) @@ -931,7 +932,7 @@ func createTempPgDB() (string, error) { dbName := "itest_" + hex.EncodeToString(randBytes) // Create database. - err = executePgQuery("CREATE DATABASE " + dbName) + err = executePgQuery("CREATE DATABASE "+dbName, port) if err != nil { return "", err } @@ -940,10 +941,10 @@ func createTempPgDB() (string, error) { } // executePgQuery executes a SQL statement in a postgres db. -func executePgQuery(query string) error { +func executePgQuery(query string, port int) error { pool, err := pgxpool.Connect( context.Background(), - postgresDatabaseDsn("postgres"), + postgresDatabaseDsn("postgres", port), ) if err != nil { return fmt.Errorf("unable to connect to database: %w", err) diff --git a/scripts/build_postgres.sh b/scripts/build_postgres.sh new file mode 100755 index 0000000000..397a0839e9 --- /dev/null +++ b/scripts/build_postgres.sh @@ -0,0 +1,44 @@ +#!/bin/bash + +# Get all the variables. +PROCESSES=$1 + +# Remove a previous postgres instance if it exists. +containers=$(docker ps --filter name=lnd-postgres -aq) +if [ -n "$containers" ]; then + echo "Removing previous postgres instance..." + docker rm $containers --force --volumes +fi + +echo "Starting new postgres container" + +for ((i=0; iitest/postgres.log 2>&1 & +done