diff --git a/core/chains/evm/logpoller/log_poller_test.go b/core/chains/evm/logpoller/log_poller_test.go index 81d1b74e42d..4bfc403e341 100644 --- a/core/chains/evm/logpoller/log_poller_test.go +++ b/core/chains/evm/logpoller/log_poller_test.go @@ -20,7 +20,6 @@ import ( "github.com/leanovate/gopter" "github.com/leanovate/gopter/gen" "github.com/leanovate/gopter/prop" - "github.com/lib/pq" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" "github.com/stretchr/testify/require" @@ -39,7 +38,6 @@ import ( "github.com/smartcontractkit/chainlink/v2/core/internal/testutils" "github.com/smartcontractkit/chainlink/v2/core/internal/testutils/evmtest" "github.com/smartcontractkit/chainlink/v2/core/internal/testutils/pgtest" - "github.com/smartcontractkit/chainlink/v2/core/services/chainlink" "github.com/smartcontractkit/chainlink/v2/core/services/pg" ) @@ -1325,61 +1323,6 @@ func TestLogPoller_DBErrorHandling(t *testing.T) { assert.Contains(t, logMsgs, "Backup log poller ran before filters loaded, skipping") } -func TestNotifyAfterInsert(t *testing.T) { - t.Parallel() - - // Use a non-transactional db for this test because notify events - // are not delivered until the transaction is committed. - var dbURL string - _, sqlxDB := heavyweight.FullTestDBV2(t, func(c *chainlink.Config, s *chainlink.Secrets) { - dbURL = s.Database.URL.URL().String() - }) - - lggr, _ := logger.TestObserved(t, zapcore.WarnLevel) - chainID := big.NewInt(1337) - o := logpoller.NewORM(chainID, sqlxDB, lggr, pgtest.NewQConfig(true)) - - listener := pq.NewListener(dbURL, time.Second, time.Second, nil) - err := listener.Listen(pg.ChannelInsertOnEVMLogs) - require.NoError(t, err) - - log := logpoller.Log{ - EvmChainId: ubig.New(chainID), - LogIndex: 10, - BlockHash: testutils.Random32Byte(), - BlockNumber: 100, - BlockTimestamp: time.Now(), - Topics: pq.ByteaArray{ - testutils.NewAddress().Bytes(), - testutils.NewAddress().Bytes(), - }, - EventSig: testutils.Random32Byte(), - Address: testutils.NewAddress(), - TxHash: testutils.Random32Byte(), - Data: []byte("test_data"), - CreatedAt: time.Now(), - } - - err = o.InsertLogs([]logpoller.Log{log}) - require.NoError(t, err) - - testutils.AssertEventually(t, func() bool { - select { - case event := <-listener.Notify: - expectedPayload := fmt.Sprintf( - "%s:%s,%s", - hexutil.Encode(log.Address.Bytes())[2:], // strip the leading 0x - hexutil.Encode(log.Topics[0])[2:], - hexutil.Encode(log.Topics[1])[2:], - ) - require.Equal(t, event.Extra, expectedPayload) - return true - default: - return false - } - }) -} - type getLogErrData struct { From string To string diff --git a/core/services/pg/channels.go b/core/services/pg/channels.go deleted file mode 100644 index aed132a7f2c..00000000000 --- a/core/services/pg/channels.go +++ /dev/null @@ -1,4 +0,0 @@ -package pg - -// Postgres channel to listen for new evm.txes -const ChannelInsertOnEVMLogs = "evm.insert_on_logs" diff --git a/core/store/migrate/migrate_test.go b/core/store/migrate/migrate_test.go index 10c698e96fd..56d1fe41eb5 100644 --- a/core/store/migrate/migrate_test.go +++ b/core/store/migrate/migrate_test.go @@ -516,6 +516,31 @@ func TestDatabaseBackFillWithMigration202(t *testing.T) { } } +func TestNoTriggers(t *testing.T) { + _, db := heavyweight.FullTestDBEmptyV2(t, nil) + + assert_num_triggers := func(expected int) { + + row := db.DB.QueryRow("select count(*) from information_schema.triggers") + var count int + err := row.Scan(&count) + + require.NoError(t, err) + require.Equal(t, expected, count) + } + + // if you find yourself here and are tempted to add a trigger, something has gone wrong + // and you should talk to the foundations team before proceeding + assert_num_triggers(0) + + // version prior to removal of all triggers + v := 217 + err := goose.UpTo(db.DB, migrationDir, int64(v)) + require.NoError(t, err) + assert_num_triggers(1) + +} + func BenchmarkBackfillingRecordsWithMigration202(b *testing.B) { previousMigration := int64(201) backfillMigration := int64(202) diff --git a/core/store/migrate/migrations/0218_drop_log_topic_trigger.sql b/core/store/migrate/migrations/0218_drop_log_topic_trigger.sql new file mode 100644 index 00000000000..ea80cccd2b5 --- /dev/null +++ b/core/store/migrate/migrations/0218_drop_log_topic_trigger.sql @@ -0,0 +1,27 @@ +-- +goose Up +-- +goose StatementBegin +DROP TRIGGER IF EXISTS notify_insert_on_logs_topics ON EVM.logs; +DROP FUNCTION IF EXISTS evm.notifysavedlogtopics(); + +-- +goose StatementEnd + + +-- +goose Down +-- +goose StatementBegin + +CREATE FUNCTION evm.notifysavedlogtopics() RETURNS trigger + LANGUAGE plpgsql +AS $$ +BEGIN + PERFORM pg_notify( + 'evm.insert_on_logs'::text, + -- hex encoded address plus comma separated list of hex encoded topic values + -- e.g. "
:," + encode(NEW.address, 'hex') || ':' || array_to_string(array(SELECT encode(unnest(NEW.topics), 'hex')), ',') + ); + RETURN NULL; +END +$$; + +CREATE TRIGGER notify_insert_on_logs_topics AFTER INSERT ON evm.logs FOR EACH ROW EXECUTE PROCEDURE evm.notifysavedlogtopics(); +-- +goose StatementEnd