diff --git a/.github/workflows/build_and_test.yml b/.github/workflows/build_and_test.yml index 99cfcf89e..b723b60c5 100644 --- a/.github/workflows/build_and_test.yml +++ b/.github/workflows/build_and_test.yml @@ -27,6 +27,7 @@ jobs: POSTGRES_PASSWORD: testing POSTGRES_DB: envio-dev POSTGRES_USER: postgres + POSTGRES_SCHEMA: public # Set health checks to wait until postgres has started options: >- --health-cmd pg_isready diff --git a/codegenerator/cli/npm/envio/src/bindings/Postgres.res b/codegenerator/cli/npm/envio/src/bindings/Postgres.res index db5c73133..3d1afc27c 100644 --- a/codegenerator/cli/npm/envio/src/bindings/Postgres.res +++ b/codegenerator/cli/npm/envio/src/bindings/Postgres.res @@ -69,6 +69,7 @@ type poolConfig = { database?: string, // Name of database to connect to (default: '') username?: string, // Username of database user (default: '') password?: string, // Password of database user (default: '') + schema?: string, // Name of schema to connect to (default: 'public') ssl?: sslOptions, // true, prefer, require, tls.connect options (default: false) max?: int, // Max number of connections (default: 10) maxLifetime?: option, // Max lifetime in seconds (more info below) (default: null) @@ -86,9 +87,53 @@ type poolConfig = { fetchTypes?: bool, // Automatically fetches types on connect on initial connection. (default: true) } +let makeConnectionString = (config: poolConfig) => { + let parts = ["postgres://"] + + switch (config.username, config.password) { + | (Some(username), Some(password)) => parts->Js.Array2.push(`${username}:${password}@`)->ignore + | (Some(username), None) => parts->Js.Array2.push(`${username}@`)->ignore + | _ => () + } + + switch config.host { + | Some(host) => parts->Js.Array2.push(host)->ignore + | None => () + } + + switch config.port { + | Some(port) => parts->Js.Array2.push(`:${port->Belt.Int.toString}`)->ignore + | None => () + } + + switch config.database { + | Some(database) => parts->Js.Array2.push(`/${database}`)->ignore + | None => () + } + + switch config.schema { + | Some(schema) => parts->Js.Array2.push(`?search_path=${schema}`)->ignore + | None => () + } + + let connectionString = parts->Js.Array2.joinWith("") + connectionString +} + @module external makeSql: (~config: poolConfig) => sql = "postgres" +@module +external makeSqlWithConnectionString: (string, poolConfig) => sql = "postgres" + +let makeSql = (~config: poolConfig) => { + let connectionString = makeConnectionString(config) + + Js.Console.log(`Connection string: ${connectionString}`) + + makeSqlWithConnectionString(connectionString, config) +} + @send external beginSql: (sql, sql => array>) => promise = "begin" // TODO: can explore this approach (https://forum.rescript-lang.org/t/rfc-support-for-tagged-template-literals/3744) diff --git a/codegenerator/cli/npm/envio/src/db/EntityHistory.res b/codegenerator/cli/npm/envio/src/db/EntityHistory.res index ebe44a103..e07e78696 100644 --- a/codegenerator/cli/npm/envio/src/db/EntityHistory.res +++ b/codegenerator/cli/npm/envio/src/db/EntityHistory.res @@ -243,8 +243,8 @@ let fromTable = (table: table, ~schema: S.t<'entity>): t<'entity> => { let insertFnName = `"insert_${table.tableName}"` let historyRowArg = "history_row" - let historyTablePath = `"public"."${historyTableName}"` - let originTablePath = `"public"."${originTableName}"` + let historyTablePath = `"${historyTableName}"` + let originTablePath = `"${originTableName}"` let previousHistoryFieldsAreNullStr = previousChangeFieldNames diff --git a/codegenerator/cli/npm/envio/src/db/Table.res b/codegenerator/cli/npm/envio/src/db/Table.res index 71db94e83..00843d6ce 100644 --- a/codegenerator/cli/npm/envio/src/db/Table.res +++ b/codegenerator/cli/npm/envio/src/db/Table.res @@ -210,7 +210,7 @@ module PostgresInterop = { table->getNonDefaultFieldNames->Array.map(fieldName => `"${fieldName}"`) `(sql, rows) => { return sql\` - INSERT INTO "public"."${table.tableName}" + INSERT INTO "${table.tableName}" \${sql(rows, ${fieldNamesInQuotes->Js.Array2.joinWith(", ")})} ON CONFLICT(${table->getPrimaryKeyFieldNames->Js.Array2.joinWith(", ")}) DO UPDATE SET diff --git a/codegenerator/cli/src/persisted_state/db.rs b/codegenerator/cli/src/persisted_state/db.rs index e9d409fa3..f12ae7cd4 100644 --- a/codegenerator/cli/src/persisted_state/db.rs +++ b/codegenerator/cli/src/persisted_state/db.rs @@ -12,10 +12,33 @@ async fn get_pg_pool() -> Result { let user = get_env_with_default("ENVIO_PG_USER", "postgres"); let password = get_env_with_default("ENVIO_POSTGRES_PASSWORD", "testing"); let database = get_env_with_default("ENVIO_PG_DATABASE", "envio-dev"); - - let connection_url = format!("postgres://{user}:{password}@{host}:{port}/{database}"); - - PgPoolOptions::new().connect(&connection_url).await + let schema = get_env_with_default("ENVIO_PG_SCHEMA", "public"); + + let connection_url = format!( + "postgresql://{}:{}@{}:{}/{}", + user, password, host, port, database + ); + + let pool = PgPoolOptions::new() + .max_connections(5) + .after_connect({ + let schema = schema.clone(); + move |conn, _| { + Box::pin({ + let value = schema.clone(); + async move { + sqlx::query(&format!("SET search_path TO {}", value)) + .execute(conn) + .await?; + Ok(()) + } + }) + } + }) + .connect(&connection_url) + .await?; + + Ok(pool) } impl PersistedState { @@ -27,7 +50,7 @@ impl PersistedState { async fn upsert_to_db_with_pool(&self, pool: &PgPool) -> Result { sqlx::query( r#" - INSERT INTO public.persisted_state ( + INSERT INTO persisted_state ( id, envio_version, config_hash, @@ -77,7 +100,7 @@ impl PersistedStateExists { schema_hash, handler_files_hash, abi_files_hash - from public.persisted_state WHERE id = 1", + from persisted_state WHERE id = 1", ) .fetch_optional(pool) .await; diff --git a/codegenerator/cli/templates/static/codegen/docker-compose.yaml b/codegenerator/cli/templates/static/codegen/docker-compose.yaml index 026fae27a..922c23c6e 100644 --- a/codegenerator/cli/templates/static/codegen/docker-compose.yaml +++ b/codegenerator/cli/templates/static/codegen/docker-compose.yaml @@ -10,6 +10,7 @@ services: POSTGRES_PASSWORD: ${ENVIO_POSTGRES_PASSWORD:-testing} POSTGRES_USER: ${ENVIO_PG_USER:-postgres} POSTGRES_DB: ${ENVIO_PG_DATABASE:-envio-dev} + POSTGRES_SCHEMA: ${ENVIO_PG_SCHEMA:-public_123} networks: - my-proxy-net graphql-engine: diff --git a/codegenerator/cli/templates/static/codegen/src/Env.res b/codegenerator/cli/templates/static/codegen/src/Env.res index c8913e624..bc0b9db26 100644 --- a/codegenerator/cli/templates/static/codegen/src/Env.res +++ b/codegenerator/cli/templates/static/codegen/src/Env.res @@ -112,6 +112,7 @@ module Db = { let user = envSafe->EnvSafe.get("ENVIO_PG_USER", S.string, ~devFallback="postgres") let password = envSafe->EnvSafe.get("ENVIO_POSTGRES_PASSWORD", S.string, ~devFallback="testing") let database = envSafe->EnvSafe.get("ENVIO_PG_DATABASE", S.string, ~devFallback="envio-dev") + let schema = envSafe->EnvSafe.get("ENVIO_PG_SCHEMA", S.string, ~fallback="public") let ssl = envSafe->EnvSafe.get( "ENVIO_PG_SSL_MODE", Postgres.sslOptionsSchema, diff --git a/codegenerator/cli/templates/static/codegen/src/db/Db.res b/codegenerator/cli/templates/static/codegen/src/db/Db.res index 95b49dff2..7e9dce26d 100644 --- a/codegenerator/cli/templates/static/codegen/src/db/Db.res +++ b/codegenerator/cli/templates/static/codegen/src/db/Db.res @@ -7,6 +7,7 @@ let config: Postgres.poolConfig = { username: Env.Db.user, password: Env.Db.password, database: Env.Db.database, + schema: Env.Db.schema, ssl: Env.Db.ssl, // TODO: think how we want to pipe these logs to pino. onnotice: ?(Env.userLogLevel == #warn || Env.userLogLevel == #error ? None : Some(_str => ())), diff --git a/codegenerator/cli/templates/static/codegen/src/db/DbFunctions.res b/codegenerator/cli/templates/static/codegen/src/db/DbFunctions.res index b5fe5f153..6acbfe159 100644 --- a/codegenerator/cli/templates/static/codegen/src/db/DbFunctions.res +++ b/codegenerator/cli/templates/static/codegen/src/db/DbFunctions.res @@ -6,7 +6,7 @@ module General = { type existsRes = {exists: bool} let hasRows = async (sql, ~table: Table.table) => { - let query = `SELECT EXISTS(SELECT 1 FROM public."${table.tableName}");` + let query = `SELECT EXISTS(SELECT 1 FROM "${table.tableName}");` switch await sql->Postgres.unsafe(query) { | [{exists}] => exists | _ => Js.Exn.raiseError("Unexpected result from hasRows query: " ++ query) diff --git a/codegenerator/cli/templates/static/codegen/src/db/DbFunctionsImplementation.js b/codegenerator/cli/templates/static/codegen/src/db/DbFunctionsImplementation.js index 71596191f..790642e0a 100644 --- a/codegenerator/cli/templates/static/codegen/src/db/DbFunctionsImplementation.js +++ b/codegenerator/cli/templates/static/codegen/src/db/DbFunctionsImplementation.js @@ -34,7 +34,7 @@ const batchSetItemsInTableCore = (table, sql, rowDataArray) => { ); return sql` -INSERT INTO "public".${sql(table.tableName)} +INSERT INTO ${sql(table.tableName)} ${sql(rowDataArray, ...fieldNames)} ON CONFLICT(${sql`${commaSeparateDynamicMapQuery( sql, @@ -56,7 +56,7 @@ module.exports.batchDeleteItemsInTable = (table, sql, pkArray) => { if (primaryKeyFieldNames.length === 1) { return sql` DELETE - FROM "public".${sql(table.tableName)} + FROM ${sql(table.tableName)} WHERE ${sql(primaryKeyFieldNames[0])} IN ${sql(pkArray)}; `; } else { @@ -71,7 +71,7 @@ module.exports.batchReadItemsInTable = (table, sql, pkArray) => { if (primaryKeyFieldNames.length === 1) { return sql` SELECT * - FROM "public".${sql(table.tableName)} + FROM ${sql(table.tableName)} WHERE ${sql(primaryKeyFieldNames[0])} IN ${sql(pkArray)}; `; } else { @@ -83,19 +83,19 @@ module.exports.batchReadItemsInTable = (table, sql, pkArray) => { module.exports.whereEqQuery = (table, sql, fieldName, value) => { return sql` SELECT * - FROM "public".${sql(table.tableName)} + FROM ${sql(table.tableName)} WHERE ${sql(fieldName)} = ${value}; `; }; module.exports.readLatestSyncedEventOnChainId = (sql, chainId) => sql` SELECT * - FROM public.event_sync_state + FROM event_sync_state WHERE chain_id = ${chainId}`; module.exports.batchSetEventSyncState = (sql, entityDataArray) => { return sql` - INSERT INTO public.event_sync_state + INSERT INTO event_sync_state ${sql( entityDataArray, "chain_id", @@ -116,12 +116,12 @@ module.exports.batchSetEventSyncState = (sql, entityDataArray) => { module.exports.readLatestChainMetadataState = (sql, chainId) => sql` SELECT * - FROM public.chain_metadata + FROM chain_metadata WHERE chain_id = ${chainId}`; module.exports.batchSetChainMetadata = (sql, entityDataArray) => { return sql` - INSERT INTO public.chain_metadata + INSERT INTO chain_metadata ${sql( entityDataArray, "chain_id", @@ -154,7 +154,7 @@ module.exports.batchSetChainMetadata = (sql, entityDataArray) => { const batchSetRawEventsCore = (sql, entityDataArray) => { return sql` - INSERT INTO "public"."raw_events" + INSERT INTO "raw_events" ${sql( entityDataArray, "chain_id", @@ -178,13 +178,13 @@ module.exports.batchSetRawEvents = (sql, entityDataArray) => { module.exports.batchDeleteRawEvents = (sql, entityIdArray) => sql` DELETE - FROM "public"."raw_events" + FROM "raw_events" WHERE (chain_id, event_id) IN ${sql(entityIdArray)};`; // end db operations for raw_events const batchSetEndOfBlockRangeScannedDataCore = (sql, rowDataArray) => { return sql` - INSERT INTO "public"."end_of_block_range_scanned_data" + INSERT INTO "end_of_block_range_scanned_data" ${sql( rowDataArray, "chain_id", @@ -210,7 +210,7 @@ module.exports.batchSetEndOfBlockRangeScannedData = (sql, rowDataArray) => { module.exports.readEndOfBlockRangeScannedDataForChain = (sql, chainId) => { return sql` - SELECT * FROM "public"."end_of_block_range_scanned_data" + SELECT * FROM "end_of_block_range_scanned_data" WHERE chain_id = ${chainId} ORDER BY block_number ASC;`; @@ -224,7 +224,7 @@ module.exports.deleteStaleEndOfBlockRangeScannedDataForChain = ( ) => { return sql` DELETE - FROM "public"."end_of_block_range_scanned_data" + FROM "end_of_block_range_scanned_data" WHERE chain_id = ${chainId} AND block_number < ${blockNumberThreshold} AND block_timestamp < ${blockTimestampThreshold} @@ -237,7 +237,7 @@ module.exports.readDynamicContractsOnChainIdAtOrBeforeBlockNumber = ( blockNumber ) => sql` SELECT * - FROM "public"."dynamic_contract_registry" + FROM "dynamic_contract_registry" WHERE registering_event_block_number <= ${blockNumber} AND chain_id = ${chainId};`; @@ -248,7 +248,7 @@ module.exports.readDynamicContractsOnChainIdMatchingEvents = ( ) => { return sql` SELECT * - FROM "public"."dynamic_contract_registry" + FROM "dynamic_contract_registry" WHERE chain_id = ${chainId} AND (registering_event_contract_name, registering_event_name, registering_event_src_address) IN ${sql( preRegisterEvents.map((item) => sql(item)) @@ -272,7 +272,7 @@ module.exports.getFirstChangeSerial_UnorderedMultichain = ( SELECT MIN(serial) AS first_change_serial FROM - public.${sql(makeHistoryTableName(entityName))} + ${sql(makeHistoryTableName(entityName))} WHERE entity_history_chain_id = ${reorgChainId} AND entity_history_block_number > ${safeBlockNumber} @@ -292,7 +292,7 @@ module.exports.getFirstChangeSerial_OrderedMultichain = ( SELECT MIN(serial) AS first_change_serial FROM - public.${sql(makeHistoryTableName(entityName))} + ${sql(makeHistoryTableName(entityName))} WHERE entity_history_block_timestamp > ${safeBlockTimestamp} OR @@ -317,7 +317,7 @@ module.exports.getFirstChangeEntityHistoryPerChain = ( SELECT DISTINCT ON (entity_history_chain_id) * FROM - public.${sql(makeHistoryTableName(entityName))} + ${sql(makeHistoryTableName(entityName))} WHERE serial >= ( SELECT @@ -344,7 +344,7 @@ module.exports.deleteRolledBackEntityHistory = ( ) -- Step 2: Delete all rows that have a serial >= the first change serial DELETE FROM - public.${sql(makeHistoryTableName(entityName))} + ${sql(makeHistoryTableName(entityName))} WHERE serial >= ( SELECT @@ -371,7 +371,7 @@ module.exports.pruneStaleEntityHistory = ( SELECT MIN(serial) AS first_change_serial FROM - public.${sql(tableName)} + ${sql(tableName)} WHERE ${Utils.$$Array.interleave( safeChainIdAndBlockNumberArray.map( @@ -385,7 +385,7 @@ module.exports.pruneStaleEntityHistory = ( SELECT DISTINCT ON (id) * FROM - public.${sql(tableName)} + ${sql(tableName)} WHERE serial >= (SELECT first_change_serial FROM first_change) ORDER BY @@ -400,7 +400,7 @@ module.exports.pruneStaleEntityHistory = ( prev.id, prev.serial FROM - public.${sql(tableName)} prev + ${sql(tableName)} prev INNER JOIN items_in_reorg_threshold r ON @@ -415,7 +415,7 @@ module.exports.pruneStaleEntityHistory = ( : sql`` } DELETE FROM - public.${sql(tableName)} eh + ${sql(tableName)} eh WHERE -- Delete all entity history of entities that are not in the reorg threshold eh.id NOT IN (SELECT id FROM items_in_reorg_threshold) @@ -442,7 +442,7 @@ module.exports.getRollbackDiff = (sql, entityName, getFirstChangeSerial) => sql` SELECT DISTINCT ON (id) after.* FROM - public.${sql(makeHistoryTableName(entityName))} after + ${sql(makeHistoryTableName(entityName))} after WHERE after.serial >= ( SELECT @@ -469,7 +469,7 @@ module.exports.getRollbackDiff = (sql, entityName, getFirstChangeSerial) => sql` COALESCE(before.entity_history_log_index, 0) AS entity_history_log_index FROM -- Use a RIGHT JOIN, to ensure that nulls get returned if there is no "before" row - public.${sql(makeHistoryTableName(entityName))} before + ${sql(makeHistoryTableName(entityName))} before RIGHT JOIN rollback_ids after ON before.id = after.id AND before.entity_history_block_timestamp = after.previous_entity_history_block_timestamp AND before.entity_history_chain_id = after.previous_entity_history_chain_id diff --git a/codegenerator/cli/templates/static/codegen/src/db/Migrations.res b/codegenerator/cli/templates/static/codegen/src/db/Migrations.res index b30152819..26c1ae5e3 100644 --- a/codegenerator/cli/templates/static/codegen/src/db/Migrations.res +++ b/codegenerator/cli/templates/static/codegen/src/db/Migrations.res @@ -26,7 +26,7 @@ let creatTableIfNotExists = (sql, table) => { ->Js.Array2.joinWith(", ") let query = ` - CREATE TABLE IF NOT EXISTS "public"."${table.tableName}"(${fieldsMapped}${primaryKeyFieldNames->Array.length > 0 + CREATE TABLE IF NOT EXISTS "${table.tableName}"(${fieldsMapped}${primaryKeyFieldNames->Array.length > 0 ? `, PRIMARY KEY(${primaryKey})` : ""});` @@ -36,7 +36,7 @@ let creatTableIfNotExists = (sql, table) => { let makeCreateIndexQuery = (~tableName, ~indexFields) => { let indexName = tableName ++ "_" ++ indexFields->Js.Array2.joinWith("_") let index = indexFields->Belt.Array.map(idx => `"${idx}"`)->Js.Array2.joinWith(", ") - `CREATE INDEX IF NOT EXISTS "${indexName}" ON "public"."${tableName}"(${index}); ` + `CREATE INDEX IF NOT EXISTS "${indexName}" ON "${tableName}"(${index}); ` } let createTableIndices = (sql, table: Table.table) => { @@ -87,7 +87,15 @@ let deleteAllTables: unit => promise = async () => { @warning("-21") await ( %raw( - "sql.unsafe`DROP SCHEMA public CASCADE;CREATE SCHEMA public;GRANT ALL ON SCHEMA public TO postgres;GRANT ALL ON SCHEMA public TO public;`" + "sql.unsafe`DO $$ +DECLARE + schema_name TEXT := current_schema(); +BEGIN + EXECUTE format('DROP SCHEMA %I CASCADE', schema_name); + EXECUTE format('CREATE SCHEMA %I', schema_name); + EXECUTE format('GRANT ALL ON SCHEMA %I TO postgres', schema_name); + EXECUTE format('GRANT ALL ON SCHEMA %I TO public', schema_name); +END $$;`" ) ) } diff --git a/scenarios/erc20_multichain_factory/test/DynamicContractRecovery_test.res b/scenarios/erc20_multichain_factory/test/DynamicContractRecovery_test.res index fefbf22b9..707b3e97e 100644 --- a/scenarios/erc20_multichain_factory/test/DynamicContractRecovery_test.res +++ b/scenarios/erc20_multichain_factory/test/DynamicContractRecovery_test.res @@ -189,7 +189,7 @@ describe("Dynamic contract restart resistance test", () => { //up to the second one yet let dynamicContractsInTable = - await Db.sql->Postgres.unsafe(`SELECT * FROM public.dynamic_contract_registry;`) + await Db.sql->Postgres.unsafe(`SELECT * FROM dynamic_contract_registry;`) Assert.equal( dynamicContractsInTable->Array.length, diff --git a/scenarios/erc20_multichain_factory/test/RollbackMultichain_test.res b/scenarios/erc20_multichain_factory/test/RollbackMultichain_test.res index 5c7f3210f..9ac382837 100644 --- a/scenarios/erc20_multichain_factory/test/RollbackMultichain_test.res +++ b/scenarios/erc20_multichain_factory/test/RollbackMultichain_test.res @@ -197,7 +197,7 @@ module Sql = { let query = unsafe(Db.sql, _) - let getAllRowsInTable = tableName => query(`SELECT * FROM public."${tableName}";`) + let getAllRowsInTable = tableName => query(`SELECT * FROM "${tableName}";`) let getAccountTokenBalance = async (~tokenAddress, ~accountAddress) => { let tokenAddress = tokenAddress->Address.toString @@ -205,7 +205,7 @@ module Sql = { let accountTokenId = EventHandlers.makeAccountTokenId(~tokenAddress, ~account_id) let res = await query( ` - SELECT * FROM public."AccountToken" + SELECT * FROM "AccountToken" WHERE id = '${accountTokenId}'; `, ) diff --git a/scenarios/test_codegen/.gitignore b/scenarios/test_codegen/.gitignore index 36557d350..5fa5957d8 100644 --- a/scenarios/test_codegen/.gitignore +++ b/scenarios/test_codegen/.gitignore @@ -33,3 +33,4 @@ generated logs build .nyc_output +.env \ No newline at end of file diff --git a/scenarios/test_codegen/test/SerDe_Test.res b/scenarios/test_codegen/test/SerDe_Test.res index ea6d54dc2..bab6b415a 100644 --- a/scenarios/test_codegen/test/SerDe_Test.res +++ b/scenarios/test_codegen/test/SerDe_Test.res @@ -88,7 +88,7 @@ describe("SerDe Test", () => { //The copy function will do it's custom postgres serialization of the entity // await Db.sql->DbFunctions.EntityHistory.copyAllEntitiesToEntityHistory - let res = await Db.sql->Postgres.unsafe(`SELECT * FROM public."EntityWithAllTypes_history";`) + let res = await Db.sql->Postgres.unsafe(`SELECT * FROM "EntityWithAllTypes_history";`) switch res { | [row] => diff --git a/scenarios/test_codegen/test/helpers/utils.ts b/scenarios/test_codegen/test/helpers/utils.ts index 09e38cc63..c2a7ed45a 100644 --- a/scenarios/test_codegen/test/helpers/utils.ts +++ b/scenarios/test_codegen/test/helpers/utils.ts @@ -5,7 +5,11 @@ import { import Postgres from "postgres"; import { config } from "../../generated/src/db/Db.bs"; -export const createSql = () => Postgres(config); +export const createSql = () => + Postgres( + `postgres://${config.username}:${config.password}@${config.host}:${config.port}/${config.database}?search_path=${config.schema}`, + config, + ); const originalConsoleLog = console.log; diff --git a/scenarios/test_codegen/test/integration-raw-events-test.ts b/scenarios/test_codegen/test/integration-raw-events-test.ts index 45d1a4ee7..dd81d4fa3 100644 --- a/scenarios/test_codegen/test/integration-raw-events-test.ts +++ b/scenarios/test_codegen/test/integration-raw-events-test.ts @@ -93,28 +93,28 @@ describe("Raw Events Integration", () => { }); it("RawEvents table contains rows after indexer runs", async function() { - let rawEventsRows = await sql`SELECT * FROM public.raw_events`; + let rawEventsRows = await sql`SELECT * FROM raw_events`; expect(rawEventsRows.count).to.be.gt(0); }); it("should ensure Entites are created correctly", async function() { - let rowsNftCollection = await sql`SELECT * FROM public."NftCollection"`; + let rowsNftCollection = await sql`SELECT * FROM "NftCollection"`; expect(rowsNftCollection.count).to.be.gt(0); - let rowsUsers = await sql`SELECT * FROM public."User"`; + let rowsUsers = await sql`SELECT * FROM "User"`; expect(rowsUsers.count).to.be.gt(0); - let rowsToken = await sql`SELECT * FROM public."Token"`; + let rowsToken = await sql`SELECT * FROM "Token"`; expect(rowsToken.count).to.be.gt(0); }); it("should have 1 row in the dynamic_contract_registry table", async function() { - let rowsDCR = await sql`SELECT * FROM public.dynamic_contract_registry`; + let rowsDCR = await sql`SELECT * FROM dynamic_contract_registry`; console.log(rowsDCR); expect(rowsDCR.count).to.be.eq(1); }); // TODO: Fix this test. This test broke after rebasing the 'dev-mode' code on the lastest main with the restructiring and dynamic contracts code. it.skip("Tracks dynamic contract on restart", async () => { - let beforeRawEventsRows = await sql`SELECT * FROM public.raw_events`; + let beforeRawEventsRows = await sql`SELECT * FROM raw_events`; //TODO: fix this test, This indicates this test is ineffective but the structure is what we want to test // below show that the contract address store is still populated with the contract console.log("new contract"); @@ -131,7 +131,7 @@ describe("Raw Events Integration", () => { }, 500) ); - let afterRawEventsRows = await sql`SELECT * FROM public.raw_events`; + let afterRawEventsRows = await sql`SELECT * FROM raw_events`; expect(afterRawEventsRows.count).to.be.gt(beforeRawEventsRows.count); }); @@ -140,7 +140,7 @@ describe("Raw Events Integration", () => { let rawEventsRows = await sql` SELECT EXISTS ( SELECT FROM information_schema.tables - WHERE table_name = 'public.raw_events' + WHERE table_name = 'raw_events' ); `; expect(rawEventsRows[0].exists).to.be.eq(false); diff --git a/scenarios/test_codegen/test/lib_tests/EntityHistory_test.res b/scenarios/test_codegen/test/lib_tests/EntityHistory_test.res index f00835f50..7b4fb9081 100644 --- a/scenarios/test_codegen/test/lib_tests/EntityHistory_test.res +++ b/scenarios/test_codegen/test/lib_tests/EntityHistory_test.res @@ -40,11 +40,11 @@ let batchSetMockEntity = Table.PostgresInterop.makeBatchSetFn( let getAllMockEntity = sql => sql - ->Postgres.unsafe(`SELECT * FROM "public"."${TestEntity.table.tableName}"`) + ->Postgres.unsafe(`SELECT * FROM "${TestEntity.table.tableName}"`) ->Promise.thenResolve(json => json->S.parseOrRaiseWith(TestEntity.rowsSchema)) let getAllMockEntityHistory = sql => - sql->Postgres.unsafe(`SELECT * FROM "public"."${TestEntity.entityHistory.table.tableName}"`) + sql->Postgres.unsafe(`SELECT * FROM "${TestEntity.entityHistory.table.tableName}"`) describe("Entity history serde", () => { it("serializes and deserializes correctly", () => { @@ -150,7 +150,7 @@ describe("Entity history serde", () => { describe("Entity History Codegen", () => { it("Creates a postgres insert function", () => { - let expected = `CREATE OR REPLACE FUNCTION "insert_TestEntity_history"(history_row "public"."TestEntity_history", should_copy_current_entity BOOLEAN) + let expected = `CREATE OR REPLACE FUNCTION "insert_TestEntity_history"(history_row "TestEntity_history", should_copy_current_entity BOOLEAN) RETURNS void AS $$ DECLARE v_previous_record RECORD; @@ -160,7 +160,7 @@ describe("Entity History Codegen", () => { IF history_row.previous_entity_history_block_timestamp IS NULL OR history_row.previous_entity_history_chain_id IS NULL OR history_row.previous_entity_history_block_number IS NULL OR history_row.previous_entity_history_log_index IS NULL THEN -- Find the most recent record for the same id SELECT entity_history_block_timestamp, entity_history_chain_id, entity_history_block_number, entity_history_log_index INTO v_previous_record - FROM "public"."TestEntity_history" + FROM "TestEntity_history" WHERE id = history_row.id ORDER BY entity_history_block_timestamp DESC, entity_history_chain_id DESC, entity_history_block_number DESC, entity_history_log_index DESC LIMIT 1; @@ -170,9 +170,9 @@ describe("Entity History Codegen", () => { history_row.previous_entity_history_block_timestamp := v_previous_record.entity_history_block_timestamp; history_row.previous_entity_history_chain_id := v_previous_record.entity_history_chain_id; history_row.previous_entity_history_block_number := v_previous_record.entity_history_block_number; history_row.previous_entity_history_log_index := v_previous_record.entity_history_log_index; ElSIF should_copy_current_entity THEN -- Check if a value for the id exists in the origin table and if so, insert a history row for it. - SELECT "id", "fieldA", "fieldB" FROM "public"."TestEntity" WHERE id = history_row.id INTO v_origin_record; + SELECT "id", "fieldA", "fieldB" FROM "TestEntity" WHERE id = history_row.id INTO v_origin_record; IF FOUND THEN - INSERT INTO "public"."TestEntity_history" (entity_history_block_timestamp, entity_history_chain_id, entity_history_block_number, entity_history_log_index, "id", "fieldA", "fieldB", "action") + INSERT INTO "TestEntity_history" (entity_history_block_timestamp, entity_history_chain_id, entity_history_block_number, entity_history_log_index, "id", "fieldA", "fieldB", "action") -- SET the current change data fields to 0 since we don't know what they were -- and it doesn't matter provided they are less than any new values VALUES (0, 0, 0, 0, v_origin_record."id", v_origin_record."fieldA", v_origin_record."fieldB", 'SET'); @@ -182,7 +182,7 @@ describe("Entity History Codegen", () => { END IF; END IF; - INSERT INTO "public"."TestEntity_history" ("entity_history_block_timestamp", "entity_history_chain_id", "entity_history_block_number", "entity_history_log_index", "previous_entity_history_block_timestamp", "previous_entity_history_chain_id", "previous_entity_history_block_number", "previous_entity_history_log_index", "id", "fieldA", "fieldB", "action") + INSERT INTO "TestEntity_history" ("entity_history_block_timestamp", "entity_history_chain_id", "entity_history_block_number", "entity_history_log_index", "previous_entity_history_block_timestamp", "previous_entity_history_chain_id", "previous_entity_history_block_number", "previous_entity_history_log_index", "id", "fieldA", "fieldB", "action") VALUES (history_row."entity_history_block_timestamp", history_row."entity_history_chain_id", history_row."entity_history_block_number", history_row."entity_history_log_index", history_row."previous_entity_history_block_timestamp", history_row."previous_entity_history_chain_id", history_row."previous_entity_history_block_number", history_row."previous_entity_history_log_index", history_row."id", history_row."fieldA", history_row."fieldB", history_row."action"); END; $$ LANGUAGE plpgsql; diff --git a/scenarios/test_codegen/test/lib_tests/Table_test.res b/scenarios/test_codegen/test/lib_tests/Table_test.res index 85dec1bf3..a215fab4d 100644 --- a/scenarios/test_codegen/test/lib_tests/Table_test.res +++ b/scenarios/test_codegen/test/lib_tests/Table_test.res @@ -14,7 +14,7 @@ describe("Table functions postgres interop", () => { let expected = `(sql, rows) => { return sql\` - INSERT INTO "public"."test_table" + INSERT INTO "test_table" \${sql(rows, "id", "field_a")} ON CONFLICT(id) DO UPDATE SET @@ -38,7 +38,7 @@ describe("Table functions postgres interop", () => { let expected = `(sql, rows) => { return sql\` - INSERT INTO "public"."test_table" + INSERT INTO "test_table" \${sql(rows, "field_a", "field_b", "field_c")} ON CONFLICT(field_a, field_b) DO UPDATE SET @@ -62,7 +62,7 @@ describe("Table functions postgres interop", () => { let expected = `(sql, rows) => { return sql\` - INSERT INTO "public"."test_table" + INSERT INTO "test_table" \${sql(rows, "id", "field_a", "token_id")} ON CONFLICT(id) DO UPDATE SET @@ -85,7 +85,7 @@ describe("Table functions postgres interop", () => { let expected = `(sql, rows) => { return sql\` - INSERT INTO "public"."test_table" + INSERT INTO "test_table" \${sql(rows, "id", "field_a")} ON CONFLICT(id) DO UPDATE SET @@ -109,7 +109,7 @@ describe("Table functions postgres interop", () => { let expected = `(sql, rows) => { return sql\` - INSERT INTO "public"."test_table" + INSERT INTO "test_table" \${sql(rows, "id", "field_a")} ON CONFLICT(id) DO UPDATE SET diff --git a/scenarios/test_codegen/test/rollback/Rollback_test.res b/scenarios/test_codegen/test/rollback/Rollback_test.res index cda96da69..80c7e4ee0 100644 --- a/scenarios/test_codegen/test/rollback/Rollback_test.res +++ b/scenarios/test_codegen/test/rollback/Rollback_test.res @@ -134,7 +134,7 @@ Exposing let query = unsafe(Db.sql, _) - let getAllRowsInTable = tableName => query(`SELECT * FROM public."${tableName}";`) + let getAllRowsInTable = tableName => query(`SELECT * FROM "${tableName}";`) } let setupDb = async () => { diff --git a/scenarios/test_codegen/test/sql-transaction-test.ts b/scenarios/test_codegen/test/sql-transaction-test.ts index e58d2ddf1..be49395e1 100644 --- a/scenarios/test_codegen/test/sql-transaction-test.ts +++ b/scenarios/test_codegen/test/sql-transaction-test.ts @@ -36,7 +36,7 @@ describe("Sql transaction tests", () => { await expect(transaction).to.eventually.be.fulfilled; - let rawEventsRows = await sql`SELECT * FROM public.raw_events`; + let rawEventsRows = await sql`SELECT * FROM raw_events`; expect(rawEventsRows.count).to.be.eq(3); }); @@ -58,7 +58,7 @@ describe("Sql transaction tests", () => { await expect(transaction).to.eventually.be.rejected; - let rawEventsRows = await sql`SELECT * FROM public.raw_events`; + let rawEventsRows = await sql`SELECT * FROM raw_events`; expect(rawEventsRows.count).to.be.eq(0); }); });