From cf13e1f2d635cd14be21671ad658772b48e58632 Mon Sep 17 00:00:00 2001 From: Zoran Cvetkov Date: Fri, 4 Oct 2024 18:20:51 +0300 Subject: [PATCH 01/27] add vid --- graph/src/components/store/entity_cache.rs | 6 ++++++ graph/src/components/store/write.rs | 20 ++++++++++++++++++-- graph/src/components/subgraph/instance.rs | 9 +++++++++ graph/src/data/store/mod.rs | 14 ++++++++++++++ graph/src/schema/input/mod.rs | 6 ++++++ runtime/wasm/src/host_exports.rs | 2 ++ store/postgres/src/relational/ddl.rs | 13 +++++++++++-- store/postgres/src/relational_queries.rs | 13 +++++++++++++ 8 files changed, 79 insertions(+), 4 deletions(-) diff --git a/graph/src/components/store/entity_cache.rs b/graph/src/components/store/entity_cache.rs index dfaae80f76a..7601a6c4d0a 100644 --- a/graph/src/components/store/entity_cache.rs +++ b/graph/src/components/store/entity_cache.rs @@ -461,11 +461,13 @@ impl EntityCache { updates.remove_null_fields(); let data = Arc::new(updates); self.current.insert(key.clone(), Some(data.cheap_clone())); + let vid = data.vid_opt().unwrap_or_default(); Some(Insert { key, data, block, end: None, + vid, }) } // Entity may have been changed @@ -476,11 +478,13 @@ impl EntityCache { let data = Arc::new(data); self.current.insert(key.clone(), Some(data.cheap_clone())); if current != data { + let vid = data.vid_opt().unwrap_or_default(); Some(Overwrite { key, data, block, end: None, + vid, }) } else { None @@ -491,11 +495,13 @@ impl EntityCache { let data = Arc::new(data); self.current.insert(key.clone(), Some(data.clone())); if current != data { + let vid = data.vid(); Some(Overwrite { key, data, block, end: None, + vid, }) } else { None diff --git a/graph/src/components/store/write.rs b/graph/src/components/store/write.rs index 721e3d80bc1..6dd2cda472b 100644 --- a/graph/src/components/store/write.rs +++ b/graph/src/components/store/write.rs @@ -45,6 +45,7 @@ pub enum EntityModification { data: Arc, block: BlockNumber, end: Option, + vid: i64, }, /// Update the entity by overwriting it Overwrite { @@ -52,6 +53,7 @@ pub enum EntityModification { data: Arc, block: BlockNumber, end: Option, + vid: i64, }, /// Remove the entity Remove { key: EntityKey, block: BlockNumber }, @@ -67,6 +69,7 @@ pub struct EntityWrite<'a> { // The end of the block range for which this write is valid. The value // of `end` itself is not included in the range pub end: Option, + pub vid: i64, } impl std::fmt::Display for EntityWrite<'_> { @@ -89,24 +92,28 @@ impl<'a> TryFrom<&'a EntityModification> for EntityWrite<'a> { data, block, end, + vid, } => Ok(EntityWrite { id: &key.entity_id, entity: data, causality_region: key.causality_region, block: *block, end: *end, + vid: *vid, }), EntityModification::Overwrite { key, data, block, end, + vid, } => Ok(EntityWrite { id: &key.entity_id, entity: &data, causality_region: key.causality_region, block: *block, end: *end, + vid: *vid, }), EntityModification::Remove { .. } => Err(()), @@ -213,11 +220,13 @@ impl EntityModification { data, block, end, + vid, } => Ok(Insert { key, data, block, end, + vid, }), Remove { key, .. } => { return Err(constraint_violation!( @@ -271,21 +280,23 @@ impl EntityModification { } impl EntityModification { - pub fn insert(key: EntityKey, data: Entity, block: BlockNumber) -> Self { + pub fn insert(key: EntityKey, data: Entity, block: BlockNumber, vid: i64) -> Self { EntityModification::Insert { key, data: Arc::new(data), block, end: None, + vid, } } - pub fn overwrite(key: EntityKey, data: Entity, block: BlockNumber) -> Self { + pub fn overwrite(key: EntityKey, data: Entity, block: BlockNumber, vid: i64) -> Self { EntityModification::Overwrite { key, data: Arc::new(data), block, end: None, + vid, } } @@ -1017,18 +1028,21 @@ mod test { let value = value.clone(); let key = THING_TYPE.parse_key("one").unwrap(); + let vid = 0; match value { Ins(block) => EntityModification::Insert { key, data: Arc::new(entity! { SCHEMA => id: "one", count: block }), block, end: None, + vid, }, Ovw(block) => EntityModification::Overwrite { key, data: Arc::new(entity! { SCHEMA => id: "one", count: block }), block, end: None, + vid, }, Rem(block) => EntityModification::Remove { key, block }, InsC(block, end) => EntityModification::Insert { @@ -1036,12 +1050,14 @@ mod test { data: Arc::new(entity! { SCHEMA => id: "one", count: block }), block, end: Some(end), + vid, }, OvwC(block, end) => EntityModification::Overwrite { key, data: Arc::new(entity! { SCHEMA => id: "one", count: block }), block, end: Some(end), + vid, }, } } diff --git a/graph/src/components/subgraph/instance.rs b/graph/src/components/subgraph/instance.rs index 889690c3916..94487fc5bd2 100644 --- a/graph/src/components/subgraph/instance.rs +++ b/graph/src/components/subgraph/instance.rs @@ -78,6 +78,8 @@ pub struct BlockState { // data source that have been processed. pub processed_data_sources: Vec, + pub vid_seq: i32, + // Marks whether a handler is currently executing. in_handler: bool, @@ -93,6 +95,7 @@ impl BlockState { persisted_data_sources: Vec::new(), handler_created_data_sources: Vec::new(), processed_data_sources: Vec::new(), + vid_seq: 0, in_handler: false, metrics: BlockStateMetrics::new(), } @@ -110,6 +113,7 @@ impl BlockState { persisted_data_sources, handler_created_data_sources, processed_data_sources, + vid_seq: _, in_handler, metrics, } = self; @@ -179,4 +183,9 @@ impl BlockState { pub fn persist_data_source(&mut self, ds: StoredDynamicDataSource) { self.persisted_data_sources.push(ds) } + pub fn next_vid(&mut self, block_number: BlockNumber) -> i64 { + let vid = ((block_number as i64) << 32) + self.vid_seq as i64; + self.vid_seq += 1; + vid + } } diff --git a/graph/src/data/store/mod.rs b/graph/src/data/store/mod.rs index 33d9286ceec..ca4ac75b6ec 100644 --- a/graph/src/data/store/mod.rs +++ b/graph/src/data/store/mod.rs @@ -735,6 +735,9 @@ where lazy_static! { /// The name of the id attribute, `"id"` pub static ref ID: Word = Word::from("id"); + + /// The name of the vid attribute, `"vid"` + pub static ref VID: Word = Word::from("vid"); } /// An entity is represented as a map of attribute names to values. @@ -910,6 +913,17 @@ impl Entity { Id::try_from(self.get("id").unwrap().clone()).expect("the id is set to a valid value") } + pub fn vid(&self) -> i64 { + self.get("vid") + .expect("the vid is set") + .as_int8() + .expect("the vid is set to a valid value") + } + + pub fn vid_opt(&self) -> Option { + self.get("vid").map(|vid| vid.as_int8()).unwrap_or_default() + } + /// Merges an entity update `update` into this entity. /// /// If a key exists in both entities, the value from `update` is chosen. diff --git a/graph/src/schema/input/mod.rs b/graph/src/schema/input/mod.rs index 84897299785..c7fe8ec9818 100644 --- a/graph/src/schema/input/mod.rs +++ b/graph/src/schema/input/mod.rs @@ -35,6 +35,7 @@ pub(crate) const POI_OBJECT: &str = "Poi$"; const POI_DIGEST: &str = "digest"; /// The name of the PoI attribute for storing the block time const POI_BLOCK_TIME: &str = "blockTime"; +const VID: &str = "vid"; pub mod kw { pub const ENTITY: &str = "entity"; @@ -1487,6 +1488,9 @@ impl InputSchema { } pub fn has_field_with_name(&self, entity_type: &EntityType, field: &str) -> bool { + if field == VID { + return true; + } let field = self.inner.pool.lookup(field); match field { @@ -1597,6 +1601,8 @@ fn atom_pool(document: &s::Document) -> AtomPool { pool.intern(POI_DIGEST); pool.intern(POI_BLOCK_TIME); + pool.intern(VID); + for definition in &document.definitions { match definition { s::Definition::TypeDefinition(typedef) => match typedef { diff --git a/runtime/wasm/src/host_exports.rs b/runtime/wasm/src/host_exports.rs index 4d050db23de..bc0071c3372 100644 --- a/runtime/wasm/src/host_exports.rs +++ b/runtime/wasm/src/host_exports.rs @@ -248,6 +248,7 @@ impl HostExports { gas: &GasCounter, ) -> Result<(), HostExportError> { let entity_type = state.entity_cache.schema.entity_type(&entity_type)?; + let vid = state.next_vid(block); Self::expect_object_type(&entity_type, "set")?; @@ -314,6 +315,7 @@ impl HostExports { data.insert(store::ID.clone(), value); } } + data.insert(store::VID.clone(), Value::Int8(vid)); self.check_invalid_fields( self.data_source.api_version.clone(), diff --git a/store/postgres/src/relational/ddl.rs b/store/postgres/src/relational/ddl.rs index aa3aefd3561..f696b3d41bb 100644 --- a/store/postgres/src/relational/ddl.rs +++ b/store/postgres/src/relational/ddl.rs @@ -4,6 +4,7 @@ use std::{ }; use graph::{ + data::subgraph::schema::POI_TABLE, prelude::{BLOCK_NUMBER_MAX, ENV_VARS}, schema::InputSchema, }; @@ -116,12 +117,18 @@ impl Table { Ok(cols) } + let vid_type = if self.name.as_str() == POI_TABLE { + "bigserial" + } else { + "bigint" + }; + if self.immutable { writeln!( out, " create table {qname} ( - {vid} bigserial primary key, + {vid} {vid_type} primary key, {block} int not null,\n\ {cols}, unique({id}) @@ -129,6 +136,7 @@ impl Table { qname = self.qualified_name, cols = columns_ddl(self)?, vid = VID_COLUMN, + vid_type = vid_type, block = BLOCK_COLUMN, id = self.primary_key().name ) @@ -137,13 +145,14 @@ impl Table { out, r#" create table {qname} ( - {vid} bigserial primary key, + {vid} {vid_type} primary key, {block_range} int4range not null, {cols} );"#, qname = self.qualified_name, cols = columns_ddl(self)?, vid = VID_COLUMN, + vid_type = vid_type, block_range = BLOCK_RANGE_COLUMN )?; diff --git a/store/postgres/src/relational_queries.rs b/store/postgres/src/relational_queries.rs index 0462bca9e13..7bb62409e08 100644 --- a/store/postgres/src/relational_queries.rs +++ b/store/postgres/src/relational_queries.rs @@ -15,6 +15,7 @@ use graph::components::store::write::{EntityWrite, RowGroup, WriteChunk}; use graph::components::store::{Child as StoreChild, DerivedEntityQuery}; use graph::data::store::{Id, IdType, NULL}; use graph::data::store::{IdList, IdRef, QueryObject}; +use graph::data::subgraph::schema::POI_TABLE; use graph::data::value::{Object, Word}; use graph::data_source::CausalityRegion; use graph::prelude::{ @@ -2413,6 +2414,7 @@ struct InsertRow<'a> { values: Vec>, br_value: BlockRangeValue, causality_region: CausalityRegion, + vid: i64, } impl<'a> InsertRow<'a> { @@ -2449,10 +2451,12 @@ impl<'a> InsertRow<'a> { } let br_value = BlockRangeValue::new(table, row.block, row.end); let causality_region = row.causality_region; + let vid = row.vid; Ok(Self { values, br_value, causality_region, + vid, }) } } @@ -2538,6 +2542,8 @@ impl<'a> QueryFragment for InsertQuery<'a> { let out = &mut out; out.unsafe_to_cache_prepared(); + let not_poi = self.table.name.as_str() != POI_TABLE; + // Construct a query // insert into schema.table(column, ...) // values @@ -2563,6 +2569,9 @@ impl<'a> QueryFragment for InsertQuery<'a> { out.push_sql(CAUSALITY_REGION_COLUMN); }; + if not_poi { + out.push_sql(", vid"); + } out.push_sql(") values\n"); for (i, row) in self.rows.iter().enumerate() { @@ -2580,6 +2589,10 @@ impl<'a> QueryFragment for InsertQuery<'a> { out.push_sql(", "); out.push_bind_param::(&row.causality_region)?; }; + if not_poi { + out.push_sql(", "); + out.push_bind_param::(&row.vid)?; + } out.push_sql(")"); } From c44bc15f89eb113bc914ece2e6ef9f0279943288 Mon Sep 17 00:00:00 2001 From: Zoran Cvetkov Date: Sun, 6 Oct 2024 14:20:06 +0300 Subject: [PATCH 02/27] fixes --- graph/src/components/store/entity_cache.rs | 4 +- graph/src/data/store/mod.rs | 4 - runtime/test/src/test.rs | 17 ++-- store/postgres/src/relational/ddl_tests.rs | 90 +++++++++---------- store/test-store/tests/graph/entity_cache.rs | 9 +- store/test-store/tests/postgres/relational.rs | 1 + .../tests/postgres/relational_bytes.rs | 4 +- store/test-store/tests/postgres/store.rs | 4 +- 8 files changed, 66 insertions(+), 67 deletions(-) diff --git a/graph/src/components/store/entity_cache.rs b/graph/src/components/store/entity_cache.rs index 7601a6c4d0a..58e94362675 100644 --- a/graph/src/components/store/entity_cache.rs +++ b/graph/src/components/store/entity_cache.rs @@ -461,7 +461,7 @@ impl EntityCache { updates.remove_null_fields(); let data = Arc::new(updates); self.current.insert(key.clone(), Some(data.cheap_clone())); - let vid = data.vid_opt().unwrap_or_default(); + let vid = data.vid(); Some(Insert { key, data, @@ -478,7 +478,7 @@ impl EntityCache { let data = Arc::new(data); self.current.insert(key.clone(), Some(data.cheap_clone())); if current != data { - let vid = data.vid_opt().unwrap_or_default(); + let vid = data.vid(); Some(Overwrite { key, data, diff --git a/graph/src/data/store/mod.rs b/graph/src/data/store/mod.rs index ca4ac75b6ec..9707468146a 100644 --- a/graph/src/data/store/mod.rs +++ b/graph/src/data/store/mod.rs @@ -920,10 +920,6 @@ impl Entity { .expect("the vid is set to a valid value") } - pub fn vid_opt(&self) -> Option { - self.get("vid").map(|vid| vid.as_int8()).unwrap_or_default() - } - /// Merges an entity update `update` into this entity. /// /// If a key exists in both entities, the value from `update` is chosen. diff --git a/runtime/test/src/test.rs b/runtime/test/src/test.rs index 91895e07725..ad3d3588f31 100644 --- a/runtime/test/src/test.rs +++ b/runtime/test/src/test.rs @@ -476,17 +476,18 @@ async fn test_ipfs_block() { // The user_data value we use with calls to ipfs_map const USER_DATA: &str = "user_data"; -fn make_thing(id: &str, value: &str) -> (String, EntityModification) { - const DOCUMENT: &str = " type Thing @entity { id: String!, value: String!, extra: String }"; +fn make_thing(id: &str, value: &str, vid: i64) -> (String, EntityModification) { + const DOCUMENT: &str = + " type Thing @entity { id: String!, value: String!, extra: String, vid: Int8 }"; lazy_static! { static ref SCHEMA: InputSchema = InputSchema::raw(DOCUMENT, "doesntmatter"); static ref THING_TYPE: EntityType = SCHEMA.entity_type("Thing").unwrap(); } - let data = entity! { SCHEMA => id: id, value: value, extra: USER_DATA }; + let data = entity! { SCHEMA => id: id, value: value, extra: USER_DATA, vid:vid }; let key = THING_TYPE.parse_key(id).unwrap(); ( format!("{{ \"id\": \"{}\", \"value\": \"{}\"}}", id, value), - EntityModification::insert(key, data, 0), + EntityModification::insert(key, data, 0, vid), ) } @@ -552,8 +553,8 @@ async fn test_ipfs_map(api_version: Version, json_error_msg: &str) { let subgraph_id = "ipfsMap"; // Try it with two valid objects - let (str1, thing1) = make_thing("one", "eins"); - let (str2, thing2) = make_thing("two", "zwei"); + let (str1, thing1) = make_thing("one", "eins", 0); + let (str2, thing2) = make_thing("two", "zwei", 0); let ops = run_ipfs_map( ipfs.clone(), subgraph_id, @@ -1022,8 +1023,8 @@ async fn test_entity_store(api_version: Version) { let schema = store.input_schema(&deployment.hash).unwrap(); - let alex = entity! { schema => id: "alex", name: "Alex" }; - let steve = entity! { schema => id: "steve", name: "Steve" }; + let alex = entity! { schema => id: "alex", name: "Alex", vid: 0i64}; + let steve = entity! { schema => id: "steve", name: "Steve", vid: 1i64}; let user_type = schema.entity_type("User").unwrap(); test_store::insert_entities( &deployment, diff --git a/store/postgres/src/relational/ddl_tests.rs b/store/postgres/src/relational/ddl_tests.rs index b7f9b44afac..a1d694ee7e9 100644 --- a/store/postgres/src/relational/ddl_tests.rs +++ b/store/postgres/src/relational/ddl_tests.rs @@ -384,7 +384,7 @@ create type sgd0815."size" as enum ('large', 'medium', 'small'); create table "sgd0815"."thing" ( - vid bigserial primary key, + vid bigint primary key, block_range int4range not null, "id" text not null, "big_thing" text not null @@ -405,7 +405,7 @@ create index attr_0_1_thing_big_thing create table "sgd0815"."scalar" ( - vid bigserial primary key, + vid bigint primary key, block_range int4range not null, "id" text not null, "bool" boolean, @@ -444,7 +444,7 @@ create index attr_1_7_scalar_color create table "sgd0815"."file_thing" ( - vid bigserial primary key, + vid bigint primary key, block_range int4range not null, causality_region int not null, "id" text not null @@ -469,7 +469,7 @@ create type sgd0815."size" as enum ('large', 'medium', 'small'); create table "sgd0815"."thing" ( - vid bigserial primary key, + vid bigint primary key, block_range int4range not null, "id" text not null, "big_thing" text not null @@ -490,7 +490,7 @@ create index attr_0_1_thing_big_thing create table "sgd0815"."scalar" ( - vid bigserial primary key, + vid bigint primary key, block_range int4range not null, "id" text not null, "bool" boolean, @@ -515,7 +515,7 @@ create index attr_1_0_scalar_id create table "sgd0815"."file_thing" ( - vid bigserial primary key, + vid bigint primary key, block_range int4range not null, causality_region int not null, "id" text not null @@ -575,7 +575,7 @@ type SongStat @entity { played: Int! }"#; const MUSIC_DDL: &str = r#"create table "sgd0815"."musician" ( - vid bigserial primary key, + vid bigint primary key, block_range int4range not null, "id" text not null, "name" text not null, @@ -598,7 +598,7 @@ create index attr_0_2_musician_main_band on "sgd0815"."musician" using gist("main_band", block_range); create table "sgd0815"."band" ( - vid bigserial primary key, + vid bigint primary key, block_range int4range not null, "id" text not null, "name" text not null, @@ -618,8 +618,8 @@ create index attr_1_1_band_name on "sgd0815"."band" using btree(left("name", 256)); create table "sgd0815"."song" ( - vid bigserial primary key, - block$ int not null, + vid bigint primary key, + block$ int not null, "id" text not null, "title" text not null, "written_by" text not null, @@ -634,7 +634,7 @@ create index attr_2_1_song_written_by on "sgd0815"."song" using btree("written_by", block$); create table "sgd0815"."song_stat" ( - vid bigserial primary key, + vid bigint primary key, block_range int4range not null, "id" text not null, "played" int4 not null @@ -676,7 +676,7 @@ type Habitat @entity { }"#; const FOREST_DDL: &str = r#"create table "sgd0815"."animal" ( - vid bigserial primary key, + vid bigint primary key, block_range int4range not null, "id" text not null, "forest" text @@ -695,8 +695,8 @@ create index attr_0_1_animal_forest on "sgd0815"."animal" using gist("forest", block_range); create table "sgd0815"."forest" ( - vid bigserial primary key, - block_range int4range not null, + vid bigint primary key, + block_range int4range not null, "id" text not null ); alter table "sgd0815"."forest" @@ -711,7 +711,7 @@ create index attr_1_0_forest_id on "sgd0815"."forest" using btree("id"); create table "sgd0815"."habitat" ( - vid bigserial primary key, + vid bigint primary key, block_range int4range not null, "id" text not null, "most_common" text not null, @@ -763,7 +763,7 @@ type Habitat @entity { }"#; const FULLTEXT_DDL: &str = r#"create table "sgd0815"."animal" ( - vid bigserial primary key, + vid bigint primary key, block_range int4range not null, "id" text not null, "name" text not null, @@ -791,7 +791,7 @@ create index attr_0_4_animal_search on "sgd0815"."animal" using gin("search"); create table "sgd0815"."forest" ( - vid bigserial primary key, + vid bigint primary key, block_range int4range not null, "id" text not null ); @@ -808,7 +808,7 @@ create index attr_1_0_forest_id on "sgd0815"."forest" using btree("id"); create table "sgd0815"."habitat" ( - vid bigserial primary key, + vid bigint primary key, block_range int4range not null, "id" text not null, "most_common" text not null, @@ -843,7 +843,7 @@ enum Orientation { const FORWARD_ENUM_SQL: &str = r#"create type sgd0815."orientation" as enum ('DOWN', 'UP'); create table "sgd0815"."thing" ( - vid bigserial primary key, + vid bigint primary key, block_range int4range not null, "id" text not null, "orientation" "sgd0815"."orientation" not null @@ -880,8 +880,8 @@ type Stats @aggregation(intervals: ["hour", "day"], source: "Data") { const TS_SQL: &str = r#" create table "sgd0815"."data" ( - vid bigserial primary key, - block$ int not null, + vid bigint primary key, + block$ int not null, "id" int8 not null, "timestamp" timestamptz not null, "amount" numeric not null, @@ -895,8 +895,8 @@ create index attr_0_1_data_amount on "sgd0815"."data" using btree("amount"); create table "sgd0815"."stats_hour" ( - vid bigserial primary key, - block$ int not null, + vid bigint primary key, + block$ int not null, "id" int8 not null, "timestamp" timestamptz not null, "volume" numeric not null, @@ -913,7 +913,7 @@ create index attr_1_2_stats_hour_max_price on "sgd0815"."stats_hour" using btree("max_price"); create table "sgd0815"."stats_day" ( - vid bigserial primary key, + vid bigint primary key, block$ int not null, "id" int8 not null, "timestamp" timestamptz not null, @@ -971,9 +971,9 @@ const LIFETIME_GQL: &str = r#" const LIFETIME_SQL: &str = r#" create table "sgd0815"."data" ( - vid bigserial primary key, - block$ int not null, -"id" int8 not null, + vid bigint primary key, + block$ int not null, + "id" int8 not null, "timestamp" timestamptz not null, "group_1" int4 not null, "group_2" int4 not null, @@ -992,9 +992,9 @@ create index attr_0_3_data_amount on "sgd0815"."data" using btree("amount"); create table "sgd0815"."stats_1_hour" ( - vid bigserial primary key, - block$ int not null, -"id" int8 not null, + vid bigint primary key, + block$ int not null, + "id" int8 not null, "timestamp" timestamptz not null, "volume" numeric not null, unique(id) @@ -1008,9 +1008,9 @@ on "sgd0815"."stats_1_hour" using btree("volume"); create table "sgd0815"."stats_1_day" ( - vid bigserial primary key, - block$ int not null, -"id" int8 not null, + vid bigint primary key, + block$ int not null, + "id" int8 not null, "timestamp" timestamptz not null, "volume" numeric not null, unique(id) @@ -1024,9 +1024,9 @@ on "sgd0815"."stats_1_day" using btree("volume"); create table "sgd0815"."stats_2_hour" ( - vid bigserial primary key, - block$ int not null, -"id" int8 not null, + vid bigint primary key, + block$ int not null, + "id" int8 not null, "timestamp" timestamptz not null, "group_1" int4 not null, "volume" numeric not null, @@ -1044,9 +1044,9 @@ create index stats_2_hour_dims on "sgd0815"."stats_2_hour"(group_1, timestamp); create table "sgd0815"."stats_2_day" ( - vid bigserial primary key, - block$ int not null, -"id" int8 not null, + vid bigint primary key, + block$ int not null, + "id" int8 not null, "timestamp" timestamptz not null, "group_1" int4 not null, "volume" numeric not null, @@ -1064,9 +1064,9 @@ create index stats_2_day_dims on "sgd0815"."stats_2_day"(group_1, timestamp); create table "sgd0815"."stats_3_hour" ( - vid bigserial primary key, - block$ int not null, -"id" int8 not null, + vid bigint primary key, + block$ int not null, + "id" int8 not null, "timestamp" timestamptz not null, "group_2" int4 not null, "group_1" int4 not null, @@ -1087,9 +1087,9 @@ create index stats_3_hour_dims on "sgd0815"."stats_3_hour"(group_2, group_1, timestamp); create table "sgd0815"."stats_3_day" ( - vid bigserial primary key, - block$ int not null, -"id" int8 not null, + vid bigint primary key, + block$ int not null, + "id" int8 not null, "timestamp" timestamptz not null, "group_2" int4 not null, "group_1" int4 not null, diff --git a/store/test-store/tests/graph/entity_cache.rs b/store/test-store/tests/graph/entity_cache.rs index d7ebb30785c..d071f249c8c 100644 --- a/store/test-store/tests/graph/entity_cache.rs +++ b/store/test-store/tests/graph/entity_cache.rs @@ -221,8 +221,8 @@ fn insert_modifications() { assert_eq!( sort_by_entity_key(result.unwrap().modifications), sort_by_entity_key(vec![ - EntityModification::insert(mogwai_key, mogwai_data, 0), - EntityModification::insert(sigurros_key, sigurros_data, 0) + EntityModification::insert(mogwai_key, mogwai_data, 0, 0), + EntityModification::insert(sigurros_key, sigurros_data, 0, 0) ]) ); } @@ -265,8 +265,8 @@ fn overwrite_modifications() { assert_eq!( sort_by_entity_key(result.unwrap().modifications), sort_by_entity_key(vec![ - EntityModification::overwrite(mogwai_key, mogwai_data, 0), - EntityModification::overwrite(sigurros_key, sigurros_data, 0) + EntityModification::overwrite(mogwai_key, mogwai_data, 0, 0), + EntityModification::overwrite(sigurros_key, sigurros_data, 0, 0) ]) ); } @@ -304,6 +304,7 @@ fn consecutive_modifications() { sort_by_entity_key(vec![EntityModification::overwrite( update_key, entity! { SCHEMA => id: "mogwai", name: "Mogwai", founded: 1995 }, + 0, 0 )]) ); diff --git a/store/test-store/tests/postgres/relational.rs b/store/test-store/tests/postgres/relational.rs index fe366b34509..bbb12050add 100644 --- a/store/test-store/tests/postgres/relational.rs +++ b/store/test-store/tests/postgres/relational.rs @@ -891,6 +891,7 @@ fn conflicting_entity() { data: fred, block: 2, end: None, + vid: 0, }, 2, ) diff --git a/store/test-store/tests/postgres/relational_bytes.rs b/store/test-store/tests/postgres/relational_bytes.rs index b7b8f36b7d7..40686f8712c 100644 --- a/store/test-store/tests/postgres/relational_bytes.rs +++ b/store/test-store/tests/postgres/relational_bytes.rs @@ -84,7 +84,7 @@ pub fn row_group_update( let mut group = RowGroup::new(entity_type.clone(), false); for (key, data) in data { group - .push(EntityModification::overwrite(key, data, block), block) + .push(EntityModification::overwrite(key, data, block, 0), block) .unwrap(); } group @@ -98,7 +98,7 @@ pub fn row_group_insert( let mut group = RowGroup::new(entity_type.clone(), false); for (key, data) in data { group - .push(EntityModification::insert(key, data, block), block) + .push(EntityModification::insert(key, data, block, 0), block) .unwrap(); } group diff --git a/store/test-store/tests/postgres/store.rs b/store/test-store/tests/postgres/store.rs index 6605c39b51d..be769d7632e 100644 --- a/store/test-store/tests/postgres/store.rs +++ b/store/test-store/tests/postgres/store.rs @@ -1510,7 +1510,7 @@ fn handle_large_string_with_index() { let key = USER_TYPE.parse_key(id).unwrap(); - EntityModification::insert(key, data, block) + EntityModification::insert(key, data, block, 0) } run_test(|store, writable, deployment| async move { @@ -1609,7 +1609,7 @@ fn handle_large_bytea_with_index() { let key = USER_TYPE.parse_key(id).unwrap(); - EntityModification::insert(key, data, block) + EntityModification::insert(key, data, block, 0) } run_test(|store, writable, deployment| async move { From 3a88523e4f72cee723ab7e4688ce312f1b072bab Mon Sep 17 00:00:00 2001 From: Zoran Cvetkov Date: Tue, 8 Oct 2024 18:09:32 +0300 Subject: [PATCH 03/27] more test fixes --- graph/src/components/store/entity_cache.rs | 4 +- graph/src/data/store/mod.rs | 8 ++ store/test-store/tests/core/interfaces.rs | 51 ++++++----- store/test-store/tests/graph/entity_cache.rs | 94 +++++++++++--------- 4 files changed, 94 insertions(+), 63 deletions(-) diff --git a/graph/src/components/store/entity_cache.rs b/graph/src/components/store/entity_cache.rs index 58e94362675..c0802aed77f 100644 --- a/graph/src/components/store/entity_cache.rs +++ b/graph/src/components/store/entity_cache.rs @@ -461,7 +461,7 @@ impl EntityCache { updates.remove_null_fields(); let data = Arc::new(updates); self.current.insert(key.clone(), Some(data.cheap_clone())); - let vid = data.vid(); + let vid = data.vid_opt(); Some(Insert { key, data, @@ -478,7 +478,7 @@ impl EntityCache { let data = Arc::new(data); self.current.insert(key.clone(), Some(data.cheap_clone())); if current != data { - let vid = data.vid(); + let vid = data.vid_opt(); Some(Overwrite { key, data, diff --git a/graph/src/data/store/mod.rs b/graph/src/data/store/mod.rs index 9707468146a..0369dedbf32 100644 --- a/graph/src/data/store/mod.rs +++ b/graph/src/data/store/mod.rs @@ -920,6 +920,14 @@ impl Entity { .expect("the vid is set to a valid value") } + // TODO: try to remove it + pub fn vid_opt(&self) -> i64 { + self.get("vid") + .map(|vid| vid.as_int8()) + .unwrap_or_default() + .unwrap_or_default() + } + /// Merges an entity update `update` into this entity. /// /// If a key exists in both entities, the value from `update` is chosen. diff --git a/store/test-store/tests/core/interfaces.rs b/store/test-store/tests/core/interfaces.rs index 78eb2fda390..bf075c467ad 100644 --- a/store/test-store/tests/core/interfaces.rs +++ b/store/test-store/tests/core/interfaces.rs @@ -202,8 +202,8 @@ async fn reference_interface_derived() { let query = "query { events { id transaction { id } } }"; let buy = ("BuyEvent", entity! { schema => id: "buy" }); - let sell1 = ("SellEvent", entity! { schema => id: "sell1" }); - let sell2 = ("SellEvent", entity! { schema => id: "sell2" }); + let sell1 = ("SellEvent", entity! { schema => id: "sell1", vid: 0i64 }); + let sell2 = ("SellEvent", entity! { schema => id: "sell2", vid: 1i64 }); let gift = ( "GiftEvent", entity! { schema => id: "gift", transaction: "txn" }, @@ -278,11 +278,11 @@ async fn follow_interface_reference() { let parent = ( "Animal", - entity! { schema => id: "parent", legs: 4, parent: Value::Null }, + entity! { schema => id: "parent", legs: 4, parent: Value::Null, vid: 0i64}, ); let child = ( "Animal", - entity! { schema => id: "child", legs: 3, parent: "parent" }, + entity! { schema => id: "child", legs: 3, parent: "parent" , vid: 1i64}, ); let res = insert_and_query(subgraph_id, document, vec![parent, child], query) @@ -459,16 +459,16 @@ async fn interface_inline_fragment_with_subquery() { "; let schema = InputSchema::raw(document, subgraph_id); - let mama_cow = ("Parent", entity! { schema => id: "mama_cow" }); + let mama_cow = ("Parent", entity! { schema => id: "mama_cow", vid: 0i64 }); let cow = ( "Animal", - entity! { schema => id: "1", name: "cow", legs: 4, parent: "mama_cow" }, + entity! { schema => id: "1", name: "cow", legs: 4, parent: "mama_cow", vid: 0i64 }, ); - let mama_bird = ("Parent", entity! { schema => id: "mama_bird" }); + let mama_bird = ("Parent", entity! { schema => id: "mama_bird", vid: 1i64 }); let bird = ( "Bird", - entity! { schema => id: "2", airspeed: 5, legs: 2, parent: "mama_bird" }, + entity! { schema => id: "2", airspeed: 5, legs: 2, parent: "mama_bird", vid: 1i64 }, ); let query = "query { leggeds(orderBy: legs) { legs ... on Bird { airspeed parent { id } } } }"; @@ -545,11 +545,11 @@ async fn alias() { let parent = ( "Animal", - entity! { schema => id: "parent", legs: 4, parent: Value::Null }, + entity! { schema => id: "parent", legs: 4, parent: Value::Null, vid: 0i64 }, ); let child = ( "Animal", - entity! { schema => id: "child", legs: 3, parent: "parent" }, + entity! { schema => id: "child", legs: 3, parent: "parent", vid: 1i64 }, ); let res = insert_and_query(subgraph_id, document, vec![parent, child], query) @@ -608,8 +608,14 @@ async fn fragments_dont_panic() { "; // The panic manifests if two parents exist. - let parent = ("Parent", entity! { schema => id: "p", child: "c" }); - let parent2 = ("Parent", entity! { schema => id: "p2", child: Value::Null }); + let parent = ( + "Parent", + entity! { schema => id: "p", child: "c", vid: 0i64 }, + ); + let parent2 = ( + "Parent", + entity! { schema => id: "p2", child: Value::Null, vid: 1i64 }, + ); let child = ("Child", entity! { schema => id:"c" }); let res = insert_and_query(subgraph_id, document, vec![parent, parent2, child], query) @@ -668,10 +674,13 @@ async fn fragments_dont_duplicate_data() { "; // This bug manifests if two parents exist. - let parent = ("Parent", entity! { schema => id: "p", children: vec!["c"] }); + let parent = ( + "Parent", + entity! { schema => id: "p", children: vec!["c"], vid: 0i64 }, + ); let parent2 = ( "Parent", - entity! { schema => id: "b", children: Vec::::new() }, + entity! { schema => id: "b", children: Vec::::new(), vid: 1i64 }, ); let child = ("Child", entity! { schema => id:"c" }); @@ -721,11 +730,11 @@ async fn redundant_fields() { let parent = ( "Animal", - entity! { schema => id: "parent", parent: Value::Null }, + entity! { schema => id: "parent", parent: Value::Null, vid: 0i64 }, ); let child = ( "Animal", - entity! { schema => id: "child", parent: "parent" }, + entity! { schema => id: "child", parent: "parent", vid: 1i64 }, ); let res = insert_and_query(subgraph_id, document, vec![parent, child], query) @@ -1081,11 +1090,11 @@ async fn enums() { let entities = vec![ ( "Trajectory", - entity! { schema => id: "1", direction: "EAST", meters: 10 }, + entity! { schema => id: "1", direction: "EAST", meters: 10, vid: 0i64}, ), ( "Trajectory", - entity! { schema => id: "2", direction: "NORTH", meters: 15 }, + entity! { schema => id: "2", direction: "NORTH", meters: 15, vid: 1i64}, ), ]; let query = "query { trajectories { id, direction, meters } }"; @@ -1134,15 +1143,15 @@ async fn enum_list_filters() { let entities = vec![ ( "Trajectory", - entity! { schema => id: "1", direction: "EAST", meters: 10 }, + entity! { schema => id: "1", direction: "EAST", meters: 10, vid: 0i64 }, ), ( "Trajectory", - entity! { schema => id: "2", direction: "NORTH", meters: 15 }, + entity! { schema => id: "2", direction: "NORTH", meters: 15, vid: 1i64 }, ), ( "Trajectory", - entity! { schema => id: "3", direction: "WEST", meters: 20 }, + entity! { schema => id: "3", direction: "WEST", meters: 20, vid: 2i64 }, ), ]; diff --git a/store/test-store/tests/graph/entity_cache.rs b/store/test-store/tests/graph/entity_cache.rs index d071f249c8c..c8ec89628da 100644 --- a/store/test-store/tests/graph/entity_cache.rs +++ b/store/test-store/tests/graph/entity_cache.rs @@ -190,6 +190,19 @@ fn sort_by_entity_key(mut mods: Vec) -> Vec) -> Vec { + arr.into_iter() + .map(|mut e| { + // println!("E1: {:?}", e); + e.remove("vid"); + // println!("E2: {:?}", e); + e.remove_null_fields(); + // println!("E3: {:?}", e); + e + }) + .collect() +} + #[tokio::test] async fn empty_cache_modifications() { let store = Arc::new(MockStore::new(BTreeMap::new())); @@ -429,17 +442,17 @@ async fn insert_test_data(store: Arc) -> DeploymentLocator .unwrap(); // 1 account 3 wallets - let test_entity_1 = create_account_entity("1", "Johnton", "tonofjohn@email.com", 67_i32); + let test_entity_1 = create_account_entity("1", "Johnton", "tonofjohn@email.com", 67_i32, 1); let id_one = WALLET_TYPE.parse_id("1").unwrap(); - let wallet_entity_1 = create_wallet_operation("1", &id_one, 67_i32); - let wallet_entity_2 = create_wallet_operation("2", &id_one, 92_i32); - let wallet_entity_3 = create_wallet_operation("3", &id_one, 192_i32); + let wallet_entity_1 = create_wallet_operation("1", &id_one, 67_i32, 1); + let wallet_entity_2 = create_wallet_operation("2", &id_one, 92_i32, 2); + let wallet_entity_3 = create_wallet_operation("3", &id_one, 192_i32, 3); // 1 account 1 wallet - let test_entity_2 = create_account_entity("2", "Cindini", "dinici@email.com", 42_i32); + let test_entity_2 = create_account_entity("2", "Cindini", "dinici@email.com", 42_i32, 2); let id_two = WALLET_TYPE.parse_id("2").unwrap(); - let wallet_entity_4 = create_wallet_operation("4", &id_two, 32_i32); + let wallet_entity_4 = create_wallet_operation("4", &id_two, 32_i32, 4); // 1 account 0 wallets - let test_entity_3 = create_account_entity("3", "Shaqueeena", "queensha@email.com", 28_i32); + let test_entity_3 = create_account_entity("3", "Shaqueeena", "queensha@email.com", 28_i32, 3); transact_entity_operations( &store, &deployment, @@ -459,9 +472,9 @@ async fn insert_test_data(store: Arc) -> DeploymentLocator deployment } -fn create_account_entity(id: &str, name: &str, email: &str, age: i32) -> EntityOperation { +fn create_account_entity(id: &str, name: &str, email: &str, age: i32, vid: i64) -> EntityOperation { let test_entity = - entity! { LOAD_RELATED_SUBGRAPH => id: id, name: name, email: email, age: age }; + entity! { LOAD_RELATED_SUBGRAPH => id: id, name: name, email: email, age: age, vid: vid }; EntityOperation::Set { key: ACCOUNT_TYPE.parse_key(id).unwrap(), @@ -469,12 +482,12 @@ fn create_account_entity(id: &str, name: &str, email: &str, age: i32) -> EntityO } } -fn create_wallet_entity(id: &str, account_id: &Id, balance: i32) -> Entity { +fn create_wallet_entity(id: &str, account_id: &Id, balance: i32, vid: i64) -> Entity { let account_id = Value::from(account_id.clone()); - entity! { LOAD_RELATED_SUBGRAPH => id: id, account: account_id, balance: balance } + entity! { LOAD_RELATED_SUBGRAPH => id: id, account: account_id, balance: balance, vid: vid} } -fn create_wallet_operation(id: &str, account_id: &Id, balance: i32) -> EntityOperation { - let test_wallet = create_wallet_entity(id, account_id, balance); +fn create_wallet_operation(id: &str, account_id: &Id, balance: i32, vid: i64) -> EntityOperation { + let test_wallet = create_wallet_entity(id, account_id, balance, vid); EntityOperation::Set { key: WALLET_TYPE.parse_key(id).unwrap(), data: test_wallet, @@ -492,12 +505,12 @@ fn check_for_account_with_multiple_wallets() { causality_region: CausalityRegion::ONCHAIN, }; let result = cache.load_related(&request).unwrap(); - let wallet_1 = create_wallet_entity("1", &account_id, 67_i32); - let wallet_2 = create_wallet_entity("2", &account_id, 92_i32); - let wallet_3 = create_wallet_entity("3", &account_id, 192_i32); + let wallet_1 = create_wallet_entity("1", &account_id, 67_i32, 0); + let wallet_2 = create_wallet_entity("2", &account_id, 92_i32, 1); + let wallet_3 = create_wallet_entity("3", &account_id, 192_i32, 2); let expeted_vec = vec![wallet_1, wallet_2, wallet_3]; - assert_eq!(result, expeted_vec); + assert_eq!(result, filter_vid(expeted_vec)); }); } @@ -512,10 +525,10 @@ fn check_for_account_with_single_wallet() { causality_region: CausalityRegion::ONCHAIN, }; let result = cache.load_related(&request).unwrap(); - let wallet_1 = create_wallet_entity("4", &account_id, 32_i32); + let wallet_1 = create_wallet_entity("4", &account_id, 32_i32, 1); let expeted_vec = vec![wallet_1]; - assert_eq!(result, expeted_vec); + assert_eq!(result, filter_vid(expeted_vec)); }); } @@ -578,8 +591,8 @@ fn check_for_insert_async_store() { run_store_test(|mut cache, store, deployment, _writable| async move { let account_id = ACCOUNT_TYPE.parse_id("2").unwrap(); // insert a new wallet - let wallet_entity_5 = create_wallet_operation("5", &account_id, 79_i32); - let wallet_entity_6 = create_wallet_operation("6", &account_id, 200_i32); + let wallet_entity_5 = create_wallet_operation("5", &account_id, 79_i32, 2); + let wallet_entity_6 = create_wallet_operation("6", &account_id, 200_i32, 3); transact_entity_operations( &store, @@ -596,21 +609,22 @@ fn check_for_insert_async_store() { causality_region: CausalityRegion::ONCHAIN, }; let result = cache.load_related(&request).unwrap(); - let wallet_1 = create_wallet_entity("4", &account_id, 32_i32); - let wallet_2 = create_wallet_entity("5", &account_id, 79_i32); - let wallet_3 = create_wallet_entity("6", &account_id, 200_i32); + let wallet_1 = create_wallet_entity("4", &account_id, 32_i32, 1); + let wallet_2 = create_wallet_entity("5", &account_id, 79_i32, 2); + let wallet_3 = create_wallet_entity("6", &account_id, 200_i32, 3); let expeted_vec = vec![wallet_1, wallet_2, wallet_3]; - assert_eq!(result, expeted_vec); + assert_eq!(result, filter_vid(expeted_vec)); }); } + #[test] fn check_for_insert_async_not_related() { run_store_test(|mut cache, store, deployment, _writable| async move { let account_id = ACCOUNT_TYPE.parse_id("2").unwrap(); // insert a new wallet - let wallet_entity_5 = create_wallet_operation("5", &account_id, 79_i32); - let wallet_entity_6 = create_wallet_operation("6", &account_id, 200_i32); + let wallet_entity_5 = create_wallet_operation("5", &account_id, 79_i32, 5); + let wallet_entity_6 = create_wallet_operation("6", &account_id, 200_i32, 6); transact_entity_operations( &store, @@ -628,12 +642,12 @@ fn check_for_insert_async_not_related() { causality_region: CausalityRegion::ONCHAIN, }; let result = cache.load_related(&request).unwrap(); - let wallet_1 = create_wallet_entity("1", &account_id, 67_i32); - let wallet_2 = create_wallet_entity("2", &account_id, 92_i32); - let wallet_3 = create_wallet_entity("3", &account_id, 192_i32); + let wallet_1 = create_wallet_entity("1", &account_id, 67_i32, 1); + let wallet_2 = create_wallet_entity("2", &account_id, 92_i32, 2); + let wallet_3 = create_wallet_entity("3", &account_id, 192_i32, 3); let expeted_vec = vec![wallet_1, wallet_2, wallet_3]; - assert_eq!(result, expeted_vec); + assert_eq!(result, filter_vid(expeted_vec)); }); } @@ -642,7 +656,7 @@ fn check_for_update_async_related() { run_store_test(|mut cache, store, deployment, writable| async move { let entity_key = WALLET_TYPE.parse_key("1").unwrap(); let account_id = entity_key.entity_id.clone(); - let wallet_entity_update = create_wallet_operation("1", &account_id, 79_i32); + let wallet_entity_update = create_wallet_operation("1", &account_id, 79_i32, 1); let new_data = match wallet_entity_update { EntityOperation::Set { ref data, .. } => data.clone(), @@ -666,11 +680,11 @@ fn check_for_update_async_related() { causality_region: CausalityRegion::ONCHAIN, }; let result = cache.load_related(&request).unwrap(); - let wallet_2 = create_wallet_entity("2", &account_id, 92_i32); - let wallet_3 = create_wallet_entity("3", &account_id, 192_i32); + let wallet_2 = create_wallet_entity("2", &account_id, 92_i32, 2); + let wallet_3 = create_wallet_entity("3", &account_id, 192_i32, 3); let expeted_vec = vec![new_data, wallet_2, wallet_3]; - assert_eq!(result, expeted_vec); + assert_eq!(filter_vid(result), filter_vid(expeted_vec)); }); } @@ -696,11 +710,11 @@ fn check_for_delete_async_related() { causality_region: CausalityRegion::ONCHAIN, }; let result = cache.load_related(&request).unwrap(); - let wallet_2 = create_wallet_entity("2", &account_id, 92_i32); - let wallet_3 = create_wallet_entity("3", &account_id, 192_i32); + let wallet_2 = create_wallet_entity("2", &account_id, 92_i32, 2); + let wallet_3 = create_wallet_entity("3", &account_id, 192_i32, 3); let expeted_vec = vec![wallet_2, wallet_3]; - assert_eq!(result, expeted_vec); + assert_eq!(result, filter_vid(expeted_vec)); }); } @@ -710,11 +724,11 @@ fn scoped_get() { // Key for an existing entity that is in the store let account1 = ACCOUNT_TYPE.parse_id("1").unwrap(); let key1 = WALLET_TYPE.parse_key("1").unwrap(); - let wallet1 = create_wallet_entity("1", &account1, 67); + let wallet1 = create_wallet_entity("1", &account1, 67, 1); // Create a new entity that is not in the store let account5 = ACCOUNT_TYPE.parse_id("5").unwrap(); - let wallet5 = create_wallet_entity("5", &account5, 100); + let wallet5 = create_wallet_entity("5", &account5, 100, 5); let key5 = WALLET_TYPE.parse_key("5").unwrap(); cache.set(key5.clone(), wallet5.clone()).unwrap(); From 548fda1cf904bd79760618ee1266b072af100601 Mon Sep 17 00:00:00 2001 From: Zoran Cvetkov Date: Tue, 8 Oct 2024 23:44:13 +0300 Subject: [PATCH 04/27] fix entity_cache tests --- store/test-store/tests/graph/entity_cache.rs | 23 +++++++++++--------- 1 file changed, 13 insertions(+), 10 deletions(-) diff --git a/store/test-store/tests/graph/entity_cache.rs b/store/test-store/tests/graph/entity_cache.rs index c8ec89628da..0bd094334f1 100644 --- a/store/test-store/tests/graph/entity_cache.rs +++ b/store/test-store/tests/graph/entity_cache.rs @@ -591,8 +591,8 @@ fn check_for_insert_async_store() { run_store_test(|mut cache, store, deployment, _writable| async move { let account_id = ACCOUNT_TYPE.parse_id("2").unwrap(); // insert a new wallet - let wallet_entity_5 = create_wallet_operation("5", &account_id, 79_i32, 2); - let wallet_entity_6 = create_wallet_operation("6", &account_id, 200_i32, 3); + let wallet_entity_5 = create_wallet_operation("5", &account_id, 79_i32, 12); + let wallet_entity_6 = create_wallet_operation("6", &account_id, 200_i32, 13); transact_entity_operations( &store, @@ -609,12 +609,12 @@ fn check_for_insert_async_store() { causality_region: CausalityRegion::ONCHAIN, }; let result = cache.load_related(&request).unwrap(); - let wallet_1 = create_wallet_entity("4", &account_id, 32_i32, 1); - let wallet_2 = create_wallet_entity("5", &account_id, 79_i32, 2); - let wallet_3 = create_wallet_entity("6", &account_id, 200_i32, 3); + let wallet_1 = create_wallet_entity("4", &account_id, 32_i32, 21); + let wallet_2 = create_wallet_entity("5", &account_id, 79_i32, 22); + let wallet_3 = create_wallet_entity("6", &account_id, 200_i32, 23); let expeted_vec = vec![wallet_1, wallet_2, wallet_3]; - assert_eq!(result, filter_vid(expeted_vec)); + assert_eq!(filter_vid(result), filter_vid(expeted_vec)); }); } @@ -656,7 +656,7 @@ fn check_for_update_async_related() { run_store_test(|mut cache, store, deployment, writable| async move { let entity_key = WALLET_TYPE.parse_key("1").unwrap(); let account_id = entity_key.entity_id.clone(); - let wallet_entity_update = create_wallet_operation("1", &account_id, 79_i32, 1); + let wallet_entity_update = create_wallet_operation("1", &account_id, 79_i32, 11); let new_data = match wallet_entity_update { EntityOperation::Set { ref data, .. } => data.clone(), @@ -680,8 +680,8 @@ fn check_for_update_async_related() { causality_region: CausalityRegion::ONCHAIN, }; let result = cache.load_related(&request).unwrap(); - let wallet_2 = create_wallet_entity("2", &account_id, 92_i32, 2); - let wallet_3 = create_wallet_entity("3", &account_id, 192_i32, 3); + let wallet_2 = create_wallet_entity("2", &account_id, 92_i32, 12); + let wallet_3 = create_wallet_entity("3", &account_id, 192_i32, 13); let expeted_vec = vec![new_data, wallet_2, wallet_3]; assert_eq!(filter_vid(result), filter_vid(expeted_vec)); @@ -743,7 +743,10 @@ fn scoped_get() { let act1 = cache.get(&key1, GetScope::InBlock).unwrap(); assert_eq!(None, act1); let act1 = cache.get(&key1, GetScope::Store).unwrap(); - assert_eq!(Some(&wallet1), act1.as_ref().map(|e| e.as_ref())); + assert_eq!( + filter_vid(vec![wallet1.clone()]), + vec![act1.as_ref().map(|e| e.as_ref()).unwrap().clone()] + ); // Even after reading from the store, the entity is not visible with // `InBlock` let act1 = cache.get(&key1, GetScope::InBlock).unwrap(); From dd9ce56dc196ed6c12836e1b99c3763a2ee978de Mon Sep 17 00:00:00 2001 From: Zoran Cvetkov Date: Thu, 10 Oct 2024 00:11:01 +0300 Subject: [PATCH 05/27] fix graphql_query tests --- store/postgres/src/relational/ddl.rs | 3 +- store/postgres/src/relational/ddl_tests.rs | 16 ++--- store/test-store/tests/graph/entity_cache.rs | 3 - store/test-store/tests/graphql/query.rs | 69 ++++++++++---------- 4 files changed, 44 insertions(+), 47 deletions(-) diff --git a/store/postgres/src/relational/ddl.rs b/store/postgres/src/relational/ddl.rs index f696b3d41bb..dd459735e0f 100644 --- a/store/postgres/src/relational/ddl.rs +++ b/store/postgres/src/relational/ddl.rs @@ -4,7 +4,6 @@ use std::{ }; use graph::{ - data::subgraph::schema::POI_TABLE, prelude::{BLOCK_NUMBER_MAX, ENV_VARS}, schema::InputSchema, }; @@ -117,7 +116,7 @@ impl Table { Ok(cols) } - let vid_type = if self.name.as_str() == POI_TABLE { + let vid_type = if self.object.is_poi() || !self.object.is_object_type() { "bigserial" } else { "bigint" diff --git a/store/postgres/src/relational/ddl_tests.rs b/store/postgres/src/relational/ddl_tests.rs index a1d694ee7e9..86e9f232d49 100644 --- a/store/postgres/src/relational/ddl_tests.rs +++ b/store/postgres/src/relational/ddl_tests.rs @@ -895,7 +895,7 @@ create index attr_0_1_data_amount on "sgd0815"."data" using btree("amount"); create table "sgd0815"."stats_hour" ( - vid bigint primary key, + vid bigserial primary key, block$ int not null, "id" int8 not null, "timestamp" timestamptz not null, @@ -913,7 +913,7 @@ create index attr_1_2_stats_hour_max_price on "sgd0815"."stats_hour" using btree("max_price"); create table "sgd0815"."stats_day" ( - vid bigint primary key, + vid bigserial primary key, block$ int not null, "id" int8 not null, "timestamp" timestamptz not null, @@ -992,7 +992,7 @@ create index attr_0_3_data_amount on "sgd0815"."data" using btree("amount"); create table "sgd0815"."stats_1_hour" ( - vid bigint primary key, + vid bigserial primary key, block$ int not null, "id" int8 not null, "timestamp" timestamptz not null, @@ -1008,7 +1008,7 @@ on "sgd0815"."stats_1_hour" using btree("volume"); create table "sgd0815"."stats_1_day" ( - vid bigint primary key, + vid bigserial primary key, block$ int not null, "id" int8 not null, "timestamp" timestamptz not null, @@ -1024,7 +1024,7 @@ on "sgd0815"."stats_1_day" using btree("volume"); create table "sgd0815"."stats_2_hour" ( - vid bigint primary key, + vid bigserial primary key, block$ int not null, "id" int8 not null, "timestamp" timestamptz not null, @@ -1044,7 +1044,7 @@ create index stats_2_hour_dims on "sgd0815"."stats_2_hour"(group_1, timestamp); create table "sgd0815"."stats_2_day" ( - vid bigint primary key, + vid bigserial primary key, block$ int not null, "id" int8 not null, "timestamp" timestamptz not null, @@ -1064,7 +1064,7 @@ create index stats_2_day_dims on "sgd0815"."stats_2_day"(group_1, timestamp); create table "sgd0815"."stats_3_hour" ( - vid bigint primary key, + vid bigserial primary key, block$ int not null, "id" int8 not null, "timestamp" timestamptz not null, @@ -1087,7 +1087,7 @@ create index stats_3_hour_dims on "sgd0815"."stats_3_hour"(group_2, group_1, timestamp); create table "sgd0815"."stats_3_day" ( - vid bigint primary key, + vid bigserial primary key, block$ int not null, "id" int8 not null, "timestamp" timestamptz not null, diff --git a/store/test-store/tests/graph/entity_cache.rs b/store/test-store/tests/graph/entity_cache.rs index 0bd094334f1..81f4f19649a 100644 --- a/store/test-store/tests/graph/entity_cache.rs +++ b/store/test-store/tests/graph/entity_cache.rs @@ -193,11 +193,8 @@ fn sort_by_entity_key(mut mods: Vec) -> Vec) -> Vec { arr.into_iter() .map(|mut e| { - // println!("E1: {:?}", e); e.remove("vid"); - // println!("E2: {:?}", e); e.remove_null_fields(); - // println!("E3: {:?}", e); e }) .collect() diff --git a/store/test-store/tests/graphql/query.rs b/store/test-store/tests/graphql/query.rs index 08ad26ef9b9..f3c3e2e1233 100644 --- a/store/test-store/tests/graphql/query.rs +++ b/store/test-store/tests/graphql/query.rs @@ -468,25 +468,25 @@ async fn insert_test_entities( ( "Musician", vec![ - entity! { is => id: "m1", name: "John", mainBand: "b1", bands: vec!["b1", "b2"], favoriteCount: 10, birthDate: timestamp.clone() }, - entity! { is => id: "m2", name: "Lisa", mainBand: "b1", bands: vec!["b1"], favoriteCount: 100, birthDate: timestamp.clone() }, + entity! { is => id: "m1", name: "John", mainBand: "b1", bands: vec!["b1", "b2"], favoriteCount: 10, birthDate: timestamp.clone(), vid: 0i64 }, + entity! { is => id: "m2", name: "Lisa", mainBand: "b1", bands: vec!["b1"], favoriteCount: 100, birthDate: timestamp.clone(), vid: 1i64 }, ], ), ("Publisher", vec![entity! { is => id: pub1 }]), ( "Band", vec![ - entity! { is => id: "b1", name: "The Musicians", originalSongs: vec![s[1], s[2]] }, - entity! { is => id: "b2", name: "The Amateurs", originalSongs: vec![s[1], s[3], s[4]] }, + entity! { is => id: "b1", name: "The Musicians", originalSongs: vec![s[1], s[2]], vid: 0i64 }, + entity! { is => id: "b2", name: "The Amateurs", originalSongs: vec![s[1], s[3], s[4]], vid: 1i64 }, ], ), ( "Song", vec![ - entity! { is => id: s[1], sid: "s1", title: "Cheesy Tune", publisher: pub1, writtenBy: "m1", media: vec![md[1], md[2]] }, - entity! { is => id: s[2], sid: "s2", title: "Rock Tune", publisher: pub1, writtenBy: "m2", media: vec![md[3], md[4]] }, - entity! { is => id: s[3], sid: "s3", title: "Pop Tune", publisher: pub1, writtenBy: "m1", media: vec![md[5]] }, - entity! { is => id: s[4], sid: "s4", title: "Folk Tune", publisher: pub1, writtenBy: "m3", media: vec![md[6]] }, + entity! { is => id: s[1], sid: "s1", title: "Cheesy Tune", publisher: pub1, writtenBy: "m1", media: vec![md[1], md[2]], vid: 0i64 }, + entity! { is => id: s[2], sid: "s2", title: "Rock Tune", publisher: pub1, writtenBy: "m2", media: vec![md[3], md[4]], vid: 1i64 }, + entity! { is => id: s[3], sid: "s3", title: "Pop Tune", publisher: pub1, writtenBy: "m1", media: vec![md[5]], vid: 2i64 }, + entity! { is => id: s[4], sid: "s4", title: "Folk Tune", publisher: pub1, writtenBy: "m3", media: vec![md[6]], vid: 3i64 }, ], ), ( @@ -498,31 +498,31 @@ async fn insert_test_entities( ( "SongStat", vec![ - entity! { is => id: s[1], played: 10 }, - entity! { is => id: s[2], played: 15 }, + entity! { is => id: s[1], played: 10, vid: 0i64 }, + entity! { is => id: s[2], played: 15, vid: 1i64 }, ], ), ( "BandReview", vec![ - entity! { is => id: "r1", body: "Bad musicians", band: "b1", author: "u1" }, - entity! { is => id: "r2", body: "Good amateurs", band: "b2", author: "u2" }, - entity! { is => id: "r5", body: "Very Bad musicians", band: "b1", author: "u3" }, + entity! { is => id: "r1", body: "Bad musicians", band: "b1", author: "u1", vid: 0i64 }, + entity! { is => id: "r2", body: "Good amateurs", band: "b2", author: "u2", vid: 1i64 }, + entity! { is => id: "r5", body: "Very Bad musicians", band: "b1", author: "u3", vid: 2i64 }, ], ), ( "SongReview", vec![ - entity! { is => id: "r3", body: "Bad", song: s[2], author: "u1" }, - entity! { is => id: "r4", body: "Good", song: s[3], author: "u2" }, - entity! { is => id: "r6", body: "Very Bad", song: s[2], author: "u3" }, + entity! { is => id: "r3", body: "Bad", song: s[2], author: "u1", vid: 0i64 }, + entity! { is => id: "r4", body: "Good", song: s[3], author: "u2", vid: 1i64 }, + entity! { is => id: "r6", body: "Very Bad", song: s[2], author: "u3", vid: 2i64 }, ], ), ( "User", vec![ - entity! { is => id: "u1", name: "Baden", latestSongReview: "r3", latestBandReview: "r1", latestReview: "r1" }, - entity! { is => id: "u2", name: "Goodwill", latestSongReview: "r4", latestBandReview: "r2", latestReview: "r2" }, + entity! { is => id: "u1", name: "Baden", latestSongReview: "r3", latestBandReview: "r1", latestReview: "r1", vid: 0i64 }, + entity! { is => id: "u2", name: "Goodwill", latestSongReview: "r4", latestBandReview: "r2", latestReview: "r2", vid: 1i64 }, ], ), ( @@ -534,17 +534,17 @@ async fn insert_test_entities( ( "Photo", vec![ - entity! { is => id: md[1], title: "Cheesy Tune Single Cover", author: "u1" }, - entity! { is => id: md[3], title: "Rock Tune Single Cover", author: "u1" }, - entity! { is => id: md[5], title: "Pop Tune Single Cover", author: "u1" }, + entity! { is => id: md[1], title: "Cheesy Tune Single Cover", author: "u1", vid: 0i64 }, + entity! { is => id: md[3], title: "Rock Tune Single Cover", author: "u1", vid: 1i64 }, + entity! { is => id: md[5], title: "Pop Tune Single Cover", author: "u1", vid: 2i64 }, ], ), ( "Video", vec![ - entity! { is => id: md[2], title: "Cheesy Tune Music Video", author: "u2" }, - entity! { is => id: md[4], title: "Rock Tune Music Video", author: "u2" }, - entity! { is => id: md[6], title: "Folk Tune Music Video", author: "u2" }, + entity! { is => id: md[2], title: "Cheesy Tune Music Video", author: "u2", vid: 0i64 }, + entity! { is => id: md[4], title: "Rock Tune Music Video", author: "u2", vid: 1i64 }, + entity! { is => id: md[6], title: "Folk Tune Music Video", author: "u2", vid: 2i64 }, ], ), ( @@ -554,29 +554,30 @@ async fn insert_test_entities( ( "Single", vec![ - entity! { is => id: "rl2", title: "Rock", songs: vec![s[2]] }, - entity! { is => id: "rl3", title: "Cheesy", songs: vec![s[1]] }, - entity! { is => id: "rl4", title: "Silence", songs: Vec::::new() }, + entity! { is => id: "rl2", title: "Rock", songs: vec![s[2]], vid: 0i64 }, + entity! { is => id: "rl3", title: "Cheesy", songs: vec![s[1]], vid: 1i64 }, + entity! { is => id: "rl4", title: "Silence", songs: Vec::::new(), vid: 2i64 }, ], ), ( "Plays", vec![ - entity! { is => id: 1i64, timestamp: ts0, song: s[1], user: "u1"}, - entity! { is => id: 2i64, timestamp: ts0, song: s[1], user: "u2"}, - entity! { is => id: 3i64, timestamp: ts0, song: s[2], user: "u1"}, - entity! { is => id: 4i64, timestamp: ts0, song: s[1], user: "u1"}, - entity! { is => id: 5i64, timestamp: ts0, song: s[1], user: "u1"}, + entity! { is => id: 1i64, timestamp: ts0, song: s[1], user: "u1", vid: 0i64 }, + entity! { is => id: 2i64, timestamp: ts0, song: s[1], user: "u2", vid: 1i64 }, + entity! { is => id: 3i64, timestamp: ts0, song: s[2], user: "u1", vid: 2i64 }, + entity! { is => id: 4i64, timestamp: ts0, song: s[1], user: "u1", vid: 3i64 }, + entity! { is => id: 5i64, timestamp: ts0, song: s[1], user: "u1", vid: 4i64 }, ], ), ]; + let entities0 = insert_ops(&manifest.schema, entities0); let entities1 = vec![( "Musician", vec![ - entity! { is => id: "m3", name: "Tom", mainBand: "b2", bands: vec!["b1", "b2"], favoriteCount: 5, birthDate: timestamp.clone() }, - entity! { is => id: "m4", name: "Valerie", bands: Vec::::new(), favoriteCount: 20, birthDate: timestamp.clone() }, + entity! { is => id: "m3", name: "Tom", mainBand: "b2", bands: vec!["b1", "b2"], favoriteCount: 5, birthDate: timestamp.clone(), vid: 2i64 }, + entity! { is => id: "m4", name: "Valerie", bands: Vec::::new(), favoriteCount: 20, birthDate: timestamp.clone(), vid: 3i64 }, ], )]; let entities1 = insert_ops(&manifest.schema, entities1); From 29b467904da60ebdcf6792585e3e3b8aaf7ed4a1 Mon Sep 17 00:00:00 2001 From: Zoran Cvetkov Date: Thu, 10 Oct 2024 14:43:44 +0300 Subject: [PATCH 06/27] fix postgress_writable tests --- store/test-store/tests/postgres/writable.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/store/test-store/tests/postgres/writable.rs b/store/test-store/tests/postgres/writable.rs index 96ce2d58b39..15c1368755a 100644 --- a/store/test-store/tests/postgres/writable.rs +++ b/store/test-store/tests/postgres/writable.rs @@ -142,7 +142,8 @@ async fn insert_count( let count_key_local = |counter_type: &EntityType, id: &str| counter_type.parse_key(id).unwrap(); let data = entity! { TEST_SUBGRAPH_SCHEMA => id: "1", - count: count as i32 + count: count as i32, + vid: count as i64, }; let entity_op = if block != 3 && block != 5 && block != 7 { EntityOperation::Set { From be68a68189f4b347fa76afc41dcade2964ef259d Mon Sep 17 00:00:00 2001 From: Zoran Cvetkov Date: Thu, 10 Oct 2024 18:17:56 +0300 Subject: [PATCH 07/27] fix more tests --- .../test-store/tests/postgres/aggregation.rs | 16 ++-- store/test-store/tests/postgres/graft.rs | 9 +- store/test-store/tests/postgres/relational.rs | 13 ++- .../tests/postgres/relational_bytes.rs | 3 +- store/test-store/tests/postgres/store.rs | 86 ++++++++++++------- 5 files changed, 84 insertions(+), 43 deletions(-) diff --git a/store/test-store/tests/postgres/aggregation.rs b/store/test-store/tests/postgres/aggregation.rs index 432bc685a62..223d62c40ef 100644 --- a/store/test-store/tests/postgres/aggregation.rs +++ b/store/test-store/tests/postgres/aggregation.rs @@ -125,8 +125,8 @@ async fn insert_test_data(store: Arc, deployment: DeploymentL let ts64 = TIMES[0]; let entities = vec![ - entity! { schema => id: 1i64, timestamp: ts64, token: TOKEN1.clone(), price: bd(1), amount: bd(10) }, - entity! { schema => id: 2i64, timestamp: ts64, token: TOKEN2.clone(), price: bd(1), amount: bd(1) }, + entity! { schema => id: 1i64, timestamp: ts64, token: TOKEN1.clone(), price: bd(1), amount: bd(10), vid: 11i64 }, + entity! { schema => id: 2i64, timestamp: ts64, token: TOKEN2.clone(), price: bd(1), amount: bd(1), vid: 12i64 }, ]; insert(&store, &deployment, BLOCKS[0].clone(), TIMES[0], entities) @@ -135,8 +135,8 @@ async fn insert_test_data(store: Arc, deployment: DeploymentL let ts64 = TIMES[1]; let entities = vec![ - entity! { schema => id: 11i64, timestamp: ts64, token: TOKEN1.clone(), price: bd(2), amount: bd(2) }, - entity! { schema => id: 12i64, timestamp: ts64, token: TOKEN2.clone(), price: bd(2), amount: bd(20) }, + entity! { schema => id: 11i64, timestamp: ts64, token: TOKEN1.clone(), price: bd(2), amount: bd(2), vid: 21i64 }, + entity! { schema => id: 12i64, timestamp: ts64, token: TOKEN2.clone(), price: bd(2), amount: bd(20), vid: 22i64 }, ]; insert(&store, &deployment, BLOCKS[1].clone(), TIMES[1], entities) .await @@ -144,8 +144,8 @@ async fn insert_test_data(store: Arc, deployment: DeploymentL let ts64 = TIMES[2]; let entities = vec![ - entity! { schema => id: 21i64, timestamp: ts64, token: TOKEN1.clone(), price: bd(3), amount: bd(30) }, - entity! { schema => id: 22i64, timestamp: ts64, token: TOKEN2.clone(), price: bd(3), amount: bd(3) }, + entity! { schema => id: 21i64, timestamp: ts64, token: TOKEN1.clone(), price: bd(3), amount: bd(30), vid: 31i64 }, + entity! { schema => id: 22i64, timestamp: ts64, token: TOKEN2.clone(), price: bd(3), amount: bd(3), vid: 32i64 }, ]; insert(&store, &deployment, BLOCKS[2].clone(), TIMES[2], entities) .await @@ -153,8 +153,8 @@ async fn insert_test_data(store: Arc, deployment: DeploymentL let ts64 = TIMES[3]; let entities = vec![ - entity! { schema => id: 31i64, timestamp: ts64, token: TOKEN1.clone(), price: bd(4), amount: bd(4) }, - entity! { schema => id: 32i64, timestamp: ts64, token: TOKEN2.clone(), price: bd(4), amount: bd(40) }, + entity! { schema => id: 31i64, timestamp: ts64, token: TOKEN1.clone(), price: bd(4), amount: bd(4), vid: 41i64 }, + entity! { schema => id: 32i64, timestamp: ts64, token: TOKEN2.clone(), price: bd(4), amount: bd(40), vid: 42i64 }, ]; insert(&store, &deployment, BLOCKS[3].clone(), TIMES[3], entities) .await diff --git a/store/test-store/tests/postgres/graft.rs b/store/test-store/tests/postgres/graft.rs index 88f77c45b97..cb69b0bc63e 100644 --- a/store/test-store/tests/postgres/graft.rs +++ b/store/test-store/tests/postgres/graft.rs @@ -175,6 +175,7 @@ async fn insert_test_data(store: Arc) -> DeploymentLocator 184.4, false, None, + 0, ); transact_entity_operations(&store, &deployment, BLOCKS[0].clone(), vec![test_entity_1]) .await @@ -189,6 +190,7 @@ async fn insert_test_data(store: Arc) -> DeploymentLocator 159.1, true, Some("red"), + 1, ); let test_entity_3_1 = create_test_entity( "3", @@ -199,6 +201,7 @@ async fn insert_test_data(store: Arc) -> DeploymentLocator 111.7, false, Some("blue"), + 2, ); transact_entity_operations( &store, @@ -218,6 +221,7 @@ async fn insert_test_data(store: Arc) -> DeploymentLocator 111.7, false, None, + 3, ); transact_entity_operations( &store, @@ -241,6 +245,7 @@ fn create_test_entity( weight: f64, coffee: bool, favorite_color: Option<&str>, + vid: i64, ) -> EntityOperation { let bin_name = scalar::Bytes::from_str(&hex::encode(name)).unwrap(); let test_entity = entity! { TEST_SUBGRAPH_SCHEMA => @@ -252,7 +257,8 @@ fn create_test_entity( seconds_age: age * 31557600, weight: Value::BigDecimal(weight.into()), coffee: coffee, - favorite_color: favorite_color + favorite_color: favorite_color, + vid: vid, }; let entity_type = TEST_SUBGRAPH_SCHEMA.entity_type(entity_type).unwrap(); @@ -601,6 +607,7 @@ fn prune() { 157.1, true, Some("red"), + 4, ); transact_and_wait(&store, &src, BLOCKS[5].clone(), vec![user2]) .await diff --git a/store/test-store/tests/postgres/relational.rs b/store/test-store/tests/postgres/relational.rs index bbb12050add..2e564405686 100644 --- a/store/test-store/tests/postgres/relational.rs +++ b/store/test-store/tests/postgres/relational.rs @@ -318,6 +318,7 @@ fn insert_user_entity( drinks: Option>, visits: i64, block: BlockNumber, + vid: i64, ) { let user = make_user( &layout.input_schema, @@ -330,6 +331,7 @@ fn insert_user_entity( favorite_color, drinks, visits, + vid, ); insert_entity_at(conn, layout, entity_type, vec![user], block); @@ -346,6 +348,7 @@ fn make_user( favorite_color: Option<&str>, drinks: Option>, visits: i64, + vid: i64, ) -> Entity { let favorite_color = favorite_color .map(|s| Value::String(s.to_owned())) @@ -361,7 +364,8 @@ fn make_user( weight: BigDecimal::from(weight), coffee: coffee, favorite_color: favorite_color, - visits: visits + visits: visits, + vid: vid, }; if let Some(drinks) = drinks { user.insert("drinks", drinks.into()).unwrap(); @@ -384,6 +388,7 @@ fn insert_users(conn: &mut PgConnection, layout: &Layout) { None, 60, 0, + 0, ); insert_user_entity( conn, @@ -399,6 +404,7 @@ fn insert_users(conn: &mut PgConnection, layout: &Layout) { Some(vec!["beer", "wine"]), 50, 0, + 1, ); insert_user_entity( conn, @@ -414,6 +420,7 @@ fn insert_users(conn: &mut PgConnection, layout: &Layout) { Some(vec!["coffee", "tea"]), 22, 0, + 2, ); } @@ -431,6 +438,7 @@ fn update_user_entity( drinks: Option>, visits: i64, block: BlockNumber, + vid: i64, ) { let user = make_user( &layout.input_schema, @@ -443,6 +451,7 @@ fn update_user_entity( favorite_color, drinks, visits, + vid, ); update_entity_at(conn, layout, entity_type, vec![user], block); } @@ -1050,6 +1059,7 @@ impl<'a> QueryChecker<'a> { None, 23, 0, + 3, ); insert_pets(conn, layout); @@ -1162,6 +1172,7 @@ fn check_block_finds() { None, 55, 1, + 4, ); checker diff --git a/store/test-store/tests/postgres/relational_bytes.rs b/store/test-store/tests/postgres/relational_bytes.rs index 40686f8712c..eaf6895c9b0 100644 --- a/store/test-store/tests/postgres/relational_bytes.rs +++ b/store/test-store/tests/postgres/relational_bytes.rs @@ -97,8 +97,9 @@ pub fn row_group_insert( ) -> RowGroup { let mut group = RowGroup::new(entity_type.clone(), false); for (key, data) in data { + let vid = data.vid(); group - .push(EntityModification::insert(key, data, block, 0), block) + .push(EntityModification::insert(key, data, block, vid), block) .unwrap(); } group diff --git a/store/test-store/tests/postgres/store.rs b/store/test-store/tests/postgres/store.rs index be769d7632e..7785ea11cdd 100644 --- a/store/test-store/tests/postgres/store.rs +++ b/store/test-store/tests/postgres/store.rs @@ -201,6 +201,7 @@ async fn insert_test_data(store: Arc) -> DeploymentLocator 184.4, false, None, + 0, ); transact_entity_operations( &store, @@ -220,6 +221,7 @@ async fn insert_test_data(store: Arc) -> DeploymentLocator 159.1, true, Some("red"), + 1, ); let test_entity_3_1 = create_test_entity( "3", @@ -230,6 +232,7 @@ async fn insert_test_data(store: Arc) -> DeploymentLocator 111.7, false, Some("blue"), + 2, ); transact_entity_operations( &store, @@ -249,6 +252,7 @@ async fn insert_test_data(store: Arc) -> DeploymentLocator 111.7, false, None, + 3, ); transact_and_wait( &store, @@ -272,6 +276,7 @@ fn create_test_entity( weight: f64, coffee: bool, favorite_color: Option<&str>, + vid: i64, ) -> EntityOperation { let bin_name = scalar::Bytes::from_str(&hex::encode(name)).unwrap(); let test_entity = entity! { TEST_SUBGRAPH_SCHEMA => @@ -284,6 +289,7 @@ fn create_test_entity( weight: Value::BigDecimal(weight.into()), coffee: coffee, favorite_color: favorite_color, + vid: vid, }; EntityOperation::Set { @@ -398,6 +404,7 @@ fn insert_entity() { 111.7, true, Some("green"), + 5, ); let count = get_entity_count(store.clone(), &deployment.hash); transact_and_wait( @@ -429,6 +436,7 @@ fn update_existing() { 111.7, true, Some("green"), + 6, ); let mut new_data = match op { EntityOperation::Set { ref data, .. } => data.clone(), @@ -467,7 +475,8 @@ fn partially_update_existing() { let entity_key = USER_TYPE.parse_key("1").unwrap(); let schema = writable.input_schema(); - let partial_entity = entity! { schema => id: "1", name: "Johnny Boy", email: Value::Null }; + let partial_entity = + entity! { schema => id: "1", name: "Johnny Boy", email: Value::Null, vid: 11i64 }; let original_entity = writable .get(&entity_key) @@ -1077,7 +1086,8 @@ fn revert_block_with_partial_update() { let entity_key = USER_TYPE.parse_key("1").unwrap(); let schema = writable.input_schema(); - let partial_entity = entity! { schema => id: "1", name: "Johnny Boy", email: Value::Null }; + let partial_entity = + entity! { schema => id: "1", name: "Johnny Boy", email: Value::Null, vid: 5i64 }; let original_entity = writable.get(&entity_key).unwrap().expect("missing entity"); @@ -1172,7 +1182,8 @@ fn revert_block_with_dynamic_data_source_operations() { // Create operations to add a user let user_key = USER_TYPE.parse_key("1").unwrap(); - let partial_entity = entity! { schema => id: "1", name: "Johnny Boy", email: Value::Null }; + let partial_entity = + entity! { schema => id: "1", name: "Johnny Boy", email: Value::Null, vid: 5i64 }; // Get the original user for comparisons let original_user = writable.get(&user_key).unwrap().expect("missing entity"); @@ -1291,9 +1302,12 @@ fn entity_changes_are_fired_and_forwarded_to_subscriptions() { let added_entities = vec![ ( "1".to_owned(), - entity! { schema => id: "1", name: "Johnny Boy" }, + entity! { schema => id: "1", name: "Johnny Boy", vid: 5i64 }, + ), + ( + "2".to_owned(), + entity! { schema => id: "2", name: "Tessa", vid: 6i64 }, ), - ("2".to_owned(), entity! { schema => id: "2", name: "Tessa" }), ]; transact_and_wait( &store.subgraph_store(), @@ -1311,7 +1325,7 @@ fn entity_changes_are_fired_and_forwarded_to_subscriptions() { .unwrap(); // Update an entity in the store - let updated_entity = entity! { schema => id: "1", name: "Johnny" }; + let updated_entity = entity! { schema => id: "1", name: "Johnny", vid: 7i64 }; let update_op = EntityOperation::Set { key: USER_TYPE.parse_key("1").unwrap(), data: updated_entity.clone(), @@ -1387,6 +1401,7 @@ fn throttle_subscription_delivers() { 120.7, false, None, + 7, ); transact_entity_operations( @@ -1432,6 +1447,7 @@ fn throttle_subscription_throttles() { 120.7, false, None, + 8, ); transact_entity_operations( @@ -1505,12 +1521,13 @@ fn handle_large_string_with_index() { name: &str, schema: &InputSchema, block: BlockNumber, + vid: i64, ) -> EntityModification { - let data = entity! { schema => id: id, name: name }; + let data = entity! { schema => id: id, name: name, vid: vid }; let key = USER_TYPE.parse_key(id).unwrap(); - EntityModification::insert(key, data, block, 0) + EntityModification::insert(key, data, block, vid) } run_test(|store, writable, deployment| async move { @@ -1539,8 +1556,8 @@ fn handle_large_string_with_index() { BlockTime::for_test(&*TEST_BLOCK_3_PTR), FirehoseCursor::None, vec![ - make_insert_op(ONE, &long_text, &schema, block), - make_insert_op(TWO, &other_text, &schema, block), + make_insert_op(ONE, &long_text, &schema, block, 11), + make_insert_op(TWO, &other_text, &schema, block, 12), ], &stopwatch_metrics, Vec::new(), @@ -1604,12 +1621,13 @@ fn handle_large_bytea_with_index() { name: &[u8], schema: &InputSchema, block: BlockNumber, + vid: i64, ) -> EntityModification { - let data = entity! { schema => id: id, bin_name: scalar::Bytes::from(name) }; + let data = entity! { schema => id: id, bin_name: scalar::Bytes::from(name), vid: vid }; let key = USER_TYPE.parse_key(id).unwrap(); - EntityModification::insert(key, data, block, 0) + EntityModification::insert(key, data, block, vid) } run_test(|store, writable, deployment| async move { @@ -1643,8 +1661,8 @@ fn handle_large_bytea_with_index() { BlockTime::for_test(&*TEST_BLOCK_3_PTR), FirehoseCursor::None, vec![ - make_insert_op(ONE, &long_bytea, &schema, block), - make_insert_op(TWO, &other_bytea, &schema, block), + make_insert_op(ONE, &long_bytea, &schema, block, 10), + make_insert_op(TWO, &other_bytea, &schema, block, 11), ], &stopwatch_metrics, Vec::new(), @@ -1812,8 +1830,10 @@ fn window() { id: &str, color: &str, age: i32, + vid: i64, ) -> EntityOperation { - let entity = entity! { TEST_SUBGRAPH_SCHEMA => id: id, age: age, favorite_color: color }; + let entity = + entity! { TEST_SUBGRAPH_SCHEMA => id: id, age: age, favorite_color: color, vid: vid }; EntityOperation::Set { key: entity_type.parse_key(id).unwrap(), @@ -1821,25 +1841,25 @@ fn window() { } } - fn make_user(id: &str, color: &str, age: i32) -> EntityOperation { - make_color_and_age(&*USER_TYPE, id, color, age) + fn make_user(id: &str, color: &str, age: i32, vid: i64) -> EntityOperation { + make_color_and_age(&*USER_TYPE, id, color, age, vid) } - fn make_person(id: &str, color: &str, age: i32) -> EntityOperation { - make_color_and_age(&*PERSON_TYPE, id, color, age) + fn make_person(id: &str, color: &str, age: i32, vid: i64) -> EntityOperation { + make_color_and_age(&*PERSON_TYPE, id, color, age, vid) } let ops = vec![ - make_user("4", "green", 34), - make_user("5", "green", 17), - make_user("6", "green", 41), - make_user("7", "red", 25), - make_user("8", "red", 45), - make_user("9", "yellow", 37), - make_user("10", "blue", 27), - make_user("11", "blue", 19), - make_person("p1", "green", 12), - make_person("p2", "red", 15), + make_user("4", "green", 34, 11), + make_user("5", "green", 17, 12), + make_user("6", "green", 41, 13), + make_user("7", "red", 25, 14), + make_user("8", "red", 45, 15), + make_user("9", "yellow", 37, 16), + make_user("10", "blue", 27, 17), + make_user("11", "blue", 19, 18), + make_person("p1", "green", 12, 19), + make_person("p2", "red", 15, 20), ]; run_test(|store, _, deployment| async move { @@ -2077,6 +2097,7 @@ fn reorg_tracking() { deployment: &DeploymentLocator, age: i32, block: &BlockPtr, + vid: i64, ) { let test_entity_1 = create_test_entity( "1", @@ -2087,6 +2108,7 @@ fn reorg_tracking() { 184.4, false, None, + vid, ); transact_and_wait(store, deployment, block.clone(), vec![test_entity_1]) .await @@ -2137,15 +2159,15 @@ fn reorg_tracking() { check_state!(store, 2, 2, 2); // Forward to block 3 - update_john(&subgraph_store, &deployment, 70, &TEST_BLOCK_3_PTR).await; + update_john(&subgraph_store, &deployment, 70, &TEST_BLOCK_3_PTR, 30).await; check_state!(store, 2, 2, 3); // Forward to block 4 - update_john(&subgraph_store, &deployment, 71, &TEST_BLOCK_4_PTR).await; + update_john(&subgraph_store, &deployment, 71, &TEST_BLOCK_4_PTR, 40).await; check_state!(store, 2, 2, 4); // Forward to block 5 - update_john(&subgraph_store, &deployment, 72, &TEST_BLOCK_5_PTR).await; + update_john(&subgraph_store, &deployment, 72, &TEST_BLOCK_5_PTR, 50).await; check_state!(store, 2, 2, 5); // Revert all the way back to block 2 From 86f63597be0609513b4c05efd7cbec230087f41e Mon Sep 17 00:00:00 2001 From: Zoran Cvetkov Date: Fri, 11 Oct 2024 12:33:55 +0300 Subject: [PATCH 08/27] fix relational tests --- store/test-store/src/store.rs | 10 ++++ store/test-store/tests/graph/entity_cache.rs | 11 +---- store/test-store/tests/postgres/relational.rs | 47 ++++++++++++++----- .../tests/postgres/relational_bytes.rs | 35 +++++++++----- 4 files changed, 69 insertions(+), 34 deletions(-) diff --git a/store/test-store/src/store.rs b/store/test-store/src/store.rs index 2921d375286..0b58da57cf2 100644 --- a/store/test-store/src/store.rs +++ b/store/test-store/src/store.rs @@ -105,6 +105,16 @@ lazy_static! { }; } +pub fn filter_vid(arr: Vec) -> Vec { + arr.into_iter() + .map(|mut e| { + e.remove("vid"); + e.remove_null_fields(); + e + }) + .collect() +} + /// Run the `test` after performing `setup`. The result of `setup` is passed /// into `test`. All tests using `run_test_sequentially` are run in sequence, /// never in parallel. The `test` is passed a `Store`, but it is permissible diff --git a/store/test-store/tests/graph/entity_cache.rs b/store/test-store/tests/graph/entity_cache.rs index 81f4f19649a..23ed17f89e4 100644 --- a/store/test-store/tests/graph/entity_cache.rs +++ b/store/test-store/tests/graph/entity_cache.rs @@ -24,6 +24,7 @@ use std::sync::Arc; use web3::types::H256; use graph_store_postgres::SubgraphStore as DieselSubgraphStore; +use test_store::store::filter_vid; use test_store::*; lazy_static! { @@ -190,16 +191,6 @@ fn sort_by_entity_key(mut mods: Vec) -> Vec) -> Vec { - arr.into_iter() - .map(|mut e| { - e.remove("vid"); - e.remove_null_fields(); - e - }) - .collect() -} - #[tokio::test] async fn empty_cache_modifications() { let store = Arc::new(MockStore::new(BTreeMap::new())); diff --git a/store/test-store/tests/postgres/relational.rs b/store/test-store/tests/postgres/relational.rs index 2e564405686..8c47c464fe0 100644 --- a/store/test-store/tests/postgres/relational.rs +++ b/store/test-store/tests/postgres/relational.rs @@ -205,11 +205,13 @@ lazy_static! { bigInt: big_int.clone(), bigIntArray: vec![big_int.clone(), (big_int + 1.into())], color: "yellow", + vid: 0i64, } }; static ref EMPTY_NULLABLESTRINGS_ENTITY: Entity = { entity! { THINGS_SCHEMA => id: "one", + vid: 0i64, } }; static ref SCALAR_TYPE: EntityType = THINGS_SCHEMA.entity_type("Scalar").unwrap(); @@ -463,17 +465,19 @@ fn insert_pet( id: &str, name: &str, block: BlockNumber, + vid: i64, ) { let pet = entity! { layout.input_schema => id: id, - name: name + name: name, + vid: vid, }; insert_entity_at(conn, layout, entity_type, vec![pet], block); } fn insert_pets(conn: &mut PgConnection, layout: &Layout) { - insert_pet(conn, layout, &*DOG_TYPE, "pluto", "Pluto", 0); - insert_pet(conn, layout, &*CAT_TYPE, "garfield", "Garfield", 0); + insert_pet(conn, layout, &*DOG_TYPE, "pluto", "Pluto", 0, 0); + insert_pet(conn, layout, &*CAT_TYPE, "garfield", "Garfield", 0, 1); } fn create_schema(conn: &mut PgConnection) -> Layout { @@ -493,6 +497,7 @@ fn create_schema(conn: &mut PgConnection) -> Layout { fn scrub(entity: &Entity) -> Entity { let mut scrubbed = entity.clone(); scrubbed.remove_null_fields(); + scrubbed.remove("vid"); scrubbed } @@ -606,6 +611,7 @@ fn update() { entity.set("string", "updated").unwrap(); entity.remove("strings"); entity.set("bool", Value::Null).unwrap(); + entity.set("vid", 1i64).unwrap(); let key = SCALAR_TYPE.key(entity.id()); let entity_type = layout.input_schema.entity_type("Scalar").unwrap(); @@ -633,8 +639,10 @@ fn update_many() { let mut one = SCALAR_ENTITY.clone(); let mut two = SCALAR_ENTITY.clone(); two.set("id", "two").unwrap(); + two.set("vid", 1i64).unwrap(); let mut three = SCALAR_ENTITY.clone(); three.set("id", "three").unwrap(); + three.set("vid", 2i64).unwrap(); insert_entity( conn, layout, @@ -656,6 +664,10 @@ fn update_many() { three.remove("strings"); three.set("color", "red").unwrap(); + one.set("vid", 3i64).unwrap(); + two.set("vid", 4i64).unwrap(); + three.set("vid", 5i64).unwrap(); + // generate keys let entity_type = layout.input_schema.entity_type("Scalar").unwrap(); let keys: Vec = ["one", "two", "three"] @@ -722,10 +734,13 @@ fn serialize_bigdecimal() { // Update with overwrite let mut entity = SCALAR_ENTITY.clone(); + let mut vid = 1i64; for d in &["50", "50.00", "5000", "0.5000", "0.050", "0.5", "0.05"] { let d = BigDecimal::from_str(d).unwrap(); entity.set("bigDecimal", d).unwrap(); + entity.set("vid", vid).unwrap(); + vid += 1; let key = SCALAR_TYPE.key(entity.id()); let entity_type = layout.input_schema.entity_type("Scalar").unwrap(); @@ -743,6 +758,7 @@ fn serialize_bigdecimal() { ) .expect("Failed to read Scalar[one]") .unwrap(); + entity.remove("vid"); assert_entity_eq!(entity, actual); } }); @@ -770,6 +786,7 @@ fn delete() { insert_entity(conn, layout, &*SCALAR_TYPE, vec![SCALAR_ENTITY.clone()]); let mut two = SCALAR_ENTITY.clone(); two.set("id", "two").unwrap(); + two.set("vid", 1i64).unwrap(); insert_entity(conn, layout, &*SCALAR_TYPE, vec![two]); // Delete where nothing is getting deleted @@ -804,8 +821,10 @@ fn insert_many_and_delete_many() { let one = SCALAR_ENTITY.clone(); let mut two = SCALAR_ENTITY.clone(); two.set("id", "two").unwrap(); + two.set("vid", 1i64).unwrap(); let mut three = SCALAR_ENTITY.clone(); three.set("id", "three").unwrap(); + three.set("vid", 2i64).unwrap(); insert_entity(conn, layout, &*SCALAR_TYPE, vec![one, two, three]); // confidence test: there should be 3 scalar entities in store right now @@ -886,6 +905,7 @@ fn conflicting_entity() { cat: &str, dog: &str, ferret: &str, + vid: i64, ) { let conflicting = |conn: &mut PgConnection, entity_type: &EntityType, types: Vec<&EntityType>| { @@ -912,7 +932,7 @@ fn conflicting_entity() { let dog_type = layout.input_schema.entity_type(dog).unwrap(); let ferret_type = layout.input_schema.entity_type(ferret).unwrap(); - let fred = entity! { layout.input_schema => id: id.clone(), name: id.clone() }; + let fred = entity! { layout.input_schema => id: id.clone(), name: id.clone(), vid: vid }; insert_entity(conn, layout, &cat_type, vec![fred]); // If we wanted to create Fred the dog, which is forbidden, we'd run this: @@ -926,10 +946,10 @@ fn conflicting_entity() { run_test(|mut conn, layout| { let id = Value::String("fred".to_string()); - check(&mut conn, layout, id, "Cat", "Dog", "Ferret"); + check(&mut conn, layout, id, "Cat", "Dog", "Ferret", 0); let id = Value::Bytes(scalar::Bytes::from_str("0xf1ed").unwrap()); - check(&mut conn, layout, id, "ByteCat", "ByteDog", "ByteFerret"); + check(&mut conn, layout, id, "ByteCat", "ByteDog", "ByteFerret", 1); }) } @@ -941,7 +961,8 @@ fn revert_block() { let set_fred = |conn: &mut PgConnection, name, block| { let fred = entity! { layout.input_schema => id: id, - name: name + name: name, + vid: block as i64, }; if block == 0 { insert_entity_at(conn, layout, &*CAT_TYPE, vec![fred], block); @@ -981,6 +1002,7 @@ fn revert_block() { let marty = entity! { layout.input_schema => id: id, order: block, + vid: (block + 10) as i64 }; insert_entity_at(conn, layout, &*MINK_TYPE, vec![marty], block); } @@ -1715,10 +1737,10 @@ struct FilterChecker<'a> { impl<'a> FilterChecker<'a> { fn new(conn: &'a mut PgConnection, layout: &'a Layout) -> Self { let (a1, a2, a2b, a3) = ferrets(); - insert_pet(conn, layout, &*FERRET_TYPE, "a1", &a1, 0); - insert_pet(conn, layout, &*FERRET_TYPE, "a2", &a2, 0); - insert_pet(conn, layout, &*FERRET_TYPE, "a2b", &a2b, 0); - insert_pet(conn, layout, &*FERRET_TYPE, "a3", &a3, 0); + insert_pet(conn, layout, &*FERRET_TYPE, "a1", &a1, 0, 0); + insert_pet(conn, layout, &*FERRET_TYPE, "a2", &a2, 0, 1); + insert_pet(conn, layout, &*FERRET_TYPE, "a2b", &a2b, 0, 2); + insert_pet(conn, layout, &*FERRET_TYPE, "a3", &a3, 0, 3); Self { conn, layout } } @@ -1862,7 +1884,8 @@ fn check_filters() { &*FERRET_TYPE, vec![entity! { layout.input_schema => id: "a1", - name: "Test" + name: "Test", + vid: 5i64 }], 1, ); diff --git a/store/test-store/tests/postgres/relational_bytes.rs b/store/test-store/tests/postgres/relational_bytes.rs index eaf6895c9b0..41aa79bf9b7 100644 --- a/store/test-store/tests/postgres/relational_bytes.rs +++ b/store/test-store/tests/postgres/relational_bytes.rs @@ -57,6 +57,7 @@ lazy_static! { static ref BEEF_ENTITY: Entity = entity! { THINGS_SCHEMA => id: scalar::Bytes::from_str("deadbeef").unwrap(), name: "Beef", + vid: 0i64 }; static ref NAMESPACE: Namespace = Namespace::new("sgd0815".to_string()).unwrap(); static ref THING_TYPE: EntityType = THINGS_SCHEMA.entity_type("Thing").unwrap(); @@ -83,8 +84,9 @@ pub fn row_group_update( ) -> RowGroup { let mut group = RowGroup::new(entity_type.clone(), false); for (key, data) in data { + let vid = data.vid(); group - .push(EntityModification::overwrite(key, data, block, 0), block) + .push(EntityModification::overwrite(key, data, block, vid), block) .unwrap(); } group @@ -129,14 +131,15 @@ fn insert_entity(conn: &mut PgConnection, layout: &Layout, entity_type: &str, en layout.insert(conn, &group, &MOCK_STOPWATCH).expect(&errmsg); } -fn insert_thing(conn: &mut PgConnection, layout: &Layout, id: &str, name: &str) { +fn insert_thing(conn: &mut PgConnection, layout: &Layout, id: &str, name: &str, vid: i64) { insert_entity( conn, layout, "Thing", entity! { layout.input_schema => id: id, - name: name + name: name, + vid: vid, }, ); } @@ -159,6 +162,7 @@ fn create_schema(conn: &mut PgConnection) -> Layout { fn scrub(entity: &Entity) -> Entity { let mut scrubbed = entity.clone(); scrubbed.remove_null_fields(); + scrubbed.remove("vid"); scrubbed } @@ -266,7 +270,7 @@ fn find() { const ID: &str = "deadbeef"; const NAME: &str = "Beef"; - insert_thing(&mut conn, layout, ID, NAME); + insert_thing(&mut conn, layout, ID, NAME, 0); // Happy path: find existing entity let entity = find_entity(conn, layout, ID).unwrap(); @@ -286,8 +290,8 @@ fn find_many() { const NAME: &str = "Beef"; const ID2: &str = "0xdeadbeef02"; const NAME2: &str = "Moo"; - insert_thing(&mut conn, layout, ID, NAME); - insert_thing(&mut conn, layout, ID2, NAME2); + insert_thing(&mut conn, layout, ID, NAME, 0); + insert_thing(&mut conn, layout, ID2, NAME2, 1); let mut id_map = BTreeMap::default(); let ids = IdList::try_from_iter( @@ -319,6 +323,7 @@ fn update() { // Update the entity let mut entity = BEEF_ENTITY.clone(); entity.set("name", "Moo").unwrap(); + entity.set("vid", 1i64).unwrap(); let key = THING_TYPE.key(entity.id()); let entity_id = entity.id(); @@ -334,7 +339,7 @@ fn update() { .expect("Failed to read Thing[deadbeef]") .unwrap(); - assert_entity_eq!(entity, actual); + assert_entity_eq!(scrub(&entity), actual); }); } @@ -346,6 +351,7 @@ fn delete() { insert_entity(&mut conn, layout, "Thing", BEEF_ENTITY.clone()); let mut two = BEEF_ENTITY.clone(); two.set("id", TWO_ID).unwrap(); + two.set("vid", 1i64).unwrap(); insert_entity(&mut conn, layout, "Thing", two); // Delete where nothing is getting deleted @@ -393,29 +399,34 @@ fn make_thing_tree(conn: &mut PgConnection, layout: &Layout) -> (Entity, Entity, let root = entity! { layout.input_schema => id: ROOT, name: "root", - children: vec!["babe01", "babe02"] + children: vec!["babe01", "babe02"], + vid: 0i64, }; let child1 = entity! { layout.input_schema => id: CHILD1, name: "child1", parent: "dead00", - children: vec![GRANDCHILD1] + children: vec![GRANDCHILD1], + vid: 1i64, }; let child2 = entity! { layout.input_schema => id: CHILD2, name: "child2", parent: "dead00", - children: vec![GRANDCHILD1] + children: vec![GRANDCHILD1], + vid: 2i64, }; let grand_child1 = entity! { layout.input_schema => id: GRANDCHILD1, name: "grandchild1", - parent: CHILD1 + parent: CHILD1, + vid: 3i64, }; let grand_child2 = entity! { layout.input_schema => id: GRANDCHILD2, name: "grandchild2", - parent: CHILD2 + parent: CHILD2, + vid: 4i64, }; insert_entity(conn, layout, "Thing", root.clone()); From f5519185ed3c2d428d6ea1c08075cd4c266f09b4 Mon Sep 17 00:00:00 2001 From: Zoran Cvetkov Date: Fri, 11 Oct 2024 18:18:59 +0300 Subject: [PATCH 09/27] fix graft test --- store/postgres/src/relational_queries.rs | 3 +++ store/test-store/tests/postgres/graft.rs | 1 + 2 files changed, 4 insertions(+) diff --git a/store/postgres/src/relational_queries.rs b/store/postgres/src/relational_queries.rs index 7bb62409e08..8e914f53b1e 100644 --- a/store/postgres/src/relational_queries.rs +++ b/store/postgres/src/relational_queries.rs @@ -5110,6 +5110,7 @@ impl<'a> QueryFragment for CopyEntityBatchQuery<'a> { out.push_sql(", "); out.push_sql(CAUSALITY_REGION_COLUMN); }; + out.push_sql(", vid"); out.push_sql(")\nselect "); for column in &self.columns { @@ -5175,6 +5176,8 @@ impl<'a> QueryFragment for CopyEntityBatchQuery<'a> { )); } } + out.push_sql(", vid"); + out.push_sql(" from "); out.push_sql(self.src.qualified_name.as_str()); out.push_sql(" where vid >= "); diff --git a/store/test-store/tests/postgres/graft.rs b/store/test-store/tests/postgres/graft.rs index cb69b0bc63e..21fff97b62f 100644 --- a/store/test-store/tests/postgres/graft.rs +++ b/store/test-store/tests/postgres/graft.rs @@ -330,6 +330,7 @@ async fn check_graft( // Make our own entries for block 2 shaq.set("email", "shaq@gmail.com").unwrap(); + shaq.set("vid", 5i64).unwrap(); let op = EntityOperation::Set { key: user_type.parse_key("3").unwrap(), data: shaq, From abcb442e894aacb26cf451aefb7d8ec74b5cc7af Mon Sep 17 00:00:00 2001 From: Zoran Cvetkov Date: Mon, 14 Oct 2024 17:22:46 +0300 Subject: [PATCH 10/27] try to fix graft prune test --- store/postgres/src/relational/prune.rs | 28 +++++++------------------- 1 file changed, 7 insertions(+), 21 deletions(-) diff --git a/store/postgres/src/relational/prune.rs b/store/postgres/src/relational/prune.rs index 6b5fcdc6940..c72d5ddd774 100644 --- a/store/postgres/src/relational/prune.rs +++ b/store/postgres/src/relational/prune.rs @@ -17,12 +17,7 @@ use graph::{ }; use itertools::Itertools; -use crate::{ - catalog, - copy::AdaptiveBatchSize, - deployment, - relational::{Table, VID_COLUMN}, -}; +use crate::{catalog, copy::AdaptiveBatchSize, deployment, relational::Table}; use super::{Catalog, Layout, Namespace}; @@ -73,7 +68,6 @@ struct TablePair { // has the same name as `src` but is in a different namespace dst: Arc, src_nsp: Namespace, - dst_nsp: Namespace, } impl TablePair { @@ -100,12 +94,7 @@ impl TablePair { } conn.batch_execute(&query)?; - Ok(TablePair { - src, - dst, - src_nsp, - dst_nsp, - }) + Ok(TablePair { src, dst, src_nsp }) } /// Copy all entity versions visible between `earliest_block` and @@ -239,10 +228,6 @@ impl TablePair { let src_qname = &self.src.qualified_name; let dst_qname = &self.dst.qualified_name; let src_nsp = &self.src_nsp; - let dst_nsp = &self.dst_nsp; - - let vid_seq = format!("{}_{VID_COLUMN}_seq", self.src.name); - let mut query = String::new(); // What we are about to do would get blocked by autovacuum on our @@ -252,12 +237,13 @@ impl TablePair { "src" => src_nsp.as_str(), "error" => e.to_string()); } + // TODO: check if this is needed // Make sure the vid sequence // continues from where it was - writeln!( - query, - "select setval('{dst_nsp}.{vid_seq}', nextval('{src_nsp}.{vid_seq}'));" - )?; + // writeln!( + // query, + // "select setval('{dst_nsp}.{vid_seq}', nextval('{src_nsp}.{vid_seq}'));" + // )?; writeln!(query, "drop table {src_qname};")?; writeln!(query, "alter table {dst_qname} set schema {src_nsp}")?; From 7b080d8711bce9b1110d8d522f043cecaf09848e Mon Sep 17 00:00:00 2001 From: Zoran Cvetkov Date: Tue, 22 Oct 2024 17:45:48 +0300 Subject: [PATCH 11/27] fix runner test data_source_revert --- store/postgres/src/relational_queries.rs | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/store/postgres/src/relational_queries.rs b/store/postgres/src/relational_queries.rs index 8e914f53b1e..3776b469df7 100644 --- a/store/postgres/src/relational_queries.rs +++ b/store/postgres/src/relational_queries.rs @@ -5090,6 +5090,8 @@ impl<'a> QueryFragment for CopyEntityBatchQuery<'a> { fn walk_ast<'b>(&'b self, mut out: AstPass<'_, 'b, Pg>) -> QueryResult<()> { out.unsafe_to_cache_prepared(); + let not_poi = self.dst.name.as_str() != POI_TABLE; + // Construct a query // insert into {dst}({columns}) // select {columns} from {src} @@ -5110,7 +5112,9 @@ impl<'a> QueryFragment for CopyEntityBatchQuery<'a> { out.push_sql(", "); out.push_sql(CAUSALITY_REGION_COLUMN); }; - out.push_sql(", vid"); + if not_poi { + out.push_sql(", vid"); + } out.push_sql(")\nselect "); for column in &self.columns { @@ -5176,7 +5180,9 @@ impl<'a> QueryFragment for CopyEntityBatchQuery<'a> { )); } } - out.push_sql(", vid"); + if not_poi { + out.push_sql(", vid"); + } out.push_sql(" from "); out.push_sql(self.src.qualified_name.as_str()); From fd5845a34050edfa15d6f8a409e9819a11fd6047 Mon Sep 17 00:00:00 2001 From: Zoran Cvetkov Date: Fri, 25 Oct 2024 18:31:12 +0300 Subject: [PATCH 12/27] try to remove optional vid fetch --- graph/src/components/store/entity_cache.rs | 4 +- graph/src/data/store/mod.rs | 8 -- .../tests/chain/ethereum/manifest.rs | 4 +- store/test-store/tests/core/interfaces.rs | 107 ++++++++++++------ store/test-store/tests/graph/entity_cache.rs | 20 ++-- store/test-store/tests/graphql/query.rs | 10 +- store/test-store/tests/postgres/writable.rs | 4 +- 7 files changed, 93 insertions(+), 64 deletions(-) diff --git a/graph/src/components/store/entity_cache.rs b/graph/src/components/store/entity_cache.rs index c0802aed77f..58e94362675 100644 --- a/graph/src/components/store/entity_cache.rs +++ b/graph/src/components/store/entity_cache.rs @@ -461,7 +461,7 @@ impl EntityCache { updates.remove_null_fields(); let data = Arc::new(updates); self.current.insert(key.clone(), Some(data.cheap_clone())); - let vid = data.vid_opt(); + let vid = data.vid(); Some(Insert { key, data, @@ -478,7 +478,7 @@ impl EntityCache { let data = Arc::new(data); self.current.insert(key.clone(), Some(data.cheap_clone())); if current != data { - let vid = data.vid_opt(); + let vid = data.vid(); Some(Overwrite { key, data, diff --git a/graph/src/data/store/mod.rs b/graph/src/data/store/mod.rs index 0369dedbf32..9707468146a 100644 --- a/graph/src/data/store/mod.rs +++ b/graph/src/data/store/mod.rs @@ -920,14 +920,6 @@ impl Entity { .expect("the vid is set to a valid value") } - // TODO: try to remove it - pub fn vid_opt(&self) -> i64 { - self.get("vid") - .map(|vid| vid.as_int8()) - .unwrap_or_default() - .unwrap_or_default() - } - /// Merges an entity update `update` into this entity. /// /// If a key exists in both entities, the value from `update` is chosen. diff --git a/store/test-store/tests/chain/ethereum/manifest.rs b/store/test-store/tests/chain/ethereum/manifest.rs index c750adb7b72..aa2a46b140b 100644 --- a/store/test-store/tests/chain/ethereum/manifest.rs +++ b/store/test-store/tests/chain/ethereum/manifest.rs @@ -316,7 +316,7 @@ specVersion: 0.0.2 .unwrap(); // Adds an example entity. - let thing = entity! { schema => id: "datthing" }; + let thing = entity! { schema => id: "datthing", vid : 0i64 }; test_store::insert_entities( &deployment, vec![(schema.entity_type("Thing").unwrap(), thing)], @@ -416,7 +416,7 @@ specVersion: 0.0.2 msg ); - let thing = entity! { schema => id: "datthing" }; + let thing = entity! { schema => id: "datthing", vid : 1i64 }; test_store::insert_entities( &deployment, vec![(schema.entity_type("Thing").unwrap(), thing)], diff --git a/store/test-store/tests/core/interfaces.rs b/store/test-store/tests/core/interfaces.rs index bf075c467ad..7f3718e8563 100644 --- a/store/test-store/tests/core/interfaces.rs +++ b/store/test-store/tests/core/interfaces.rs @@ -69,7 +69,7 @@ async fn one_interface_one_entity() { type Animal implements Legged @entity { id: ID!, legs: Int }"; let schema = InputSchema::raw(document, subgraph_id); - let entity = ("Animal", entity! { schema => id: "1", legs: 3 }); + let entity = ("Animal", entity! { schema => id: "1", legs: 3, vid: 0i64 }); // Collection query. let query = "query { leggeds(first: 100) { legs } }"; @@ -97,7 +97,7 @@ async fn one_interface_one_entity_typename() { type Animal implements Legged @entity { id: ID!, legs: Int }"; let schema = InputSchema::raw(document, subgraph_id); - let entity = ("Animal", entity! { schema => id: "1", legs: 3 }); + let entity = ("Animal", entity! { schema => id: "1", legs: 3, vid: 0i64 }); let query = "query { leggeds(first: 100) { __typename } }"; @@ -118,8 +118,11 @@ async fn one_interface_multiple_entities() { "; let schema = InputSchema::raw(document, subgraph_id); - let animal = ("Animal", entity! { schema => id: "1", legs: 3 }); - let furniture = ("Furniture", entity! { schema => id: "2", legs: 4 }); + let animal = ("Animal", entity! { schema => id: "1", legs: 3, vid: 0i64 }); + let furniture = ( + "Furniture", + entity! { schema => id: "2", legs: 4, vid: 0i64 }, + ); let query = "query { leggeds(first: 100, orderBy: legs) { legs } }"; @@ -150,8 +153,8 @@ async fn reference_interface() { let query = "query { leggeds(first: 100) { leg { id } } }"; - let leg = ("Leg", entity! { schema => id: "1" }); - let animal = ("Animal", entity! { schema => id: "1", leg: 1 }); + let leg = ("Leg", entity! { schema => id: "1", vid: 0i64 }); + let animal = ("Animal", entity! { schema => id: "1", leg: 1, vid: 0i64 }); let res = insert_and_query(subgraph_id, document, vec![leg, animal], query) .await @@ -201,16 +204,16 @@ async fn reference_interface_derived() { let query = "query { events { id transaction { id } } }"; - let buy = ("BuyEvent", entity! { schema => id: "buy" }); + let buy = ("BuyEvent", entity! { schema => id: "buy", vid: 0i64 }); let sell1 = ("SellEvent", entity! { schema => id: "sell1", vid: 0i64 }); let sell2 = ("SellEvent", entity! { schema => id: "sell2", vid: 1i64 }); let gift = ( "GiftEvent", - entity! { schema => id: "gift", transaction: "txn" }, + entity! { schema => id: "gift", transaction: "txn", vid: 0i64 }, ); let txn = ( "Transaction", - entity! { schema => id: "txn", buyEvent: "buy", sellEvents: vec!["sell1", "sell2"] }, + entity! { schema => id: "txn", buyEvent: "buy", sellEvents: vec!["sell1", "sell2"], vid: 0i64 }, ); let entities = vec![buy, sell1, sell2, gift, txn]; @@ -305,8 +308,11 @@ async fn conflicting_implementors_id() { "; let schema = InputSchema::raw(document, subgraph_id); - let animal = ("Animal", entity! { schema => id: "1", legs: 3 }); - let furniture = ("Furniture", entity! { schema => id: "1", legs: 3 }); + let animal = ("Animal", entity! { schema => id: "1", legs: 3, vid: 0i64 }); + let furniture = ( + "Furniture", + entity! { schema => id: "1", legs: 3, vid: 0i64 }, + ); let query = "query { leggeds(first: 100) { legs } }"; @@ -334,8 +340,11 @@ async fn derived_interface_relationship() { "; let schema = InputSchema::raw(document, subgraph_id); - let forest = ("Forest", entity! { schema => id: "1" }); - let animal = ("Animal", entity! { schema => id: "1", forest: "1" }); + let forest = ("Forest", entity! { schema => id: "1", vid: 0i64 }); + let animal = ( + "Animal", + entity! { schema => id: "1", forest: "1", vid: 0i64 }, + ); let query = "query { forests(first: 100) { dwellers(first: 100) { id } } }"; @@ -362,9 +371,12 @@ async fn two_interfaces() { "; let schema = InputSchema::raw(document, subgraph_id); - let a = ("A", entity! { schema => id: "1", foo: "bla" }); - let b = ("B", entity! { schema => id: "1", bar: 100 }); - let ab = ("AB", entity! { schema => id: "2", foo: "ble", bar: 200 }); + let a = ("A", entity! { schema => id: "1", foo: "bla", vid: 0i64 }); + let b = ("B", entity! { schema => id: "1", bar: 100, vid: 0i64 }); + let ab = ( + "AB", + entity! { schema => id: "2", foo: "ble", bar: 200, vid: 0i64 }, + ); let query = "query { ibars(first: 100, orderBy: bar) { bar } @@ -390,7 +402,7 @@ async fn interface_non_inline_fragment() { let entity = ( "Animal", - entity! { schema => id: "1", name: "cow", legs: 3 }, + entity! { schema => id: "1", name: "cow", legs: 3, vid: 0i64 }, ); // Query only the fragment. @@ -422,9 +434,12 @@ async fn interface_inline_fragment() { let animal = ( "Animal", - entity! { schema => id: "1", name: "cow", legs: 4 }, + entity! { schema => id: "1", name: "cow", legs: 4, vid: 0i64 }, + ); + let bird = ( + "Bird", + entity! { schema => id: "2", airspeed: 24, legs: 2, vid: 0i64 }, ); - let bird = ("Bird", entity! { schema => id: "2", airspeed: 24, legs: 2 }); let query = "query { leggeds(orderBy: legs) { ... on Animal { name } ...on Bird { airspeed } } }"; @@ -616,7 +631,7 @@ async fn fragments_dont_panic() { "Parent", entity! { schema => id: "p2", child: Value::Null, vid: 1i64 }, ); - let child = ("Child", entity! { schema => id:"c" }); + let child = ("Child", entity! { schema => id:"c", vid: 2i64 }); let res = insert_and_query(subgraph_id, document, vec![parent, parent2, child], query) .await @@ -682,7 +697,7 @@ async fn fragments_dont_duplicate_data() { "Parent", entity! { schema => id: "b", children: Vec::::new(), vid: 1i64 }, ); - let child = ("Child", entity! { schema => id:"c" }); + let child = ("Child", entity! { schema => id:"c", vid: 2i64 }); let res = insert_and_query(subgraph_id, document, vec![parent, parent2, child], query) .await @@ -792,8 +807,11 @@ async fn fragments_merge_selections() { } "; - let parent = ("Parent", entity! { schema => id: "p", children: vec!["c"] }); - let child = ("Child", entity! { schema => id: "c", foo: 1 }); + let parent = ( + "Parent", + entity! { schema => id: "p", children: vec!["c"], vid: 0i64 }, + ); + let child = ("Child", entity! { schema => id: "c", foo: 1, vid: 1i64 }); let res = insert_and_query(subgraph_id, document, vec![parent, child], query) .await @@ -849,8 +867,14 @@ async fn merge_fields_not_in_interface() { } }"; - let animal = ("Animal", entity! { schema => id: "cow", human: "fred" }); - let human = ("Human", entity! { schema => id: "fred", animal: "cow" }); + let animal = ( + "Animal", + entity! { schema => id: "cow", human: "fred", vid: 0i64 }, + ); + let human = ( + "Human", + entity! { schema => id: "fred", animal: "cow", vid: 0i64 }, + ); let res = insert_and_query(subgraph_id, document, vec![animal, human], query) .await @@ -923,15 +947,15 @@ async fn nested_interface_fragments() { } }"; - let foo = ("Foo", entity! { schema => id: "foo" }); - let one = ("One", entity! { schema => id: "1", foo1: "foo" }); + let foo = ("Foo", entity! { schema => id: "foo", vid: 0i64 }); + let one = ("One", entity! { schema => id: "1", foo1: "foo", vid: 0i64 }); let two = ( "Two", - entity! { schema => id: "2", foo1: "foo", foo2: "foo" }, + entity! { schema => id: "2", foo1: "foo", foo2: "foo", vid: 0i64 }, ); let three = ( "Three", - entity! { schema => id: "3", foo1: "foo", foo2: "foo", foo3: "foo" }, + entity! { schema => id: "3", foo1: "foo", foo2: "foo", foo3: "foo", vid: 0i64 }, ); let res = insert_and_query(subgraph_id, document, vec![foo, one, two, three], query) @@ -1004,9 +1028,9 @@ async fn nested_interface_fragments_overlapping() { } }"; - let foo = ("Foo", entity! { schema => id: "foo" }); - let one = ("One", entity! { schema => id: "1", foo1: "foo" }); - let two = ("Two", entity! { schema => id: "2", foo1: "foo" }); + let foo = ("Foo", entity! { schema => id: "foo", vid: 0i64 }); + let one = ("One", entity! { schema => id: "1", foo1: "foo", vid: 0i64 }); + let two = ("Two", entity! { schema => id: "2", foo1: "foo", vid: 0i64 }); let res = insert_and_query(subgraph_id, document, vec![foo, one, two], query) .await .unwrap(); @@ -1281,10 +1305,13 @@ async fn mixed_mutability() { let query = "query { events { id } }"; let entities = vec![ - ("Mutable", entity! { schema => id: "mut0", name: "mut0" }), + ( + "Mutable", + entity! { schema => id: "mut0", name: "mut0", vid: 0i64 }, + ), ( "Immutable", - entity! { schema => id: "immo0", name: "immo0" }, + entity! { schema => id: "immo0", name: "immo0", vid: 0i64 }, ), ]; @@ -1335,9 +1362,15 @@ async fn derived_interface_bytes() { let query = "query { pools { trades { id } } }"; let entities = vec![ - ("Pool", entity! { schema => id: b("0xf001") }), - ("Sell", entity! { schema => id: b("0xc0"), pool: "0xf001"}), - ("Buy", entity! { schema => id: b("0xb0"), pool: "0xf001"}), + ("Pool", entity! { schema => id: b("0xf001"), vid: 0i64 }), + ( + "Sell", + entity! { schema => id: b("0xc0"), pool: "0xf001", vid: 0i64}, + ), + ( + "Buy", + entity! { schema => id: b("0xb0"), pool: "0xf001", vid: 0i64}, + ), ]; let res = insert_and_query(subgraph_id, document, entities, query) diff --git a/store/test-store/tests/graph/entity_cache.rs b/store/test-store/tests/graph/entity_cache.rs index 23ed17f89e4..376c527910a 100644 --- a/store/test-store/tests/graph/entity_cache.rs +++ b/store/test-store/tests/graph/entity_cache.rs @@ -208,11 +208,11 @@ fn insert_modifications() { let store = Arc::new(store); let mut cache = EntityCache::new(store); - let mogwai_data = entity! { SCHEMA => id: "mogwai", name: "Mogwai" }; + let mogwai_data = entity! { SCHEMA => id: "mogwai", name: "Mogwai", vid: 0i64 }; let mogwai_key = make_band_key("mogwai"); cache.set(mogwai_key.clone(), mogwai_data.clone()).unwrap(); - let sigurros_data = entity! { SCHEMA => id: "sigurros", name: "Sigur Ros" }; + let sigurros_data = entity! { SCHEMA => id: "sigurros", name: "Sigur Ros", vid: 0i64 }; let sigurros_key = make_band_key("sigurros"); cache .set(sigurros_key.clone(), sigurros_data.clone()) @@ -243,8 +243,8 @@ fn overwrite_modifications() { // every set operation as an overwrite. let store = { let entities = vec![ - entity! { SCHEMA => id: "mogwai", name: "Mogwai" }, - entity! { SCHEMA => id: "sigurros", name: "Sigur Ros" }, + entity! { SCHEMA => id: "mogwai", name: "Mogwai", vid: 0i64 }, + entity! { SCHEMA => id: "sigurros", name: "Sigur Ros", vid: 0i64 }, ]; MockStore::new(entity_version_map("Band", entities)) }; @@ -252,11 +252,12 @@ fn overwrite_modifications() { let store = Arc::new(store); let mut cache = EntityCache::new(store); - let mogwai_data = entity! { SCHEMA => id: "mogwai", name: "Mogwai", founded: 1995 }; + let mogwai_data = entity! { SCHEMA => id: "mogwai", name: "Mogwai", founded: 1995, vid: 0i64 }; let mogwai_key = make_band_key("mogwai"); cache.set(mogwai_key.clone(), mogwai_data.clone()).unwrap(); - let sigurros_data = entity! { SCHEMA => id: "sigurros", name: "Sigur Ros", founded: 1994 }; + let sigurros_data = + entity! { SCHEMA => id: "sigurros", name: "Sigur Ros", founded: 1994, vid: 0i64 }; let sigurros_key = make_band_key("sigurros"); cache .set(sigurros_key.clone(), sigurros_data.clone()) @@ -277,8 +278,9 @@ fn consecutive_modifications() { // Pre-populate the store with data so that we can test setting a field to // `Value::Null`. let store = { - let entities = - vec![entity! { SCHEMA => id: "mogwai", name: "Mogwai", label: "Chemikal Underground" }]; + let entities = vec![ + entity! { SCHEMA => id: "mogwai", name: "Mogwai", label: "Chemikal Underground", vid: 0i64 }, + ]; MockStore::new(entity_version_map("Band", entities)) }; @@ -304,7 +306,7 @@ fn consecutive_modifications() { sort_by_entity_key(result.unwrap().modifications), sort_by_entity_key(vec![EntityModification::overwrite( update_key, - entity! { SCHEMA => id: "mogwai", name: "Mogwai", founded: 1995 }, + entity! { SCHEMA => id: "mogwai", name: "Mogwai", founded: 1995, vid: 0i64 }, 0, 0 )]) diff --git a/store/test-store/tests/graphql/query.rs b/store/test-store/tests/graphql/query.rs index f3c3e2e1233..e1fafe0f915 100644 --- a/store/test-store/tests/graphql/query.rs +++ b/store/test-store/tests/graphql/query.rs @@ -472,7 +472,7 @@ async fn insert_test_entities( entity! { is => id: "m2", name: "Lisa", mainBand: "b1", bands: vec!["b1"], favoriteCount: 100, birthDate: timestamp.clone(), vid: 1i64 }, ], ), - ("Publisher", vec![entity! { is => id: pub1 }]), + ("Publisher", vec![entity! { is => id: pub1, vid: 0i64 }]), ( "Band", vec![ @@ -492,7 +492,7 @@ async fn insert_test_entities( ( "User", vec![ - entity! { is => id: "u1", name: "User 1", latestSongReview: "r3", latestBandReview: "r1", latestReview: "r3" }, + entity! { is => id: "u1", name: "User 1", latestSongReview: "r3", latestBandReview: "r1", latestReview: "r3", vid: 0i64 }, ], ), ( @@ -528,7 +528,7 @@ async fn insert_test_entities( ( "AnonymousUser", vec![ - entity! { is => id: "u3", name: "Anonymous 3", latestSongReview: "r6", latestBandReview: "r5", latestReview: "r5" }, + entity! { is => id: "u3", name: "Anonymous 3", latestSongReview: "r6", latestBandReview: "r5", latestReview: "r5", vid: 0i64 }, ], ), ( @@ -549,7 +549,9 @@ async fn insert_test_entities( ), ( "Album", - vec![entity! { is => id: "rl1", title: "Pop and Folk", songs: vec![s[3], s[4]] }], + vec![ + entity! { is => id: "rl1", title: "Pop and Folk", songs: vec![s[3], s[4]], vid: 0i64 }, + ], ), ( "Single", diff --git a/store/test-store/tests/postgres/writable.rs b/store/test-store/tests/postgres/writable.rs index 15c1368755a..eeef513d8da 100644 --- a/store/test-store/tests/postgres/writable.rs +++ b/store/test-store/tests/postgres/writable.rs @@ -296,7 +296,7 @@ fn restart() { // Cause an error by leaving out the non-nullable `count` attribute let entity_ops = vec![EntityOperation::Set { key: count_key("1"), - data: entity! { schema => id: "1" }, + data: entity! { schema => id: "1", vid: 0i64 }, }]; transact_entity_operations( &subgraph_store, @@ -320,7 +320,7 @@ fn restart() { // Retry our write with correct data let entity_ops = vec![EntityOperation::Set { key: count_key("1"), - data: entity! { schema => id: "1", count: 1 }, + data: entity! { schema => id: "1", count: 1, vid: 0i64 }, }]; // `SubgraphStore` caches the correct writable so that this call // uses the restarted writable, and is equivalent to using From a4a0adc5598589ffef0303579d5545226b3af275 Mon Sep 17 00:00:00 2001 From: Zoran Cvetkov Date: Mon, 28 Oct 2024 21:22:29 +0200 Subject: [PATCH 13/27] fix --- graph/src/components/store/entity_cache.rs | 7 ++++--- graph/src/schema/input/mod.rs | 1 + 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/graph/src/components/store/entity_cache.rs b/graph/src/components/store/entity_cache.rs index 58e94362675..24493343b0f 100644 --- a/graph/src/components/store/entity_cache.rs +++ b/graph/src/components/store/entity_cache.rs @@ -453,6 +453,7 @@ impl EntityCache { for (key, update) in self.updates { use EntityModification::*; + let is_poi = key.entity_type.is_poi(); let current = self.current.remove(&key).and_then(|entity| entity); let modification = match (current, update) { // Entity was created @@ -461,7 +462,7 @@ impl EntityCache { updates.remove_null_fields(); let data = Arc::new(updates); self.current.insert(key.clone(), Some(data.cheap_clone())); - let vid = data.vid(); + let vid = if is_poi { 0 } else { data.vid() }; Some(Insert { key, data, @@ -478,7 +479,7 @@ impl EntityCache { let data = Arc::new(data); self.current.insert(key.clone(), Some(data.cheap_clone())); if current != data { - let vid = data.vid(); + let vid = if is_poi { 0 } else { data.vid() }; Some(Overwrite { key, data, @@ -495,7 +496,7 @@ impl EntityCache { let data = Arc::new(data); self.current.insert(key.clone(), Some(data.clone())); if current != data { - let vid = data.vid(); + let vid = if is_poi { 0 } else { data.vid() }; Some(Overwrite { key, data, diff --git a/graph/src/schema/input/mod.rs b/graph/src/schema/input/mod.rs index c7fe8ec9818..a71be5a500f 100644 --- a/graph/src/schema/input/mod.rs +++ b/graph/src/schema/input/mod.rs @@ -1488,6 +1488,7 @@ impl InputSchema { } pub fn has_field_with_name(&self, entity_type: &EntityType, field: &str) -> bool { + // TODO: check if it is needed if field == VID { return true; } From 5680a8c63f3b53ec50c1c34e9beafd8ce429ee46 Mon Sep 17 00:00:00 2001 From: Zoran Cvetkov Date: Thu, 31 Oct 2024 17:38:59 +0200 Subject: [PATCH 14/27] add wrapping --- chain/substreams/src/trigger.rs | 4 +- core/src/subgraph/runner.rs | 4 +- graph/src/components/store/entity_cache.rs | 42 ++--- graph/src/components/store/mod.rs | 4 +- graph/src/data/store/mod.rs | 18 +- runtime/test/src/test.rs | 5 +- runtime/wasm/src/host_exports.rs | 6 +- server/index-node/src/resolver.rs | 2 +- store/postgres/src/relational.rs | 8 +- store/postgres/src/relational_queries.rs | 19 +- store/test-store/src/store.rs | 12 +- store/test-store/tests/graph/entity_cache.rs | 164 ++++++++++-------- store/test-store/tests/graphql/query.rs | 10 +- .../test-store/tests/postgres/aggregation.rs | 7 +- store/test-store/tests/postgres/graft.rs | 9 +- store/test-store/tests/postgres/store.rs | 41 +++-- store/test-store/tests/postgres/writable.rs | 5 +- 17 files changed, 207 insertions(+), 153 deletions(-) diff --git a/chain/substreams/src/trigger.rs b/chain/substreams/src/trigger.rs index 3e6dafcb2f0..8ca187c77af 100644 --- a/chain/substreams/src/trigger.rs +++ b/chain/substreams/src/trigger.rs @@ -8,6 +8,7 @@ use graph::{ subgraph::{MappingError, ProofOfIndexingEvent, SharedProofOfIndexing}, trigger_processor::HostedTrigger, }, + data::store::EntityV, prelude::{ anyhow, async_trait, BlockHash, BlockNumber, BlockState, CheapClone, RuntimeHostBuilder, }, @@ -237,7 +238,8 @@ where logger, ); - state.entity_cache.set(key, entity)?; + // TODO: check if 0 is correct VID + state.entity_cache.set(key, EntityV::new(entity, 0))?; } ParsedChanges::Delete(entity_key) => { let entity_type = entity_key.entity_type.cheap_clone(); diff --git a/core/src/subgraph/runner.rs b/core/src/subgraph/runner.rs index 02774452f9a..bc1ee5bbeba 100644 --- a/core/src/subgraph/runner.rs +++ b/core/src/subgraph/runner.rs @@ -18,6 +18,7 @@ use graph::components::{ subgraph::{MappingError, PoICausalityRegion, ProofOfIndexing, SharedProofOfIndexing}, }; use graph::data::store::scalar::Bytes; +use graph::data::store::EntityV; use graph::data::subgraph::{ schema::{SubgraphError, SubgraphHealth}, SubgraphFeature, @@ -1617,7 +1618,8 @@ async fn update_proof_of_indexing( data.push((entity_cache.schema.poi_block_time(), block_time)); } let poi = entity_cache.make_entity(data)?; - entity_cache.set(key, poi) + // VOI is autogenerated for POI table and our input is ignored + entity_cache.set(key, EntityV::new(poi, 0)) } let _section_guard = stopwatch.start_section("update_proof_of_indexing"); diff --git a/graph/src/components/store/entity_cache.rs b/graph/src/components/store/entity_cache.rs index 24493343b0f..24962b6d8b6 100644 --- a/graph/src/components/store/entity_cache.rs +++ b/graph/src/components/store/entity_cache.rs @@ -7,7 +7,7 @@ use std::sync::Arc; use crate::cheap_clone::CheapClone; use crate::components::store::write::EntityModification; use crate::components::store::{self as s, Entity, EntityOperation}; -use crate::data::store::{EntityValidationError, Id, IdType, IntoEntityIterator}; +use crate::data::store::{EntityV, EntityValidationError, Id, IdType, IntoEntityIterator}; use crate::prelude::ENV_VARS; use crate::schema::{EntityKey, InputSchema}; use crate::util::intern::Error as InternError; @@ -29,8 +29,8 @@ pub enum GetScope { #[derive(Debug, Clone)] enum EntityOp { Remove, - Update(Entity), - Overwrite(Entity), + Update(EntityV), + Overwrite(EntityV), } impl EntityOp { @@ -41,7 +41,7 @@ impl EntityOp { use EntityOp::*; match (self, entity) { (Remove, _) => Ok(None), - (Overwrite(new), _) | (Update(new), None) => Ok(Some(new)), + (Overwrite(new), _) | (Update(new), None) => Ok(Some(new.e)), (Update(updates), Some(entity)) => { let mut e = entity.borrow().clone(); e.merge_remove_null_fields(updates)?; @@ -65,7 +65,7 @@ impl EntityOp { match self { // This is how `Overwrite` is constructed, by accumulating `Update` onto `Remove`. Remove => *self = Overwrite(update), - Update(current) | Overwrite(current) => current.merge(update), + Update(current) | Overwrite(current) => current.e.merge(update.e), } } } @@ -278,9 +278,9 @@ impl EntityCache { ) -> Result, anyhow::Error> { match op { EntityOp::Update(entity) | EntityOp::Overwrite(entity) - if query.matches(key, entity) => + if query.matches(key, &entity.e) => { - Ok(Some(entity.clone())) + Ok(Some(entity.e.clone())) } EntityOp::Remove => Ok(None), _ => Ok(None), @@ -349,9 +349,9 @@ impl EntityCache { /// with existing data. The entity will be validated against the /// subgraph schema, and any errors will result in an `Err` being /// returned. - pub fn set(&mut self, key: EntityKey, entity: Entity) -> Result<(), anyhow::Error> { + pub fn set(&mut self, key: EntityKey, entity: EntityV) -> Result<(), anyhow::Error> { // check the validate for derived fields - let is_valid = entity.validate(&key).is_ok(); + let is_valid = entity.e.validate(&key).is_ok(); self.entity_op(key.clone(), EntityOp::Update(entity)); @@ -453,33 +453,33 @@ impl EntityCache { for (key, update) in self.updates { use EntityModification::*; - let is_poi = key.entity_type.is_poi(); + // let is_poi = key.entity_type.is_poi(); let current = self.current.remove(&key).and_then(|entity| entity); let modification = match (current, update) { // Entity was created (None, EntityOp::Update(mut updates)) | (None, EntityOp::Overwrite(mut updates)) => { - updates.remove_null_fields(); + updates.e.remove_null_fields(); let data = Arc::new(updates); - self.current.insert(key.clone(), Some(data.cheap_clone())); - let vid = if is_poi { 0 } else { data.vid() }; + self.current + .insert(key.clone(), Some(data.e.clone().into())); Some(Insert { key, - data, + data: data.e.clone().into(), block, end: None, - vid, + vid: data.vid, }) } // Entity may have been changed (Some(current), EntityOp::Update(updates)) => { let mut data = current.as_ref().clone(); + let vid = updates.vid; data.merge_remove_null_fields(updates) .map_err(|e| key.unknown_attribute(e))?; let data = Arc::new(data); self.current.insert(key.clone(), Some(data.cheap_clone())); if current != data { - let vid = if is_poi { 0 } else { data.vid() }; Some(Overwrite { key, data, @@ -494,15 +494,15 @@ impl EntityCache { // Entity was removed and then updated, so it will be overwritten (Some(current), EntityOp::Overwrite(data)) => { let data = Arc::new(data); - self.current.insert(key.clone(), Some(data.clone())); - if current != data { - let vid = if is_poi { 0 } else { data.vid() }; + self.current + .insert(key.clone(), Some(data.e.clone().into())); + if current != data.e.clone().into() { Some(Overwrite { key, - data, + data: data.e.clone().into(), block, end: None, - vid, + vid: data.vid, }) } else { None diff --git a/graph/src/components/store/mod.rs b/graph/src/components/store/mod.rs index 31b0e62cfae..9713b78c150 100644 --- a/graph/src/components/store/mod.rs +++ b/graph/src/components/store/mod.rs @@ -30,7 +30,7 @@ use crate::cheap_clone::CheapClone; use crate::components::store::write::EntityModification; use crate::constraint_violation; use crate::data::store::scalar::Bytes; -use crate::data::store::{Id, IdList, Value}; +use crate::data::store::{EntityV, Id, IdList, Value}; use crate::data::value::Word; use crate::data_source::CausalityRegion; use crate::derive::CheapClone; @@ -829,7 +829,7 @@ where pub enum EntityOperation { /// Locates the entity specified by `key` and sets its attributes according to the contents of /// `data`. If no entity exists with this key, creates a new entity. - Set { key: EntityKey, data: Entity }, + Set { key: EntityKey, data: EntityV }, /// Removes an entity with the specified key, if one exists. Remove { key: EntityKey }, diff --git a/graph/src/data/store/mod.rs b/graph/src/data/store/mod.rs index 9707468146a..5b9ece22657 100644 --- a/graph/src/data/store/mod.rs +++ b/graph/src/data/store/mod.rs @@ -913,6 +913,7 @@ impl Entity { Id::try_from(self.get("id").unwrap().clone()).expect("the id is set to a valid value") } + // TODO: only for tests! pub fn vid(&self) -> i64 { self.get("vid") .expect("the vid is set") @@ -934,8 +935,8 @@ impl Entity { /// If a key exists in both entities, the value from `update` is chosen. /// If a key only exists on one entity, the value from that entity is chosen. /// If a key is set to `Value::Null` in `update`, the key/value pair is removed. - pub fn merge_remove_null_fields(&mut self, update: Entity) -> Result<(), InternError> { - for (key, value) in update.0.into_iter() { + pub fn merge_remove_null_fields(&mut self, update: EntityV) -> Result<(), InternError> { + for (key, value) in update.e.0.into_iter() { match value { Value::Null => self.0.remove(&key), _ => self.0.insert(&key, value)?, @@ -1086,6 +1087,19 @@ impl std::fmt::Debug for Entity { } } +/// An entity is represented as a map of attribute names to values. +#[derive(Debug, Clone, CacheWeight, PartialEq, Eq, Serialize)] +pub struct EntityV { + pub e: Entity, + pub vid: i64, +} + +impl EntityV { + pub fn new(e: Entity, vid: i64) -> Self { + Self { e, vid } + } +} + /// An object that is returned from a query. It's a an `r::Value` which /// carries the attributes of the object (`__typename`, `id` etc.) and /// possibly a pointer to its parent if the query that constructed it is one diff --git a/runtime/test/src/test.rs b/runtime/test/src/test.rs index ad3d3588f31..78fa6e49657 100644 --- a/runtime/test/src/test.rs +++ b/runtime/test/src/test.rs @@ -477,13 +477,12 @@ async fn test_ipfs_block() { const USER_DATA: &str = "user_data"; fn make_thing(id: &str, value: &str, vid: i64) -> (String, EntityModification) { - const DOCUMENT: &str = - " type Thing @entity { id: String!, value: String!, extra: String, vid: Int8 }"; + const DOCUMENT: &str = " type Thing @entity { id: String!, value: String!, extra: String }"; lazy_static! { static ref SCHEMA: InputSchema = InputSchema::raw(DOCUMENT, "doesntmatter"); static ref THING_TYPE: EntityType = SCHEMA.entity_type("Thing").unwrap(); } - let data = entity! { SCHEMA => id: id, value: value, extra: USER_DATA, vid:vid }; + let data = entity! { SCHEMA => id: id, value: value, extra: USER_DATA}; let key = THING_TYPE.parse_key(id).unwrap(); ( format!("{{ \"id\": \"{}\", \"value\": \"{}\"}}", id, value), diff --git a/runtime/wasm/src/host_exports.rs b/runtime/wasm/src/host_exports.rs index bc0071c3372..3fafdc0e30a 100644 --- a/runtime/wasm/src/host_exports.rs +++ b/runtime/wasm/src/host_exports.rs @@ -18,7 +18,7 @@ use graph::components::store::{EnsLookup, GetScope, LoadRelatedRequest}; use graph::components::subgraph::{ InstanceDSTemplate, PoICausalityRegion, ProofOfIndexingEvent, SharedProofOfIndexing, }; -use graph::data::store::{self}; +use graph::data::store::{self, EntityV}; use graph::data_source::{CausalityRegion, DataSource, EntityTypeAccess}; use graph::ensure; use graph::prelude::ethabi::param_type::Reader; @@ -315,7 +315,7 @@ impl HostExports { data.insert(store::ID.clone(), value); } } - data.insert(store::VID.clone(), Value::Int8(vid)); + // data.insert(store::VID.clone(), Value::Int8(vid)); self.check_invalid_fields( self.data_source.api_version.clone(), @@ -352,7 +352,7 @@ impl HostExports { state.metrics.track_entity_write(&entity_type, &entity); - state.entity_cache.set(key, entity)?; + state.entity_cache.set(key, EntityV::new(entity, vid))?; Ok(()) } diff --git a/server/index-node/src/resolver.rs b/server/index-node/src/resolver.rs index fb3937afdc2..870d319dcf1 100644 --- a/server/index-node/src/resolver.rs +++ b/server/index-node/src/resolver.rs @@ -768,7 +768,7 @@ fn entity_changes_to_graphql(entity_changes: Vec) -> r::Value { .push(key.entity_id); } EntityOperation::Set { key, data } => { - updates.entry(key.entity_type).or_default().push(data); + updates.entry(key.entity_type).or_default().push(data.e); } } } diff --git a/store/postgres/src/relational.rs b/store/postgres/src/relational.rs index f6a14c3a5fa..e58f9cf8c09 100644 --- a/store/postgres/src/relational.rs +++ b/store/postgres/src/relational.rs @@ -64,7 +64,7 @@ use crate::{ }, }; use graph::components::store::DerivedEntityQuery; -use graph::data::store::{Id, IdList, IdType, BYTES_SCALAR}; +use graph::data::store::{EntityV, Id, IdList, IdType, BYTES_SCALAR}; use graph::data::subgraph::schema::POI_TABLE; use graph::prelude::{ anyhow, info, BlockNumber, DeploymentHash, Entity, EntityChange, EntityOperation, Logger, @@ -693,12 +693,12 @@ impl Layout { for entity_data in inserts_or_updates.into_iter() { let entity_type = entity_data.entity_type(&self.input_schema); - let data: Entity = entity_data.deserialize_with_layout(self, None)?; - let entity_id = data.id(); + let data: EntityV = entity_data.deserialize_with_layout(self, None)?; + let entity_id = data.e.id(); processed_entities.insert((entity_type.clone(), entity_id.clone())); changes.push(EntityOperation::Set { - key: entity_type.key_in(entity_id, CausalityRegion::from_entity(&data)), + key: entity_type.key_in(entity_id, CausalityRegion::from_entity(&data.e)), data, }); } diff --git a/store/postgres/src/relational_queries.rs b/store/postgres/src/relational_queries.rs index 3776b469df7..11405b62934 100644 --- a/store/postgres/src/relational_queries.rs +++ b/store/postgres/src/relational_queries.rs @@ -13,7 +13,7 @@ use diesel::sql_types::Untyped; use diesel::sql_types::{Array, BigInt, Binary, Bool, Int8, Integer, Jsonb, Text, Timestamptz}; use graph::components::store::write::{EntityWrite, RowGroup, WriteChunk}; use graph::components::store::{Child as StoreChild, DerivedEntityQuery}; -use graph::data::store::{Id, IdType, NULL}; +use graph::data::store::{EntityV, Id, IdType, NULL}; use graph::data::store::{IdList, IdRef, QueryObject}; use graph::data::subgraph::schema::POI_TABLE; use graph::data::value::{Object, Word}; @@ -201,6 +201,23 @@ impl FromEntityData for Entity { } } +impl FromEntityData for EntityV { + const WITH_INTERNAL_KEYS: bool = false; + + type Value = graph::prelude::Value; + + fn from_data>>( + schema: &InputSchema, + parent_id: Option, + iter: I, + ) -> Result { + debug_assert_eq!(None, parent_id); + let e = schema.try_make_entity(iter).map_err(StoreError::from)?; + let vid = e.vid(); + Ok(EntityV::new(e, vid)) + } +} + impl FromEntityData for QueryObject { const WITH_INTERNAL_KEYS: bool = true; diff --git a/store/test-store/src/store.rs b/store/test-store/src/store.rs index 0b58da57cf2..3ff2ec0dcd7 100644 --- a/store/test-store/src/store.rs +++ b/store/test-store/src/store.rs @@ -6,6 +6,7 @@ use graph::components::store::BlockStore; use graph::data::graphql::load_manager::LoadManager; use graph::data::query::QueryResults; use graph::data::query::QueryTarget; +use graph::data::store::EntityV; use graph::data::subgraph::schema::{DeploymentCreate, SubgraphError}; use graph::data::subgraph::SubgraphFeature; use graph::data_source::DataSource; @@ -432,12 +433,13 @@ pub async fn insert_entities( deployment: &DeploymentLocator, entities: Vec<(EntityType, Entity)>, ) -> Result<(), StoreError> { - let insert_ops = entities - .into_iter() - .map(|(entity_type, data)| EntityOperation::Set { + let insert_ops = entities.into_iter().map(|(entity_type, data)| { + let vid = data.vid(); + EntityOperation::Set { key: entity_type.key(data.id()), - data, - }); + data: EntityV::new(data, vid), + } + }); transact_entity_operations( &SUBGRAPH_STORE, diff --git a/store/test-store/tests/graph/entity_cache.rs b/store/test-store/tests/graph/entity_cache.rs index 376c527910a..49d696e2f0a 100644 --- a/store/test-store/tests/graph/entity_cache.rs +++ b/store/test-store/tests/graph/entity_cache.rs @@ -4,7 +4,7 @@ use graph::components::store::{ DeploymentCursorTracker, DerivedEntityQuery, GetScope, LoadRelatedRequest, ReadStore, StoredDynamicDataSource, WritableStore, }; -use graph::data::store::Id; +use graph::data::store::{EntityV, Id}; use graph::data::subgraph::schema::{DeploymentCreate, SubgraphError, SubgraphHealth}; use graph::data_source::CausalityRegion; use graph::schema::{EntityKey, EntityType, InputSchema}; @@ -208,14 +208,16 @@ fn insert_modifications() { let store = Arc::new(store); let mut cache = EntityCache::new(store); - let mogwai_data = entity! { SCHEMA => id: "mogwai", name: "Mogwai", vid: 0i64 }; + let mogwai_data = entity! { SCHEMA => id: "mogwai", name: "Mogwai"}; let mogwai_key = make_band_key("mogwai"); - cache.set(mogwai_key.clone(), mogwai_data.clone()).unwrap(); + cache + .set(mogwai_key.clone(), EntityV::new(mogwai_data.clone(), 0)) + .unwrap(); - let sigurros_data = entity! { SCHEMA => id: "sigurros", name: "Sigur Ros", vid: 0i64 }; + let sigurros_data = entity! { SCHEMA => id: "sigurros", name: "Sigur Ros"}; let sigurros_key = make_band_key("sigurros"); cache - .set(sigurros_key.clone(), sigurros_data.clone()) + .set(sigurros_key.clone(), EntityV::new(sigurros_data.clone(), 0)) .unwrap(); let result = cache.as_modifications(0); @@ -243,8 +245,8 @@ fn overwrite_modifications() { // every set operation as an overwrite. let store = { let entities = vec![ - entity! { SCHEMA => id: "mogwai", name: "Mogwai", vid: 0i64 }, - entity! { SCHEMA => id: "sigurros", name: "Sigur Ros", vid: 0i64 }, + entity! { SCHEMA => id: "mogwai", name: "Mogwai" }, + entity! { SCHEMA => id: "sigurros", name: "Sigur Ros" }, ]; MockStore::new(entity_version_map("Band", entities)) }; @@ -252,15 +254,16 @@ fn overwrite_modifications() { let store = Arc::new(store); let mut cache = EntityCache::new(store); - let mogwai_data = entity! { SCHEMA => id: "mogwai", name: "Mogwai", founded: 1995, vid: 0i64 }; + let mogwai_data = entity! { SCHEMA => id: "mogwai", name: "Mogwai", founded: 1995}; let mogwai_key = make_band_key("mogwai"); - cache.set(mogwai_key.clone(), mogwai_data.clone()).unwrap(); + cache + .set(mogwai_key.clone(), EntityV::new(mogwai_data.clone(), 0)) + .unwrap(); - let sigurros_data = - entity! { SCHEMA => id: "sigurros", name: "Sigur Ros", founded: 1994, vid: 0i64 }; + let sigurros_data = entity! { SCHEMA => id: "sigurros", name: "Sigur Ros", founded: 1994 }; let sigurros_key = make_band_key("sigurros"); cache - .set(sigurros_key.clone(), sigurros_data.clone()) + .set(sigurros_key.clone(), EntityV::new(sigurros_data.clone(), 0)) .unwrap(); let result = cache.as_modifications(0); @@ -292,12 +295,14 @@ fn consecutive_modifications() { let update_data = entity! { SCHEMA => id: "mogwai", founded: 1995, label: "Rock Action Records" }; let update_key = make_band_key("mogwai"); - cache.set(update_key, update_data).unwrap(); + cache.set(update_key, EntityV::new(update_data, 0)).unwrap(); // Then, just reset the "label". let update_data = entity! { SCHEMA => id: "mogwai", label: Value::Null }; let update_key = make_band_key("mogwai"); - cache.set(update_key.clone(), update_data).unwrap(); + cache + .set(update_key.clone(), EntityV::new(update_data, 0)) + .unwrap(); // We expect a single overwrite modification for the above that leaves "id" // and "name" untouched, sets "founded" and removes the "label" field. @@ -468,13 +473,15 @@ fn create_account_entity(id: &str, name: &str, email: &str, age: i32, vid: i64) EntityOperation::Set { key: ACCOUNT_TYPE.parse_key(id).unwrap(), - data: test_entity, + data: EntityV::new(test_entity, vid), } } -fn create_wallet_entity(id: &str, account_id: &Id, balance: i32, vid: i64) -> Entity { +fn create_wallet_entity(id: &str, account_id: &Id, balance: i32, vid: i64) -> EntityV { let account_id = Value::from(account_id.clone()); - entity! { LOAD_RELATED_SUBGRAPH => id: id, account: account_id, balance: balance, vid: vid} + let e = + entity! { LOAD_RELATED_SUBGRAPH => id: id, account: account_id, balance: balance, vid: vid}; + EntityV::new(e, vid) } fn create_wallet_operation(id: &str, account_id: &Id, balance: i32, vid: i64) -> EntityOperation { let test_wallet = create_wallet_entity(id, account_id, balance, vid); @@ -495,9 +502,9 @@ fn check_for_account_with_multiple_wallets() { causality_region: CausalityRegion::ONCHAIN, }; let result = cache.load_related(&request).unwrap(); - let wallet_1 = create_wallet_entity("1", &account_id, 67_i32, 0); - let wallet_2 = create_wallet_entity("2", &account_id, 92_i32, 1); - let wallet_3 = create_wallet_entity("3", &account_id, 192_i32, 2); + let wallet_1 = create_wallet_entity("1", &account_id, 67_i32, 0).e; + let wallet_2 = create_wallet_entity("2", &account_id, 92_i32, 1).e; + let wallet_3 = create_wallet_entity("3", &account_id, 192_i32, 2).e; let expeted_vec = vec![wallet_1, wallet_2, wallet_3]; assert_eq!(result, filter_vid(expeted_vec)); @@ -515,7 +522,7 @@ fn check_for_account_with_single_wallet() { causality_region: CausalityRegion::ONCHAIN, }; let result = cache.load_related(&request).unwrap(); - let wallet_1 = create_wallet_entity("4", &account_id, 32_i32, 1); + let wallet_1 = create_wallet_entity("4", &account_id, 32_i32, 1).e; let expeted_vec = vec![wallet_1]; assert_eq!(result, filter_vid(expeted_vec)); @@ -599,9 +606,9 @@ fn check_for_insert_async_store() { causality_region: CausalityRegion::ONCHAIN, }; let result = cache.load_related(&request).unwrap(); - let wallet_1 = create_wallet_entity("4", &account_id, 32_i32, 21); - let wallet_2 = create_wallet_entity("5", &account_id, 79_i32, 22); - let wallet_3 = create_wallet_entity("6", &account_id, 200_i32, 23); + let wallet_1 = create_wallet_entity("4", &account_id, 32_i32, 21).e; + let wallet_2 = create_wallet_entity("5", &account_id, 79_i32, 22).e; + let wallet_3 = create_wallet_entity("6", &account_id, 200_i32, 23).e; let expeted_vec = vec![wallet_1, wallet_2, wallet_3]; assert_eq!(filter_vid(result), filter_vid(expeted_vec)); @@ -632,9 +639,9 @@ fn check_for_insert_async_not_related() { causality_region: CausalityRegion::ONCHAIN, }; let result = cache.load_related(&request).unwrap(); - let wallet_1 = create_wallet_entity("1", &account_id, 67_i32, 1); - let wallet_2 = create_wallet_entity("2", &account_id, 92_i32, 2); - let wallet_3 = create_wallet_entity("3", &account_id, 192_i32, 3); + let wallet_1 = create_wallet_entity("1", &account_id, 67_i32, 1).e; + let wallet_2 = create_wallet_entity("2", &account_id, 92_i32, 2).e; + let wallet_3 = create_wallet_entity("3", &account_id, 192_i32, 3).e; let expeted_vec = vec![wallet_1, wallet_2, wallet_3]; assert_eq!(result, filter_vid(expeted_vec)); @@ -651,7 +658,8 @@ fn check_for_update_async_related() { let new_data = match wallet_entity_update { EntityOperation::Set { ref data, .. } => data.clone(), _ => unreachable!(), - }; + } + .e; assert_ne!(writable.get(&entity_key).unwrap().unwrap(), new_data); // insert a new wallet transact_entity_operations( @@ -670,8 +678,8 @@ fn check_for_update_async_related() { causality_region: CausalityRegion::ONCHAIN, }; let result = cache.load_related(&request).unwrap(); - let wallet_2 = create_wallet_entity("2", &account_id, 92_i32, 12); - let wallet_3 = create_wallet_entity("3", &account_id, 192_i32, 13); + let wallet_2 = create_wallet_entity("2", &account_id, 92_i32, 12).e; + let wallet_3 = create_wallet_entity("3", &account_id, 192_i32, 13).e; let expeted_vec = vec![new_data, wallet_2, wallet_3]; assert_eq!(filter_vid(result), filter_vid(expeted_vec)); @@ -700,57 +708,61 @@ fn check_for_delete_async_related() { causality_region: CausalityRegion::ONCHAIN, }; let result = cache.load_related(&request).unwrap(); - let wallet_2 = create_wallet_entity("2", &account_id, 92_i32, 2); - let wallet_3 = create_wallet_entity("3", &account_id, 192_i32, 3); + let wallet_2 = create_wallet_entity("2", &account_id, 92_i32, 2).e; + let wallet_3 = create_wallet_entity("3", &account_id, 192_i32, 3).e; let expeted_vec = vec![wallet_2, wallet_3]; assert_eq!(result, filter_vid(expeted_vec)); }); } -#[test] -fn scoped_get() { - run_store_test(|mut cache, _store, _deployment, _writable| async move { - // Key for an existing entity that is in the store - let account1 = ACCOUNT_TYPE.parse_id("1").unwrap(); - let key1 = WALLET_TYPE.parse_key("1").unwrap(); - let wallet1 = create_wallet_entity("1", &account1, 67, 1); - - // Create a new entity that is not in the store - let account5 = ACCOUNT_TYPE.parse_id("5").unwrap(); - let wallet5 = create_wallet_entity("5", &account5, 100, 5); - let key5 = WALLET_TYPE.parse_key("5").unwrap(); - cache.set(key5.clone(), wallet5.clone()).unwrap(); - - // For the new entity, we can retrieve it with either scope - let act5 = cache.get(&key5, GetScope::InBlock).unwrap(); - assert_eq!(Some(&wallet5), act5.as_ref().map(|e| e.as_ref())); - let act5 = cache.get(&key5, GetScope::Store).unwrap(); - assert_eq!(Some(&wallet5), act5.as_ref().map(|e| e.as_ref())); - - // For an entity in the store, we can not get it `InBlock` but with - // `Store` - let act1 = cache.get(&key1, GetScope::InBlock).unwrap(); - assert_eq!(None, act1); - let act1 = cache.get(&key1, GetScope::Store).unwrap(); - assert_eq!( - filter_vid(vec![wallet1.clone()]), - vec![act1.as_ref().map(|e| e.as_ref()).unwrap().clone()] - ); - // Even after reading from the store, the entity is not visible with - // `InBlock` - let act1 = cache.get(&key1, GetScope::InBlock).unwrap(); - assert_eq!(None, act1); - // But if it gets updated, it becomes visible with either scope - let mut wallet1 = wallet1; - wallet1.set("balance", 70).unwrap(); - cache.set(key1.clone(), wallet1.clone()).unwrap(); - let act1 = cache.get(&key1, GetScope::InBlock).unwrap(); - assert_eq!(Some(&wallet1), act1.as_ref().map(|e| e.as_ref())); - let act1 = cache.get(&key1, GetScope::Store).unwrap(); - assert_eq!(Some(&wallet1), act1.as_ref().map(|e| e.as_ref())); - }) -} +// #[test] +// fn scoped_get() { +// run_store_test(|mut cache, _store, _deployment, _writable| async move { +// // Key for an existing entity that is in the store +// let account1 = ACCOUNT_TYPE.parse_id("1").unwrap(); +// let key1 = WALLET_TYPE.parse_key("1").unwrap(); +// let wallet1 = create_wallet_entity("1", &account1, 67, 1); + +// // Create a new entity that is not in the store +// let account5 = ACCOUNT_TYPE.parse_id("5").unwrap(); +// let wallet5 = create_wallet_entity("5", &account5, 100, 5); +// let key5 = WALLET_TYPE.parse_key("5").unwrap(); +// cache +// .set(key5.clone(), EntityV::new(wallet5.clone(), 5)) +// .unwrap(); + +// // For the new entity, we can retrieve it with either scope +// let act5 = cache.get(&key5, GetScope::InBlock).unwrap(); +// assert_eq!(Some(&wallet5), act5.as_ref().map(|e| e.as_ref())); +// let act5 = cache.get(&key5, GetScope::Store).unwrap(); +// assert_eq!(Some(&wallet5), act5.as_ref().map(|e| e.as_ref())); + +// // For an entity in the store, we can not get it `InBlock` but with +// // `Store` +// let act1 = cache.get(&key1, GetScope::InBlock).unwrap(); +// assert_eq!(None, act1); +// let act1 = cache.get(&key1, GetScope::Store).unwrap(); +// assert_eq!( +// filter_vid(vec![wallet1.clone()]), +// vec![act1.as_ref().map(|e| e.as_ref()).unwrap().clone()] +// ); +// // Even after reading from the store, the entity is not visible with +// // `InBlock` +// let act1 = cache.get(&key1, GetScope::InBlock).unwrap(); +// assert_eq!(None, act1); +// // But if it gets updated, it becomes visible with either scope +// let mut wallet1 = wallet1; +// wallet1.set("balance", 70).unwrap(); +// cache +// .set(key1.clone(), EntityV::new(wallet1.clone(), 1)) +// .unwrap(); +// let act1 = cache.get(&key1, GetScope::InBlock).unwrap(); +// assert_eq!(Some(&wallet1), act1.as_ref().map(|e| e.as_ref())); +// let act1 = cache.get(&key1, GetScope::Store).unwrap(); +// assert_eq!(Some(&wallet1), act1.as_ref().map(|e| e.as_ref())); +// }) +// } /// Entities should never contain a `__typename` or `g$parent_id` field, if /// they do, that can cause PoI divergences, because entities will differ @@ -791,6 +803,6 @@ fn no_interface_mods() { let entity = entity! { LOAD_RELATED_SUBGRAPH => id: "1", balance: 100 }; - cache.set(key, entity).unwrap_err(); + cache.set(key, EntityV::new(entity, 0)).unwrap_err(); }) } diff --git a/store/test-store/tests/graphql/query.rs b/store/test-store/tests/graphql/query.rs index e1fafe0f915..59126171bff 100644 --- a/store/test-store/tests/graphql/query.rs +++ b/store/test-store/tests/graphql/query.rs @@ -1,6 +1,7 @@ use graph::blockchain::{Block, BlockTime}; use graph::data::query::Trace; use graph::data::store::scalar::Timestamp; +use graph::data::store::EntityV; use graph::data::subgraph::schema::DeploymentCreate; use graph::data::subgraph::LATEST_VERSION; use graph::entity; @@ -424,9 +425,12 @@ async fn insert_test_entities( .into_iter() .map(|(typename, entities)| { let entity_type = schema.entity_type(typename).unwrap(); - entities.into_iter().map(move |data| EntityOperation::Set { - key: entity_type.key(data.id()), - data, + entities.into_iter().map(move |data| { + let vid = data.vid(); + EntityOperation::Set { + key: entity_type.key(data.id()), + data: EntityV::new(data, vid), + } }) }) .flatten() diff --git a/store/test-store/tests/postgres/aggregation.rs b/store/test-store/tests/postgres/aggregation.rs index 223d62c40ef..8db6138abbb 100644 --- a/store/test-store/tests/postgres/aggregation.rs +++ b/store/test-store/tests/postgres/aggregation.rs @@ -1,6 +1,7 @@ use std::fmt::Write; use std::{future::Future, sync::Arc}; +use graph::data::store::EntityV; use graph::{ blockchain::{block_stream::FirehoseCursor, BlockPtr, BlockTime}, components::{ @@ -82,7 +83,11 @@ pub async fn insert( .map(|data| { let data_type = schema.entity_type("Data").unwrap(); let key = data_type.key(data.id()); - EntityOperation::Set { data, key } + let vid = data.vid(); + EntityOperation::Set { + data: EntityV::new(data, vid), + key, + } }) .collect(); diff --git a/store/test-store/tests/postgres/graft.rs b/store/test-store/tests/postgres/graft.rs index 21fff97b62f..e52983f1663 100644 --- a/store/test-store/tests/postgres/graft.rs +++ b/store/test-store/tests/postgres/graft.rs @@ -9,7 +9,7 @@ use graph::components::store::{ DeploymentLocator, EntityOrder, EntityQuery, PruneReporter, PruneRequest, PruningStrategy, VersionStats, }; -use graph::data::store::{scalar, Id}; +use graph::data::store::{scalar, EntityV, Id}; use graph::data::subgraph::schema::*; use graph::data::subgraph::*; use graph::semver::Version; @@ -258,13 +258,12 @@ fn create_test_entity( weight: Value::BigDecimal(weight.into()), coffee: coffee, favorite_color: favorite_color, - vid: vid, }; let entity_type = TEST_SUBGRAPH_SCHEMA.entity_type(entity_type).unwrap(); EntityOperation::Set { key: entity_type.parse_key(id).unwrap(), - data: test_entity, + data: EntityV::new(test_entity, vid), } } @@ -330,10 +329,10 @@ async fn check_graft( // Make our own entries for block 2 shaq.set("email", "shaq@gmail.com").unwrap(); - shaq.set("vid", 5i64).unwrap(); + // shaq.set("vid", 5i64).unwrap(); let op = EntityOperation::Set { key: user_type.parse_key("3").unwrap(), - data: shaq, + data: EntityV::new(shaq, 5), }; transact_and_wait(&store, &deployment, BLOCKS[2].clone(), vec![op]) .await diff --git a/store/test-store/tests/postgres/store.rs b/store/test-store/tests/postgres/store.rs index 7785ea11cdd..6cf68a17f10 100644 --- a/store/test-store/tests/postgres/store.rs +++ b/store/test-store/tests/postgres/store.rs @@ -2,6 +2,7 @@ use graph::blockchain::block_stream::FirehoseCursor; use graph::blockchain::BlockTime; use graph::data::graphql::ext::TypeDefinitionExt; use graph::data::query::QueryTarget; +use graph::data::store::EntityV; use graph::data::subgraph::schema::DeploymentCreate; use graph::data_source::common::MappingABI; use graph::futures01::{future, Stream}; @@ -289,12 +290,11 @@ fn create_test_entity( weight: Value::BigDecimal(weight.into()), coffee: coffee, favorite_color: favorite_color, - vid: vid, }; EntityOperation::Set { key: entity_type.parse_key(id).unwrap(), - data: test_entity, + data: EntityV::new(test_entity, vid), } } @@ -444,7 +444,7 @@ fn update_existing() { }; // Verify that the entity before updating is different from what we expect afterwards - assert_ne!(writable.get(&entity_key).unwrap().unwrap(), new_data); + assert_ne!(writable.get(&entity_key).unwrap().unwrap(), new_data.e); // Set test entity; as the entity already exists an update should be performed let count = get_entity_count(store.clone(), &deployment.hash); @@ -459,13 +459,16 @@ fn update_existing() { assert_eq!(count, get_entity_count(store.clone(), &deployment.hash)); // Verify that the entity in the store has changed to what we have set. - let bin_name = match new_data.get("bin_name") { + let bin_name = match new_data.e.get("bin_name") { Some(Value::Bytes(bytes)) => bytes.clone(), _ => unreachable!(), }; - new_data.insert("bin_name", Value::Bytes(bin_name)).unwrap(); - assert_eq!(writable.get(&entity_key).unwrap(), Some(new_data)); + new_data + .e + .insert("bin_name", Value::Bytes(bin_name)) + .unwrap(); + assert_eq!(writable.get(&entity_key).unwrap(), Some(new_data.e)); }) } @@ -475,8 +478,7 @@ fn partially_update_existing() { let entity_key = USER_TYPE.parse_key("1").unwrap(); let schema = writable.input_schema(); - let partial_entity = - entity! { schema => id: "1", name: "Johnny Boy", email: Value::Null, vid: 11i64 }; + let partial_entity = entity! { schema => id: "1", name: "Johnny Boy", email: Value::Null }; let original_entity = writable .get(&entity_key) @@ -490,7 +492,7 @@ fn partially_update_existing() { TEST_BLOCK_3_PTR.clone(), vec![EntityOperation::Set { key: entity_key.clone(), - data: partial_entity.clone(), + data: EntityV::new(partial_entity.clone(), 11), }], ) .await @@ -1086,8 +1088,7 @@ fn revert_block_with_partial_update() { let entity_key = USER_TYPE.parse_key("1").unwrap(); let schema = writable.input_schema(); - let partial_entity = - entity! { schema => id: "1", name: "Johnny Boy", email: Value::Null, vid: 5i64 }; + let partial_entity = entity! { schema => id: "1", name: "Johnny Boy", email: Value::Null }; let original_entity = writable.get(&entity_key).unwrap().expect("missing entity"); @@ -1098,7 +1099,7 @@ fn revert_block_with_partial_update() { TEST_BLOCK_3_PTR.clone(), vec![EntityOperation::Set { key: entity_key.clone(), - data: partial_entity.clone(), + data: EntityV::new(partial_entity.clone(), 5), }], ) .await @@ -1182,8 +1183,7 @@ fn revert_block_with_dynamic_data_source_operations() { // Create operations to add a user let user_key = USER_TYPE.parse_key("1").unwrap(); - let partial_entity = - entity! { schema => id: "1", name: "Johnny Boy", email: Value::Null, vid: 5i64 }; + let partial_entity = entity! { schema => id: "1", name: "Johnny Boy", email: Value::Null }; // Get the original user for comparisons let original_user = writable.get(&user_key).unwrap().expect("missing entity"); @@ -1194,7 +1194,7 @@ fn revert_block_with_dynamic_data_source_operations() { let ops = vec![EntityOperation::Set { key: user_key.clone(), - data: partial_entity.clone(), + data: EntityV::new(partial_entity.clone(), 5), }]; // Add user and dynamic data source to the store @@ -1317,7 +1317,7 @@ fn entity_changes_are_fired_and_forwarded_to_subscriptions() { .iter() .map(|(id, data)| EntityOperation::Set { key: USER_TYPE.parse_key(id.as_str()).unwrap(), - data: data.clone(), + data: EntityV::new(data.clone(), data.vid()), }) .collect(), ) @@ -1325,10 +1325,10 @@ fn entity_changes_are_fired_and_forwarded_to_subscriptions() { .unwrap(); // Update an entity in the store - let updated_entity = entity! { schema => id: "1", name: "Johnny", vid: 7i64 }; + let updated_entity = entity! { schema => id: "1", name: "Johnny" }; let update_op = EntityOperation::Set { key: USER_TYPE.parse_key("1").unwrap(), - data: updated_entity.clone(), + data: EntityV::new(updated_entity.clone(), 7), }; // Delete an entity in the store @@ -1832,12 +1832,11 @@ fn window() { age: i32, vid: i64, ) -> EntityOperation { - let entity = - entity! { TEST_SUBGRAPH_SCHEMA => id: id, age: age, favorite_color: color, vid: vid }; + let entity = entity! { TEST_SUBGRAPH_SCHEMA => id: id, age: age, favorite_color: color }; EntityOperation::Set { key: entity_type.parse_key(id).unwrap(), - data: entity, + data: EntityV::new(entity, vid), } } diff --git a/store/test-store/tests/postgres/writable.rs b/store/test-store/tests/postgres/writable.rs index eeef513d8da..038b93f462f 100644 --- a/store/test-store/tests/postgres/writable.rs +++ b/store/test-store/tests/postgres/writable.rs @@ -143,7 +143,6 @@ async fn insert_count( let data = entity! { TEST_SUBGRAPH_SCHEMA => id: "1", count: count as i32, - vid: count as i64, }; let entity_op = if block != 3 && block != 5 && block != 7 { EntityOperation::Set { @@ -296,7 +295,7 @@ fn restart() { // Cause an error by leaving out the non-nullable `count` attribute let entity_ops = vec![EntityOperation::Set { key: count_key("1"), - data: entity! { schema => id: "1", vid: 0i64 }, + data: EntityV::new(entity! { schema => id: "1"}, 0), }]; transact_entity_operations( &subgraph_store, @@ -320,7 +319,7 @@ fn restart() { // Retry our write with correct data let entity_ops = vec![EntityOperation::Set { key: count_key("1"), - data: entity! { schema => id: "1", count: 1, vid: 0i64 }, + data: EntityV::new(entity! { schema => id: "1", count: 1}, 0), }]; // `SubgraphStore` caches the correct writable so that this call // uses the restarted writable, and is equivalent to using From 63a3cb5b28902a24935dc793d84fbd1fc0bb4a44 Mon Sep 17 00:00:00 2001 From: Zoran Cvetkov Date: Wed, 6 Nov 2024 18:07:41 +0200 Subject: [PATCH 15/27] cleanup --- graph/src/components/store/entity_cache.rs | 22 ++--- graph/src/schema/input/mod.rs | 6 +- store/postgres/src/relational_queries.rs | 8 +- store/test-store/tests/graph/entity_cache.rs | 95 ++++++++++---------- store/test-store/tests/postgres/graft.rs | 5 +- store/test-store/tests/postgres/store.rs | 10 +-- 6 files changed, 71 insertions(+), 75 deletions(-) diff --git a/graph/src/components/store/entity_cache.rs b/graph/src/components/store/entity_cache.rs index 24962b6d8b6..6ea8b988bb2 100644 --- a/graph/src/components/store/entity_cache.rs +++ b/graph/src/components/store/entity_cache.rs @@ -459,16 +459,16 @@ impl EntityCache { // Entity was created (None, EntityOp::Update(mut updates)) | (None, EntityOp::Overwrite(mut updates)) => { + let vid = updates.vid; updates.e.remove_null_fields(); - let data = Arc::new(updates); - self.current - .insert(key.clone(), Some(data.e.clone().into())); + let data = Arc::new(updates.e.clone()); + self.current.insert(key.clone(), Some(data.cheap_clone())); Some(Insert { key, - data: data.e.clone().into(), + data, block, end: None, - vid: data.vid, + vid, }) } // Entity may have been changed @@ -493,16 +493,16 @@ impl EntityCache { } // Entity was removed and then updated, so it will be overwritten (Some(current), EntityOp::Overwrite(data)) => { - let data = Arc::new(data); - self.current - .insert(key.clone(), Some(data.e.clone().into())); - if current != data.e.clone().into() { + let vid = data.vid; + let data = Arc::new(data.e.clone()); + self.current.insert(key.clone(), Some(data.cheap_clone())); + if current != data { Some(Overwrite { key, - data: data.e.clone().into(), + data, block, end: None, - vid: data.vid, + vid, }) } else { None diff --git a/graph/src/schema/input/mod.rs b/graph/src/schema/input/mod.rs index a71be5a500f..222b55bea3a 100644 --- a/graph/src/schema/input/mod.rs +++ b/graph/src/schema/input/mod.rs @@ -1489,9 +1489,9 @@ impl InputSchema { pub fn has_field_with_name(&self, entity_type: &EntityType, field: &str) -> bool { // TODO: check if it is needed - if field == VID { - return true; - } + // if field == VID { + // return true; + // } let field = self.inner.pool.lookup(field); match field { diff --git a/store/postgres/src/relational_queries.rs b/store/postgres/src/relational_queries.rs index 11405b62934..5bb462dc4bd 100644 --- a/store/postgres/src/relational_queries.rs +++ b/store/postgres/src/relational_queries.rs @@ -15,7 +15,7 @@ use graph::components::store::write::{EntityWrite, RowGroup, WriteChunk}; use graph::components::store::{Child as StoreChild, DerivedEntityQuery}; use graph::data::store::{EntityV, Id, IdType, NULL}; use graph::data::store::{IdList, IdRef, QueryObject}; -use graph::data::subgraph::schema::POI_TABLE; +// use graph::data::subgraph::schema::POI_TABLE; use graph::data::value::{Object, Word}; use graph::data_source::CausalityRegion; use graph::prelude::{ @@ -2559,7 +2559,8 @@ impl<'a> QueryFragment for InsertQuery<'a> { let out = &mut out; out.unsafe_to_cache_prepared(); - let not_poi = self.table.name.as_str() != POI_TABLE; + // let not_poi = self.table.name.as_str() != POI_TABLE; + let not_poi = !self.table.object.is_poi(); // Construct a query // insert into schema.table(column, ...) @@ -5107,7 +5108,8 @@ impl<'a> QueryFragment for CopyEntityBatchQuery<'a> { fn walk_ast<'b>(&'b self, mut out: AstPass<'_, 'b, Pg>) -> QueryResult<()> { out.unsafe_to_cache_prepared(); - let not_poi = self.dst.name.as_str() != POI_TABLE; + // let not_poi = self.dst.name.as_str() != POI_TABLE; + let not_poi = !self.dst.object.is_poi(); // Construct a query // insert into {dst}({columns}) diff --git a/store/test-store/tests/graph/entity_cache.rs b/store/test-store/tests/graph/entity_cache.rs index 49d696e2f0a..92050d6f07f 100644 --- a/store/test-store/tests/graph/entity_cache.rs +++ b/store/test-store/tests/graph/entity_cache.rs @@ -469,7 +469,7 @@ async fn insert_test_data(store: Arc) -> DeploymentLocator fn create_account_entity(id: &str, name: &str, email: &str, age: i32, vid: i64) -> EntityOperation { let test_entity = - entity! { LOAD_RELATED_SUBGRAPH => id: id, name: name, email: email, age: age, vid: vid }; + entity! { LOAD_RELATED_SUBGRAPH => id: id, name: name, email: email, age: age }; EntityOperation::Set { key: ACCOUNT_TYPE.parse_key(id).unwrap(), @@ -479,8 +479,7 @@ fn create_account_entity(id: &str, name: &str, email: &str, age: i32, vid: i64) fn create_wallet_entity(id: &str, account_id: &Id, balance: i32, vid: i64) -> EntityV { let account_id = Value::from(account_id.clone()); - let e = - entity! { LOAD_RELATED_SUBGRAPH => id: id, account: account_id, balance: balance, vid: vid}; + let e = entity! { LOAD_RELATED_SUBGRAPH => id: id, account: account_id, balance: balance}; EntityV::new(e, vid) } fn create_wallet_operation(id: &str, account_id: &Id, balance: i32, vid: i64) -> EntityOperation { @@ -716,53 +715,49 @@ fn check_for_delete_async_related() { }); } -// #[test] -// fn scoped_get() { -// run_store_test(|mut cache, _store, _deployment, _writable| async move { -// // Key for an existing entity that is in the store -// let account1 = ACCOUNT_TYPE.parse_id("1").unwrap(); -// let key1 = WALLET_TYPE.parse_key("1").unwrap(); -// let wallet1 = create_wallet_entity("1", &account1, 67, 1); - -// // Create a new entity that is not in the store -// let account5 = ACCOUNT_TYPE.parse_id("5").unwrap(); -// let wallet5 = create_wallet_entity("5", &account5, 100, 5); -// let key5 = WALLET_TYPE.parse_key("5").unwrap(); -// cache -// .set(key5.clone(), EntityV::new(wallet5.clone(), 5)) -// .unwrap(); - -// // For the new entity, we can retrieve it with either scope -// let act5 = cache.get(&key5, GetScope::InBlock).unwrap(); -// assert_eq!(Some(&wallet5), act5.as_ref().map(|e| e.as_ref())); -// let act5 = cache.get(&key5, GetScope::Store).unwrap(); -// assert_eq!(Some(&wallet5), act5.as_ref().map(|e| e.as_ref())); - -// // For an entity in the store, we can not get it `InBlock` but with -// // `Store` -// let act1 = cache.get(&key1, GetScope::InBlock).unwrap(); -// assert_eq!(None, act1); -// let act1 = cache.get(&key1, GetScope::Store).unwrap(); -// assert_eq!( -// filter_vid(vec![wallet1.clone()]), -// vec![act1.as_ref().map(|e| e.as_ref()).unwrap().clone()] -// ); -// // Even after reading from the store, the entity is not visible with -// // `InBlock` -// let act1 = cache.get(&key1, GetScope::InBlock).unwrap(); -// assert_eq!(None, act1); -// // But if it gets updated, it becomes visible with either scope -// let mut wallet1 = wallet1; -// wallet1.set("balance", 70).unwrap(); -// cache -// .set(key1.clone(), EntityV::new(wallet1.clone(), 1)) -// .unwrap(); -// let act1 = cache.get(&key1, GetScope::InBlock).unwrap(); -// assert_eq!(Some(&wallet1), act1.as_ref().map(|e| e.as_ref())); -// let act1 = cache.get(&key1, GetScope::Store).unwrap(); -// assert_eq!(Some(&wallet1), act1.as_ref().map(|e| e.as_ref())); -// }) -// } +#[test] +fn scoped_get() { + run_store_test(|mut cache, _store, _deployment, _writable| async move { + // Key for an existing entity that is in the store + let account1 = ACCOUNT_TYPE.parse_id("1").unwrap(); + let key1 = WALLET_TYPE.parse_key("1").unwrap(); + let wallet1 = create_wallet_entity("1", &account1, 67, 1); + + // Create a new entity that is not in the store + let account5 = ACCOUNT_TYPE.parse_id("5").unwrap(); + let wallet5 = create_wallet_entity("5", &account5, 100, 5); + let key5 = WALLET_TYPE.parse_key("5").unwrap(); + cache.set(key5.clone(), wallet5.clone()).unwrap(); + + // For the new entity, we can retrieve it with either scope + let act5 = cache.get(&key5, GetScope::InBlock).unwrap(); + assert_eq!(Some(&wallet5.e), act5.as_ref().map(|e| e.as_ref())); + let act5 = cache.get(&key5, GetScope::Store).unwrap(); + assert_eq!(Some(&wallet5.e), act5.as_ref().map(|e| e.as_ref())); + + // For an entity in the store, we can not get it `InBlock` but with + // `Store` + let act1 = cache.get(&key1, GetScope::InBlock).unwrap(); + assert_eq!(None, act1); + let act1 = cache.get(&key1, GetScope::Store).unwrap(); + assert_eq!( + filter_vid(vec![wallet1.e.clone()]), + vec![act1.as_ref().map(|e| e.as_ref()).unwrap().clone()] + ); + // Even after reading from the store, the entity is not visible with + // `InBlock` + let act1 = cache.get(&key1, GetScope::InBlock).unwrap(); + assert_eq!(None, act1); + // But if it gets updated, it becomes visible with either scope + let mut wallet1 = wallet1; + wallet1.e.set("balance", 70).unwrap(); + cache.set(key1.clone(), wallet1.clone()).unwrap(); + let act1 = cache.get(&key1, GetScope::InBlock).unwrap(); + assert_eq!(Some(&wallet1.e), act1.as_ref().map(|e| e.as_ref())); + let act1 = cache.get(&key1, GetScope::Store).unwrap(); + assert_eq!(Some(&wallet1.e), act1.as_ref().map(|e| e.as_ref())); + }) +} /// Entities should never contain a `__typename` or `g$parent_id` field, if /// they do, that can cause PoI divergences, because entities will differ diff --git a/store/test-store/tests/postgres/graft.rs b/store/test-store/tests/postgres/graft.rs index e52983f1663..59cbe5e3f0a 100644 --- a/store/test-store/tests/postgres/graft.rs +++ b/store/test-store/tests/postgres/graft.rs @@ -257,7 +257,7 @@ fn create_test_entity( seconds_age: age * 31557600, weight: Value::BigDecimal(weight.into()), coffee: coffee, - favorite_color: favorite_color, + favorite_color: favorite_color }; let entity_type = TEST_SUBGRAPH_SCHEMA.entity_type(entity_type).unwrap(); @@ -329,10 +329,9 @@ async fn check_graft( // Make our own entries for block 2 shaq.set("email", "shaq@gmail.com").unwrap(); - // shaq.set("vid", 5i64).unwrap(); let op = EntityOperation::Set { key: user_type.parse_key("3").unwrap(), - data: EntityV::new(shaq, 5), + data: EntityV::new(shaq, 3), }; transact_and_wait(&store, &deployment, BLOCKS[2].clone(), vec![op]) .await diff --git a/store/test-store/tests/postgres/store.rs b/store/test-store/tests/postgres/store.rs index 6cf68a17f10..736afd68f5c 100644 --- a/store/test-store/tests/postgres/store.rs +++ b/store/test-store/tests/postgres/store.rs @@ -1523,7 +1523,7 @@ fn handle_large_string_with_index() { block: BlockNumber, vid: i64, ) -> EntityModification { - let data = entity! { schema => id: id, name: name, vid: vid }; + let data = entity! { schema => id: id, name: name }; let key = USER_TYPE.parse_key(id).unwrap(); @@ -1623,7 +1623,7 @@ fn handle_large_bytea_with_index() { block: BlockNumber, vid: i64, ) -> EntityModification { - let data = entity! { schema => id: id, bin_name: scalar::Bytes::from(name), vid: vid }; + let data = entity! { schema => id: id, bin_name: scalar::Bytes::from(name) }; let key = USER_TYPE.parse_key(id).unwrap(); @@ -2158,15 +2158,15 @@ fn reorg_tracking() { check_state!(store, 2, 2, 2); // Forward to block 3 - update_john(&subgraph_store, &deployment, 70, &TEST_BLOCK_3_PTR, 30).await; + update_john(&subgraph_store, &deployment, 70, &TEST_BLOCK_3_PTR, 5).await; check_state!(store, 2, 2, 3); // Forward to block 4 - update_john(&subgraph_store, &deployment, 71, &TEST_BLOCK_4_PTR, 40).await; + update_john(&subgraph_store, &deployment, 71, &TEST_BLOCK_4_PTR, 6).await; check_state!(store, 2, 2, 4); // Forward to block 5 - update_john(&subgraph_store, &deployment, 72, &TEST_BLOCK_5_PTR, 50).await; + update_john(&subgraph_store, &deployment, 72, &TEST_BLOCK_5_PTR, 7).await; check_state!(store, 2, 2, 5); // Revert all the way back to block 2 From 9899b1369eb3dba8ba135ae689a2837f0b648439 Mon Sep 17 00:00:00 2001 From: Zoran Cvetkov Date: Mon, 18 Nov 2024 16:18:57 +0200 Subject: [PATCH 16/27] cleanup --- store/test-store/tests/graph/entity_cache.rs | 97 ++++++++++---------- 1 file changed, 46 insertions(+), 51 deletions(-) diff --git a/store/test-store/tests/graph/entity_cache.rs b/store/test-store/tests/graph/entity_cache.rs index 92050d6f07f..0e64161f6ec 100644 --- a/store/test-store/tests/graph/entity_cache.rs +++ b/store/test-store/tests/graph/entity_cache.rs @@ -24,7 +24,6 @@ use std::sync::Arc; use web3::types::H256; use graph_store_postgres::SubgraphStore as DieselSubgraphStore; -use test_store::store::filter_vid; use test_store::*; lazy_static! { @@ -208,13 +207,13 @@ fn insert_modifications() { let store = Arc::new(store); let mut cache = EntityCache::new(store); - let mogwai_data = entity! { SCHEMA => id: "mogwai", name: "Mogwai"}; + let mogwai_data = entity! { SCHEMA => id: "mogwai", name: "Mogwai" }; let mogwai_key = make_band_key("mogwai"); cache .set(mogwai_key.clone(), EntityV::new(mogwai_data.clone(), 0)) .unwrap(); - let sigurros_data = entity! { SCHEMA => id: "sigurros", name: "Sigur Ros"}; + let sigurros_data = entity! { SCHEMA => id: "sigurros", name: "Sigur Ros" }; let sigurros_key = make_band_key("sigurros"); cache .set(sigurros_key.clone(), EntityV::new(sigurros_data.clone(), 0)) @@ -254,7 +253,7 @@ fn overwrite_modifications() { let store = Arc::new(store); let mut cache = EntityCache::new(store); - let mogwai_data = entity! { SCHEMA => id: "mogwai", name: "Mogwai", founded: 1995}; + let mogwai_data = entity! { SCHEMA => id: "mogwai", name: "Mogwai", founded: 1995 }; let mogwai_key = make_band_key("mogwai"); cache .set(mogwai_key.clone(), EntityV::new(mogwai_data.clone(), 0)) @@ -281,9 +280,8 @@ fn consecutive_modifications() { // Pre-populate the store with data so that we can test setting a field to // `Value::Null`. let store = { - let entities = vec![ - entity! { SCHEMA => id: "mogwai", name: "Mogwai", label: "Chemikal Underground", vid: 0i64 }, - ]; + let entities = + vec![entity! { SCHEMA => id: "mogwai", name: "Mogwai", label: "Chemikal Underground" }]; MockStore::new(entity_version_map("Band", entities)) }; @@ -311,7 +309,7 @@ fn consecutive_modifications() { sort_by_entity_key(result.unwrap().modifications), sort_by_entity_key(vec![EntityModification::overwrite( update_key, - entity! { SCHEMA => id: "mogwai", name: "Mogwai", founded: 1995, vid: 0i64 }, + entity! { SCHEMA => id: "mogwai", name: "Mogwai", founded: 1995 }, 0, 0 )]) @@ -477,13 +475,12 @@ fn create_account_entity(id: &str, name: &str, email: &str, age: i32, vid: i64) } } -fn create_wallet_entity(id: &str, account_id: &Id, balance: i32, vid: i64) -> EntityV { +fn create_wallet_entity(id: &str, account_id: &Id, balance: i32) -> Entity { let account_id = Value::from(account_id.clone()); - let e = entity! { LOAD_RELATED_SUBGRAPH => id: id, account: account_id, balance: balance}; - EntityV::new(e, vid) + entity! { LOAD_RELATED_SUBGRAPH => id: id, account: account_id, balance: balance } } fn create_wallet_operation(id: &str, account_id: &Id, balance: i32, vid: i64) -> EntityOperation { - let test_wallet = create_wallet_entity(id, account_id, balance, vid); + let test_wallet = EntityV::new(create_wallet_entity(id, account_id, balance), vid); EntityOperation::Set { key: WALLET_TYPE.parse_key(id).unwrap(), data: test_wallet, @@ -501,12 +498,12 @@ fn check_for_account_with_multiple_wallets() { causality_region: CausalityRegion::ONCHAIN, }; let result = cache.load_related(&request).unwrap(); - let wallet_1 = create_wallet_entity("1", &account_id, 67_i32, 0).e; - let wallet_2 = create_wallet_entity("2", &account_id, 92_i32, 1).e; - let wallet_3 = create_wallet_entity("3", &account_id, 192_i32, 2).e; + let wallet_1 = create_wallet_entity("1", &account_id, 67_i32); + let wallet_2 = create_wallet_entity("2", &account_id, 92_i32); + let wallet_3 = create_wallet_entity("3", &account_id, 192_i32); let expeted_vec = vec![wallet_1, wallet_2, wallet_3]; - assert_eq!(result, filter_vid(expeted_vec)); + assert_eq!(result, expeted_vec); }); } @@ -521,10 +518,10 @@ fn check_for_account_with_single_wallet() { causality_region: CausalityRegion::ONCHAIN, }; let result = cache.load_related(&request).unwrap(); - let wallet_1 = create_wallet_entity("4", &account_id, 32_i32, 1).e; + let wallet_1 = create_wallet_entity("4", &account_id, 32_i32); let expeted_vec = vec![wallet_1]; - assert_eq!(result, filter_vid(expeted_vec)); + assert_eq!(result, expeted_vec); }); } @@ -605,15 +602,14 @@ fn check_for_insert_async_store() { causality_region: CausalityRegion::ONCHAIN, }; let result = cache.load_related(&request).unwrap(); - let wallet_1 = create_wallet_entity("4", &account_id, 32_i32, 21).e; - let wallet_2 = create_wallet_entity("5", &account_id, 79_i32, 22).e; - let wallet_3 = create_wallet_entity("6", &account_id, 200_i32, 23).e; + let wallet_1 = create_wallet_entity("4", &account_id, 32_i32); + let wallet_2 = create_wallet_entity("5", &account_id, 79_i32); + let wallet_3 = create_wallet_entity("6", &account_id, 200_i32); let expeted_vec = vec![wallet_1, wallet_2, wallet_3]; - assert_eq!(filter_vid(result), filter_vid(expeted_vec)); + assert_eq!(result, expeted_vec); }); } - #[test] fn check_for_insert_async_not_related() { run_store_test(|mut cache, store, deployment, _writable| async move { @@ -638,12 +634,12 @@ fn check_for_insert_async_not_related() { causality_region: CausalityRegion::ONCHAIN, }; let result = cache.load_related(&request).unwrap(); - let wallet_1 = create_wallet_entity("1", &account_id, 67_i32, 1).e; - let wallet_2 = create_wallet_entity("2", &account_id, 92_i32, 2).e; - let wallet_3 = create_wallet_entity("3", &account_id, 192_i32, 3).e; + let wallet_1 = create_wallet_entity("1", &account_id, 67_i32); + let wallet_2 = create_wallet_entity("2", &account_id, 92_i32); + let wallet_3 = create_wallet_entity("3", &account_id, 192_i32); let expeted_vec = vec![wallet_1, wallet_2, wallet_3]; - assert_eq!(result, filter_vid(expeted_vec)); + assert_eq!(result, expeted_vec); }); } @@ -657,9 +653,8 @@ fn check_for_update_async_related() { let new_data = match wallet_entity_update { EntityOperation::Set { ref data, .. } => data.clone(), _ => unreachable!(), - } - .e; - assert_ne!(writable.get(&entity_key).unwrap().unwrap(), new_data); + }; + assert_ne!(writable.get(&entity_key).unwrap().unwrap(), new_data.e); // insert a new wallet transact_entity_operations( &store, @@ -677,11 +672,11 @@ fn check_for_update_async_related() { causality_region: CausalityRegion::ONCHAIN, }; let result = cache.load_related(&request).unwrap(); - let wallet_2 = create_wallet_entity("2", &account_id, 92_i32, 12).e; - let wallet_3 = create_wallet_entity("3", &account_id, 192_i32, 13).e; - let expeted_vec = vec![new_data, wallet_2, wallet_3]; + let wallet_2 = create_wallet_entity("2", &account_id, 92_i32); + let wallet_3 = create_wallet_entity("3", &account_id, 192_i32); + let expeted_vec = vec![new_data.e, wallet_2, wallet_3]; - assert_eq!(filter_vid(result), filter_vid(expeted_vec)); + assert_eq!(result, expeted_vec); }); } @@ -707,55 +702,55 @@ fn check_for_delete_async_related() { causality_region: CausalityRegion::ONCHAIN, }; let result = cache.load_related(&request).unwrap(); - let wallet_2 = create_wallet_entity("2", &account_id, 92_i32, 2).e; - let wallet_3 = create_wallet_entity("3", &account_id, 192_i32, 3).e; + let wallet_2 = create_wallet_entity("2", &account_id, 92_i32); + let wallet_3 = create_wallet_entity("3", &account_id, 192_i32); let expeted_vec = vec![wallet_2, wallet_3]; - assert_eq!(result, filter_vid(expeted_vec)); + assert_eq!(result, expeted_vec); }); } - #[test] fn scoped_get() { run_store_test(|mut cache, _store, _deployment, _writable| async move { // Key for an existing entity that is in the store let account1 = ACCOUNT_TYPE.parse_id("1").unwrap(); let key1 = WALLET_TYPE.parse_key("1").unwrap(); - let wallet1 = create_wallet_entity("1", &account1, 67, 1); + let wallet1 = create_wallet_entity("1", &account1, 67); // Create a new entity that is not in the store let account5 = ACCOUNT_TYPE.parse_id("5").unwrap(); - let wallet5 = create_wallet_entity("5", &account5, 100, 5); + let wallet5 = create_wallet_entity("5", &account5, 100); let key5 = WALLET_TYPE.parse_key("5").unwrap(); - cache.set(key5.clone(), wallet5.clone()).unwrap(); + cache + .set(key5.clone(), EntityV::new(wallet5.clone(), 5)) + .unwrap(); // For the new entity, we can retrieve it with either scope let act5 = cache.get(&key5, GetScope::InBlock).unwrap(); - assert_eq!(Some(&wallet5.e), act5.as_ref().map(|e| e.as_ref())); + assert_eq!(Some(&wallet5), act5.as_ref().map(|e| e.as_ref())); let act5 = cache.get(&key5, GetScope::Store).unwrap(); - assert_eq!(Some(&wallet5.e), act5.as_ref().map(|e| e.as_ref())); + assert_eq!(Some(&wallet5), act5.as_ref().map(|e| e.as_ref())); // For an entity in the store, we can not get it `InBlock` but with // `Store` let act1 = cache.get(&key1, GetScope::InBlock).unwrap(); assert_eq!(None, act1); let act1 = cache.get(&key1, GetScope::Store).unwrap(); - assert_eq!( - filter_vid(vec![wallet1.e.clone()]), - vec![act1.as_ref().map(|e| e.as_ref()).unwrap().clone()] - ); + assert_eq!(Some(&wallet1), act1.as_ref().map(|e| e.as_ref())); // Even after reading from the store, the entity is not visible with // `InBlock` let act1 = cache.get(&key1, GetScope::InBlock).unwrap(); assert_eq!(None, act1); // But if it gets updated, it becomes visible with either scope let mut wallet1 = wallet1; - wallet1.e.set("balance", 70).unwrap(); - cache.set(key1.clone(), wallet1.clone()).unwrap(); + wallet1.set("balance", 70).unwrap(); + cache + .set(key1.clone(), EntityV::new(wallet1.clone(), 1)) + .unwrap(); let act1 = cache.get(&key1, GetScope::InBlock).unwrap(); - assert_eq!(Some(&wallet1.e), act1.as_ref().map(|e| e.as_ref())); + assert_eq!(Some(&wallet1), act1.as_ref().map(|e| e.as_ref())); let act1 = cache.get(&key1, GetScope::Store).unwrap(); - assert_eq!(Some(&wallet1.e), act1.as_ref().map(|e| e.as_ref())); + assert_eq!(Some(&wallet1), act1.as_ref().map(|e| e.as_ref())); }) } From d9e74f15bef1c68b9870b70d8cf60f0309226756 Mon Sep 17 00:00:00 2001 From: Zoran Cvetkov Date: Mon, 18 Nov 2024 17:27:59 +0200 Subject: [PATCH 17/27] cleanup --- core/src/subgraph/runner.rs | 2 +- graph/src/data/store/mod.rs | 6 ++++-- graph/src/schema/input/mod.rs | 8 ++------ runtime/wasm/src/host_exports.rs | 1 - store/test-store/src/store.rs | 10 ---------- store/test-store/tests/core/interfaces.rs | 8 ++++---- 6 files changed, 11 insertions(+), 24 deletions(-) diff --git a/core/src/subgraph/runner.rs b/core/src/subgraph/runner.rs index bc1ee5bbeba..7900fd15fa7 100644 --- a/core/src/subgraph/runner.rs +++ b/core/src/subgraph/runner.rs @@ -1618,7 +1618,7 @@ async fn update_proof_of_indexing( data.push((entity_cache.schema.poi_block_time(), block_time)); } let poi = entity_cache.make_entity(data)?; - // VOI is autogenerated for POI table and our input is ignored + // VID is autogenerated for POI table and our input is ignored entity_cache.set(key, EntityV::new(poi, 0)) } diff --git a/graph/src/data/store/mod.rs b/graph/src/data/store/mod.rs index 5b9ece22657..68be101f719 100644 --- a/graph/src/data/store/mod.rs +++ b/graph/src/data/store/mod.rs @@ -913,10 +913,12 @@ impl Entity { Id::try_from(self.get("id").unwrap().clone()).expect("the id is set to a valid value") } - // TODO: only for tests! + // TODO: try to use it only for tests! + // #[cfg(debug_assertions)] pub fn vid(&self) -> i64 { self.get("vid") .expect("the vid is set") + // .unwrap_or(&Value::Int8(0)) .as_int8() .expect("the vid is set to a valid value") } @@ -1087,7 +1089,7 @@ impl std::fmt::Debug for Entity { } } -/// An entity is represented as a map of attribute names to values. +/// An entity wrapper that has VID too. #[derive(Debug, Clone, CacheWeight, PartialEq, Eq, Serialize)] pub struct EntityV { pub e: Entity, diff --git a/graph/src/schema/input/mod.rs b/graph/src/schema/input/mod.rs index 222b55bea3a..440873f47d6 100644 --- a/graph/src/schema/input/mod.rs +++ b/graph/src/schema/input/mod.rs @@ -35,7 +35,7 @@ pub(crate) const POI_OBJECT: &str = "Poi$"; const POI_DIGEST: &str = "digest"; /// The name of the PoI attribute for storing the block time const POI_BLOCK_TIME: &str = "blockTime"; -const VID: &str = "vid"; +const VID_FIELD: &str = "vid"; pub mod kw { pub const ENTITY: &str = "entity"; @@ -1488,10 +1488,6 @@ impl InputSchema { } pub fn has_field_with_name(&self, entity_type: &EntityType, field: &str) -> bool { - // TODO: check if it is needed - // if field == VID { - // return true; - // } let field = self.inner.pool.lookup(field); match field { @@ -1602,7 +1598,7 @@ fn atom_pool(document: &s::Document) -> AtomPool { pool.intern(POI_DIGEST); pool.intern(POI_BLOCK_TIME); - pool.intern(VID); + pool.intern(VID_FIELD); for definition in &document.definitions { match definition { diff --git a/runtime/wasm/src/host_exports.rs b/runtime/wasm/src/host_exports.rs index 3fafdc0e30a..f6d8d0e761c 100644 --- a/runtime/wasm/src/host_exports.rs +++ b/runtime/wasm/src/host_exports.rs @@ -315,7 +315,6 @@ impl HostExports { data.insert(store::ID.clone(), value); } } - // data.insert(store::VID.clone(), Value::Int8(vid)); self.check_invalid_fields( self.data_source.api_version.clone(), diff --git a/store/test-store/src/store.rs b/store/test-store/src/store.rs index 3ff2ec0dcd7..187521a3631 100644 --- a/store/test-store/src/store.rs +++ b/store/test-store/src/store.rs @@ -106,16 +106,6 @@ lazy_static! { }; } -pub fn filter_vid(arr: Vec) -> Vec { - arr.into_iter() - .map(|mut e| { - e.remove("vid"); - e.remove_null_fields(); - e - }) - .collect() -} - /// Run the `test` after performing `setup`. The result of `setup` is passed /// into `test`. All tests using `run_test_sequentially` are run in sequence, /// never in parallel. The `test` is passed a `Store`, but it is permissible diff --git a/store/test-store/tests/core/interfaces.rs b/store/test-store/tests/core/interfaces.rs index 7f3718e8563..8142adac79e 100644 --- a/store/test-store/tests/core/interfaces.rs +++ b/store/test-store/tests/core/interfaces.rs @@ -1114,11 +1114,11 @@ async fn enums() { let entities = vec![ ( "Trajectory", - entity! { schema => id: "1", direction: "EAST", meters: 10, vid: 0i64}, + entity! { schema => id: "1", direction: "EAST", meters: 10, vid: 0i64 }, ), ( "Trajectory", - entity! { schema => id: "2", direction: "NORTH", meters: 15, vid: 1i64}, + entity! { schema => id: "2", direction: "NORTH", meters: 15, vid: 1i64 }, ), ]; let query = "query { trajectories { id, direction, meters } }"; @@ -1365,11 +1365,11 @@ async fn derived_interface_bytes() { ("Pool", entity! { schema => id: b("0xf001"), vid: 0i64 }), ( "Sell", - entity! { schema => id: b("0xc0"), pool: "0xf001", vid: 0i64}, + entity! { schema => id: b("0xc0"), pool: "0xf001", vid: 0i64 }, ), ( "Buy", - entity! { schema => id: b("0xb0"), pool: "0xf001", vid: 0i64}, + entity! { schema => id: b("0xb0"), pool: "0xf001", vid: 0i64 }, ), ]; From fd9f92b6561a03ed324fc8bba0c54b6a58d5e4ab Mon Sep 17 00:00:00 2001 From: Zoran Cvetkov Date: Tue, 19 Nov 2024 14:08:32 +0200 Subject: [PATCH 18/27] correct vid --- chain/substreams/src/trigger.rs | 4 ++-- graph/src/components/subgraph/instance.rs | 1 + 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/chain/substreams/src/trigger.rs b/chain/substreams/src/trigger.rs index 8ca187c77af..06c218aa11c 100644 --- a/chain/substreams/src/trigger.rs +++ b/chain/substreams/src/trigger.rs @@ -238,8 +238,8 @@ where logger, ); - // TODO: check if 0 is correct VID - state.entity_cache.set(key, EntityV::new(entity, 0))?; + let vid = state.next_vid(block.number); + state.entity_cache.set(key, EntityV::new(entity, vid))?; } ParsedChanges::Delete(entity_key) => { let entity_type = entity_key.entity_type.cheap_clone(); diff --git a/graph/src/components/subgraph/instance.rs b/graph/src/components/subgraph/instance.rs index 94487fc5bd2..2bcdf439c07 100644 --- a/graph/src/components/subgraph/instance.rs +++ b/graph/src/components/subgraph/instance.rs @@ -183,6 +183,7 @@ impl BlockState { pub fn persist_data_source(&mut self, ds: StoredDynamicDataSource) { self.persisted_data_sources.push(ds) } + pub fn next_vid(&mut self, block_number: BlockNumber) -> i64 { let vid = ((block_number as i64) << 32) + self.vid_seq as i64; self.vid_seq += 1; From d8425a1f7f5ca0635774e3bdf46fd161a4e49c27 Mon Sep 17 00:00:00 2001 From: Zoran Cvetkov Date: Tue, 19 Nov 2024 15:00:24 +0200 Subject: [PATCH 19/27] cleanup --- store/postgres/src/relational.rs | 9 +++++---- store/postgres/src/relational/prune.rs | 8 -------- store/postgres/src/relational_queries.rs | 22 +--------------------- 3 files changed, 6 insertions(+), 33 deletions(-) diff --git a/store/postgres/src/relational.rs b/store/postgres/src/relational.rs index e58f9cf8c09..3fff8c8dae7 100644 --- a/store/postgres/src/relational.rs +++ b/store/postgres/src/relational.rs @@ -693,13 +693,14 @@ impl Layout { for entity_data in inserts_or_updates.into_iter() { let entity_type = entity_data.entity_type(&self.input_schema); - let data: EntityV = entity_data.deserialize_with_layout(self, None)?; - let entity_id = data.e.id(); + let data: Entity = entity_data.deserialize_with_layout(self, None)?; + let entity_id = data.id(); processed_entities.insert((entity_type.clone(), entity_id.clone())); + let vid = data.vid(); changes.push(EntityOperation::Set { - key: entity_type.key_in(entity_id, CausalityRegion::from_entity(&data.e)), - data, + key: entity_type.key_in(entity_id, CausalityRegion::from_entity(&data)), + data: EntityV::new(data, vid), }); } diff --git a/store/postgres/src/relational/prune.rs b/store/postgres/src/relational/prune.rs index c72d5ddd774..39337d2a485 100644 --- a/store/postgres/src/relational/prune.rs +++ b/store/postgres/src/relational/prune.rs @@ -237,14 +237,6 @@ impl TablePair { "src" => src_nsp.as_str(), "error" => e.to_string()); } - // TODO: check if this is needed - // Make sure the vid sequence - // continues from where it was - // writeln!( - // query, - // "select setval('{dst_nsp}.{vid_seq}', nextval('{src_nsp}.{vid_seq}'));" - // )?; - writeln!(query, "drop table {src_qname};")?; writeln!(query, "alter table {dst_qname} set schema {src_nsp}")?; conn.transaction(|conn| conn.batch_execute(&query))?; diff --git a/store/postgres/src/relational_queries.rs b/store/postgres/src/relational_queries.rs index 5bb462dc4bd..40f83e4ff92 100644 --- a/store/postgres/src/relational_queries.rs +++ b/store/postgres/src/relational_queries.rs @@ -13,9 +13,8 @@ use diesel::sql_types::Untyped; use diesel::sql_types::{Array, BigInt, Binary, Bool, Int8, Integer, Jsonb, Text, Timestamptz}; use graph::components::store::write::{EntityWrite, RowGroup, WriteChunk}; use graph::components::store::{Child as StoreChild, DerivedEntityQuery}; -use graph::data::store::{EntityV, Id, IdType, NULL}; +use graph::data::store::{Id, IdType, NULL}; use graph::data::store::{IdList, IdRef, QueryObject}; -// use graph::data::subgraph::schema::POI_TABLE; use graph::data::value::{Object, Word}; use graph::data_source::CausalityRegion; use graph::prelude::{ @@ -201,23 +200,6 @@ impl FromEntityData for Entity { } } -impl FromEntityData for EntityV { - const WITH_INTERNAL_KEYS: bool = false; - - type Value = graph::prelude::Value; - - fn from_data>>( - schema: &InputSchema, - parent_id: Option, - iter: I, - ) -> Result { - debug_assert_eq!(None, parent_id); - let e = schema.try_make_entity(iter).map_err(StoreError::from)?; - let vid = e.vid(); - Ok(EntityV::new(e, vid)) - } -} - impl FromEntityData for QueryObject { const WITH_INTERNAL_KEYS: bool = true; @@ -2559,7 +2541,6 @@ impl<'a> QueryFragment for InsertQuery<'a> { let out = &mut out; out.unsafe_to_cache_prepared(); - // let not_poi = self.table.name.as_str() != POI_TABLE; let not_poi = !self.table.object.is_poi(); // Construct a query @@ -5108,7 +5089,6 @@ impl<'a> QueryFragment for CopyEntityBatchQuery<'a> { fn walk_ast<'b>(&'b self, mut out: AstPass<'_, 'b, Pg>) -> QueryResult<()> { out.unsafe_to_cache_prepared(); - // let not_poi = self.dst.name.as_str() != POI_TABLE; let not_poi = !self.dst.object.is_poi(); // Construct a query From d86699ec810a0c7a0bb9da7a70e64f71795c2de4 Mon Sep 17 00:00:00 2001 From: Zoran Cvetkov Date: Tue, 19 Nov 2024 16:43:26 +0200 Subject: [PATCH 20/27] cleanup --- graph/src/data/store/mod.rs | 15 ++- runtime/test/src/test.rs | 4 +- store/test-store/src/store.rs | 2 +- .../tests/chain/ethereum/manifest.rs | 4 +- store/test-store/tests/core/interfaces.rs | 94 +++++++------------ store/test-store/tests/graphql/query.rs | 2 +- .../test-store/tests/postgres/aggregation.rs | 2 +- store/test-store/tests/postgres/relational.rs | 2 - .../tests/postgres/relational_bytes.rs | 4 +- store/test-store/tests/postgres/store.rs | 2 +- store/test-store/tests/postgres/writable.rs | 2 +- 11 files changed, 55 insertions(+), 78 deletions(-) diff --git a/graph/src/data/store/mod.rs b/graph/src/data/store/mod.rs index 68be101f719..eb54d44511a 100644 --- a/graph/src/data/store/mod.rs +++ b/graph/src/data/store/mod.rs @@ -913,12 +913,21 @@ impl Entity { Id::try_from(self.get("id").unwrap().clone()).expect("the id is set to a valid value") } - // TODO: try to use it only for tests! - // #[cfg(debug_assertions)] + /// Return the VID of this entity and if its missing or of a type different than + /// i64 it panics. pub fn vid(&self) -> i64 { self.get("vid") .expect("the vid is set") - // .unwrap_or(&Value::Int8(0)) + .as_int8() + .expect("the vid is set to a valid value") + } + + /// This version of the function returns 0 if the VID is not set. It should be + /// used only in the testing code for more lenient definition of entities. + #[cfg(debug_assertions)] + pub fn vid_or_default(&self) -> i64 { + self.get("vid") + .unwrap_or(&Value::Int8(0)) .as_int8() .expect("the vid is set to a valid value") } diff --git a/runtime/test/src/test.rs b/runtime/test/src/test.rs index 78fa6e49657..f9c04fb462e 100644 --- a/runtime/test/src/test.rs +++ b/runtime/test/src/test.rs @@ -1022,8 +1022,8 @@ async fn test_entity_store(api_version: Version) { let schema = store.input_schema(&deployment.hash).unwrap(); - let alex = entity! { schema => id: "alex", name: "Alex", vid: 0i64}; - let steve = entity! { schema => id: "steve", name: "Steve", vid: 1i64}; + let alex = entity! { schema => id: "alex", name: "Alex", vid: 0i64 }; + let steve = entity! { schema => id: "steve", name: "Steve", vid: 1i64 }; let user_type = schema.entity_type("User").unwrap(); test_store::insert_entities( &deployment, diff --git a/store/test-store/src/store.rs b/store/test-store/src/store.rs index 187521a3631..30017db3d21 100644 --- a/store/test-store/src/store.rs +++ b/store/test-store/src/store.rs @@ -424,7 +424,7 @@ pub async fn insert_entities( entities: Vec<(EntityType, Entity)>, ) -> Result<(), StoreError> { let insert_ops = entities.into_iter().map(|(entity_type, data)| { - let vid = data.vid(); + let vid = data.vid_or_default(); EntityOperation::Set { key: entity_type.key(data.id()), data: EntityV::new(data, vid), diff --git a/store/test-store/tests/chain/ethereum/manifest.rs b/store/test-store/tests/chain/ethereum/manifest.rs index aa2a46b140b..23fdfae7c57 100644 --- a/store/test-store/tests/chain/ethereum/manifest.rs +++ b/store/test-store/tests/chain/ethereum/manifest.rs @@ -316,7 +316,7 @@ specVersion: 0.0.2 .unwrap(); // Adds an example entity. - let thing = entity! { schema => id: "datthing", vid : 0i64 }; + let thing = entity! { schema => id: "datthing" }; test_store::insert_entities( &deployment, vec![(schema.entity_type("Thing").unwrap(), thing)], @@ -416,7 +416,7 @@ specVersion: 0.0.2 msg ); - let thing = entity! { schema => id: "datthing", vid : 1i64 }; + let thing = entity! { schema => id: "datthing", vid: 1i64 }; test_store::insert_entities( &deployment, vec![(schema.entity_type("Thing").unwrap(), thing)], diff --git a/store/test-store/tests/core/interfaces.rs b/store/test-store/tests/core/interfaces.rs index 8142adac79e..3b8ca1724c2 100644 --- a/store/test-store/tests/core/interfaces.rs +++ b/store/test-store/tests/core/interfaces.rs @@ -69,7 +69,7 @@ async fn one_interface_one_entity() { type Animal implements Legged @entity { id: ID!, legs: Int }"; let schema = InputSchema::raw(document, subgraph_id); - let entity = ("Animal", entity! { schema => id: "1", legs: 3, vid: 0i64 }); + let entity = ("Animal", entity! { schema => id: "1", legs: 3 }); // Collection query. let query = "query { leggeds(first: 100) { legs } }"; @@ -97,7 +97,7 @@ async fn one_interface_one_entity_typename() { type Animal implements Legged @entity { id: ID!, legs: Int }"; let schema = InputSchema::raw(document, subgraph_id); - let entity = ("Animal", entity! { schema => id: "1", legs: 3, vid: 0i64 }); + let entity = ("Animal", entity! { schema => id: "1", legs: 3 }); let query = "query { leggeds(first: 100) { __typename } }"; @@ -118,11 +118,8 @@ async fn one_interface_multiple_entities() { "; let schema = InputSchema::raw(document, subgraph_id); - let animal = ("Animal", entity! { schema => id: "1", legs: 3, vid: 0i64 }); - let furniture = ( - "Furniture", - entity! { schema => id: "2", legs: 4, vid: 0i64 }, - ); + let animal = ("Animal", entity! { schema => id: "1", legs: 3 }); + let furniture = ("Furniture", entity! { schema => id: "2", legs: 4 }); let query = "query { leggeds(first: 100, orderBy: legs) { legs } }"; @@ -153,8 +150,8 @@ async fn reference_interface() { let query = "query { leggeds(first: 100) { leg { id } } }"; - let leg = ("Leg", entity! { schema => id: "1", vid: 0i64 }); - let animal = ("Animal", entity! { schema => id: "1", leg: 1, vid: 0i64 }); + let leg = ("Leg", entity! { schema => id: "1" }); + let animal = ("Animal", entity! { schema => id: "1", leg: 1 }); let res = insert_and_query(subgraph_id, document, vec![leg, animal], query) .await @@ -209,11 +206,11 @@ async fn reference_interface_derived() { let sell2 = ("SellEvent", entity! { schema => id: "sell2", vid: 1i64 }); let gift = ( "GiftEvent", - entity! { schema => id: "gift", transaction: "txn", vid: 0i64 }, + entity! { schema => id: "gift", transaction: "txn" }, ); let txn = ( "Transaction", - entity! { schema => id: "txn", buyEvent: "buy", sellEvents: vec!["sell1", "sell2"], vid: 0i64 }, + entity! { schema => id: "txn", buyEvent: "buy", sellEvents: vec!["sell1", "sell2"] }, ); let entities = vec![buy, sell1, sell2, gift, txn]; @@ -308,11 +305,8 @@ async fn conflicting_implementors_id() { "; let schema = InputSchema::raw(document, subgraph_id); - let animal = ("Animal", entity! { schema => id: "1", legs: 3, vid: 0i64 }); - let furniture = ( - "Furniture", - entity! { schema => id: "1", legs: 3, vid: 0i64 }, - ); + let animal = ("Animal", entity! { schema => id: "1", legs: 3 }); + let furniture = ("Furniture", entity! { schema => id: "1", legs: 3 }); let query = "query { leggeds(first: 100) { legs } }"; @@ -340,11 +334,8 @@ async fn derived_interface_relationship() { "; let schema = InputSchema::raw(document, subgraph_id); - let forest = ("Forest", entity! { schema => id: "1", vid: 0i64 }); - let animal = ( - "Animal", - entity! { schema => id: "1", forest: "1", vid: 0i64 }, - ); + let forest = ("Forest", entity! { schema => id: "1" }); + let animal = ("Animal", entity! { schema => id: "1", forest: "1" }); let query = "query { forests(first: 100) { dwellers(first: 100) { id } } }"; @@ -371,12 +362,9 @@ async fn two_interfaces() { "; let schema = InputSchema::raw(document, subgraph_id); - let a = ("A", entity! { schema => id: "1", foo: "bla", vid: 0i64 }); - let b = ("B", entity! { schema => id: "1", bar: 100, vid: 0i64 }); - let ab = ( - "AB", - entity! { schema => id: "2", foo: "ble", bar: 200, vid: 0i64 }, - ); + let a = ("A", entity! { schema => id: "1", foo: "bla" }); + let b = ("B", entity! { schema => id: "1", bar: 100 }); + let ab = ("AB", entity! { schema => id: "2", foo: "ble", bar: 200 }); let query = "query { ibars(first: 100, orderBy: bar) { bar } @@ -402,7 +390,7 @@ async fn interface_non_inline_fragment() { let entity = ( "Animal", - entity! { schema => id: "1", name: "cow", legs: 3, vid: 0i64 }, + entity! { schema => id: "1", name: "cow", legs: 3 }, ); // Query only the fragment. @@ -434,12 +422,9 @@ async fn interface_inline_fragment() { let animal = ( "Animal", - entity! { schema => id: "1", name: "cow", legs: 4, vid: 0i64 }, - ); - let bird = ( - "Bird", - entity! { schema => id: "2", airspeed: 24, legs: 2, vid: 0i64 }, + entity! { schema => id: "1", name: "cow", legs: 4 }, ); + let bird = ("Bird", entity! { schema => id: "2", airspeed: 24, legs: 2 }); let query = "query { leggeds(orderBy: legs) { ... on Animal { name } ...on Bird { airspeed } } }"; @@ -867,14 +852,8 @@ async fn merge_fields_not_in_interface() { } }"; - let animal = ( - "Animal", - entity! { schema => id: "cow", human: "fred", vid: 0i64 }, - ); - let human = ( - "Human", - entity! { schema => id: "fred", animal: "cow", vid: 0i64 }, - ); + let animal = ("Animal", entity! { schema => id: "cow", human: "fred" }); + let human = ("Human", entity! { schema => id: "fred", animal: "cow" }); let res = insert_and_query(subgraph_id, document, vec![animal, human], query) .await @@ -947,15 +926,15 @@ async fn nested_interface_fragments() { } }"; - let foo = ("Foo", entity! { schema => id: "foo", vid: 0i64 }); - let one = ("One", entity! { schema => id: "1", foo1: "foo", vid: 0i64 }); + let foo = ("Foo", entity! { schema => id: "foo" }); + let one = ("One", entity! { schema => id: "1", foo1: "foo" }); let two = ( "Two", - entity! { schema => id: "2", foo1: "foo", foo2: "foo", vid: 0i64 }, + entity! { schema => id: "2", foo1: "foo", foo2: "foo" }, ); let three = ( "Three", - entity! { schema => id: "3", foo1: "foo", foo2: "foo", foo3: "foo", vid: 0i64 }, + entity! { schema => id: "3", foo1: "foo", foo2: "foo", foo3: "foo" }, ); let res = insert_and_query(subgraph_id, document, vec![foo, one, two, three], query) @@ -1028,9 +1007,9 @@ async fn nested_interface_fragments_overlapping() { } }"; - let foo = ("Foo", entity! { schema => id: "foo", vid: 0i64 }); - let one = ("One", entity! { schema => id: "1", foo1: "foo", vid: 0i64 }); - let two = ("Two", entity! { schema => id: "2", foo1: "foo", vid: 0i64 }); + let foo = ("Foo", entity! { schema => id: "foo" }); + let one = ("One", entity! { schema => id: "1", foo1: "foo" }); + let two = ("Two", entity! { schema => id: "2", foo1: "foo" }); let res = insert_and_query(subgraph_id, document, vec![foo, one, two], query) .await .unwrap(); @@ -1305,13 +1284,10 @@ async fn mixed_mutability() { let query = "query { events { id } }"; let entities = vec![ - ( - "Mutable", - entity! { schema => id: "mut0", name: "mut0", vid: 0i64 }, - ), + ("Mutable", entity! { schema => id: "mut0", name: "mut0" }), ( "Immutable", - entity! { schema => id: "immo0", name: "immo0", vid: 0i64 }, + entity! { schema => id: "immo0", name: "immo0" }, ), ]; @@ -1362,15 +1338,9 @@ async fn derived_interface_bytes() { let query = "query { pools { trades { id } } }"; let entities = vec![ - ("Pool", entity! { schema => id: b("0xf001"), vid: 0i64 }), - ( - "Sell", - entity! { schema => id: b("0xc0"), pool: "0xf001", vid: 0i64 }, - ), - ( - "Buy", - entity! { schema => id: b("0xb0"), pool: "0xf001", vid: 0i64 }, - ), + ("Pool", entity! { schema => id: b("0xf001") }), + ("Sell", entity! { schema => id: b("0xc0"), pool: "0xf001" }), + ("Buy", entity! { schema => id: b("0xb0"), pool: "0xf001" }), ]; let res = insert_and_query(subgraph_id, document, entities, query) diff --git a/store/test-store/tests/graphql/query.rs b/store/test-store/tests/graphql/query.rs index 59126171bff..5358249040d 100644 --- a/store/test-store/tests/graphql/query.rs +++ b/store/test-store/tests/graphql/query.rs @@ -426,7 +426,7 @@ async fn insert_test_entities( .map(|(typename, entities)| { let entity_type = schema.entity_type(typename).unwrap(); entities.into_iter().map(move |data| { - let vid = data.vid(); + let vid = data.vid_or_default(); EntityOperation::Set { key: entity_type.key(data.id()), data: EntityV::new(data, vid), diff --git a/store/test-store/tests/postgres/aggregation.rs b/store/test-store/tests/postgres/aggregation.rs index 8db6138abbb..de47b88d6a1 100644 --- a/store/test-store/tests/postgres/aggregation.rs +++ b/store/test-store/tests/postgres/aggregation.rs @@ -83,7 +83,7 @@ pub async fn insert( .map(|data| { let data_type = schema.entity_type("Data").unwrap(); let key = data_type.key(data.id()); - let vid = data.vid(); + let vid = data.vid_or_default(); EntityOperation::Set { data: EntityV::new(data, vid), key, diff --git a/store/test-store/tests/postgres/relational.rs b/store/test-store/tests/postgres/relational.rs index 8c47c464fe0..8a2e5a2f3d9 100644 --- a/store/test-store/tests/postgres/relational.rs +++ b/store/test-store/tests/postgres/relational.rs @@ -205,13 +205,11 @@ lazy_static! { bigInt: big_int.clone(), bigIntArray: vec![big_int.clone(), (big_int + 1.into())], color: "yellow", - vid: 0i64, } }; static ref EMPTY_NULLABLESTRINGS_ENTITY: Entity = { entity! { THINGS_SCHEMA => id: "one", - vid: 0i64, } }; static ref SCALAR_TYPE: EntityType = THINGS_SCHEMA.entity_type("Scalar").unwrap(); diff --git a/store/test-store/tests/postgres/relational_bytes.rs b/store/test-store/tests/postgres/relational_bytes.rs index 41aa79bf9b7..6897dc9e914 100644 --- a/store/test-store/tests/postgres/relational_bytes.rs +++ b/store/test-store/tests/postgres/relational_bytes.rs @@ -84,7 +84,7 @@ pub fn row_group_update( ) -> RowGroup { let mut group = RowGroup::new(entity_type.clone(), false); for (key, data) in data { - let vid = data.vid(); + let vid = data.vid_or_default(); group .push(EntityModification::overwrite(key, data, block, vid), block) .unwrap(); @@ -99,7 +99,7 @@ pub fn row_group_insert( ) -> RowGroup { let mut group = RowGroup::new(entity_type.clone(), false); for (key, data) in data { - let vid = data.vid(); + let vid = data.vid_or_default(); group .push(EntityModification::insert(key, data, block, vid), block) .unwrap(); diff --git a/store/test-store/tests/postgres/store.rs b/store/test-store/tests/postgres/store.rs index 736afd68f5c..22e473e3810 100644 --- a/store/test-store/tests/postgres/store.rs +++ b/store/test-store/tests/postgres/store.rs @@ -1317,7 +1317,7 @@ fn entity_changes_are_fired_and_forwarded_to_subscriptions() { .iter() .map(|(id, data)| EntityOperation::Set { key: USER_TYPE.parse_key(id.as_str()).unwrap(), - data: EntityV::new(data.clone(), data.vid()), + data: EntityV::new(data.clone(), data.vid_or_default()), }) .collect(), ) diff --git a/store/test-store/tests/postgres/writable.rs b/store/test-store/tests/postgres/writable.rs index 038b93f462f..50b744e62b7 100644 --- a/store/test-store/tests/postgres/writable.rs +++ b/store/test-store/tests/postgres/writable.rs @@ -142,7 +142,7 @@ async fn insert_count( let count_key_local = |counter_type: &EntityType, id: &str| counter_type.parse_key(id).unwrap(); let data = entity! { TEST_SUBGRAPH_SCHEMA => id: "1", - count: count as i32, + count: count as i32 }; let entity_op = if block != 3 && block != 5 && block != 7 { EntityOperation::Set { From faa6e5dd08cc622b54c9e5a28b86f73cf326d236 Mon Sep 17 00:00:00 2001 From: Zoran Cvetkov Date: Fri, 22 Nov 2024 16:54:16 +0200 Subject: [PATCH 21/27] bump the spec version --- graph/src/components/store/entity_cache.rs | 1 - graph/src/data/subgraph/api_version.rs | 4 +++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/graph/src/components/store/entity_cache.rs b/graph/src/components/store/entity_cache.rs index 6ea8b988bb2..4a3bca8e2b1 100644 --- a/graph/src/components/store/entity_cache.rs +++ b/graph/src/components/store/entity_cache.rs @@ -453,7 +453,6 @@ impl EntityCache { for (key, update) in self.updates { use EntityModification::*; - // let is_poi = key.entity_type.is_poi(); let current = self.current.remove(&key).and_then(|entity| entity); let modification = match (current, update) { // Entity was created diff --git a/graph/src/data/subgraph/api_version.rs b/graph/src/data/subgraph/api_version.rs index 43ee639007c..dcfdb329476 100644 --- a/graph/src/data/subgraph/api_version.rs +++ b/graph/src/data/subgraph/api_version.rs @@ -55,10 +55,12 @@ pub const SPEC_VERSION_1_1_0: Version = Version::new(1, 1, 0); pub const SPEC_VERSION_1_2_0: Version = Version::new(1, 2, 0); // Enables subgraphs as datasource +// Change the way the VID field is generated. It used to be autoincrement. Now its +// based on block number and the sequence of the entities in a block. pub const SPEC_VERSION_1_3_0: Version = Version::new(1, 3, 0); // The latest spec version available -pub const LATEST_VERSION: &Version = &SPEC_VERSION_1_2_0; +pub const LATEST_VERSION: &Version = &SPEC_VERSION_1_3_0; pub const MIN_SPEC_VERSION: Version = Version::new(0, 0, 2); From 15411a1704575002dc861e92e072c524392bbc1a Mon Sep 17 00:00:00 2001 From: Zoran Cvetkov Date: Tue, 17 Dec 2024 17:47:19 +0200 Subject: [PATCH 22/27] simplify --- chain/substreams/src/trigger.rs | 4 +-- core/src/subgraph/runner.rs | 7 +++-- graph/src/components/store/entity_cache.rs | 20 +++++++++++-- graph/src/components/subgraph/instance.rs | 10 ------- .../subgraph/proof_of_indexing/online.rs | 4 +++ runtime/wasm/src/host_exports.rs | 5 ++-- store/test-store/tests/graph/entity_cache.rs | 28 ++++++++----------- 7 files changed, 40 insertions(+), 38 deletions(-) diff --git a/chain/substreams/src/trigger.rs b/chain/substreams/src/trigger.rs index 06c218aa11c..4dd8d5f74ac 100644 --- a/chain/substreams/src/trigger.rs +++ b/chain/substreams/src/trigger.rs @@ -8,7 +8,6 @@ use graph::{ subgraph::{MappingError, ProofOfIndexingEvent, SharedProofOfIndexing}, trigger_processor::HostedTrigger, }, - data::store::EntityV, prelude::{ anyhow, async_trait, BlockHash, BlockNumber, BlockState, CheapClone, RuntimeHostBuilder, }, @@ -238,8 +237,7 @@ where logger, ); - let vid = state.next_vid(block.number); - state.entity_cache.set(key, EntityV::new(entity, vid))?; + state.entity_cache.set(key, entity, block.number)?; } ParsedChanges::Delete(entity_key) => { let entity_type = entity_key.entity_type.cheap_clone(); diff --git a/core/src/subgraph/runner.rs b/core/src/subgraph/runner.rs index 7900fd15fa7..daf1c4cf6da 100644 --- a/core/src/subgraph/runner.rs +++ b/core/src/subgraph/runner.rs @@ -18,7 +18,6 @@ use graph::components::{ subgraph::{MappingError, PoICausalityRegion, ProofOfIndexing, SharedProofOfIndexing}, }; use graph::data::store::scalar::Bytes; -use graph::data::store::EntityV; use graph::data::subgraph::{ schema::{SubgraphError, SubgraphHealth}, SubgraphFeature, @@ -1604,6 +1603,7 @@ async fn update_proof_of_indexing( key: EntityKey, digest: Bytes, block_time: BlockTime, + block: BlockNumber, ) -> Result<(), Error> { let digest_name = entity_cache.schema.poi_digest(); let mut data = vec![ @@ -1618,12 +1618,12 @@ async fn update_proof_of_indexing( data.push((entity_cache.schema.poi_block_time(), block_time)); } let poi = entity_cache.make_entity(data)?; - // VID is autogenerated for POI table and our input is ignored - entity_cache.set(key, EntityV::new(poi, 0)) + entity_cache.set(key, poi, block) } let _section_guard = stopwatch.start_section("update_proof_of_indexing"); + let block_number = proof_of_indexing.get_block(); let mut proof_of_indexing = proof_of_indexing.take(); for (causality_region, stream) in proof_of_indexing.drain() { @@ -1659,6 +1659,7 @@ async fn update_proof_of_indexing( entity_key, updated_proof_of_indexing, block_time, + block_number, )?; } diff --git a/graph/src/components/store/entity_cache.rs b/graph/src/components/store/entity_cache.rs index 4a3bca8e2b1..b5ec2738ba4 100644 --- a/graph/src/components/store/entity_cache.rs +++ b/graph/src/components/store/entity_cache.rs @@ -105,6 +105,10 @@ pub struct EntityCache { /// generated IDs, the `EntityCache` needs to be newly instantiated for /// each block seq: u32, + + // Sequence number of the next VID value for this block. The value written + // in the database consist of a block number and this SEQ number. + pub vid_seq: i32, } impl Debug for EntityCache { @@ -132,6 +136,7 @@ impl EntityCache { schema: store.input_schema(), store, seq: 0, + vid_seq: 0, } } @@ -152,6 +157,7 @@ impl EntityCache { schema: store.input_schema(), store, seq: 0, + vid_seq: 0, } } @@ -349,9 +355,19 @@ impl EntityCache { /// with existing data. The entity will be validated against the /// subgraph schema, and any errors will result in an `Err` being /// returned. - pub fn set(&mut self, key: EntityKey, entity: EntityV) -> Result<(), anyhow::Error> { + pub fn set( + &mut self, + key: EntityKey, + entity: Entity, + block: BlockNumber, + ) -> Result<(), anyhow::Error> { // check the validate for derived fields - let is_valid = entity.e.validate(&key).is_ok(); + let is_valid = entity.validate(&key).is_ok(); + + //The next VID is based on a block number and a sequence withing the block + let vid = ((block as i64) << 32) + self.vid_seq as i64; + self.vid_seq += 1; + let entity = EntityV::new(entity, vid); self.entity_op(key.clone(), EntityOp::Update(entity)); diff --git a/graph/src/components/subgraph/instance.rs b/graph/src/components/subgraph/instance.rs index 2bcdf439c07..889690c3916 100644 --- a/graph/src/components/subgraph/instance.rs +++ b/graph/src/components/subgraph/instance.rs @@ -78,8 +78,6 @@ pub struct BlockState { // data source that have been processed. pub processed_data_sources: Vec, - pub vid_seq: i32, - // Marks whether a handler is currently executing. in_handler: bool, @@ -95,7 +93,6 @@ impl BlockState { persisted_data_sources: Vec::new(), handler_created_data_sources: Vec::new(), processed_data_sources: Vec::new(), - vid_seq: 0, in_handler: false, metrics: BlockStateMetrics::new(), } @@ -113,7 +110,6 @@ impl BlockState { persisted_data_sources, handler_created_data_sources, processed_data_sources, - vid_seq: _, in_handler, metrics, } = self; @@ -183,10 +179,4 @@ impl BlockState { pub fn persist_data_source(&mut self, ds: StoredDynamicDataSource) { self.persisted_data_sources.push(ds) } - - pub fn next_vid(&mut self, block_number: BlockNumber) -> i64 { - let vid = ((block_number as i64) << 32) + self.vid_seq as i64; - self.vid_seq += 1; - vid - } } diff --git a/graph/src/components/subgraph/proof_of_indexing/online.rs b/graph/src/components/subgraph/proof_of_indexing/online.rs index f90fac969cf..d47f08b0a8f 100644 --- a/graph/src/components/subgraph/proof_of_indexing/online.rs +++ b/graph/src/components/subgraph/proof_of_indexing/online.rs @@ -242,6 +242,10 @@ impl ProofOfIndexing { pub fn take(self) -> HashMap { self.per_causality_region } + + pub fn get_block(&self) -> BlockNumber { + self.block_number + } } pub struct ProofOfIndexingFinisher { diff --git a/runtime/wasm/src/host_exports.rs b/runtime/wasm/src/host_exports.rs index f6d8d0e761c..3de534b9069 100644 --- a/runtime/wasm/src/host_exports.rs +++ b/runtime/wasm/src/host_exports.rs @@ -18,7 +18,7 @@ use graph::components::store::{EnsLookup, GetScope, LoadRelatedRequest}; use graph::components::subgraph::{ InstanceDSTemplate, PoICausalityRegion, ProofOfIndexingEvent, SharedProofOfIndexing, }; -use graph::data::store::{self, EntityV}; +use graph::data::store::{self}; use graph::data_source::{CausalityRegion, DataSource, EntityTypeAccess}; use graph::ensure; use graph::prelude::ethabi::param_type::Reader; @@ -248,7 +248,6 @@ impl HostExports { gas: &GasCounter, ) -> Result<(), HostExportError> { let entity_type = state.entity_cache.schema.entity_type(&entity_type)?; - let vid = state.next_vid(block); Self::expect_object_type(&entity_type, "set")?; @@ -351,7 +350,7 @@ impl HostExports { state.metrics.track_entity_write(&entity_type, &entity); - state.entity_cache.set(key, EntityV::new(entity, vid))?; + state.entity_cache.set(key, entity, block)?; Ok(()) } diff --git a/store/test-store/tests/graph/entity_cache.rs b/store/test-store/tests/graph/entity_cache.rs index 0e64161f6ec..1df4b9adac9 100644 --- a/store/test-store/tests/graph/entity_cache.rs +++ b/store/test-store/tests/graph/entity_cache.rs @@ -210,13 +210,13 @@ fn insert_modifications() { let mogwai_data = entity! { SCHEMA => id: "mogwai", name: "Mogwai" }; let mogwai_key = make_band_key("mogwai"); cache - .set(mogwai_key.clone(), EntityV::new(mogwai_data.clone(), 0)) + .set(mogwai_key.clone(), mogwai_data.clone(), 0) .unwrap(); let sigurros_data = entity! { SCHEMA => id: "sigurros", name: "Sigur Ros" }; let sigurros_key = make_band_key("sigurros"); cache - .set(sigurros_key.clone(), EntityV::new(sigurros_data.clone(), 0)) + .set(sigurros_key.clone(), sigurros_data.clone(), 0) .unwrap(); let result = cache.as_modifications(0); @@ -224,7 +224,7 @@ fn insert_modifications() { sort_by_entity_key(result.unwrap().modifications), sort_by_entity_key(vec![ EntityModification::insert(mogwai_key, mogwai_data, 0, 0), - EntityModification::insert(sigurros_key, sigurros_data, 0, 0) + EntityModification::insert(sigurros_key, sigurros_data, 0, 1) ]) ); } @@ -256,13 +256,13 @@ fn overwrite_modifications() { let mogwai_data = entity! { SCHEMA => id: "mogwai", name: "Mogwai", founded: 1995 }; let mogwai_key = make_band_key("mogwai"); cache - .set(mogwai_key.clone(), EntityV::new(mogwai_data.clone(), 0)) + .set(mogwai_key.clone(), mogwai_data.clone(), 0) .unwrap(); let sigurros_data = entity! { SCHEMA => id: "sigurros", name: "Sigur Ros", founded: 1994 }; let sigurros_key = make_band_key("sigurros"); cache - .set(sigurros_key.clone(), EntityV::new(sigurros_data.clone(), 0)) + .set(sigurros_key.clone(), sigurros_data.clone(), 0) .unwrap(); let result = cache.as_modifications(0); @@ -270,7 +270,7 @@ fn overwrite_modifications() { sort_by_entity_key(result.unwrap().modifications), sort_by_entity_key(vec![ EntityModification::overwrite(mogwai_key, mogwai_data, 0, 0), - EntityModification::overwrite(sigurros_key, sigurros_data, 0, 0) + EntityModification::overwrite(sigurros_key, sigurros_data, 0, 1) ]) ); } @@ -293,14 +293,12 @@ fn consecutive_modifications() { let update_data = entity! { SCHEMA => id: "mogwai", founded: 1995, label: "Rock Action Records" }; let update_key = make_band_key("mogwai"); - cache.set(update_key, EntityV::new(update_data, 0)).unwrap(); + cache.set(update_key, update_data, 0).unwrap(); // Then, just reset the "label". let update_data = entity! { SCHEMA => id: "mogwai", label: Value::Null }; let update_key = make_band_key("mogwai"); - cache - .set(update_key.clone(), EntityV::new(update_data, 0)) - .unwrap(); + cache.set(update_key.clone(), update_data, 0).unwrap(); // We expect a single overwrite modification for the above that leaves "id" // and "name" untouched, sets "founded" and removes the "label" field. @@ -721,9 +719,7 @@ fn scoped_get() { let account5 = ACCOUNT_TYPE.parse_id("5").unwrap(); let wallet5 = create_wallet_entity("5", &account5, 100); let key5 = WALLET_TYPE.parse_key("5").unwrap(); - cache - .set(key5.clone(), EntityV::new(wallet5.clone(), 5)) - .unwrap(); + cache.set(key5.clone(), wallet5.clone(), 0).unwrap(); // For the new entity, we can retrieve it with either scope let act5 = cache.get(&key5, GetScope::InBlock).unwrap(); @@ -744,9 +740,7 @@ fn scoped_get() { // But if it gets updated, it becomes visible with either scope let mut wallet1 = wallet1; wallet1.set("balance", 70).unwrap(); - cache - .set(key1.clone(), EntityV::new(wallet1.clone(), 1)) - .unwrap(); + cache.set(key1.clone(), wallet1.clone(), 0).unwrap(); let act1 = cache.get(&key1, GetScope::InBlock).unwrap(); assert_eq!(Some(&wallet1), act1.as_ref().map(|e| e.as_ref())); let act1 = cache.get(&key1, GetScope::Store).unwrap(); @@ -793,6 +787,6 @@ fn no_interface_mods() { let entity = entity! { LOAD_RELATED_SUBGRAPH => id: "1", balance: 100 }; - cache.set(key, EntityV::new(entity, 0)).unwrap_err(); + cache.set(key, entity, 0).unwrap_err(); }) } From 62d1e2b874fbd5769cd3e5118ba59657c82efd35 Mon Sep 17 00:00:00 2001 From: Zoran Cvetkov Date: Wed, 18 Dec 2024 17:59:31 +0200 Subject: [PATCH 23/27] fix tests --- graph/src/components/store/entity_cache.rs | 2 +- store/test-store/tests/postgres/writable.rs | 13 +++++++------ 2 files changed, 8 insertions(+), 7 deletions(-) diff --git a/graph/src/components/store/entity_cache.rs b/graph/src/components/store/entity_cache.rs index b5ec2738ba4..fce0b5cc6d3 100644 --- a/graph/src/components/store/entity_cache.rs +++ b/graph/src/components/store/entity_cache.rs @@ -364,7 +364,7 @@ impl EntityCache { // check the validate for derived fields let is_valid = entity.validate(&key).is_ok(); - //The next VID is based on a block number and a sequence withing the block + // The next VID is based on a block number and a sequence within the block let vid = ((block as i64) << 32) + self.vid_seq as i64; self.vid_seq += 1; let entity = EntityV::new(entity, vid); diff --git a/store/test-store/tests/postgres/writable.rs b/store/test-store/tests/postgres/writable.rs index 50b744e62b7..dbc66e5e401 100644 --- a/store/test-store/tests/postgres/writable.rs +++ b/store/test-store/tests/postgres/writable.rs @@ -1,4 +1,5 @@ use graph::blockchain::block_stream::{EntitySourceOperation, FirehoseCursor}; +use graph::data::store::EntityV; use graph::data::subgraph::schema::DeploymentCreate; use graph::data::value::Word; use graph::data_source::CausalityRegion; @@ -147,7 +148,7 @@ async fn insert_count( let entity_op = if block != 3 && block != 5 && block != 7 { EntityOperation::Set { key: count_key_local(&COUNTER_TYPE, &data.get("id").unwrap().to_string()), - data, + data: EntityV::new(data, block.into()), } } else { EntityOperation::Remove { @@ -166,7 +167,7 @@ async fn insert_count( }; let entity_op = EntityOperation::Set { key: count_key_local(&COUNTER2_TYPE, &data.get("id").unwrap().to_string()), - data, + data: EntityV::new(data, block.into()), }; ops.push(entity_op); } @@ -344,10 +345,10 @@ fn read_range_test() { r#"(1, [EntitySourceOperation { entity_op: Create, entity_type: EntityType(Counter), entity: Entity { count: Int(2), id: String("1") }, vid: 1 }, EntitySourceOperation { entity_op: Create, entity_type: EntityType(Counter2), entity: Entity { count: Int(2), id: String("1") }, vid: 1 }])"#, r#"(2, [EntitySourceOperation { entity_op: Modify, entity_type: EntityType(Counter), entity: Entity { count: Int(4), id: String("1") }, vid: 2 }, EntitySourceOperation { entity_op: Create, entity_type: EntityType(Counter2), entity: Entity { count: Int(4), id: String("2") }, vid: 2 }])"#, r#"(3, [EntitySourceOperation { entity_op: Delete, entity_type: EntityType(Counter), entity: Entity { count: Int(4), id: String("1") }, vid: 2 }, EntitySourceOperation { entity_op: Create, entity_type: EntityType(Counter2), entity: Entity { count: Int(6), id: String("3") }, vid: 3 }])"#, - r#"(4, [EntitySourceOperation { entity_op: Create, entity_type: EntityType(Counter), entity: Entity { count: Int(8), id: String("1") }, vid: 3 }, EntitySourceOperation { entity_op: Create, entity_type: EntityType(Counter2), entity: Entity { count: Int(8), id: String("4") }, vid: 4 }])"#, - r#"(5, [EntitySourceOperation { entity_op: Delete, entity_type: EntityType(Counter), entity: Entity { count: Int(8), id: String("1") }, vid: 3 }, EntitySourceOperation { entity_op: Create, entity_type: EntityType(Counter2), entity: Entity { count: Int(10), id: String("5") }, vid: 5 }])"#, - r#"(6, [EntitySourceOperation { entity_op: Create, entity_type: EntityType(Counter), entity: Entity { count: Int(12), id: String("1") }, vid: 4 }])"#, - r#"(7, [EntitySourceOperation { entity_op: Delete, entity_type: EntityType(Counter), entity: Entity { count: Int(12), id: String("1") }, vid: 4 }])"#, + r#"(4, [EntitySourceOperation { entity_op: Create, entity_type: EntityType(Counter), entity: Entity { count: Int(8), id: String("1") }, vid: 4 }, EntitySourceOperation { entity_op: Create, entity_type: EntityType(Counter2), entity: Entity { count: Int(8), id: String("4") }, vid: 4 }])"#, + r#"(5, [EntitySourceOperation { entity_op: Delete, entity_type: EntityType(Counter), entity: Entity { count: Int(8), id: String("1") }, vid: 4 }, EntitySourceOperation { entity_op: Create, entity_type: EntityType(Counter2), entity: Entity { count: Int(10), id: String("5") }, vid: 5 }])"#, + r#"(6, [EntitySourceOperation { entity_op: Create, entity_type: EntityType(Counter), entity: Entity { count: Int(12), id: String("1") }, vid: 6 }])"#, + r#"(7, [EntitySourceOperation { entity_op: Delete, entity_type: EntityType(Counter), entity: Entity { count: Int(12), id: String("1") }, vid: 6 }])"#, ]; let subgraph_store = store.subgraph_store(); writable.deployment_synced().unwrap(); From 714c9316b87f25794019b41e08b3831abe9e8bf1 Mon Sep 17 00:00:00 2001 From: Zoran Cvetkov Date: Thu, 9 Jan 2025 18:07:03 +0200 Subject: [PATCH 24/27] addressing review remarks --- graph/src/components/store/entity_cache.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/graph/src/components/store/entity_cache.rs b/graph/src/components/store/entity_cache.rs index fce0b5cc6d3..f7a9a8fe88b 100644 --- a/graph/src/components/store/entity_cache.rs +++ b/graph/src/components/store/entity_cache.rs @@ -108,7 +108,7 @@ pub struct EntityCache { // Sequence number of the next VID value for this block. The value written // in the database consist of a block number and this SEQ number. - pub vid_seq: i32, + pub vid_seq: u32, } impl Debug for EntityCache { From 6da9287e0bb4a85530bda680fd67da18b7942acc Mon Sep 17 00:00:00 2001 From: Zoran Cvetkov Date: Mon, 13 Jan 2025 19:32:22 +0100 Subject: [PATCH 25/27] generate POI VIDs too --- graph/src/components/store/entity_cache.rs | 8 +++++-- graph/src/data/store/mod.rs | 2 +- runtime/test/src/test.rs | 4 ++-- store/postgres/src/relational/ddl.rs | 2 +- store/postgres/src/relational_queries.rs | 22 +++++--------------- store/test-store/tests/graph/entity_cache.rs | 10 ++++----- 6 files changed, 20 insertions(+), 28 deletions(-) diff --git a/graph/src/components/store/entity_cache.rs b/graph/src/components/store/entity_cache.rs index f7a9a8fe88b..6a539203b45 100644 --- a/graph/src/components/store/entity_cache.rs +++ b/graph/src/components/store/entity_cache.rs @@ -17,6 +17,10 @@ use super::{BlockNumber, DerivedEntityQuery, LoadRelatedRequest, StoreError}; pub type EntityLfuCache = LfuCache>>; +// Number of VIDs that are reserved ourside of the generated ones here. +// Currently only 1 for POIs is used, but lets reserve a few more. +const RESERVED_VIDS: u32 = 100; + /// The scope in which the `EntityCache` should perform a `get` operation pub enum GetScope { /// Get from all previously stored entities in the store @@ -136,7 +140,7 @@ impl EntityCache { schema: store.input_schema(), store, seq: 0, - vid_seq: 0, + vid_seq: RESERVED_VIDS, } } @@ -157,7 +161,7 @@ impl EntityCache { schema: store.input_schema(), store, seq: 0, - vid_seq: 0, + vid_seq: RESERVED_VIDS, } } diff --git a/graph/src/data/store/mod.rs b/graph/src/data/store/mod.rs index eb54d44511a..86e255b23e7 100644 --- a/graph/src/data/store/mod.rs +++ b/graph/src/data/store/mod.rs @@ -927,7 +927,7 @@ impl Entity { #[cfg(debug_assertions)] pub fn vid_or_default(&self) -> i64 { self.get("vid") - .unwrap_or(&Value::Int8(0)) + .unwrap_or(&Value::Int8(100)) .as_int8() .expect("the vid is set to a valid value") } diff --git a/runtime/test/src/test.rs b/runtime/test/src/test.rs index f9c04fb462e..8acbf48b5e8 100644 --- a/runtime/test/src/test.rs +++ b/runtime/test/src/test.rs @@ -552,8 +552,8 @@ async fn test_ipfs_map(api_version: Version, json_error_msg: &str) { let subgraph_id = "ipfsMap"; // Try it with two valid objects - let (str1, thing1) = make_thing("one", "eins", 0); - let (str2, thing2) = make_thing("two", "zwei", 0); + let (str1, thing1) = make_thing("one", "eins", 100); + let (str2, thing2) = make_thing("two", "zwei", 100); let ops = run_ipfs_map( ipfs.clone(), subgraph_id, diff --git a/store/postgres/src/relational/ddl.rs b/store/postgres/src/relational/ddl.rs index dd459735e0f..38a7ac13ff7 100644 --- a/store/postgres/src/relational/ddl.rs +++ b/store/postgres/src/relational/ddl.rs @@ -116,7 +116,7 @@ impl Table { Ok(cols) } - let vid_type = if self.object.is_poi() || !self.object.is_object_type() { + let vid_type = if !self.object.is_object_type() { "bigserial" } else { "bigint" diff --git a/store/postgres/src/relational_queries.rs b/store/postgres/src/relational_queries.rs index 40f83e4ff92..681266f6a0e 100644 --- a/store/postgres/src/relational_queries.rs +++ b/store/postgres/src/relational_queries.rs @@ -2541,8 +2541,6 @@ impl<'a> QueryFragment for InsertQuery<'a> { let out = &mut out; out.unsafe_to_cache_prepared(); - let not_poi = !self.table.object.is_poi(); - // Construct a query // insert into schema.table(column, ...) // values @@ -2568,9 +2566,7 @@ impl<'a> QueryFragment for InsertQuery<'a> { out.push_sql(CAUSALITY_REGION_COLUMN); }; - if not_poi { - out.push_sql(", vid"); - } + out.push_sql(", vid"); out.push_sql(") values\n"); for (i, row) in self.rows.iter().enumerate() { @@ -2588,10 +2584,8 @@ impl<'a> QueryFragment for InsertQuery<'a> { out.push_sql(", "); out.push_bind_param::(&row.causality_region)?; }; - if not_poi { - out.push_sql(", "); - out.push_bind_param::(&row.vid)?; - } + out.push_sql(", "); + out.push_bind_param::(&row.vid)?; out.push_sql(")"); } @@ -5089,8 +5083,6 @@ impl<'a> QueryFragment for CopyEntityBatchQuery<'a> { fn walk_ast<'b>(&'b self, mut out: AstPass<'_, 'b, Pg>) -> QueryResult<()> { out.unsafe_to_cache_prepared(); - let not_poi = !self.dst.object.is_poi(); - // Construct a query // insert into {dst}({columns}) // select {columns} from {src} @@ -5111,9 +5103,7 @@ impl<'a> QueryFragment for CopyEntityBatchQuery<'a> { out.push_sql(", "); out.push_sql(CAUSALITY_REGION_COLUMN); }; - if not_poi { - out.push_sql(", vid"); - } + out.push_sql(", vid"); out.push_sql(")\nselect "); for column in &self.columns { @@ -5179,9 +5169,7 @@ impl<'a> QueryFragment for CopyEntityBatchQuery<'a> { )); } } - if not_poi { - out.push_sql(", vid"); - } + out.push_sql(", vid"); out.push_sql(" from "); out.push_sql(self.src.qualified_name.as_str()); diff --git a/store/test-store/tests/graph/entity_cache.rs b/store/test-store/tests/graph/entity_cache.rs index 1df4b9adac9..2d210ea25ce 100644 --- a/store/test-store/tests/graph/entity_cache.rs +++ b/store/test-store/tests/graph/entity_cache.rs @@ -223,8 +223,8 @@ fn insert_modifications() { assert_eq!( sort_by_entity_key(result.unwrap().modifications), sort_by_entity_key(vec![ - EntityModification::insert(mogwai_key, mogwai_data, 0, 0), - EntityModification::insert(sigurros_key, sigurros_data, 0, 1) + EntityModification::insert(mogwai_key, mogwai_data, 0, 100), + EntityModification::insert(sigurros_key, sigurros_data, 0, 101) ]) ); } @@ -269,8 +269,8 @@ fn overwrite_modifications() { assert_eq!( sort_by_entity_key(result.unwrap().modifications), sort_by_entity_key(vec![ - EntityModification::overwrite(mogwai_key, mogwai_data, 0, 0), - EntityModification::overwrite(sigurros_key, sigurros_data, 0, 1) + EntityModification::overwrite(mogwai_key, mogwai_data, 0, 100), + EntityModification::overwrite(sigurros_key, sigurros_data, 0, 101) ]) ); } @@ -309,7 +309,7 @@ fn consecutive_modifications() { update_key, entity! { SCHEMA => id: "mogwai", name: "Mogwai", founded: 1995 }, 0, - 0 + 100 )]) ); } From d178175ae7058edf7edb65e652a3f4a3af9bb549 Mon Sep 17 00:00:00 2001 From: Zoran Cvetkov Date: Thu, 16 Jan 2025 16:12:08 +0100 Subject: [PATCH 26/27] remove VID wrapping --- graph/src/components/store/entity_cache.rs | 31 +++---- graph/src/components/store/mod.rs | 4 +- graph/src/components/store/write.rs | 29 ++---- graph/src/data/store/mod.rs | 38 +++----- runtime/test/src/test.rs | 4 +- server/index-node/src/resolver.rs | 2 +- store/postgres/src/relational.rs | 5 +- store/postgres/src/relational_queries.rs | 11 ++- store/test-store/src/store.rs | 7 +- store/test-store/tests/graph/entity_cache.rs | 91 +++++++++++-------- store/test-store/tests/graphql/query.rs | 7 +- .../test-store/tests/postgres/aggregation.rs | 18 ++-- store/test-store/tests/postgres/graft.rs | 10 +- store/test-store/tests/postgres/relational.rs | 5 +- .../tests/postgres/relational_bytes.rs | 7 +- store/test-store/tests/postgres/store.rs | 60 ++++++------ store/test-store/tests/postgres/writable.rs | 27 +++--- 17 files changed, 175 insertions(+), 181 deletions(-) diff --git a/graph/src/components/store/entity_cache.rs b/graph/src/components/store/entity_cache.rs index 6a539203b45..2bb4c7b1791 100644 --- a/graph/src/components/store/entity_cache.rs +++ b/graph/src/components/store/entity_cache.rs @@ -7,7 +7,7 @@ use std::sync::Arc; use crate::cheap_clone::CheapClone; use crate::components::store::write::EntityModification; use crate::components::store::{self as s, Entity, EntityOperation}; -use crate::data::store::{EntityV, EntityValidationError, Id, IdType, IntoEntityIterator}; +use crate::data::store::{EntityValidationError, Id, IdType, IntoEntityIterator}; use crate::prelude::ENV_VARS; use crate::schema::{EntityKey, InputSchema}; use crate::util::intern::Error as InternError; @@ -33,8 +33,8 @@ pub enum GetScope { #[derive(Debug, Clone)] enum EntityOp { Remove, - Update(EntityV), - Overwrite(EntityV), + Update(Entity), + Overwrite(Entity), } impl EntityOp { @@ -45,7 +45,7 @@ impl EntityOp { use EntityOp::*; match (self, entity) { (Remove, _) => Ok(None), - (Overwrite(new), _) | (Update(new), None) => Ok(Some(new.e)), + (Overwrite(new), _) | (Update(new), None) => Ok(Some(new)), (Update(updates), Some(entity)) => { let mut e = entity.borrow().clone(); e.merge_remove_null_fields(updates)?; @@ -69,7 +69,7 @@ impl EntityOp { match self { // This is how `Overwrite` is constructed, by accumulating `Update` onto `Remove`. Remove => *self = Overwrite(update), - Update(current) | Overwrite(current) => current.e.merge(update.e), + Update(current) | Overwrite(current) => current.merge(update), } } } @@ -288,9 +288,9 @@ impl EntityCache { ) -> Result, anyhow::Error> { match op { EntityOp::Update(entity) | EntityOp::Overwrite(entity) - if query.matches(key, &entity.e) => + if query.matches(key, &entity) => { - Ok(Some(entity.e.clone())) + Ok(Some(entity.clone())) } EntityOp::Remove => Ok(None), _ => Ok(None), @@ -371,7 +371,10 @@ impl EntityCache { // The next VID is based on a block number and a sequence within the block let vid = ((block as i64) << 32) + self.vid_seq as i64; self.vid_seq += 1; - let entity = EntityV::new(entity, vid); + let mut entity = entity; + let old_vid = entity.set_vid(vid).expect("the vid should be set"); + // Make sure that there was no VID previously set for this entity. + assert!(old_vid.is_none()); self.entity_op(key.clone(), EntityOp::Update(entity)); @@ -478,22 +481,19 @@ impl EntityCache { // Entity was created (None, EntityOp::Update(mut updates)) | (None, EntityOp::Overwrite(mut updates)) => { - let vid = updates.vid; - updates.e.remove_null_fields(); - let data = Arc::new(updates.e.clone()); + updates.remove_null_fields(); + let data = Arc::new(updates); self.current.insert(key.clone(), Some(data.cheap_clone())); Some(Insert { key, data, block, end: None, - vid, }) } // Entity may have been changed (Some(current), EntityOp::Update(updates)) => { let mut data = current.as_ref().clone(); - let vid = updates.vid; data.merge_remove_null_fields(updates) .map_err(|e| key.unknown_attribute(e))?; let data = Arc::new(data); @@ -504,7 +504,6 @@ impl EntityCache { data, block, end: None, - vid, }) } else { None @@ -512,8 +511,7 @@ impl EntityCache { } // Entity was removed and then updated, so it will be overwritten (Some(current), EntityOp::Overwrite(data)) => { - let vid = data.vid; - let data = Arc::new(data.e.clone()); + let data = Arc::new(data); self.current.insert(key.clone(), Some(data.cheap_clone())); if current != data { Some(Overwrite { @@ -521,7 +519,6 @@ impl EntityCache { data, block, end: None, - vid, }) } else { None diff --git a/graph/src/components/store/mod.rs b/graph/src/components/store/mod.rs index 9713b78c150..31b0e62cfae 100644 --- a/graph/src/components/store/mod.rs +++ b/graph/src/components/store/mod.rs @@ -30,7 +30,7 @@ use crate::cheap_clone::CheapClone; use crate::components::store::write::EntityModification; use crate::constraint_violation; use crate::data::store::scalar::Bytes; -use crate::data::store::{EntityV, Id, IdList, Value}; +use crate::data::store::{Id, IdList, Value}; use crate::data::value::Word; use crate::data_source::CausalityRegion; use crate::derive::CheapClone; @@ -829,7 +829,7 @@ where pub enum EntityOperation { /// Locates the entity specified by `key` and sets its attributes according to the contents of /// `data`. If no entity exists with this key, creates a new entity. - Set { key: EntityKey, data: EntityV }, + Set { key: EntityKey, data: Entity }, /// Removes an entity with the specified key, if one exists. Remove { key: EntityKey }, diff --git a/graph/src/components/store/write.rs b/graph/src/components/store/write.rs index 6dd2cda472b..d4632c8410f 100644 --- a/graph/src/components/store/write.rs +++ b/graph/src/components/store/write.rs @@ -45,7 +45,6 @@ pub enum EntityModification { data: Arc, block: BlockNumber, end: Option, - vid: i64, }, /// Update the entity by overwriting it Overwrite { @@ -53,7 +52,6 @@ pub enum EntityModification { data: Arc, block: BlockNumber, end: Option, - vid: i64, }, /// Remove the entity Remove { key: EntityKey, block: BlockNumber }, @@ -69,7 +67,6 @@ pub struct EntityWrite<'a> { // The end of the block range for which this write is valid. The value // of `end` itself is not included in the range pub end: Option, - pub vid: i64, } impl std::fmt::Display for EntityWrite<'_> { @@ -92,28 +89,24 @@ impl<'a> TryFrom<&'a EntityModification> for EntityWrite<'a> { data, block, end, - vid, } => Ok(EntityWrite { id: &key.entity_id, entity: data, causality_region: key.causality_region, block: *block, end: *end, - vid: *vid, }), EntityModification::Overwrite { key, data, block, end, - vid, } => Ok(EntityWrite { id: &key.entity_id, entity: &data, causality_region: key.causality_region, block: *block, end: *end, - vid: *vid, }), EntityModification::Remove { .. } => Err(()), @@ -220,13 +213,11 @@ impl EntityModification { data, block, end, - vid, } => Ok(Insert { key, data, block, end, - vid, }), Remove { key, .. } => { return Err(constraint_violation!( @@ -280,23 +271,21 @@ impl EntityModification { } impl EntityModification { - pub fn insert(key: EntityKey, data: Entity, block: BlockNumber, vid: i64) -> Self { + pub fn insert(key: EntityKey, data: Entity, block: BlockNumber) -> Self { EntityModification::Insert { key, data: Arc::new(data), block, end: None, - vid, } } - pub fn overwrite(key: EntityKey, data: Entity, block: BlockNumber, vid: i64) -> Self { + pub fn overwrite(key: EntityKey, data: Entity, block: BlockNumber) -> Self { EntityModification::Overwrite { key, data: Arc::new(data), block, end: None, - vid, } } @@ -1028,36 +1017,32 @@ mod test { let value = value.clone(); let key = THING_TYPE.parse_key("one").unwrap(); - let vid = 0; + let vid = 0i64; match value { Ins(block) => EntityModification::Insert { key, - data: Arc::new(entity! { SCHEMA => id: "one", count: block }), + data: Arc::new(entity! { SCHEMA => id: "one", count: block, vid: vid }), block, end: None, - vid, }, Ovw(block) => EntityModification::Overwrite { key, - data: Arc::new(entity! { SCHEMA => id: "one", count: block }), + data: Arc::new(entity! { SCHEMA => id: "one", count: block, vid: vid }), block, end: None, - vid, }, Rem(block) => EntityModification::Remove { key, block }, InsC(block, end) => EntityModification::Insert { key, - data: Arc::new(entity! { SCHEMA => id: "one", count: block }), + data: Arc::new(entity! { SCHEMA => id: "one", count: block, vid: vid }), block, end: Some(end), - vid, }, OvwC(block, end) => EntityModification::Overwrite { key, - data: Arc::new(entity! { SCHEMA => id: "one", count: block }), + data: Arc::new(entity! { SCHEMA => id: "one", count: block, vid: vid }), block, end: Some(end), - vid, }, } } diff --git a/graph/src/data/store/mod.rs b/graph/src/data/store/mod.rs index 86e255b23e7..cd16c8ee5e7 100644 --- a/graph/src/data/store/mod.rs +++ b/graph/src/data/store/mod.rs @@ -735,9 +735,6 @@ where lazy_static! { /// The name of the id attribute, `"id"` pub static ref ID: Word = Word::from("id"); - - /// The name of the vid attribute, `"vid"` - pub static ref VID: Word = Word::from("vid"); } /// An entity is represented as a map of attribute names to values. @@ -922,14 +919,18 @@ impl Entity { .expect("the vid is set to a valid value") } - /// This version of the function returns 0 if the VID is not set. It should be - /// used only in the testing code for more lenient definition of entities. + /// Sets the VID of the entity. The previous one is returned. + pub fn set_vid(&mut self, value: i64) -> Result, InternError> { + self.0.insert("vid", value.into()) + } + + /// Sets the VID if not set. Should be used only for tests. #[cfg(debug_assertions)] - pub fn vid_or_default(&self) -> i64 { - self.get("vid") - .unwrap_or(&Value::Int8(100)) - .as_int8() - .expect("the vid is set to a valid value") + pub fn set_vid_if_empty(&mut self) { + let vid = self.get("vid"); + if vid.is_none() { + let _ = self.set_vid(100).expect("the vid should be set"); + } } /// Merges an entity update `update` into this entity. @@ -946,8 +947,8 @@ impl Entity { /// If a key exists in both entities, the value from `update` is chosen. /// If a key only exists on one entity, the value from that entity is chosen. /// If a key is set to `Value::Null` in `update`, the key/value pair is removed. - pub fn merge_remove_null_fields(&mut self, update: EntityV) -> Result<(), InternError> { - for (key, value) in update.e.0.into_iter() { + pub fn merge_remove_null_fields(&mut self, update: Entity) -> Result<(), InternError> { + for (key, value) in update.into_iter() { match value { Value::Null => self.0.remove(&key), _ => self.0.insert(&key, value)?, @@ -1098,19 +1099,6 @@ impl std::fmt::Debug for Entity { } } -/// An entity wrapper that has VID too. -#[derive(Debug, Clone, CacheWeight, PartialEq, Eq, Serialize)] -pub struct EntityV { - pub e: Entity, - pub vid: i64, -} - -impl EntityV { - pub fn new(e: Entity, vid: i64) -> Self { - Self { e, vid } - } -} - /// An object that is returned from a query. It's a an `r::Value` which /// carries the attributes of the object (`__typename`, `id` etc.) and /// possibly a pointer to its parent if the query that constructed it is one diff --git a/runtime/test/src/test.rs b/runtime/test/src/test.rs index 8acbf48b5e8..b926562e0d4 100644 --- a/runtime/test/src/test.rs +++ b/runtime/test/src/test.rs @@ -482,11 +482,11 @@ fn make_thing(id: &str, value: &str, vid: i64) -> (String, EntityModification) { static ref SCHEMA: InputSchema = InputSchema::raw(DOCUMENT, "doesntmatter"); static ref THING_TYPE: EntityType = SCHEMA.entity_type("Thing").unwrap(); } - let data = entity! { SCHEMA => id: id, value: value, extra: USER_DATA}; + let data = entity! { SCHEMA => id: id, value: value, extra: USER_DATA, vid: vid }; let key = THING_TYPE.parse_key(id).unwrap(); ( format!("{{ \"id\": \"{}\", \"value\": \"{}\"}}", id, value), - EntityModification::insert(key, data, 0, vid), + EntityModification::insert(key, data, 0), ) } diff --git a/server/index-node/src/resolver.rs b/server/index-node/src/resolver.rs index 870d319dcf1..fb3937afdc2 100644 --- a/server/index-node/src/resolver.rs +++ b/server/index-node/src/resolver.rs @@ -768,7 +768,7 @@ fn entity_changes_to_graphql(entity_changes: Vec) -> r::Value { .push(key.entity_id); } EntityOperation::Set { key, data } => { - updates.entry(key.entity_type).or_default().push(data.e); + updates.entry(key.entity_type).or_default().push(data); } } } diff --git a/store/postgres/src/relational.rs b/store/postgres/src/relational.rs index 3fff8c8dae7..f6a14c3a5fa 100644 --- a/store/postgres/src/relational.rs +++ b/store/postgres/src/relational.rs @@ -64,7 +64,7 @@ use crate::{ }, }; use graph::components::store::DerivedEntityQuery; -use graph::data::store::{EntityV, Id, IdList, IdType, BYTES_SCALAR}; +use graph::data::store::{Id, IdList, IdType, BYTES_SCALAR}; use graph::data::subgraph::schema::POI_TABLE; use graph::prelude::{ anyhow, info, BlockNumber, DeploymentHash, Entity, EntityChange, EntityOperation, Logger, @@ -697,10 +697,9 @@ impl Layout { let entity_id = data.id(); processed_entities.insert((entity_type.clone(), entity_id.clone())); - let vid = data.vid(); changes.push(EntityOperation::Set { key: entity_type.key_in(entity_id, CausalityRegion::from_entity(&data)), - data: EntityV::new(data, vid), + data, }); } diff --git a/store/postgres/src/relational_queries.rs b/store/postgres/src/relational_queries.rs index 681266f6a0e..b9fb1088924 100644 --- a/store/postgres/src/relational_queries.rs +++ b/store/postgres/src/relational_queries.rs @@ -539,7 +539,14 @@ impl EntityData { // table column; those will be things like the // block_range that `select *` pulls in but that we // don't care about here - if let Some(column) = table.column(&SqlName::verbatim(key)) { + if key == "vid" { + // VID is not in the input schema but we need it so deserialize it too + match T::Value::from_column_value(&ColumnType::Int8, json) { + Ok(value) if value.is_null() => None, + Ok(value) => Some(Ok((Word::from("vid"), value))), + Err(e) => Some(Err(e)), + } + } else if let Some(column) = table.column(&SqlName::verbatim(key)) { match T::Value::from_column_value(&column.column_type, json) { Ok(value) if value.is_null() => None, Ok(value) => Some(Ok((Word::from(column.field.to_string()), value))), @@ -2450,7 +2457,7 @@ impl<'a> InsertRow<'a> { } let br_value = BlockRangeValue::new(table, row.block, row.end); let causality_region = row.causality_region; - let vid = row.vid; + let vid = row.entity.vid(); Ok(Self { values, br_value, diff --git a/store/test-store/src/store.rs b/store/test-store/src/store.rs index 30017db3d21..5475a29db2c 100644 --- a/store/test-store/src/store.rs +++ b/store/test-store/src/store.rs @@ -6,7 +6,6 @@ use graph::components::store::BlockStore; use graph::data::graphql::load_manager::LoadManager; use graph::data::query::QueryResults; use graph::data::query::QueryTarget; -use graph::data::store::EntityV; use graph::data::subgraph::schema::{DeploymentCreate, SubgraphError}; use graph::data::subgraph::SubgraphFeature; use graph::data_source::DataSource; @@ -423,11 +422,11 @@ pub async fn insert_entities( deployment: &DeploymentLocator, entities: Vec<(EntityType, Entity)>, ) -> Result<(), StoreError> { - let insert_ops = entities.into_iter().map(|(entity_type, data)| { - let vid = data.vid_or_default(); + let insert_ops = entities.into_iter().map(|(entity_type, mut data)| { + data.set_vid_if_empty(); EntityOperation::Set { key: entity_type.key(data.id()), - data: EntityV::new(data, vid), + data, } }); diff --git a/store/test-store/tests/graph/entity_cache.rs b/store/test-store/tests/graph/entity_cache.rs index 2d210ea25ce..ab3cc5ed02e 100644 --- a/store/test-store/tests/graph/entity_cache.rs +++ b/store/test-store/tests/graph/entity_cache.rs @@ -4,7 +4,7 @@ use graph::components::store::{ DeploymentCursorTracker, DerivedEntityQuery, GetScope, LoadRelatedRequest, ReadStore, StoredDynamicDataSource, WritableStore, }; -use graph::data::store::{EntityV, Id}; +use graph::data::store::Id; use graph::data::subgraph::schema::{DeploymentCreate, SubgraphError, SubgraphHealth}; use graph::data_source::CausalityRegion; use graph::schema::{EntityKey, EntityType, InputSchema}; @@ -207,24 +207,27 @@ fn insert_modifications() { let store = Arc::new(store); let mut cache = EntityCache::new(store); - let mogwai_data = entity! { SCHEMA => id: "mogwai", name: "Mogwai" }; + let mut mogwai_data = entity! { SCHEMA => id: "mogwai", name: "Mogwai" }; let mogwai_key = make_band_key("mogwai"); cache .set(mogwai_key.clone(), mogwai_data.clone(), 0) .unwrap(); - let sigurros_data = entity! { SCHEMA => id: "sigurros", name: "Sigur Ros" }; + let mut sigurros_data = entity! { SCHEMA => id: "sigurros", name: "Sigur Ros" }; let sigurros_key = make_band_key("sigurros"); cache .set(sigurros_key.clone(), sigurros_data.clone(), 0) .unwrap(); + mogwai_data.set_vid(100).unwrap(); + sigurros_data.set_vid(101).unwrap(); + let result = cache.as_modifications(0); assert_eq!( sort_by_entity_key(result.unwrap().modifications), sort_by_entity_key(vec![ - EntityModification::insert(mogwai_key, mogwai_data, 0, 100), - EntityModification::insert(sigurros_key, sigurros_data, 0, 101) + EntityModification::insert(mogwai_key, mogwai_data, 0), + EntityModification::insert(sigurros_key, sigurros_data, 0) ]) ); } @@ -253,24 +256,27 @@ fn overwrite_modifications() { let store = Arc::new(store); let mut cache = EntityCache::new(store); - let mogwai_data = entity! { SCHEMA => id: "mogwai", name: "Mogwai", founded: 1995 }; + let mut mogwai_data = entity! { SCHEMA => id: "mogwai", name: "Mogwai", founded: 1995 }; let mogwai_key = make_band_key("mogwai"); cache .set(mogwai_key.clone(), mogwai_data.clone(), 0) .unwrap(); - let sigurros_data = entity! { SCHEMA => id: "sigurros", name: "Sigur Ros", founded: 1994 }; + let mut sigurros_data = entity! { SCHEMA => id: "sigurros", name: "Sigur Ros", founded: 1994}; let sigurros_key = make_band_key("sigurros"); cache .set(sigurros_key.clone(), sigurros_data.clone(), 0) .unwrap(); + mogwai_data.set_vid(100).unwrap(); + sigurros_data.set_vid(101).unwrap(); + let result = cache.as_modifications(0); assert_eq!( sort_by_entity_key(result.unwrap().modifications), sort_by_entity_key(vec![ - EntityModification::overwrite(mogwai_key, mogwai_data, 0, 100), - EntityModification::overwrite(sigurros_key, sigurros_data, 0, 101) + EntityModification::overwrite(mogwai_key, mogwai_data, 0), + EntityModification::overwrite(sigurros_key, sigurros_data, 0) ]) ); } @@ -307,9 +313,8 @@ fn consecutive_modifications() { sort_by_entity_key(result.unwrap().modifications), sort_by_entity_key(vec![EntityModification::overwrite( update_key, - entity! { SCHEMA => id: "mogwai", name: "Mogwai", founded: 1995 }, + entity! { SCHEMA => id: "mogwai", name: "Mogwai", founded: 1995, vid: 101i64 }, 0, - 100 )]) ); } @@ -465,20 +470,26 @@ async fn insert_test_data(store: Arc) -> DeploymentLocator fn create_account_entity(id: &str, name: &str, email: &str, age: i32, vid: i64) -> EntityOperation { let test_entity = - entity! { LOAD_RELATED_SUBGRAPH => id: id, name: name, email: email, age: age }; + entity! { LOAD_RELATED_SUBGRAPH => id: id, name: name, email: email, age: age, vid: vid}; EntityOperation::Set { key: ACCOUNT_TYPE.parse_key(id).unwrap(), - data: EntityV::new(test_entity, vid), + data: test_entity, } } -fn create_wallet_entity(id: &str, account_id: &Id, balance: i32) -> Entity { +fn create_wallet_entity(id: &str, account_id: &Id, balance: i32, vid: i64) -> Entity { let account_id = Value::from(account_id.clone()); - entity! { LOAD_RELATED_SUBGRAPH => id: id, account: account_id, balance: balance } + entity! { LOAD_RELATED_SUBGRAPH => id: id, account: account_id, balance: balance, vid: vid} } + +fn create_wallet_entity_no_vid(id: &str, account_id: &Id, balance: i32) -> Entity { + let account_id = Value::from(account_id.clone()); + entity! { LOAD_RELATED_SUBGRAPH => id: id, account: account_id, balance: balance} +} + fn create_wallet_operation(id: &str, account_id: &Id, balance: i32, vid: i64) -> EntityOperation { - let test_wallet = EntityV::new(create_wallet_entity(id, account_id, balance), vid); + let test_wallet = create_wallet_entity(id, account_id, balance, vid); EntityOperation::Set { key: WALLET_TYPE.parse_key(id).unwrap(), data: test_wallet, @@ -496,9 +507,9 @@ fn check_for_account_with_multiple_wallets() { causality_region: CausalityRegion::ONCHAIN, }; let result = cache.load_related(&request).unwrap(); - let wallet_1 = create_wallet_entity("1", &account_id, 67_i32); - let wallet_2 = create_wallet_entity("2", &account_id, 92_i32); - let wallet_3 = create_wallet_entity("3", &account_id, 192_i32); + let wallet_1 = create_wallet_entity("1", &account_id, 67_i32, 1); + let wallet_2 = create_wallet_entity("2", &account_id, 92_i32, 2); + let wallet_3 = create_wallet_entity("3", &account_id, 192_i32, 3); let expeted_vec = vec![wallet_1, wallet_2, wallet_3]; assert_eq!(result, expeted_vec); @@ -516,7 +527,7 @@ fn check_for_account_with_single_wallet() { causality_region: CausalityRegion::ONCHAIN, }; let result = cache.load_related(&request).unwrap(); - let wallet_1 = create_wallet_entity("4", &account_id, 32_i32); + let wallet_1 = create_wallet_entity("4", &account_id, 32_i32, 4); let expeted_vec = vec![wallet_1]; assert_eq!(result, expeted_vec); @@ -600,9 +611,9 @@ fn check_for_insert_async_store() { causality_region: CausalityRegion::ONCHAIN, }; let result = cache.load_related(&request).unwrap(); - let wallet_1 = create_wallet_entity("4", &account_id, 32_i32); - let wallet_2 = create_wallet_entity("5", &account_id, 79_i32); - let wallet_3 = create_wallet_entity("6", &account_id, 200_i32); + let wallet_1 = create_wallet_entity("4", &account_id, 32_i32, 4); + let wallet_2 = create_wallet_entity("5", &account_id, 79_i32, 12); + let wallet_3 = create_wallet_entity("6", &account_id, 200_i32, 13); let expeted_vec = vec![wallet_1, wallet_2, wallet_3]; assert_eq!(result, expeted_vec); @@ -632,9 +643,9 @@ fn check_for_insert_async_not_related() { causality_region: CausalityRegion::ONCHAIN, }; let result = cache.load_related(&request).unwrap(); - let wallet_1 = create_wallet_entity("1", &account_id, 67_i32); - let wallet_2 = create_wallet_entity("2", &account_id, 92_i32); - let wallet_3 = create_wallet_entity("3", &account_id, 192_i32); + let wallet_1 = create_wallet_entity("1", &account_id, 67_i32, 1); + let wallet_2 = create_wallet_entity("2", &account_id, 92_i32, 2); + let wallet_3 = create_wallet_entity("3", &account_id, 192_i32, 3); let expeted_vec = vec![wallet_1, wallet_2, wallet_3]; assert_eq!(result, expeted_vec); @@ -652,7 +663,7 @@ fn check_for_update_async_related() { EntityOperation::Set { ref data, .. } => data.clone(), _ => unreachable!(), }; - assert_ne!(writable.get(&entity_key).unwrap().unwrap(), new_data.e); + assert_ne!(writable.get(&entity_key).unwrap().unwrap(), new_data); // insert a new wallet transact_entity_operations( &store, @@ -670,9 +681,9 @@ fn check_for_update_async_related() { causality_region: CausalityRegion::ONCHAIN, }; let result = cache.load_related(&request).unwrap(); - let wallet_2 = create_wallet_entity("2", &account_id, 92_i32); - let wallet_3 = create_wallet_entity("3", &account_id, 192_i32); - let expeted_vec = vec![new_data.e, wallet_2, wallet_3]; + let wallet_2 = create_wallet_entity("2", &account_id, 92_i32, 2); + let wallet_3 = create_wallet_entity("3", &account_id, 192_i32, 3); + let expeted_vec = vec![new_data, wallet_2, wallet_3]; assert_eq!(result, expeted_vec); }); @@ -700,8 +711,8 @@ fn check_for_delete_async_related() { causality_region: CausalityRegion::ONCHAIN, }; let result = cache.load_related(&request).unwrap(); - let wallet_2 = create_wallet_entity("2", &account_id, 92_i32); - let wallet_3 = create_wallet_entity("3", &account_id, 192_i32); + let wallet_2 = create_wallet_entity("2", &account_id, 92_i32, 2); + let wallet_3 = create_wallet_entity("3", &account_id, 192_i32, 3); let expeted_vec = vec![wallet_2, wallet_3]; assert_eq!(result, expeted_vec); @@ -713,26 +724,30 @@ fn scoped_get() { // Key for an existing entity that is in the store let account1 = ACCOUNT_TYPE.parse_id("1").unwrap(); let key1 = WALLET_TYPE.parse_key("1").unwrap(); - let wallet1 = create_wallet_entity("1", &account1, 67); + let wallet1 = create_wallet_entity_no_vid("1", &account1, 67); // Create a new entity that is not in the store let account5 = ACCOUNT_TYPE.parse_id("5").unwrap(); - let wallet5 = create_wallet_entity("5", &account5, 100); + let mut wallet5 = create_wallet_entity_no_vid("5", &account5, 100); let key5 = WALLET_TYPE.parse_key("5").unwrap(); cache.set(key5.clone(), wallet5.clone(), 0).unwrap(); + wallet5.set_vid(100).unwrap(); // For the new entity, we can retrieve it with either scope let act5 = cache.get(&key5, GetScope::InBlock).unwrap(); assert_eq!(Some(&wallet5), act5.as_ref().map(|e| e.as_ref())); let act5 = cache.get(&key5, GetScope::Store).unwrap(); assert_eq!(Some(&wallet5), act5.as_ref().map(|e| e.as_ref())); + let mut wallet1a = wallet1.clone(); + wallet1a.set_vid(1).unwrap(); // For an entity in the store, we can not get it `InBlock` but with // `Store` let act1 = cache.get(&key1, GetScope::InBlock).unwrap(); assert_eq!(None, act1); let act1 = cache.get(&key1, GetScope::Store).unwrap(); - assert_eq!(Some(&wallet1), act1.as_ref().map(|e| e.as_ref())); + assert_eq!(Some(&wallet1a), act1.as_ref().map(|e| e.as_ref())); + // Even after reading from the store, the entity is not visible with // `InBlock` let act1 = cache.get(&key1, GetScope::InBlock).unwrap(); @@ -741,10 +756,12 @@ fn scoped_get() { let mut wallet1 = wallet1; wallet1.set("balance", 70).unwrap(); cache.set(key1.clone(), wallet1.clone(), 0).unwrap(); + wallet1a = wallet1; + wallet1a.set_vid(101).unwrap(); let act1 = cache.get(&key1, GetScope::InBlock).unwrap(); - assert_eq!(Some(&wallet1), act1.as_ref().map(|e| e.as_ref())); + assert_eq!(Some(&wallet1a), act1.as_ref().map(|e| e.as_ref())); let act1 = cache.get(&key1, GetScope::Store).unwrap(); - assert_eq!(Some(&wallet1), act1.as_ref().map(|e| e.as_ref())); + assert_eq!(Some(&wallet1a), act1.as_ref().map(|e| e.as_ref())); }) } diff --git a/store/test-store/tests/graphql/query.rs b/store/test-store/tests/graphql/query.rs index 5358249040d..d7e7dec8f55 100644 --- a/store/test-store/tests/graphql/query.rs +++ b/store/test-store/tests/graphql/query.rs @@ -1,7 +1,6 @@ use graph::blockchain::{Block, BlockTime}; use graph::data::query::Trace; use graph::data::store::scalar::Timestamp; -use graph::data::store::EntityV; use graph::data::subgraph::schema::DeploymentCreate; use graph::data::subgraph::LATEST_VERSION; use graph::entity; @@ -425,11 +424,11 @@ async fn insert_test_entities( .into_iter() .map(|(typename, entities)| { let entity_type = schema.entity_type(typename).unwrap(); - entities.into_iter().map(move |data| { - let vid = data.vid_or_default(); + entities.into_iter().map(move |mut data| { + data.set_vid_if_empty(); EntityOperation::Set { key: entity_type.key(data.id()), - data: EntityV::new(data, vid), + data, } }) }) diff --git a/store/test-store/tests/postgres/aggregation.rs b/store/test-store/tests/postgres/aggregation.rs index de47b88d6a1..5c0420a495a 100644 --- a/store/test-store/tests/postgres/aggregation.rs +++ b/store/test-store/tests/postgres/aggregation.rs @@ -1,7 +1,6 @@ use std::fmt::Write; use std::{future::Future, sync::Arc}; -use graph::data::store::EntityV; use graph::{ blockchain::{block_stream::FirehoseCursor, BlockPtr, BlockTime}, components::{ @@ -80,14 +79,11 @@ pub async fn insert( let schema = ReadStore::input_schema(store); let ops = entities .into_iter() - .map(|data| { + .map(|mut data| { let data_type = schema.entity_type("Data").unwrap(); let key = data_type.key(data.id()); - let vid = data.vid_or_default(); - EntityOperation::Set { - data: EntityV::new(data, vid), - key, - } + let _ = data.set_vid_if_empty(); + EntityOperation::Set { data, key } }) .collect(); @@ -178,10 +174,10 @@ fn stats_hour(schema: &InputSchema) -> Vec> { let block2 = vec![ entity! { schema => id: 11i64, timestamp: ts2, token: TOKEN1.clone(), sum: bd(3), sum_sq: bd(5), max: bd(10), first: bd(10), last: bd(2), - value: bd(14), totalValue: bd(14) }, + value: bd(14), totalValue: bd(14), vid: 1i64 }, entity! { schema => id: 12i64, timestamp: ts2, token: TOKEN2.clone(), sum: bd(3), sum_sq: bd(5), max: bd(20), first: bd(1), last: bd(20), - value: bd(41), totalValue: bd(41) }, + value: bd(41), totalValue: bd(41), vid: 2i64 }, ]; let ts3 = BlockTime::since_epoch(3600, 0); @@ -191,10 +187,10 @@ fn stats_hour(schema: &InputSchema) -> Vec> { let mut v2 = vec![ entity! { schema => id: 21i64, timestamp: ts3, token: TOKEN1.clone(), sum: bd(3), sum_sq: bd(9), max: bd(30), first: bd(30), last: bd(30), - value: bd(90), totalValue: bd(104) }, + value: bd(90), totalValue: bd(104), vid: 3i64 }, entity! { schema => id: 22i64, timestamp: ts3, token: TOKEN2.clone(), sum: bd(3), sum_sq: bd(9), max: bd(3), first: bd(3), last: bd(3), - value: bd(9), totalValue: bd(50)}, + value: bd(9), totalValue: bd(50), vid: 4i64 }, ]; v1.append(&mut v2); v1 diff --git a/store/test-store/tests/postgres/graft.rs b/store/test-store/tests/postgres/graft.rs index 59cbe5e3f0a..0d741b1b26c 100644 --- a/store/test-store/tests/postgres/graft.rs +++ b/store/test-store/tests/postgres/graft.rs @@ -9,7 +9,7 @@ use graph::components::store::{ DeploymentLocator, EntityOrder, EntityQuery, PruneReporter, PruneRequest, PruningStrategy, VersionStats, }; -use graph::data::store::{scalar, EntityV, Id}; +use graph::data::store::{scalar, Id}; use graph::data::subgraph::schema::*; use graph::data::subgraph::*; use graph::semver::Version; @@ -257,13 +257,14 @@ fn create_test_entity( seconds_age: age * 31557600, weight: Value::BigDecimal(weight.into()), coffee: coffee, - favorite_color: favorite_color + favorite_color: favorite_color, + vid: vid, }; let entity_type = TEST_SUBGRAPH_SCHEMA.entity_type(entity_type).unwrap(); EntityOperation::Set { key: entity_type.parse_key(id).unwrap(), - data: EntityV::new(test_entity, vid), + data: test_entity, } } @@ -329,9 +330,10 @@ async fn check_graft( // Make our own entries for block 2 shaq.set("email", "shaq@gmail.com").unwrap(); + let _ = shaq.set_vid(3); let op = EntityOperation::Set { key: user_type.parse_key("3").unwrap(), - data: EntityV::new(shaq, 3), + data: shaq, }; transact_and_wait(&store, &deployment, BLOCKS[2].clone(), vec![op]) .await diff --git a/store/test-store/tests/postgres/relational.rs b/store/test-store/tests/postgres/relational.rs index 8a2e5a2f3d9..a02778df675 100644 --- a/store/test-store/tests/postgres/relational.rs +++ b/store/test-store/tests/postgres/relational.rs @@ -205,11 +205,13 @@ lazy_static! { bigInt: big_int.clone(), bigIntArray: vec![big_int.clone(), (big_int + 1.into())], color: "yellow", + vid: 0i64, } }; static ref EMPTY_NULLABLESTRINGS_ENTITY: Entity = { entity! { THINGS_SCHEMA => id: "one", + vid: 0i64, } }; static ref SCALAR_TYPE: EntityType = THINGS_SCHEMA.entity_type("Scalar").unwrap(); @@ -495,7 +497,6 @@ fn create_schema(conn: &mut PgConnection) -> Layout { fn scrub(entity: &Entity) -> Entity { let mut scrubbed = entity.clone(); scrubbed.remove_null_fields(); - scrubbed.remove("vid"); scrubbed } @@ -756,7 +757,6 @@ fn serialize_bigdecimal() { ) .expect("Failed to read Scalar[one]") .unwrap(); - entity.remove("vid"); assert_entity_eq!(entity, actual); } }); @@ -918,7 +918,6 @@ fn conflicting_entity() { data: fred, block: 2, end: None, - vid: 0, }, 2, ) diff --git a/store/test-store/tests/postgres/relational_bytes.rs b/store/test-store/tests/postgres/relational_bytes.rs index 6897dc9e914..1d24bbcd8a2 100644 --- a/store/test-store/tests/postgres/relational_bytes.rs +++ b/store/test-store/tests/postgres/relational_bytes.rs @@ -84,9 +84,8 @@ pub fn row_group_update( ) -> RowGroup { let mut group = RowGroup::new(entity_type.clone(), false); for (key, data) in data { - let vid = data.vid_or_default(); group - .push(EntityModification::overwrite(key, data, block, vid), block) + .push(EntityModification::overwrite(key, data, block), block) .unwrap(); } group @@ -99,9 +98,8 @@ pub fn row_group_insert( ) -> RowGroup { let mut group = RowGroup::new(entity_type.clone(), false); for (key, data) in data { - let vid = data.vid_or_default(); group - .push(EntityModification::insert(key, data, block, vid), block) + .push(EntityModification::insert(key, data, block), block) .unwrap(); } group @@ -162,7 +160,6 @@ fn create_schema(conn: &mut PgConnection) -> Layout { fn scrub(entity: &Entity) -> Entity { let mut scrubbed = entity.clone(); scrubbed.remove_null_fields(); - scrubbed.remove("vid"); scrubbed } diff --git a/store/test-store/tests/postgres/store.rs b/store/test-store/tests/postgres/store.rs index 22e473e3810..be7f3cf550b 100644 --- a/store/test-store/tests/postgres/store.rs +++ b/store/test-store/tests/postgres/store.rs @@ -2,7 +2,6 @@ use graph::blockchain::block_stream::FirehoseCursor; use graph::blockchain::BlockTime; use graph::data::graphql::ext::TypeDefinitionExt; use graph::data::query::QueryTarget; -use graph::data::store::EntityV; use graph::data::subgraph::schema::DeploymentCreate; use graph::data_source::common::MappingABI; use graph::futures01::{future, Stream}; @@ -290,11 +289,12 @@ fn create_test_entity( weight: Value::BigDecimal(weight.into()), coffee: coffee, favorite_color: favorite_color, + vid: vid, }; EntityOperation::Set { key: entity_type.parse_key(id).unwrap(), - data: EntityV::new(test_entity, vid), + data: test_entity, } } @@ -358,6 +358,7 @@ fn get_entity_1() { seconds_age: Value::BigInt(BigInt::from(2114359200)), weight: Value::BigDecimal(184.4.into()), coffee: false, + vid: 0i64 }; // "favorite_color" was set to `Null` earlier and should be absent @@ -383,6 +384,7 @@ fn get_entity_3() { seconds_age: Value::BigInt(BigInt::from(883612800)), weight: Value::BigDecimal(111.7.into()), coffee: false, + vid: 3_i64, }; // "favorite_color" was set to `Null` earlier and should be absent @@ -444,7 +446,7 @@ fn update_existing() { }; // Verify that the entity before updating is different from what we expect afterwards - assert_ne!(writable.get(&entity_key).unwrap().unwrap(), new_data.e); + assert_ne!(writable.get(&entity_key).unwrap().unwrap(), new_data); // Set test entity; as the entity already exists an update should be performed let count = get_entity_count(store.clone(), &deployment.hash); @@ -459,16 +461,13 @@ fn update_existing() { assert_eq!(count, get_entity_count(store.clone(), &deployment.hash)); // Verify that the entity in the store has changed to what we have set. - let bin_name = match new_data.e.get("bin_name") { + let bin_name = match new_data.get("bin_name") { Some(Value::Bytes(bytes)) => bytes.clone(), _ => unreachable!(), }; - new_data - .e - .insert("bin_name", Value::Bytes(bin_name)) - .unwrap(); - assert_eq!(writable.get(&entity_key).unwrap(), Some(new_data.e)); + new_data.insert("bin_name", Value::Bytes(bin_name)).unwrap(); + assert_eq!(writable.get(&entity_key).unwrap(), Some(new_data)); }) } @@ -478,7 +477,8 @@ fn partially_update_existing() { let entity_key = USER_TYPE.parse_key("1").unwrap(); let schema = writable.input_schema(); - let partial_entity = entity! { schema => id: "1", name: "Johnny Boy", email: Value::Null }; + let partial_entity = + entity! { schema => id: "1", name: "Johnny Boy", email: Value::Null, vid: 11i64 }; let original_entity = writable .get(&entity_key) @@ -492,7 +492,7 @@ fn partially_update_existing() { TEST_BLOCK_3_PTR.clone(), vec![EntityOperation::Set { key: entity_key.clone(), - data: EntityV::new(partial_entity.clone(), 11), + data: partial_entity.clone(), }], ) .await @@ -1088,7 +1088,8 @@ fn revert_block_with_partial_update() { let entity_key = USER_TYPE.parse_key("1").unwrap(); let schema = writable.input_schema(); - let partial_entity = entity! { schema => id: "1", name: "Johnny Boy", email: Value::Null }; + let partial_entity = + entity! { schema => id: "1", name: "Johnny Boy", email: Value::Null, vid: 5i64 }; let original_entity = writable.get(&entity_key).unwrap().expect("missing entity"); @@ -1099,7 +1100,7 @@ fn revert_block_with_partial_update() { TEST_BLOCK_3_PTR.clone(), vec![EntityOperation::Set { key: entity_key.clone(), - data: EntityV::new(partial_entity.clone(), 5), + data: partial_entity, }], ) .await @@ -1183,7 +1184,8 @@ fn revert_block_with_dynamic_data_source_operations() { // Create operations to add a user let user_key = USER_TYPE.parse_key("1").unwrap(); - let partial_entity = entity! { schema => id: "1", name: "Johnny Boy", email: Value::Null }; + let partial_entity = + entity! { schema => id: "1", name: "Johnny Boy", email: Value::Null, vid: 5i64 }; // Get the original user for comparisons let original_user = writable.get(&user_key).unwrap().expect("missing entity"); @@ -1194,7 +1196,7 @@ fn revert_block_with_dynamic_data_source_operations() { let ops = vec![EntityOperation::Set { key: user_key.clone(), - data: EntityV::new(partial_entity.clone(), 5), + data: partial_entity.clone(), }]; // Add user and dynamic data source to the store @@ -1315,9 +1317,13 @@ fn entity_changes_are_fired_and_forwarded_to_subscriptions() { TEST_BLOCK_1_PTR.clone(), added_entities .iter() - .map(|(id, data)| EntityOperation::Set { - key: USER_TYPE.parse_key(id.as_str()).unwrap(), - data: EntityV::new(data.clone(), data.vid_or_default()), + .map(|(id, data)| { + let mut data = data.clone(); + data.set_vid_if_empty(); + EntityOperation::Set { + key: USER_TYPE.parse_key(id.as_str()).unwrap(), + data, + } }) .collect(), ) @@ -1325,10 +1331,10 @@ fn entity_changes_are_fired_and_forwarded_to_subscriptions() { .unwrap(); // Update an entity in the store - let updated_entity = entity! { schema => id: "1", name: "Johnny" }; + let updated_entity = entity! { schema => id: "1", name: "Johnny", vid: 7i64 }; let update_op = EntityOperation::Set { key: USER_TYPE.parse_key("1").unwrap(), - data: EntityV::new(updated_entity.clone(), 7), + data: updated_entity.clone(), }; // Delete an entity in the store @@ -1523,11 +1529,11 @@ fn handle_large_string_with_index() { block: BlockNumber, vid: i64, ) -> EntityModification { - let data = entity! { schema => id: id, name: name }; + let data = entity! { schema => id: id, name: name, vid: vid }; let key = USER_TYPE.parse_key(id).unwrap(); - EntityModification::insert(key, data, block, vid) + EntityModification::insert(key, data, block) } run_test(|store, writable, deployment| async move { @@ -1568,6 +1574,7 @@ fn handle_large_string_with_index() { ) .await .expect("Failed to insert large text"); + writable.flush().await.unwrap(); let query = user_query() @@ -1623,11 +1630,11 @@ fn handle_large_bytea_with_index() { block: BlockNumber, vid: i64, ) -> EntityModification { - let data = entity! { schema => id: id, bin_name: scalar::Bytes::from(name) }; + let data = entity! { schema => id: id, bin_name: scalar::Bytes::from(name), vid: vid }; let key = USER_TYPE.parse_key(id).unwrap(); - EntityModification::insert(key, data, block, vid) + EntityModification::insert(key, data, block) } run_test(|store, writable, deployment| async move { @@ -1832,11 +1839,12 @@ fn window() { age: i32, vid: i64, ) -> EntityOperation { - let entity = entity! { TEST_SUBGRAPH_SCHEMA => id: id, age: age, favorite_color: color }; + let entity = + entity! { TEST_SUBGRAPH_SCHEMA => id: id, age: age, favorite_color: color, vid: vid }; EntityOperation::Set { key: entity_type.parse_key(id).unwrap(), - data: EntityV::new(entity, vid), + data: entity, } } diff --git a/store/test-store/tests/postgres/writable.rs b/store/test-store/tests/postgres/writable.rs index dbc66e5e401..2228c148d25 100644 --- a/store/test-store/tests/postgres/writable.rs +++ b/store/test-store/tests/postgres/writable.rs @@ -1,5 +1,4 @@ use graph::blockchain::block_stream::{EntitySourceOperation, FirehoseCursor}; -use graph::data::store::EntityV; use graph::data::subgraph::schema::DeploymentCreate; use graph::data::value::Word; use graph::data_source::CausalityRegion; @@ -143,12 +142,13 @@ async fn insert_count( let count_key_local = |counter_type: &EntityType, id: &str| counter_type.parse_key(id).unwrap(); let data = entity! { TEST_SUBGRAPH_SCHEMA => id: "1", - count: count as i32 + count: count as i32, + vid: block as i64, }; let entity_op = if block != 3 && block != 5 && block != 7 { EntityOperation::Set { key: count_key_local(&COUNTER_TYPE, &data.get("id").unwrap().to_string()), - data: EntityV::new(data, block.into()), + data, } } else { EntityOperation::Remove { @@ -164,10 +164,11 @@ async fn insert_count( let data = entity! { TEST_SUBGRAPH_SCHEMA => id: &block.to_string(), count :count as i32, + vid: block as i64, }; let entity_op = EntityOperation::Set { key: count_key_local(&COUNTER2_TYPE, &data.get("id").unwrap().to_string()), - data: EntityV::new(data, block.into()), + data, }; ops.push(entity_op); } @@ -296,7 +297,7 @@ fn restart() { // Cause an error by leaving out the non-nullable `count` attribute let entity_ops = vec![EntityOperation::Set { key: count_key("1"), - data: EntityV::new(entity! { schema => id: "1"}, 0), + data: entity! { schema => id: "1", vid: 0i64}, }]; transact_entity_operations( &subgraph_store, @@ -320,7 +321,7 @@ fn restart() { // Retry our write with correct data let entity_ops = vec![EntityOperation::Set { key: count_key("1"), - data: EntityV::new(entity! { schema => id: "1", count: 1}, 0), + data: entity! { schema => id: "1", count: 1, vid: 0i64}, }]; // `SubgraphStore` caches the correct writable so that this call // uses the restarted writable, and is equivalent to using @@ -342,13 +343,13 @@ fn restart() { fn read_range_test() { run_test(|store, writable, sourceable, deployment| async move { let result_entities = vec![ - r#"(1, [EntitySourceOperation { entity_op: Create, entity_type: EntityType(Counter), entity: Entity { count: Int(2), id: String("1") }, vid: 1 }, EntitySourceOperation { entity_op: Create, entity_type: EntityType(Counter2), entity: Entity { count: Int(2), id: String("1") }, vid: 1 }])"#, - r#"(2, [EntitySourceOperation { entity_op: Modify, entity_type: EntityType(Counter), entity: Entity { count: Int(4), id: String("1") }, vid: 2 }, EntitySourceOperation { entity_op: Create, entity_type: EntityType(Counter2), entity: Entity { count: Int(4), id: String("2") }, vid: 2 }])"#, - r#"(3, [EntitySourceOperation { entity_op: Delete, entity_type: EntityType(Counter), entity: Entity { count: Int(4), id: String("1") }, vid: 2 }, EntitySourceOperation { entity_op: Create, entity_type: EntityType(Counter2), entity: Entity { count: Int(6), id: String("3") }, vid: 3 }])"#, - r#"(4, [EntitySourceOperation { entity_op: Create, entity_type: EntityType(Counter), entity: Entity { count: Int(8), id: String("1") }, vid: 4 }, EntitySourceOperation { entity_op: Create, entity_type: EntityType(Counter2), entity: Entity { count: Int(8), id: String("4") }, vid: 4 }])"#, - r#"(5, [EntitySourceOperation { entity_op: Delete, entity_type: EntityType(Counter), entity: Entity { count: Int(8), id: String("1") }, vid: 4 }, EntitySourceOperation { entity_op: Create, entity_type: EntityType(Counter2), entity: Entity { count: Int(10), id: String("5") }, vid: 5 }])"#, - r#"(6, [EntitySourceOperation { entity_op: Create, entity_type: EntityType(Counter), entity: Entity { count: Int(12), id: String("1") }, vid: 6 }])"#, - r#"(7, [EntitySourceOperation { entity_op: Delete, entity_type: EntityType(Counter), entity: Entity { count: Int(12), id: String("1") }, vid: 6 }])"#, + r#"(1, [EntitySourceOperation { entity_op: Create, entity_type: EntityType(Counter), entity: Entity { count: Int(2), id: String("1"), vid: Int8(1) }, vid: 1 }, EntitySourceOperation { entity_op: Create, entity_type: EntityType(Counter2), entity: Entity { count: Int(2), id: String("1"), vid: Int8(1) }, vid: 1 }])"#, + r#"(2, [EntitySourceOperation { entity_op: Modify, entity_type: EntityType(Counter), entity: Entity { count: Int(4), id: String("1"), vid: Int8(2) }, vid: 2 }, EntitySourceOperation { entity_op: Create, entity_type: EntityType(Counter2), entity: Entity { count: Int(4), id: String("2"), vid: Int8(2) }, vid: 2 }])"#, + r#"(3, [EntitySourceOperation { entity_op: Delete, entity_type: EntityType(Counter), entity: Entity { count: Int(4), id: String("1"), vid: Int8(2) }, vid: 2 }, EntitySourceOperation { entity_op: Create, entity_type: EntityType(Counter2), entity: Entity { count: Int(6), id: String("3"), vid: Int8(3) }, vid: 3 }])"#, + r#"(4, [EntitySourceOperation { entity_op: Create, entity_type: EntityType(Counter), entity: Entity { count: Int(8), id: String("1"), vid: Int8(4) }, vid: 4 }, EntitySourceOperation { entity_op: Create, entity_type: EntityType(Counter2), entity: Entity { count: Int(8), id: String("4"), vid: Int8(4) }, vid: 4 }])"#, + r#"(5, [EntitySourceOperation { entity_op: Delete, entity_type: EntityType(Counter), entity: Entity { count: Int(8), id: String("1"), vid: Int8(4) }, vid: 4 }, EntitySourceOperation { entity_op: Create, entity_type: EntityType(Counter2), entity: Entity { count: Int(10), id: String("5"), vid: Int8(5) }, vid: 5 }])"#, + r#"(6, [EntitySourceOperation { entity_op: Create, entity_type: EntityType(Counter), entity: Entity { count: Int(12), id: String("1"), vid: Int8(6) }, vid: 6 }])"#, + r#"(7, [EntitySourceOperation { entity_op: Delete, entity_type: EntityType(Counter), entity: Entity { count: Int(12), id: String("1"), vid: Int8(6) }, vid: 6 }])"#, ]; let subgraph_store = store.subgraph_store(); writable.deployment_synced().unwrap(); From 640e7d19c84488963e000056b450cc928a3bd402 Mon Sep 17 00:00:00 2001 From: Zoran Cvetkov Date: Sun, 19 Jan 2025 18:16:52 +0200 Subject: [PATCH 27/27] cleanup --- graph/src/components/store/write.rs | 9 ++++----- graph/src/data/store/mod.rs | 4 ++-- store/postgres/src/relational_queries.rs | 2 +- store/test-store/tests/chain/ethereum/manifest.rs | 2 +- store/test-store/tests/core/interfaces.rs | 4 ++-- store/test-store/tests/postgres/aggregation.rs | 2 +- store/test-store/tests/postgres/relational_bytes.rs | 10 ++-------- 7 files changed, 13 insertions(+), 20 deletions(-) diff --git a/graph/src/components/store/write.rs b/graph/src/components/store/write.rs index d4632c8410f..721e3d80bc1 100644 --- a/graph/src/components/store/write.rs +++ b/graph/src/components/store/write.rs @@ -1017,30 +1017,29 @@ mod test { let value = value.clone(); let key = THING_TYPE.parse_key("one").unwrap(); - let vid = 0i64; match value { Ins(block) => EntityModification::Insert { key, - data: Arc::new(entity! { SCHEMA => id: "one", count: block, vid: vid }), + data: Arc::new(entity! { SCHEMA => id: "one", count: block }), block, end: None, }, Ovw(block) => EntityModification::Overwrite { key, - data: Arc::new(entity! { SCHEMA => id: "one", count: block, vid: vid }), + data: Arc::new(entity! { SCHEMA => id: "one", count: block }), block, end: None, }, Rem(block) => EntityModification::Remove { key, block }, InsC(block, end) => EntityModification::Insert { key, - data: Arc::new(entity! { SCHEMA => id: "one", count: block, vid: vid }), + data: Arc::new(entity! { SCHEMA => id: "one", count: block }), block, end: Some(end), }, OvwC(block, end) => EntityModification::Overwrite { key, - data: Arc::new(entity! { SCHEMA => id: "one", count: block, vid: vid }), + data: Arc::new(entity! { SCHEMA => id: "one", count: block }), block, end: Some(end), }, diff --git a/graph/src/data/store/mod.rs b/graph/src/data/store/mod.rs index cd16c8ee5e7..b16f6a2866d 100644 --- a/graph/src/data/store/mod.rs +++ b/graph/src/data/store/mod.rs @@ -924,7 +924,7 @@ impl Entity { self.0.insert("vid", value.into()) } - /// Sets the VID if not set. Should be used only for tests. + /// Sets the VID if it's not already set. Should be used only for tests. #[cfg(debug_assertions)] pub fn set_vid_if_empty(&mut self) { let vid = self.get("vid"); @@ -948,7 +948,7 @@ impl Entity { /// If a key only exists on one entity, the value from that entity is chosen. /// If a key is set to `Value::Null` in `update`, the key/value pair is removed. pub fn merge_remove_null_fields(&mut self, update: Entity) -> Result<(), InternError> { - for (key, value) in update.into_iter() { + for (key, value) in update.0.into_iter() { match value { Value::Null => self.0.remove(&key), _ => self.0.insert(&key, value)?, diff --git a/store/postgres/src/relational_queries.rs b/store/postgres/src/relational_queries.rs index b9fb1088924..08127dff2c5 100644 --- a/store/postgres/src/relational_queries.rs +++ b/store/postgres/src/relational_queries.rs @@ -540,7 +540,7 @@ impl EntityData { // block_range that `select *` pulls in but that we // don't care about here if key == "vid" { - // VID is not in the input schema but we need it so deserialize it too + // VID is not in the input schema but we need it, so deserialize it too match T::Value::from_column_value(&ColumnType::Int8, json) { Ok(value) if value.is_null() => None, Ok(value) => Some(Ok((Word::from("vid"), value))), diff --git a/store/test-store/tests/chain/ethereum/manifest.rs b/store/test-store/tests/chain/ethereum/manifest.rs index 23fdfae7c57..c750adb7b72 100644 --- a/store/test-store/tests/chain/ethereum/manifest.rs +++ b/store/test-store/tests/chain/ethereum/manifest.rs @@ -416,7 +416,7 @@ specVersion: 0.0.2 msg ); - let thing = entity! { schema => id: "datthing", vid: 1i64 }; + let thing = entity! { schema => id: "datthing" }; test_store::insert_entities( &deployment, vec![(schema.entity_type("Thing").unwrap(), thing)], diff --git a/store/test-store/tests/core/interfaces.rs b/store/test-store/tests/core/interfaces.rs index 3b8ca1724c2..a4fc8314665 100644 --- a/store/test-store/tests/core/interfaces.rs +++ b/store/test-store/tests/core/interfaces.rs @@ -202,8 +202,8 @@ async fn reference_interface_derived() { let query = "query { events { id transaction { id } } }"; let buy = ("BuyEvent", entity! { schema => id: "buy", vid: 0i64 }); - let sell1 = ("SellEvent", entity! { schema => id: "sell1", vid: 0i64 }); - let sell2 = ("SellEvent", entity! { schema => id: "sell2", vid: 1i64 }); + let sell1 = ("SellEvent", entity! { schema => id: "sell1", vid: 1i64 }); + let sell2 = ("SellEvent", entity! { schema => id: "sell2", vid: 2i64 }); let gift = ( "GiftEvent", entity! { schema => id: "gift", transaction: "txn" }, diff --git a/store/test-store/tests/postgres/aggregation.rs b/store/test-store/tests/postgres/aggregation.rs index 5c0420a495a..b131cb4a323 100644 --- a/store/test-store/tests/postgres/aggregation.rs +++ b/store/test-store/tests/postgres/aggregation.rs @@ -82,7 +82,7 @@ pub async fn insert( .map(|mut data| { let data_type = schema.entity_type("Data").unwrap(); let key = data_type.key(data.id()); - let _ = data.set_vid_if_empty(); + data.set_vid_if_empty(); EntityOperation::Set { data, key } }) .collect(); diff --git a/store/test-store/tests/postgres/relational_bytes.rs b/store/test-store/tests/postgres/relational_bytes.rs index 1d24bbcd8a2..3f4bd88c8d8 100644 --- a/store/test-store/tests/postgres/relational_bytes.rs +++ b/store/test-store/tests/postgres/relational_bytes.rs @@ -157,12 +157,6 @@ fn create_schema(conn: &mut PgConnection) -> Layout { .expect("Failed to create relational schema") } -fn scrub(entity: &Entity) -> Entity { - let mut scrubbed = entity.clone(); - scrubbed.remove_null_fields(); - scrubbed -} - macro_rules! assert_entity_eq { ($left:expr, $right:expr) => {{ let (left, right) = (&($left), &($right)); @@ -271,7 +265,7 @@ fn find() { // Happy path: find existing entity let entity = find_entity(conn, layout, ID).unwrap(); - assert_entity_eq!(scrub(&BEEF_ENTITY), entity); + assert_entity_eq!(BEEF_ENTITY.clone(), entity); assert!(CausalityRegion::from_entity(&entity) == CausalityRegion::ONCHAIN); // Find non-existing entity @@ -336,7 +330,7 @@ fn update() { .expect("Failed to read Thing[deadbeef]") .unwrap(); - assert_entity_eq!(scrub(&entity), actual); + assert_entity_eq!(entity, actual); }); }