diff --git a/benchmarks/src/btreemap.rs b/benchmarks/src/btreemap.rs index a186d3ee..5f7edc0b 100644 --- a/benchmarks/src/btreemap.rs +++ b/benchmarks/src/btreemap.rs @@ -47,6 +47,150 @@ bench_tests! { btreemap_insert_blob_512_1024, insert_helper_v1, Blob512, Blob1024; btreemap_insert_blob_512_1024_v2, insert_helper_v2, Blob512, Blob1024; + // 4 x V + btreemap_insert_blob_4_4, insert_helper_v1, Blob4, Blob4; + btreemap_insert_blob_4_4_v2, insert_helper_v2, Blob4, Blob4; + btreemap_insert_blob_4_8, insert_helper_v1, Blob4, Blob8; + btreemap_insert_blob_4_8_v2, insert_helper_v2, Blob4, Blob8; + btreemap_insert_blob_4_16, insert_helper_v1, Blob4, Blob16; + btreemap_insert_blob_4_16_v2, insert_helper_v2, Blob4, Blob16; + btreemap_insert_blob_4_32, insert_helper_v1, Blob4, Blob32; + btreemap_insert_blob_4_32_v2, insert_helper_v2, Blob4, Blob32; + btreemap_insert_blob_4_64, insert_helper_v1, Blob4, Blob64; + btreemap_insert_blob_4_64_v2, insert_helper_v2, Blob4, Blob64; + btreemap_insert_blob_4_128, insert_helper_v1, Blob4, Blob128; + btreemap_insert_blob_4_128_v2, insert_helper_v2, Blob4, Blob128; + btreemap_insert_blob_4_256, insert_helper_v1, Blob4, Blob256; + btreemap_insert_blob_4_256_v2, insert_helper_v2, Blob4, Blob256; + btreemap_insert_blob_4_512, insert_helper_v1, Blob4, Blob512; + btreemap_insert_blob_4_512_v2, insert_helper_v2, Blob4, Blob512; + + // 8 x V + btreemap_insert_blob_8_4, insert_helper_v1, Blob8, Blob4; + btreemap_insert_blob_8_4_v2, insert_helper_v2, Blob8, Blob4; + btreemap_insert_blob_8_8, insert_helper_v1, Blob8, Blob8; + btreemap_insert_blob_8_8_v2, insert_helper_v2, Blob8, Blob8; + btreemap_insert_blob_8_16, insert_helper_v1, Blob8, Blob16; + btreemap_insert_blob_8_16_v2, insert_helper_v2, Blob8, Blob16; + btreemap_insert_blob_8_32, insert_helper_v1, Blob8, Blob32; + btreemap_insert_blob_8_32_v2, insert_helper_v2, Blob8, Blob32; + btreemap_insert_blob_8_64, insert_helper_v1, Blob8, Blob64; + btreemap_insert_blob_8_64_v2, insert_helper_v2, Blob8, Blob64; + btreemap_insert_blob_8_128, insert_helper_v1, Blob8, Blob128; + btreemap_insert_blob_8_128_v2, insert_helper_v2, Blob8, Blob128; + btreemap_insert_blob_8_256, insert_helper_v1, Blob8, Blob256; + btreemap_insert_blob_8_256_v2, insert_helper_v2, Blob8, Blob256; + btreemap_insert_blob_8_512, insert_helper_v1, Blob8, Blob512; + btreemap_insert_blob_8_512_v2, insert_helper_v2, Blob8, Blob512; + + // 16 x V + btreemap_insert_blob_16_4, insert_helper_v1, Blob16, Blob4; + btreemap_insert_blob_16_4_v2, insert_helper_v2, Blob16, Blob4; + btreemap_insert_blob_16_8, insert_helper_v1, Blob16, Blob8; + btreemap_insert_blob_16_8_v2, insert_helper_v2, Blob16, Blob8; + btreemap_insert_blob_16_16, insert_helper_v1, Blob16, Blob16; + btreemap_insert_blob_16_16_v2, insert_helper_v2, Blob16, Blob16; + btreemap_insert_blob_16_32, insert_helper_v1, Blob16, Blob32; + btreemap_insert_blob_16_32_v2, insert_helper_v2, Blob16, Blob32; + btreemap_insert_blob_16_64, insert_helper_v1, Blob16, Blob64; + btreemap_insert_blob_16_64_v2, insert_helper_v2, Blob16, Blob64; + btreemap_insert_blob_16_128, insert_helper_v1, Blob16, Blob128; + btreemap_insert_blob_16_128_v2, insert_helper_v2, Blob16, Blob128; + btreemap_insert_blob_16_256, insert_helper_v1, Blob16, Blob256; + btreemap_insert_blob_16_256_v2, insert_helper_v2, Blob16, Blob256; + btreemap_insert_blob_16_512, insert_helper_v1, Blob16, Blob512; + btreemap_insert_blob_16_512_v2, insert_helper_v2, Blob16, Blob512; + + // 32 x V + btreemap_insert_blob_32_4, insert_helper_v1, Blob32, Blob4; + btreemap_insert_blob_32_4_v2, insert_helper_v2, Blob32, Blob4; + btreemap_insert_blob_32_8, insert_helper_v1, Blob32, Blob8; + btreemap_insert_blob_32_8_v2, insert_helper_v2, Blob32, Blob8; + btreemap_insert_blob_32_16, insert_helper_v1, Blob32, Blob16; + btreemap_insert_blob_32_16_v2, insert_helper_v2, Blob32, Blob16; + btreemap_insert_blob_32_32, insert_helper_v1, Blob32, Blob32; + btreemap_insert_blob_32_32_v2, insert_helper_v2, Blob32, Blob32; + btreemap_insert_blob_32_64, insert_helper_v1, Blob32, Blob64; + btreemap_insert_blob_32_64_v2, insert_helper_v2, Blob32, Blob64; + btreemap_insert_blob_32_128, insert_helper_v1, Blob32, Blob128; + btreemap_insert_blob_32_128_v2, insert_helper_v2, Blob32, Blob128; + btreemap_insert_blob_32_256, insert_helper_v1, Blob32, Blob256; + btreemap_insert_blob_32_256_v2, insert_helper_v2, Blob32, Blob256; + btreemap_insert_blob_32_512, insert_helper_v1, Blob32, Blob512; + btreemap_insert_blob_32_512_v2, insert_helper_v2, Blob32, Blob512; + + // 64 x V + btreemap_insert_blob_64_4, insert_helper_v1, Blob64, Blob4; + btreemap_insert_blob_64_4_v2, insert_helper_v2, Blob64, Blob4; + btreemap_insert_blob_64_8, insert_helper_v1, Blob64, Blob8; + btreemap_insert_blob_64_8_v2, insert_helper_v2, Blob64, Blob8; + btreemap_insert_blob_64_16, insert_helper_v1, Blob64, Blob16; + btreemap_insert_blob_64_16_v2, insert_helper_v2, Blob64, Blob16; + btreemap_insert_blob_64_32, insert_helper_v1, Blob64, Blob32; + btreemap_insert_blob_64_32_v2, insert_helper_v2, Blob64, Blob32; + btreemap_insert_blob_64_64, insert_helper_v1, Blob64, Blob64; + btreemap_insert_blob_64_64_v2, insert_helper_v2, Blob64, Blob64; + btreemap_insert_blob_64_128, insert_helper_v1, Blob64, Blob128; + btreemap_insert_blob_64_128_v2, insert_helper_v2, Blob64, Blob128; + btreemap_insert_blob_64_256, insert_helper_v1, Blob64, Blob256; + btreemap_insert_blob_64_256_v2, insert_helper_v2, Blob64, Blob256; + btreemap_insert_blob_64_512, insert_helper_v1, Blob64, Blob512; + btreemap_insert_blob_64_512_v2, insert_helper_v2, Blob64, Blob512; + + // 128 x V + btreemap_insert_blob_128_4, insert_helper_v1, Blob128, Blob4; + btreemap_insert_blob_128_4_v2, insert_helper_v2, Blob128, Blob4; + btreemap_insert_blob_128_8, insert_helper_v1, Blob128, Blob8; + btreemap_insert_blob_128_8_v2, insert_helper_v2, Blob128, Blob8; + btreemap_insert_blob_128_16, insert_helper_v1, Blob128, Blob16; + btreemap_insert_blob_128_16_v2, insert_helper_v2, Blob128, Blob16; + btreemap_insert_blob_128_32, insert_helper_v1, Blob128, Blob32; + btreemap_insert_blob_128_32_v2, insert_helper_v2, Blob128, Blob32; + btreemap_insert_blob_128_64, insert_helper_v1, Blob128, Blob64; + btreemap_insert_blob_128_64_v2, insert_helper_v2, Blob128, Blob64; + btreemap_insert_blob_128_128, insert_helper_v1, Blob128, Blob128; + btreemap_insert_blob_128_128_v2, insert_helper_v2, Blob128, Blob128; + btreemap_insert_blob_128_256, insert_helper_v1, Blob128, Blob256; + btreemap_insert_blob_128_256_v2, insert_helper_v2, Blob128, Blob256; + btreemap_insert_blob_128_512, insert_helper_v1, Blob128, Blob512; + btreemap_insert_blob_128_512_v2, insert_helper_v2, Blob128, Blob512; + + // 256 x V + btreemap_insert_blob_256_4, insert_helper_v1, Blob256, Blob4; + btreemap_insert_blob_256_4_v2, insert_helper_v2, Blob256, Blob4; + btreemap_insert_blob_256_8, insert_helper_v1, Blob256, Blob8; + btreemap_insert_blob_256_8_v2, insert_helper_v2, Blob256, Blob8; + btreemap_insert_blob_256_16, insert_helper_v1, Blob256, Blob16; + btreemap_insert_blob_256_16_v2, insert_helper_v2, Blob256, Blob16; + btreemap_insert_blob_256_32, insert_helper_v1, Blob256, Blob32; + btreemap_insert_blob_256_32_v2, insert_helper_v2, Blob256, Blob32; + btreemap_insert_blob_256_64, insert_helper_v1, Blob256, Blob64; + btreemap_insert_blob_256_64_v2, insert_helper_v2, Blob256, Blob64; + btreemap_insert_blob_256_128, insert_helper_v1, Blob256, Blob128; + btreemap_insert_blob_256_128_v2, insert_helper_v2, Blob256, Blob128; + btreemap_insert_blob_256_256, insert_helper_v1, Blob256, Blob256; + btreemap_insert_blob_256_256_v2, insert_helper_v2, Blob256, Blob256; + btreemap_insert_blob_256_512, insert_helper_v1, Blob256, Blob512; + btreemap_insert_blob_256_512_v2, insert_helper_v2, Blob256, Blob512; + + // 512 x V + btreemap_insert_blob_512_4, insert_helper_v1, Blob512, Blob4; + btreemap_insert_blob_512_4_v2, insert_helper_v2, Blob512, Blob4; + btreemap_insert_blob_512_8, insert_helper_v1, Blob512, Blob8; + btreemap_insert_blob_512_8_v2, insert_helper_v2, Blob512, Blob8; + btreemap_insert_blob_512_16, insert_helper_v1, Blob512, Blob16; + btreemap_insert_blob_512_16_v2, insert_helper_v2, Blob512, Blob16; + btreemap_insert_blob_512_32, insert_helper_v1, Blob512, Blob32; + btreemap_insert_blob_512_32_v2, insert_helper_v2, Blob512, Blob32; + btreemap_insert_blob_512_64, insert_helper_v1, Blob512, Blob64; + btreemap_insert_blob_512_64_v2, insert_helper_v2, Blob512, Blob64; + btreemap_insert_blob_512_128, insert_helper_v1, Blob512, Blob128; + btreemap_insert_blob_512_128_v2, insert_helper_v2, Blob512, Blob128; + btreemap_insert_blob_512_256, insert_helper_v1, Blob512, Blob256; + btreemap_insert_blob_512_256_v2, insert_helper_v2, Blob512, Blob256; + btreemap_insert_blob_512_512, insert_helper_v1, Blob512, Blob512; + btreemap_insert_blob_512_512_v2, insert_helper_v2, Blob512, Blob512; + // 1024 x V btreemap_insert_blob_1024_4, insert_helper_v1, Blob1024, Blob4; btreemap_insert_blob_1024_4_v2, insert_helper_v2, Blob1024, Blob4; @@ -64,16 +208,19 @@ bench_tests! { btreemap_insert_blob_1024_256_v2, insert_helper_v2, Blob1024, Blob256; btreemap_insert_blob_1024_512, insert_helper_v1, Blob1024, Blob512; btreemap_insert_blob_1024_512_v2, insert_helper_v2, Blob1024, Blob512; - btreemap_insert_blob_1024_512_v2_mem_manager, insert_helper_v2_mem_manager, Blob1024, Blob512; + //btreemap_insert_blob_1024_512_v2_mem_manager, insert_helper_v2_mem_manager, Blob1024, Blob512; + + btreemap_insert_blob_1024_1024, insert_helper_v1, Blob1024, Blob1024; + btreemap_insert_blob_1024_1024_v2, insert_helper_v2, Blob1024, Blob1024; - btreemap_insert_u64_u64, insert_helper_v1, u64, u64; - btreemap_insert_u64_u64_v2, insert_helper_v2, u64, u64; - btreemap_insert_u64_u64_v2_mem_manager, insert_helper_v2_mem_manager, u64, u64; + // btreemap_insert_u64_u64, insert_helper_v1, u64, u64; + // btreemap_insert_u64_u64_v2, insert_helper_v2, u64, u64; + // btreemap_insert_u64_u64_v2_mem_manager, insert_helper_v2_mem_manager, u64, u64; - btreemap_insert_u64_blob_8, insert_helper_v1, u64, Blob8; - btreemap_insert_u64_blob_8_v2, insert_helper_v2, u64, Blob8; - btreemap_insert_blob_8_u64, insert_helper_v1, Blob8, u64; - btreemap_insert_blob_8_u64_v2, insert_helper_v2, Blob8, u64; + // btreemap_insert_u64_blob_8, insert_helper_v1, u64, Blob8; + // btreemap_insert_u64_blob_8_v2, insert_helper_v2, u64, Blob8; + // btreemap_insert_blob_8_u64, insert_helper_v1, Blob8, u64; + // btreemap_insert_blob_8_u64_v2, insert_helper_v2, Blob8, u64; } // Benchmarks removing keys from a BTreeMap. @@ -96,13 +243,178 @@ bench_tests! { btreemap_remove_blob_512_1024, remove_helper_v1, Blob512, Blob1024; btreemap_remove_blob_512_1024_v2, remove_helper_v2, Blob512, Blob1024; - btreemap_remove_u64_u64, remove_helper_v1, u64, u64; - btreemap_remove_u64_u64_v2, remove_helper_v2, u64, u64; - - btreemap_remove_u64_blob_8, remove_helper_v1, u64, Blob8; - btreemap_remove_u64_blob_8_v2, remove_helper_v2, u64, Blob8; - btreemap_remove_blob_8_u64, remove_helper_v1, Blob8, u64; - btreemap_remove_blob_8_u64_v2, remove_helper_v2, Blob8, u64; + // // 4 x V + // btreemap_remove_blob_4_4, remove_helper_v1, Blob4, Blob4; + // btreemap_remove_blob_4_4_v2, remove_helper_v2, Blob4, Blob4; + // btreemap_remove_blob_4_8, remove_helper_v1, Blob4, Blob8; + // btreemap_remove_blob_4_8_v2, remove_helper_v2, Blob4, Blob8; + // btreemap_remove_blob_4_16, remove_helper_v1, Blob4, Blob16; + // btreemap_remove_blob_4_16_v2, remove_helper_v2, Blob4, Blob16; + // btreemap_remove_blob_4_32, remove_helper_v1, Blob4, Blob32; + // btreemap_remove_blob_4_32_v2, remove_helper_v2, Blob4, Blob32; + // btreemap_remove_blob_4_64, remove_helper_v1, Blob4, Blob64; + // btreemap_remove_blob_4_64_v2, remove_helper_v2, Blob4, Blob64; + // btreemap_remove_blob_4_128, remove_helper_v1, Blob4, Blob128; + // btreemap_remove_blob_4_128_v2, remove_helper_v2, Blob4, Blob128; + // btreemap_remove_blob_4_256, remove_helper_v1, Blob4, Blob256; + // btreemap_remove_blob_4_256_v2, remove_helper_v2, Blob4, Blob256; + // btreemap_remove_blob_4_512, remove_helper_v1, Blob4, Blob512; + // btreemap_remove_blob_4_512_v2, remove_helper_v2, Blob4, Blob512; + + // // 8 x V + // btreemap_remove_blob_8_4, remove_helper_v1, Blob8, Blob4; + // btreemap_remove_blob_8_4_v2, remove_helper_v2, Blob8, Blob4; + // btreemap_remove_blob_8_8, remove_helper_v1, Blob8, Blob8; + // btreemap_remove_blob_8_8_v2, remove_helper_v2, Blob8, Blob8; + // btreemap_remove_blob_8_16, remove_helper_v1, Blob8, Blob16; + // btreemap_remove_blob_8_16_v2, remove_helper_v2, Blob8, Blob16; + // btreemap_remove_blob_8_32, remove_helper_v1, Blob8, Blob32; + // btreemap_remove_blob_8_32_v2, remove_helper_v2, Blob8, Blob32; + // btreemap_remove_blob_8_64, remove_helper_v1, Blob8, Blob64; + // btreemap_remove_blob_8_64_v2, remove_helper_v2, Blob8, Blob64; + // btreemap_remove_blob_8_128, remove_helper_v1, Blob8, Blob128; + // btreemap_remove_blob_8_128_v2, remove_helper_v2, Blob8, Blob128; + // btreemap_remove_blob_8_256, remove_helper_v1, Blob8, Blob256; + // btreemap_remove_blob_8_256_v2, remove_helper_v2, Blob8, Blob256; + // btreemap_remove_blob_8_512, remove_helper_v1, Blob8, Blob512; + // btreemap_remove_blob_8_512_v2, remove_helper_v2, Blob8, Blob512; + + // // 16 x V + // btreemap_remove_blob_16_4, remove_helper_v1, Blob16, Blob4; + // btreemap_remove_blob_16_4_v2, remove_helper_v2, Blob16, Blob4; + // btreemap_remove_blob_16_8, remove_helper_v1, Blob16, Blob8; + // btreemap_remove_blob_16_8_v2, remove_helper_v2, Blob16, Blob8; + // btreemap_remove_blob_16_16, remove_helper_v1, Blob16, Blob16; + // btreemap_remove_blob_16_16_v2, remove_helper_v2, Blob16, Blob16; + // btreemap_remove_blob_16_32, remove_helper_v1, Blob16, Blob32; + // btreemap_remove_blob_16_32_v2, remove_helper_v2, Blob16, Blob32; + // btreemap_remove_blob_16_64, remove_helper_v1, Blob16, Blob64; + // btreemap_remove_blob_16_64_v2, remove_helper_v2, Blob16, Blob64; + // btreemap_remove_blob_16_128, remove_helper_v1, Blob16, Blob128; + // btreemap_remove_blob_16_128_v2, remove_helper_v2, Blob16, Blob128; + // btreemap_remove_blob_16_256, remove_helper_v1, Blob16, Blob256; + // btreemap_remove_blob_16_256_v2, remove_helper_v2, Blob16, Blob256; + // btreemap_remove_blob_16_512, remove_helper_v1, Blob16, Blob512; + // btreemap_remove_blob_16_512_v2, remove_helper_v2, Blob16, Blob512; + + // // 32 x V + // btreemap_remove_blob_32_4, remove_helper_v1, Blob32, Blob4; + // btreemap_remove_blob_32_4_v2, remove_helper_v2, Blob32, Blob4; + // btreemap_remove_blob_32_8, remove_helper_v1, Blob32, Blob8; + // btreemap_remove_blob_32_8_v2, remove_helper_v2, Blob32, Blob8; + // btreemap_remove_blob_32_16, remove_helper_v1, Blob32, Blob16; + // btreemap_remove_blob_32_16_v2, remove_helper_v2, Blob32, Blob16; + // btreemap_remove_blob_32_32, remove_helper_v1, Blob32, Blob32; + // btreemap_remove_blob_32_32_v2, remove_helper_v2, Blob32, Blob32; + // btreemap_remove_blob_32_64, remove_helper_v1, Blob32, Blob64; + // btreemap_remove_blob_32_64_v2, remove_helper_v2, Blob32, Blob64; + // btreemap_remove_blob_32_128, remove_helper_v1, Blob32, Blob128; + // btreemap_remove_blob_32_128_v2, remove_helper_v2, Blob32, Blob128; + // btreemap_remove_blob_32_256, remove_helper_v1, Blob32, Blob256; + // btreemap_remove_blob_32_256_v2, remove_helper_v2, Blob32, Blob256; + // btreemap_remove_blob_32_512, remove_helper_v1, Blob32, Blob512; + // btreemap_remove_blob_32_512_v2, remove_helper_v2, Blob32, Blob512; + + // // 64 x V + // btreemap_remove_blob_64_4, remove_helper_v1, Blob64, Blob4; + // btreemap_remove_blob_64_4_v2, remove_helper_v2, Blob64, Blob4; + // btreemap_remove_blob_64_8, remove_helper_v1, Blob64, Blob8; + // btreemap_remove_blob_64_8_v2, remove_helper_v2, Blob64, Blob8; + // btreemap_remove_blob_64_16, remove_helper_v1, Blob64, Blob16; + // btreemap_remove_blob_64_16_v2, remove_helper_v2, Blob64, Blob16; + // btreemap_remove_blob_64_32, remove_helper_v1, Blob64, Blob32; + // btreemap_remove_blob_64_32_v2, remove_helper_v2, Blob64, Blob32; + // btreemap_remove_blob_64_64, remove_helper_v1, Blob64, Blob64; + // btreemap_remove_blob_64_64_v2, remove_helper_v2, Blob64, Blob64; + // btreemap_remove_blob_64_128, remove_helper_v1, Blob64, Blob128; + // btreemap_remove_blob_64_128_v2, remove_helper_v2, Blob64, Blob128; + // btreemap_remove_blob_64_256, remove_helper_v1, Blob64, Blob256; + // btreemap_remove_blob_64_256_v2, remove_helper_v2, Blob64, Blob256; + // btreemap_remove_blob_64_512, remove_helper_v1, Blob64, Blob512; + // btreemap_remove_blob_64_512_v2, remove_helper_v2, Blob64, Blob512; + + // // 128 x V + // btreemap_remove_blob_128_4, remove_helper_v1, Blob128, Blob4; + // btreemap_remove_blob_128_4_v2, remove_helper_v2, Blob128, Blob4; + // btreemap_remove_blob_128_8, remove_helper_v1, Blob128, Blob8; + // btreemap_remove_blob_128_8_v2, remove_helper_v2, Blob128, Blob8; + // btreemap_remove_blob_128_16, remove_helper_v1, Blob128, Blob16; + // btreemap_remove_blob_128_16_v2, remove_helper_v2, Blob128, Blob16; + // btreemap_remove_blob_128_32, remove_helper_v1, Blob128, Blob32; + // btreemap_remove_blob_128_32_v2, remove_helper_v2, Blob128, Blob32; + // btreemap_remove_blob_128_64, remove_helper_v1, Blob128, Blob64; + // btreemap_remove_blob_128_64_v2, remove_helper_v2, Blob128, Blob64; + // btreemap_remove_blob_128_128, remove_helper_v1, Blob128, Blob128; + // btreemap_remove_blob_128_128_v2, remove_helper_v2, Blob128, Blob128; + // btreemap_remove_blob_128_256, remove_helper_v1, Blob128, Blob256; + // btreemap_remove_blob_128_256_v2, remove_helper_v2, Blob128, Blob256; + // btreemap_remove_blob_128_512, remove_helper_v1, Blob128, Blob512; + // btreemap_remove_blob_128_512_v2, remove_helper_v2, Blob128, Blob512; + + // // 256 x V + // btreemap_remove_blob_256_4, remove_helper_v1, Blob256, Blob4; + // btreemap_remove_blob_256_4_v2, remove_helper_v2, Blob256, Blob4; + // btreemap_remove_blob_256_8, remove_helper_v1, Blob256, Blob8; + // btreemap_remove_blob_256_8_v2, remove_helper_v2, Blob256, Blob8; + // btreemap_remove_blob_256_16, remove_helper_v1, Blob256, Blob16; + // btreemap_remove_blob_256_16_v2, remove_helper_v2, Blob256, Blob16; + // btreemap_remove_blob_256_32, remove_helper_v1, Blob256, Blob32; + // btreemap_remove_blob_256_32_v2, remove_helper_v2, Blob256, Blob32; + // btreemap_remove_blob_256_64, remove_helper_v1, Blob256, Blob64; + // btreemap_remove_blob_256_64_v2, remove_helper_v2, Blob256, Blob64; + // btreemap_remove_blob_256_128, remove_helper_v1, Blob256, Blob128; + // btreemap_remove_blob_256_128_v2, remove_helper_v2, Blob256, Blob128; + // btreemap_remove_blob_256_256, remove_helper_v1, Blob256, Blob256; + // btreemap_remove_blob_256_256_v2, remove_helper_v2, Blob256, Blob256; + // btreemap_remove_blob_256_512, remove_helper_v1, Blob256, Blob512; + // btreemap_remove_blob_256_512_v2, remove_helper_v2, Blob256, Blob512; + + // // 512 x V + // btreemap_remove_blob_512_4, remove_helper_v1, Blob512, Blob4; + // btreemap_remove_blob_512_4_v2, remove_helper_v2, Blob512, Blob4; + // btreemap_remove_blob_512_8, remove_helper_v1, Blob512, Blob8; + // btreemap_remove_blob_512_8_v2, remove_helper_v2, Blob512, Blob8; + // btreemap_remove_blob_512_16, remove_helper_v1, Blob512, Blob16; + // btreemap_remove_blob_512_16_v2, remove_helper_v2, Blob512, Blob16; + // btreemap_remove_blob_512_32, remove_helper_v1, Blob512, Blob32; + // btreemap_remove_blob_512_32_v2, remove_helper_v2, Blob512, Blob32; + // btreemap_remove_blob_512_64, remove_helper_v1, Blob512, Blob64; + // btreemap_remove_blob_512_64_v2, remove_helper_v2, Blob512, Blob64; + // btreemap_remove_blob_512_128, remove_helper_v1, Blob512, Blob128; + // btreemap_remove_blob_512_128_v2, remove_helper_v2, Blob512, Blob128; + // btreemap_remove_blob_512_256, remove_helper_v1, Blob512, Blob256; + // btreemap_remove_blob_512_256_v2, remove_helper_v2, Blob512, Blob256; + // btreemap_remove_blob_512_512, remove_helper_v1, Blob512, Blob512; + // btreemap_remove_blob_512_512_v2, remove_helper_v2, Blob512, Blob512; + + // // 1024 x V + // btreemap_remove_blob_1024_4, remove_helper_v1, Blob1024, Blob4; + // btreemap_remove_blob_1024_4_v2, remove_helper_v2, Blob1024, Blob4; + // btreemap_remove_blob_1024_8, remove_helper_v1, Blob1024, Blob8; + // btreemap_remove_blob_1024_8_v2, remove_helper_v2, Blob1024, Blob8; + // btreemap_remove_blob_1024_16, remove_helper_v1, Blob1024, Blob16; + // btreemap_remove_blob_1024_16_v2, remove_helper_v2, Blob1024, Blob16; + // btreemap_remove_blob_1024_32, remove_helper_v1, Blob1024, Blob32; + // btreemap_remove_blob_1024_32_v2, remove_helper_v2, Blob1024, Blob32; + // btreemap_remove_blob_1024_64, remove_helper_v1, Blob1024, Blob64; + // btreemap_remove_blob_1024_64_v2, remove_helper_v2, Blob1024, Blob64; + // btreemap_remove_blob_1024_128, remove_helper_v1, Blob1024, Blob128; + // btreemap_remove_blob_1024_128_v2, remove_helper_v2, Blob1024, Blob128; + // btreemap_remove_blob_1024_256, remove_helper_v1, Blob1024, Blob256; + // btreemap_remove_blob_1024_256_v2, remove_helper_v2, Blob1024, Blob256; + // btreemap_remove_blob_1024_512, remove_helper_v1, Blob1024, Blob512; + // btreemap_remove_blob_1024_512_v2, remove_helper_v2, Blob1024, Blob512; + + // btreemap_remove_blob_1024_1024, remove_helper_v1, Blob1024, Blob1024; + // btreemap_remove_blob_1024_1024_v2, remove_helper_v2, Blob1024, Blob1024; + + // btreemap_remove_u64_u64, remove_helper_v1, u64, u64; + // btreemap_remove_u64_u64_v2, remove_helper_v2, u64, u64; + + // btreemap_remove_u64_blob_8, remove_helper_v1, u64, Blob8; + // btreemap_remove_u64_blob_8_v2, remove_helper_v2, u64, Blob8; + // btreemap_remove_blob_8_u64, remove_helper_v1, Blob8, u64; + // btreemap_remove_blob_8_u64_v2, remove_helper_v2, Blob8, u64; } // Benchmarks getting keys from a BTreeMap. @@ -124,16 +436,181 @@ bench_tests! { btreemap_get_blob_256_1024_v2, get_helper_v2, Blob256, Blob1024; btreemap_get_blob_512_1024, get_helper_v1, Blob512, Blob1024; btreemap_get_blob_512_1024_v2, get_helper_v2, Blob512, Blob1024; - btreemap_get_blob_512_1024_v2_mem_manager, get_helper_v2_mem_manager, Blob512, Blob1024; + //btreemap_get_blob_512_1024_v2_mem_manager, get_helper_v2_mem_manager, Blob512, Blob1024; + + // 4 x V + btreemap_get_blob_4_4, get_helper_v1, Blob4, Blob4; + btreemap_get_blob_4_4_v2, get_helper_v2, Blob4, Blob4; + btreemap_get_blob_4_8, get_helper_v1, Blob4, Blob8; + btreemap_get_blob_4_8_v2, get_helper_v2, Blob4, Blob8; + btreemap_get_blob_4_16, get_helper_v1, Blob4, Blob16; + btreemap_get_blob_4_16_v2, get_helper_v2, Blob4, Blob16; + btreemap_get_blob_4_32, get_helper_v1, Blob4, Blob32; + btreemap_get_blob_4_32_v2, get_helper_v2, Blob4, Blob32; + btreemap_get_blob_4_64, get_helper_v1, Blob4, Blob64; + btreemap_get_blob_4_64_v2, get_helper_v2, Blob4, Blob64; + btreemap_get_blob_4_128, get_helper_v1, Blob4, Blob128; + btreemap_get_blob_4_128_v2, get_helper_v2, Blob4, Blob128; + btreemap_get_blob_4_256, get_helper_v1, Blob4, Blob256; + btreemap_get_blob_4_256_v2, get_helper_v2, Blob4, Blob256; + btreemap_get_blob_4_512, get_helper_v1, Blob4, Blob512; + btreemap_get_blob_4_512_v2, get_helper_v2, Blob4, Blob512; + + // 8 x V + btreemap_get_blob_8_4, get_helper_v1, Blob8, Blob4; + btreemap_get_blob_8_4_v2, get_helper_v2, Blob8, Blob4; + btreemap_get_blob_8_8, get_helper_v1, Blob8, Blob8; + btreemap_get_blob_8_8_v2, get_helper_v2, Blob8, Blob8; + btreemap_get_blob_8_16, get_helper_v1, Blob8, Blob16; + btreemap_get_blob_8_16_v2, get_helper_v2, Blob8, Blob16; + btreemap_get_blob_8_32, get_helper_v1, Blob8, Blob32; + btreemap_get_blob_8_32_v2, get_helper_v2, Blob8, Blob32; + btreemap_get_blob_8_64, get_helper_v1, Blob8, Blob64; + btreemap_get_blob_8_64_v2, get_helper_v2, Blob8, Blob64; + btreemap_get_blob_8_128, get_helper_v1, Blob8, Blob128; + btreemap_get_blob_8_128_v2, get_helper_v2, Blob8, Blob128; + btreemap_get_blob_8_256, get_helper_v1, Blob8, Blob256; + btreemap_get_blob_8_256_v2, get_helper_v2, Blob8, Blob256; + btreemap_get_blob_8_512, get_helper_v1, Blob8, Blob512; + btreemap_get_blob_8_512_v2, get_helper_v2, Blob8, Blob512; + + // 16 x V + btreemap_get_blob_16_4, get_helper_v1, Blob16, Blob4; + btreemap_get_blob_16_4_v2, get_helper_v2, Blob16, Blob4; + btreemap_get_blob_16_8, get_helper_v1, Blob16, Blob8; + btreemap_get_blob_16_8_v2, get_helper_v2, Blob16, Blob8; + btreemap_get_blob_16_16, get_helper_v1, Blob16, Blob16; + btreemap_get_blob_16_16_v2, get_helper_v2, Blob16, Blob16; + btreemap_get_blob_16_32, get_helper_v1, Blob16, Blob32; + btreemap_get_blob_16_32_v2, get_helper_v2, Blob16, Blob32; + btreemap_get_blob_16_64, get_helper_v1, Blob16, Blob64; + btreemap_get_blob_16_64_v2, get_helper_v2, Blob16, Blob64; + btreemap_get_blob_16_128, get_helper_v1, Blob16, Blob128; + btreemap_get_blob_16_128_v2, get_helper_v2, Blob16, Blob128; + btreemap_get_blob_16_256, get_helper_v1, Blob16, Blob256; + btreemap_get_blob_16_256_v2, get_helper_v2, Blob16, Blob256; + btreemap_get_blob_16_512, get_helper_v1, Blob16, Blob512; + btreemap_get_blob_16_512_v2, get_helper_v2, Blob16, Blob512; + + // 32 x V + btreemap_get_blob_32_4, get_helper_v1, Blob32, Blob4; + btreemap_get_blob_32_4_v2, get_helper_v2, Blob32, Blob4; + btreemap_get_blob_32_8, get_helper_v1, Blob32, Blob8; + btreemap_get_blob_32_8_v2, get_helper_v2, Blob32, Blob8; + btreemap_get_blob_32_16, get_helper_v1, Blob32, Blob16; + btreemap_get_blob_32_16_v2, get_helper_v2, Blob32, Blob16; + btreemap_get_blob_32_32, get_helper_v1, Blob32, Blob32; + btreemap_get_blob_32_32_v2, get_helper_v2, Blob32, Blob32; + btreemap_get_blob_32_64, get_helper_v1, Blob32, Blob64; + btreemap_get_blob_32_64_v2, get_helper_v2, Blob32, Blob64; + btreemap_get_blob_32_128, get_helper_v1, Blob32, Blob128; + btreemap_get_blob_32_128_v2, get_helper_v2, Blob32, Blob128; + btreemap_get_blob_32_256, get_helper_v1, Blob32, Blob256; + btreemap_get_blob_32_256_v2, get_helper_v2, Blob32, Blob256; + btreemap_get_blob_32_512, get_helper_v1, Blob32, Blob512; + btreemap_get_blob_32_512_v2, get_helper_v2, Blob32, Blob512; + + // 64 x V + btreemap_get_blob_64_4, get_helper_v1, Blob64, Blob4; + btreemap_get_blob_64_4_v2, get_helper_v2, Blob64, Blob4; + btreemap_get_blob_64_8, get_helper_v1, Blob64, Blob8; + btreemap_get_blob_64_8_v2, get_helper_v2, Blob64, Blob8; + btreemap_get_blob_64_16, get_helper_v1, Blob64, Blob16; + btreemap_get_blob_64_16_v2, get_helper_v2, Blob64, Blob16; + btreemap_get_blob_64_32, get_helper_v1, Blob64, Blob32; + btreemap_get_blob_64_32_v2, get_helper_v2, Blob64, Blob32; + btreemap_get_blob_64_64, get_helper_v1, Blob64, Blob64; + btreemap_get_blob_64_64_v2, get_helper_v2, Blob64, Blob64; + btreemap_get_blob_64_128, get_helper_v1, Blob64, Blob128; + btreemap_get_blob_64_128_v2, get_helper_v2, Blob64, Blob128; + btreemap_get_blob_64_256, get_helper_v1, Blob64, Blob256; + btreemap_get_blob_64_256_v2, get_helper_v2, Blob64, Blob256; + btreemap_get_blob_64_512, get_helper_v1, Blob64, Blob512; + btreemap_get_blob_64_512_v2, get_helper_v2, Blob64, Blob512; + + // 128 x V + btreemap_get_blob_128_4, get_helper_v1, Blob128, Blob4; + btreemap_get_blob_128_4_v2, get_helper_v2, Blob128, Blob4; + btreemap_get_blob_128_8, get_helper_v1, Blob128, Blob8; + btreemap_get_blob_128_8_v2, get_helper_v2, Blob128, Blob8; + btreemap_get_blob_128_16, get_helper_v1, Blob128, Blob16; + btreemap_get_blob_128_16_v2, get_helper_v2, Blob128, Blob16; + btreemap_get_blob_128_32, get_helper_v1, Blob128, Blob32; + btreemap_get_blob_128_32_v2, get_helper_v2, Blob128, Blob32; + btreemap_get_blob_128_64, get_helper_v1, Blob128, Blob64; + btreemap_get_blob_128_64_v2, get_helper_v2, Blob128, Blob64; + btreemap_get_blob_128_128, get_helper_v1, Blob128, Blob128; + btreemap_get_blob_128_128_v2, get_helper_v2, Blob128, Blob128; + btreemap_get_blob_128_256, get_helper_v1, Blob128, Blob256; + btreemap_get_blob_128_256_v2, get_helper_v2, Blob128, Blob256; + btreemap_get_blob_128_512, get_helper_v1, Blob128, Blob512; + btreemap_get_blob_128_512_v2, get_helper_v2, Blob128, Blob512; + + // 256 x V + btreemap_get_blob_256_4, get_helper_v1, Blob256, Blob4; + btreemap_get_blob_256_4_v2, get_helper_v2, Blob256, Blob4; + btreemap_get_blob_256_8, get_helper_v1, Blob256, Blob8; + btreemap_get_blob_256_8_v2, get_helper_v2, Blob256, Blob8; + btreemap_get_blob_256_16, get_helper_v1, Blob256, Blob16; + btreemap_get_blob_256_16_v2, get_helper_v2, Blob256, Blob16; + btreemap_get_blob_256_32, get_helper_v1, Blob256, Blob32; + btreemap_get_blob_256_32_v2, get_helper_v2, Blob256, Blob32; + btreemap_get_blob_256_64, get_helper_v1, Blob256, Blob64; + btreemap_get_blob_256_64_v2, get_helper_v2, Blob256, Blob64; + btreemap_get_blob_256_128, get_helper_v1, Blob256, Blob128; + btreemap_get_blob_256_128_v2, get_helper_v2, Blob256, Blob128; + btreemap_get_blob_256_256, get_helper_v1, Blob256, Blob256; + btreemap_get_blob_256_256_v2, get_helper_v2, Blob256, Blob256; + btreemap_get_blob_256_512, get_helper_v1, Blob256, Blob512; + btreemap_get_blob_256_512_v2, get_helper_v2, Blob256, Blob512; + + // 512 x V + btreemap_get_blob_512_4, get_helper_v1, Blob512, Blob4; + btreemap_get_blob_512_4_v2, get_helper_v2, Blob512, Blob4; + btreemap_get_blob_512_8, get_helper_v1, Blob512, Blob8; + btreemap_get_blob_512_8_v2, get_helper_v2, Blob512, Blob8; + btreemap_get_blob_512_16, get_helper_v1, Blob512, Blob16; + btreemap_get_blob_512_16_v2, get_helper_v2, Blob512, Blob16; + btreemap_get_blob_512_32, get_helper_v1, Blob512, Blob32; + btreemap_get_blob_512_32_v2, get_helper_v2, Blob512, Blob32; + btreemap_get_blob_512_64, get_helper_v1, Blob512, Blob64; + btreemap_get_blob_512_64_v2, get_helper_v2, Blob512, Blob64; + btreemap_get_blob_512_128, get_helper_v1, Blob512, Blob128; + btreemap_get_blob_512_128_v2, get_helper_v2, Blob512, Blob128; + btreemap_get_blob_512_256, get_helper_v1, Blob512, Blob256; + btreemap_get_blob_512_256_v2, get_helper_v2, Blob512, Blob256; + btreemap_get_blob_512_512, get_helper_v1, Blob512, Blob512; + btreemap_get_blob_512_512_v2, get_helper_v2, Blob512, Blob512; - btreemap_get_u64_u64, get_helper_v1, u64, u64; - btreemap_get_u64_u64_v2, get_helper_v2, u64, u64; - btreemap_get_u64_u64_v2_mem_manager, get_helper_v2_mem_manager, u64, u64; - - btreemap_get_u64_blob_8, get_helper_v1, u64, Blob8; - btreemap_get_u64_blob_8_v2, get_helper_v2, u64, Blob8; - btreemap_get_blob_8_u64, get_helper_v1, Blob8, u64; - btreemap_get_blob_8_u64_v2, get_helper_v2, Blob8, u64; + // 1024 x V + btreemap_get_blob_1024_4, get_helper_v1, Blob1024, Blob4; + btreemap_get_blob_1024_4_v2, get_helper_v2, Blob1024, Blob4; + btreemap_get_blob_1024_8, get_helper_v1, Blob1024, Blob8; + btreemap_get_blob_1024_8_v2, get_helper_v2, Blob1024, Blob8; + btreemap_get_blob_1024_16, get_helper_v1, Blob1024, Blob16; + btreemap_get_blob_1024_16_v2, get_helper_v2, Blob1024, Blob16; + btreemap_get_blob_1024_32, get_helper_v1, Blob1024, Blob32; + btreemap_get_blob_1024_32_v2, get_helper_v2, Blob1024, Blob32; + btreemap_get_blob_1024_64, get_helper_v1, Blob1024, Blob64; + btreemap_get_blob_1024_64_v2, get_helper_v2, Blob1024, Blob64; + btreemap_get_blob_1024_128, get_helper_v1, Blob1024, Blob128; + btreemap_get_blob_1024_128_v2, get_helper_v2, Blob1024, Blob128; + btreemap_get_blob_1024_256, get_helper_v1, Blob1024, Blob256; + btreemap_get_blob_1024_256_v2, get_helper_v2, Blob1024, Blob256; + btreemap_get_blob_1024_512, get_helper_v1, Blob1024, Blob512; + btreemap_get_blob_1024_512_v2, get_helper_v2, Blob1024, Blob512; + + btreemap_get_blob_1024_1024, get_helper_v1, Blob1024, Blob1024; + btreemap_get_blob_1024_1024_v2, get_helper_v2, Blob1024, Blob1024; + + // btreemap_get_u64_u64, get_helper_v1, u64, u64; + // btreemap_get_u64_u64_v2, get_helper_v2, u64, u64; + // btreemap_get_u64_u64_v2_mem_manager, get_helper_v2_mem_manager, u64, u64; + + // btreemap_get_u64_blob_8, get_helper_v1, u64, Blob8; + // btreemap_get_u64_blob_8_v2, get_helper_v2, u64, Blob8; + // btreemap_get_blob_8_u64, get_helper_v1, Blob8, u64; + // btreemap_get_blob_8_u64_v2, get_helper_v2, Blob8, u64; } // Benchmarks `contains_key` of a BTreeMap. diff --git a/src/btreemap.rs b/src/btreemap.rs index 7d7caa3a..e7e4c457 100644 --- a/src/btreemap.rs +++ b/src/btreemap.rs @@ -366,7 +366,7 @@ where let mut root = self.load_node(self.root_addr); // Check if the key already exists in the root. - if let Ok(idx) = root.search(&key) { + if let Ok(idx) = root.search(&key, self.memory()) { // The key exists. Overwrite it and return the previous value. let (_, previous_value) = root.swap_entry(idx, (key, value), self.memory()); self.save_node(&mut root); @@ -409,7 +409,7 @@ where assert!(!node.is_full()); // Look for the key in the node. - match node.search(&key) { + match node.search(&key, self.memory()) { Ok(idx) => { // The key is already in the node. // Overwrite it and return the previous value. @@ -442,7 +442,7 @@ where if child.is_full() { // Check if the key already exists in the child. - if let Ok(idx) = child.search(&key) { + if let Ok(idx) = child.search(&key, self.memory()) { // The key exists. Overwrite it and return the previous value. let (_, previous_value) = child.swap_entry(idx, (key, value), self.memory()); @@ -455,7 +455,7 @@ where // The children have now changed. Search again for // the child where we need to store the entry in. - let idx = node.search(&key).unwrap_or_else(|idx| idx); + let idx = node.search(&key, self.memory()).unwrap_or_else(|idx| idx); child = self.load_node(node.child(idx)); } @@ -469,7 +469,7 @@ where } } - /// Takes as input a nonfull internal `node` and index to its full child, then + /// Takes as input a non-full internal `node` and index to its full child, then /// splits this child into two, adding an additional child to `node`. /// /// Example: @@ -535,7 +535,7 @@ where { let node = self.load_node(node_addr); // Look for the key in the current node. - match node.search(key) { + match node.search(key, self.memory()) { Ok(idx) => Some(f(node, idx)), // Key found: apply `f`. Err(idx) => match node.node_type() { NodeType::Leaf => None, // At a leaf: key not present. @@ -652,7 +652,7 @@ where match node.node_type() { NodeType::Leaf => { - match node.search(key) { + match node.search(key, self.memory()) { Ok(idx) => { // Case 1: The node is a leaf node and the key exists in it. // This is the simplest case. The key is removed from the leaf. @@ -679,7 +679,7 @@ where } } NodeType::Internal => { - match node.search(key) { + match node.search(key, self.memory()) { Ok(idx) => { // Case 2: The node is an internal node and the key exists in it. @@ -1310,10 +1310,40 @@ mod test { } } - macro_rules! verify_and_run { - ($runner:ident, $K:ty, $V:ty) => {{ + /// Asserts that the associated `BOUND` for `$ty` is _not_ `Unbounded`. + macro_rules! assert_bounded { + ($ty:ty) => { + assert_ne!(<$ty>::BOUND, StorableBound::Unbounded, "Must be Bounded"); + }; + } + + /// Asserts that the associated `BOUND` for `$ty` _is_ `Unbounded`. + macro_rules! assert_unbounded { + ($ty:ty) => { + assert_eq!(<$ty>::BOUND, StorableBound::Unbounded, "Must be Unbounded"); + }; + } + + macro_rules! run_with_key { + ($runner:ident, $K:ty) => {{ verify_monotonic::<$K>(); - $runner::<$K, $V>(); + + // Empty value. + $runner::<$K, ()>(); + + // Bounded values. + assert_bounded!(u32); + $runner::<$K, u32>(); + + assert_bounded!(Blob<20>); + $runner::<$K, Blob<20>>(); + + // Unbounded values. + assert_unbounded!(MonotonicVec32); + $runner::<$K, MonotonicVec32>(); + + assert_unbounded!(MonotonicString32); + $runner::<$K, MonotonicString32>(); }}; } @@ -1322,37 +1352,19 @@ mod test { ($name:ident, $runner:ident) => { #[test] fn $name() { - use StorableBound::Unbounded; - - // Set, empty value, bounded. - { - type Value = (); - assert_ne!(::BOUND, Unbounded, "Must be Bounded"); - verify_and_run!($runner, u32, Value); - verify_and_run!($runner, Blob<10>, Value); - verify_and_run!($runner, MonotonicVec32, Value); - verify_and_run!($runner, MonotonicString32, Value); - } + // Bounded keys. + assert_bounded!(u32); + run_with_key!($runner, u32); - // Map, bounded value. - { - type Value = u32; - assert_ne!(Value::BOUND, Unbounded, "Must be Bounded"); - verify_and_run!($runner, u32, Value); - verify_and_run!($runner, Blob<10>, Value); - verify_and_run!($runner, MonotonicVec32, Value); - verify_and_run!($runner, MonotonicString32, Value); - } + assert_bounded!(Blob<10>); + run_with_key!($runner, Blob<10>); - // Map, unbounded value. - { - type Value = MonotonicVec32; - assert_eq!(Value::BOUND, Unbounded, "Must be Unbounded"); - verify_and_run!($runner, u32, Value); - verify_and_run!($runner, Blob<10>, Value); - verify_and_run!($runner, MonotonicVec32, Value); - verify_and_run!($runner, MonotonicString32, Value); - } + // Unbounded keys. + assert_unbounded!(MonotonicVec32); + run_with_key!($runner, MonotonicVec32); + + assert_unbounded!(MonotonicString32); + run_with_key!($runner, MonotonicString32); } }; } @@ -1440,7 +1452,7 @@ mod test { assert!(right_child.is_full()); let median_index = right_child.entries_len() / 2; let median_key = key(12); - assert_eq!(right_child.key(median_index), &median_key); + assert_eq!(right_child.key(median_index, btree.memory()), &median_key); // Overwrite the value of the median key. assert_eq!(btree.insert(median_key.clone(), value(123)), Some(value(0))); @@ -3089,7 +3101,7 @@ mod test { // [0, 1, 2, 3, 4, 5] [7, 8, 9, 10, 11] let root = btree.load_node(btree.root_addr); assert_eq!(root.node_type(), NodeType::Internal); - assert_eq!(root.keys(), vec![vec![6; 10_000]]); + assert_eq!(root.keys(btree.memory()), vec![vec![6; 10_000]]); assert_eq!(root.children_len(), 2); // Remove the element in the root. @@ -3101,7 +3113,7 @@ mod test { // [0, 1, 2, 3, 4] [7, 8, 9, 10, 11] let root = btree.load_node(btree.root_addr); assert_eq!(root.node_type(), NodeType::Internal); - assert_eq!(root.keys(), vec![vec![5; 10_000]]); + assert_eq!(root.keys(btree.memory()), vec![vec![5; 10_000]]); assert_eq!(root.children_len(), 2); // Remove the element in the root. This triggers the case where the root @@ -3113,7 +3125,7 @@ mod test { let root = btree.load_node(btree.root_addr); assert_eq!(root.node_type(), NodeType::Leaf); assert_eq!( - root.keys(), + root.keys(btree.memory()), vec![ vec![0; 10_000], vec![1; 10_000], diff --git a/src/btreemap/iter.rs b/src/btreemap/iter.rs index 82869508..1e653e16 100644 --- a/src/btreemap/iter.rs +++ b/src/btreemap/iter.rs @@ -94,7 +94,7 @@ where Bound::Included(key) | Bound::Excluded(key) => { let mut node = self.map.load_node(self.map.root_addr); loop { - match node.search(key) { + match node.search(key, self.map.memory()) { Ok(idx) => { if let Bound::Included(_) = self.range.start_bound() { // We found the key exactly matching the left bound. @@ -115,7 +115,7 @@ where }; if idx + 1 < node.entries_len() - && self.range.contains(node.key(idx + 1)) + && self.range.contains(node.key(idx + 1, self.map.memory())) { self.forward_cursors.push(Cursor::Node { node, @@ -152,7 +152,9 @@ where NodeType::Leaf => None, }; - if idx < node.entries_len() && self.range.contains(node.key(idx)) { + if idx < node.entries_len() + && self.range.contains(node.key(idx, self.map.memory())) + { self.forward_cursors.push(Cursor::Node { node, next: Index::Entry(idx), @@ -188,7 +190,7 @@ where Bound::Included(key) | Bound::Excluded(key) => { let mut node = self.map.load_node(self.map.root_addr); loop { - match node.search(key) { + match node.search(key, self.map.memory()) { Ok(idx) => { if let Bound::Included(_) = self.range.end_bound() { // We found the key exactly matching the right bound. @@ -208,7 +210,9 @@ where NodeType::Leaf => None, }; - if idx > 0 && self.range.contains(node.key(idx - 1)) { + if idx > 0 + && self.range.contains(node.key(idx - 1, self.map.memory())) + { self.backward_cursors.push(Cursor::Node { node, next: Index::Entry(idx - 1), @@ -243,7 +247,8 @@ where NodeType::Leaf => None, }; - if idx > 0 && self.range.contains(node.key(idx - 1)) { + if idx > 0 && self.range.contains(node.key(idx - 1, self.map.memory())) + { self.backward_cursors.push(Cursor::Node { node, next: Index::Entry(idx - 1), @@ -320,7 +325,7 @@ where next: Index::Entry(entry_idx), } => { // If the key does not belong to the range, iteration stops. - if !self.range.contains(node.key(entry_idx)) { + if !self.range.contains(node.key(entry_idx, self.map.memory())) { // Clear all cursors to avoid needless work in subsequent calls. self.forward_cursors = vec![]; self.backward_cursors = vec![]; @@ -328,7 +333,7 @@ where } let res = map(&node, entry_idx); - self.range.0 = Bound::Excluded(node.key(entry_idx).clone()); + self.range.0 = Bound::Excluded(node.key(entry_idx, self.map.memory()).clone()); let next = match node.node_type() { // If this is an internal node, add the next child to the cursors. @@ -403,7 +408,7 @@ where next: Index::Entry(entry_idx), } => { // If the key does not belong to the range, iteration stops. - if !self.range.contains(node.key(entry_idx)) { + if !self.range.contains(node.key(entry_idx, self.map.memory())) { // Clear all cursors to avoid needless work in subsequent calls. self.forward_cursors = vec![]; self.backward_cursors = vec![]; @@ -411,7 +416,7 @@ where } let res = map(&node, entry_idx); - self.range.1 = Bound::Excluded(node.key(entry_idx).clone()); + self.range.1 = Bound::Excluded(node.key(entry_idx, self.map.memory()).clone()); if let Some(next) = match node.node_type() { // If this is an internal node, add the previous child to the cursors. @@ -497,7 +502,7 @@ where fn next(&mut self) -> Option { self.0 - .next_map(|node, entry_idx| node.key(entry_idx).clone()) + .next_map(|node, entry_idx| node.key(entry_idx, self.0.map.memory()).clone()) } fn count(mut self) -> usize @@ -516,7 +521,7 @@ where { fn next_back(&mut self) -> Option { self.0 - .next_back_map(|node, entry_idx| node.key(entry_idx).clone()) + .next_back_map(|node, entry_idx| node.key(entry_idx, self.0.map.memory()).clone()) } } diff --git a/src/btreemap/node.rs b/src/btreemap/node.rs index 50af80e0..7079d7f0 100644 --- a/src/btreemap/node.rs +++ b/src/btreemap/node.rs @@ -39,6 +39,8 @@ pub enum NodeType { pub type Entry = (K, Vec); pub type EntryRef<'a, K> = (&'a K, &'a [u8]); +type LazyEntry = (LazyKey, LazyValue); + /// A node of a B-Tree. /// /// There are two versions of a `Node`: @@ -52,7 +54,7 @@ pub struct Node { address: Address, // List of tuples consisting of a key and the encoded value. // INVARIANT: the list is sorted by key. - keys_and_encoded_values: Vec<(K, Value)>, + keys_and_encoded_values: Vec>, // For the key at position I, children[I] points to the left // child of this key and children[I + 1] points to the right child. children: Vec
, @@ -106,14 +108,13 @@ impl Node { pub fn get_max(&self, memory: &M) -> Entry { match self.node_type { NodeType::Leaf => { - let last_idx = self.keys_and_encoded_values.len() - 1; + let entry = self + .keys_and_encoded_values + .last() + .expect("A node can never be empty"); ( - self.keys_and_encoded_values - .last() - .expect("A node can never be empty") - .0 - .clone(), - self.value(last_idx, memory).to_vec(), + self.get_key(entry, memory).clone(), + self.get_value(entry, memory).to_vec(), ) } NodeType::Internal => { @@ -164,32 +165,94 @@ impl Node { ) -> Entry { let (old_key, old_value) = core::mem::replace( &mut self.keys_and_encoded_values[idx], - (key, Value::by_value(value)), + (LazyKey::by_value(key), LazyValue::by_value(value)), ); - (old_key, self.extract_value(old_value, memory)) + ( + self.extract_key(old_key, memory), + self.extract_value(old_value, memory), + ) } /// Returns a reference to the entry at the specified index. pub fn entry(&self, idx: usize, memory: &M) -> EntryRef { - ( - &self.keys_and_encoded_values[idx].0, - self.value(idx, memory), - ) + (self.key(idx, memory), self.value(idx, memory)) + } + + /// Returns a reference to the cached key and loads it from memory if needed. + #[inline] + fn get_key<'a, M: Memory>(&'a self, (k, _): &'a LazyEntry, memory: &M) -> &'a K { + k.get_or_load(|offset| self.load_key_from_memory(offset, memory)) + } + + /// Returns a reference to the cached value and loads it from memory if needed. + #[inline] + fn get_value<'a, M: Memory>(&'a self, (_, v): &'a LazyEntry, memory: &M) -> &'a [u8] { + v.get_or_load(|offset| self.load_value_from_memory(offset, memory)) + } + + /// Returns a reference to the key at the specified index. + pub fn key(&self, idx: usize, memory: &M) -> &K { + self.get_key(&self.keys_and_encoded_values[idx], memory) } /// Returns a reference to the encoded value at the specified index. pub fn value(&self, idx: usize, memory: &M) -> &[u8] { - // Load and cache the value from the underlying memory if needed. - self.keys_and_encoded_values[idx] - .1 - .get_or_load(|offset| self.load_value_from_memory(offset, memory)) + self.get_value(&self.keys_and_encoded_values[idx], memory) + } + + /// Extracts the contents of key (by loading it first if it's not loaded yet). + fn extract_key(&self, key: LazyKey, memory: &M) -> K { + key.take_or_load(|offset| self.load_key_from_memory(offset, memory)) } /// Extracts the contents of value (by loading it first if it's not loaded yet). - fn extract_value(&self, value: Value, memory: &M) -> Vec { + fn extract_value(&self, value: LazyValue, memory: &M) -> Vec { value.take_or_load(|offset| self.load_value_from_memory(offset, memory)) } + /// Loads a key from stable memory at the given offset of this node. + fn load_key_from_memory(&self, mut offset: Bytes, memory: &M) -> K { + let reader = NodeReader { + address: self.address, + overflows: &self.overflows, + page_size: self.page_size(), + memory, + }; + + // Retrieve the key's size. + let key_offset = Address::from(offset.get()); + let key_size = match self.version { + Version::V1(_) => { + // In V1, the key's size is always stored in memory. + offset += U32_SIZE; + read_u32(&reader, key_offset) + } + Version::V2(_) => { + // In V2, if the key is fixed-size, use the maximum bound; + // otherwise, read its size from memory. + if K::BOUND.is_fixed_size() { + K::BOUND.max_size() + } else { + offset += U32_SIZE; + read_u32(&reader, key_offset) + } + } + }; + + let mut bytes = vec![]; + read_to_vec( + &reader, + Address::from((offset).get()), + &mut bytes, + key_size as usize, + ); + //println!("ABC load key_size: {key_size:?}"); + //println!("ABC load key: {bytes:?}"); + let key = K::from_bytes(Cow::Borrowed(&bytes)); + + key + } + /// Loads a value from stable memory at the given offset of this node. fn load_value_from_memory(&self, offset: Bytes, memory: &M) -> Vec { let reader = NodeReader { @@ -199,13 +262,13 @@ impl Node { memory, }; - let value_len = read_u32(&reader, Address::from(offset.get())) as usize; + let value_size = read_u32(&reader, Address::from(offset.get())) as usize; let mut bytes = vec![]; read_to_vec( &reader, Address::from((offset + U32_SIZE).get()), &mut bytes, - value_len, + value_size, ); bytes @@ -215,11 +278,6 @@ impl Node { self.version.page_size() } - /// Returns a reference to the key at the specified index. - pub fn key(&self, idx: usize) -> &K { - &self.keys_and_encoded_values[idx].0 - } - /// Returns the child's address at the given index. pub fn child(&self, idx: usize) -> Address { self.children[idx] @@ -251,28 +309,41 @@ impl Node { } /// Inserts a new entry at the specified index. - pub fn insert_entry(&mut self, idx: usize, (key, encoded_value): Entry) { + pub fn insert_entry(&mut self, idx: usize, (key, value): Entry) { self.keys_and_encoded_values - .insert(idx, (key, Value::by_value(encoded_value))); + .insert(idx, (LazyKey::by_value(key), LazyValue::by_value(value))); + // let keys: Vec<_> = self + // .keys_and_encoded_values + // .iter() + // .map(|(key, _)| format!("{key:?}")) + // .collect(); + //println!("ABC insert_entry: {:?}", keys); } /// Returns the entry at the specified index while consuming this node. pub fn into_entry(mut self, idx: usize, memory: &M) -> Entry { let keys_and_encoded_values = core::mem::take(&mut self.keys_and_encoded_values); let (key, value) = keys_and_encoded_values.into_iter().nth(idx).unwrap(); - (key, self.extract_value(value, memory)) + ( + self.extract_key(key, memory), + self.extract_value(value, memory), + ) } /// Removes the entry at the specified index. pub fn remove_entry(&mut self, idx: usize, memory: &M) -> Entry { let (key, value) = self.keys_and_encoded_values.remove(idx); - (key, self.extract_value(value, memory)) + //println!("ABC remove_entry: {:?}", key); + ( + self.extract_key(key, memory), + self.extract_value(value, memory), + ) } /// Adds a new entry at the back of the node. - pub fn push_entry(&mut self, (key, encoded_value): Entry) { + pub fn push_entry(&mut self, (key, value): Entry) { self.keys_and_encoded_values - .push((key, Value::by_value(encoded_value))); + .push((LazyKey::by_value(key), LazyValue::by_value(value))); } /// Removes an entry from the back of the node. @@ -282,12 +353,15 @@ impl Node { return None; } - let (key, last_value) = self + let (key, value) = self .keys_and_encoded_values .pop() .expect("node must not be empty"); - Some((key, self.extract_value(last_value, memory))) + Some(( + self.extract_key(key, memory), + self.extract_value(value, memory), + )) } /// Merges the entries and children of the `source` node into self, along with the median entry. @@ -307,19 +381,32 @@ impl Node { median: Entry, allocator: &mut Allocator, ) { - // Load all the values from the source node first, as they will be moved out. + // let self_keys: Vec<_> = self + // .keys_and_encoded_values + // .iter() + // .map(|(k, _)| format!("{k:?}")) + // .collect(); + + // let source_keys: Vec<_> = source + // .keys_and_encoded_values + // .iter() + // .map(|(k, _)| format!("{k:?}\n")) + // .collect(); + //println!("ABC merge\n self : {self_keys:?}\n source : {source_keys:?} \n"); + + // Load all the entries from the source node first, as they will be moved out. for i in 0..source.entries_len() { - source.value(i, allocator.memory()); + let _e = source.entry(i, allocator.memory()); } - if source.key(0) > self.key(0) { + if source.key(0, allocator.memory()) > self.key(0, allocator.memory()) { // The source node has keys that are greater than self. // Append the source node into self. - Self::append(self, &mut source, median); + Self::append(self, &mut source, median, allocator.memory()); } else { // self has keys that are greater than the source node. // Append self into the source node (which more efficient). - Self::append(&mut source, self, median); + Self::append(&mut source, self, median, allocator.memory()); // Move the entries and children into self. self.keys_and_encoded_values = core::mem::take(&mut source.keys_and_encoded_values); @@ -340,14 +427,14 @@ impl Node { /// /// POSTCONDITION: /// * `b` is empty. - fn append(a: &mut Node, b: &mut Node, median: Entry) { + fn append(a: &mut Node, b: &mut Node, median: Entry, memory: &M) { // Assert preconditions. let a_len = a.entries_len(); assert_eq!(a.node_type(), b.node_type()); assert!(b.entries_len() > 0); assert!(a_len > 0); - assert!(a.key(a_len - 1) < &median.0); - assert!(&median.0 < b.key(0)); + assert!(a.key(a_len - 1, memory) < &median.0); + assert!(&median.0 < b.key(0, memory)); a.push_entry(median); @@ -364,18 +451,15 @@ impl Node { #[cfg(test)] pub fn entries(&self, memory: &M) -> Vec> { - self.keys_and_encoded_values - .iter() - .enumerate() - .map(|(idx, (key, _))| (key.clone(), self.value(idx, memory).to_vec())) + (0..self.keys_and_encoded_values.len()) + .map(|i| (self.key(i, memory).clone(), self.value(i, memory).to_vec())) .collect() } #[cfg(test)] - pub fn keys(&self) -> Vec { - self.keys_and_encoded_values - .iter() - .map(|(key, _)| key.clone()) + pub fn keys(&self, memory: &M) -> Vec { + (0..self.keys_and_encoded_values.len()) + .map(|i| self.key(i, memory).clone()) .collect() } @@ -395,9 +479,26 @@ impl Node { /// of the matching key. If the value is not found then `Result::Err` is /// returned, containing the index where a matching key could be inserted /// while maintaining sorted order. - pub fn search(&self, key: &K) -> Result { - self.keys_and_encoded_values - .binary_search_by_key(&key, |entry| &entry.0) + pub fn search(&self, key: &K, memory: &M) -> Result { + // let self_keys: Vec<_> = self + // .keys_and_encoded_values + // .iter() + // .map(|(k, _)| format!("{k:?}")) + // .collect(); + // println!("\nABC search BEFORE: {self_keys:?}"); + + let result = self + .keys_and_encoded_values + .binary_search_by_key(&key, |entry| self.get_key(entry, memory)); + + // let self_keys: Vec<_> = self + // .keys_and_encoded_values + // .iter() + // .map(|(k, _)| format!("{k:?}")) + // .collect(); + //println!("ABC search AFTER : {self_keys:?}"); + + result } /// Returns the maximum size a node can be if it has bounded keys and values. @@ -422,9 +523,9 @@ impl Node { pub fn split(&mut self, sibling: &mut Node, memory: &M) -> Entry { debug_assert!(self.is_full()); - // Load the values that will be moved out of the node and into the new sibling. + // Load the entries that will be moved out of the node and into the new sibling. for idx in B..self.entries_len() { - self.value(idx, memory); + let _e = self.entry(idx, memory); } // Move the entries and children above the median into the new sibling. @@ -463,53 +564,134 @@ impl NodeHeader { } } -/// The value in a K/V pair. +struct LazyKey(LazyObject); + +impl LazyKey { + /// Create a lazy key with a memory offset. + #[inline] + pub fn by_ref(offset: Bytes) -> Self { + Self(LazyObject::by_ref(offset)) + } + + /// Create a lazy key from a key. + #[inline] + pub fn by_value(key: K) -> Self { + Self(LazyObject::by_value(key)) + } + + /// Returns a reference to the key, loading it if necessary. + #[inline] + pub fn get_or_load(&self, load: impl FnOnce(Bytes) -> K) -> &K { + self.0.get_or_load(load) + } + + /// Consumes self and returns the key, loading it if necessary. + #[inline] + pub fn take_or_load(self, load: impl FnOnce(Bytes) -> K) -> K { + self.0.take_or_load(load) + } +} + +impl std::fmt::Debug for LazyKey { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match &self.0 { + LazyObject::ByVal(ref key) => { + let bytes = key.to_bytes_checked(); + f.debug_tuple("LazyKey") + .field(&format_args!("LazyKey::ByVal({:?})", bytes)) + .finish() + } + LazyObject::ByRef { offset, loaded } => match loaded.get() { + Some(key) => { + let bytes = key.to_bytes_checked(); + f.debug_tuple("LazyKey") + .field(&format_args!("LazyKey::ByRef(loaded:{:?})", bytes)) + .finish() + } + None => f + .debug_tuple("LazyKey") + .field(&format_args!("LazyKey::ByRef(offset: {:?})", offset)) + .finish(), + }, + } + } +} + +type Blob = Vec; + +#[derive(Debug)] +struct LazyValue(LazyObject); + +impl LazyValue { + /// Create a lazy value with a memory offset. + #[inline] + pub fn by_ref(offset: Bytes) -> Self { + Self(LazyObject::by_ref(offset)) + } + + /// Create a lazy value from a value. + #[inline] + pub fn by_value(value: Blob) -> Self { + Self(LazyObject::by_value(value)) + } + + /// Returns a reference to the key, loading it if necessary. + #[inline] + pub fn get_or_load(&self, load: impl FnOnce(Bytes) -> Blob) -> &Blob { + self.0.get_or_load(load) + } + + /// Consumes self and returns the key, loading it if necessary. + #[inline] + pub fn take_or_load(self, load: impl FnOnce(Bytes) -> Blob) -> Blob { + self.0.take_or_load(load) + } +} + +/// A lazy-loaded object. #[derive(Debug)] -enum Value { - /// The value's encoded bytes. - ByVal(Vec), +enum LazyObject { + /// Object stored by value. + ByVal(T), + /// Object stored by reference, loaded on demand. ByRef { - /// The value's offset in the node. + /// Memory offset of the object. offset: Bytes, - /// The lazily loaded encoded bytes. - loaded_value: OnceCell>, + /// Cache for the lazily loaded object. + loaded: OnceCell, }, } -impl Value { +impl LazyObject { + /// Create a lazy object with a memory offset. pub fn by_ref(offset: Bytes) -> Self { Self::ByRef { offset, - loaded_value: Default::default(), + loaded: Default::default(), } } - pub fn by_value(value: Vec) -> Self { + /// Create a lazy object from a value. + pub fn by_value(value: T) -> Self { Self::ByVal(value) } - /// Returns a reference to the value if the value has been loaded or runs the given function to - /// load the value. - pub fn get_or_load(&self, load: impl FnOnce(Bytes) -> Vec) -> &[u8] { + /// Get a reference to the object, loading it if necessary. + pub fn get_or_load(&self, load: impl FnOnce(Bytes) -> T) -> &T { match self { - Value::ByVal(v) => &v[..], - Value::ByRef { - offset, - loaded_value: value, - } => value.get_or_init(|| load(*offset)), + LazyObject::ByVal(v) => v, + LazyObject::ByRef { offset, loaded } => loaded.get_or_init(|| load(*offset)), } } - /// Extracts the value while consuming self if the value has been loaded or runs the given - /// function to load the value. - pub fn take_or_load(self, load: impl FnOnce(Bytes) -> Vec) -> Vec { + /// Consume self and return the object, loading it if necessary. + pub fn take_or_load(self, load: impl FnOnce(Bytes) -> T) -> T { match self { - Value::ByVal(v) => v, - Value::ByRef { - offset, - loaded_value: value, - } => value.into_inner().unwrap_or_else(|| load(offset)), + LazyObject::ByVal(v) => v, + LazyObject::ByRef { offset, loaded } => { + loaded.into_inner().unwrap_or_else(|| load(offset)) + } } } } diff --git a/src/btreemap/node/v1.rs b/src/btreemap/node/v1.rs index 2dc02857..ddaf20aa 100644 --- a/src/btreemap/node/v1.rs +++ b/src/btreemap/node/v1.rs @@ -15,9 +15,13 @@ //! ---------------------------------------- //! # Entries (k) ↕ 2 bytes //! ---------------------------------------- <-- Entries (upto `CAPACITY` entries) -//! Key(0) +//! Key(0) size ↕ 4 bytes //! ---------------------------------------- -//! Value(0) +//! Key(0) ↕ `max_key_size` bytes +//! ---------------------------------------- +//! Value(0) size ↕ 4 bytes +//! ---------------------------------------- +//! Value(0) ↕ `max_value_size` bytes //! ---------------------------------------- //! Key(1) size ↕ 4 bytes //! ---------------------------------------- @@ -59,26 +63,37 @@ impl Node { max_value_size: u32, memory: &M, ) -> Self { - #[cfg(feature = "canbench")] - let _p = canbench::profile("node_load_v1"); - // Load the entries. let mut keys_encoded_values = Vec::with_capacity(header.num_entries as usize); let mut offset = NodeHeader::size(); - let mut buf = vec![]; + //const LOAD_SIZE_THRESHOLD: u32 = 8; + //let mut buf = vec![]; for _ in 0..header.num_entries { - // Read the key's size. - let key_size = read_u32(memory, address + offset); + let key_offset = offset; + //let key_size = read_u32(memory, address + offset); offset += U32_SIZE; - - // Read the key. - read_to_vec(memory, address + offset, &mut buf, key_size as usize); + // let key = if key_size <= LOAD_SIZE_THRESHOLD { + // read_to_vec(memory, address + offset, &mut buf, key_size as usize); + // LazyKey::by_value(K::from_bytes(Cow::Borrowed(&buf))) + // } else { + // LazyKey::by_ref(key_offset) + // }; + let key = LazyKey::by_ref(key_offset); offset += Bytes::from(max_key_size); - let key = K::from_bytes(Cow::Borrowed(&buf)); - // Values are loaded lazily. Store a reference and skip loading it. - keys_encoded_values.push((key, Value::by_ref(offset))); - offset += U32_SIZE + Bytes::from(max_value_size); + let value_offset = offset; + // let value_size = read_u32(memory, address + offset); + offset += U32_SIZE; + // let value = if value_size <= LOAD_SIZE_THRESHOLD { + // read_to_vec(memory, address + offset, &mut buf, value_size as usize); + // LazyValue::by_value(buf.to_vec()) + // } else { + // LazyValue::by_ref(value_offset) + // }; + let value = LazyValue::by_ref(value_offset); + offset += Bytes::from(max_value_size); + + keys_encoded_values.push((key, value)); } // Load children if this is an internal node. @@ -113,9 +128,6 @@ impl Node { } pub(super) fn save_v1(&self, memory: &M) { - #[cfg(feature = "canbench")] - let _p = canbench::profile("node_save_v1"); - match self.node_type { NodeType::Leaf => { assert!(self.children.is_empty()); @@ -129,10 +141,17 @@ impl Node { assert!(!self.keys_and_encoded_values.is_empty() || !self.children.is_empty()); // Assert entries are sorted in strictly increasing order. + + // TODO: remove debug code. + // let entries: Vec<_> = (0..self.entries_len()) + // .map(|i| (i, self.key(i, memory).to_bytes(), self.value(i, memory))) + // .collect(); + //println!("ABC: entries: {:?}", entries); + assert!(self .keys_and_encoded_values .windows(2) - .all(|e| e[0].0 < e[1].0)); + .all(|arr| self.get_key(&arr[0], memory) < self.get_key(&arr[1], memory))); let (max_key_size, max_value_size) = match self.version { Version::V1(DerivedPageSize { @@ -156,16 +175,16 @@ impl Node { let mut offset = NodeHeader::size(); - // Load all the values. This is necessary so that we don't overwrite referenced - // values when writing the entries to the node. + // Load all the entries. This is necessary so that we don't overwrite referenced + // entries when writing the entries to the node. for i in 0..self.keys_and_encoded_values.len() { - self.value(i, memory); + self.entry(i, memory); } // Write the entries. - for (idx, (key, _)) in self.keys_and_encoded_values.iter().enumerate() { + for i in 0..self.keys_and_encoded_values.len() { // Write the size of the key. - let key_bytes = key.to_bytes_checked(); + let key_bytes = self.key(i, memory).to_bytes_checked(); write_u32(memory, self.address + offset, key_bytes.len() as u32); offset += U32_SIZE; @@ -174,7 +193,7 @@ impl Node { offset += Bytes::from(max_key_size); // Write the size of the value. - let value = self.value(idx, memory); + let value = self.value(i, memory); write_u32(memory, self.address + offset, value.len() as u32); offset += U32_SIZE; diff --git a/src/btreemap/node/v2.rs b/src/btreemap/node/v2.rs index f559d777..c6a0555b 100644 --- a/src/btreemap/node/v2.rs +++ b/src/btreemap/node/v2.rs @@ -111,9 +111,6 @@ impl Node { header: NodeHeader, memory: &M, ) -> Self { - #[cfg(feature = "canbench")] - let _p = canbench::profile("node_load_v2"); - // Load the node, including any overflows, into a buffer. let overflows = read_overflows(address, memory); @@ -151,32 +148,49 @@ impl Node { // Load the keys. let mut keys_encoded_values = Vec::with_capacity(num_entries); - let mut buf = vec![]; + //const LOAD_SIZE_THRESHOLD: u32 = 8; + //let mut buf = vec![]; for _ in 0..num_entries { - // Load the key's size. + let key_offset = Bytes::from(offset.get()); + + // Advance offset by the key_size type size if applicable. let key_size = if K::BOUND.is_fixed_size() { // Key is fixed in size. The size of the key is always its max size. K::BOUND.max_size() } else { // Key is not fixed in size. Read the size from memory. - let value = read_u32(&reader, offset); + let key_size = read_u32(&reader, offset); + //println!("ABC read key_size: {:?}", key_size); offset += U32_SIZE; - value + key_size }; - - // Load the key. - read_to_vec(&reader, offset, &mut buf, key_size as usize); - let key = K::from_bytes(Cow::Borrowed(&buf)); + // let key = if key_size <= LOAD_SIZE_THRESHOLD { + // read_to_vec(&reader, offset, &mut buf, key_size as usize); + // LazyKey::by_value(K::from_bytes(Cow::Borrowed(&buf))) + // } else { + // LazyKey::by_ref(key_offset) + // }; + let key = LazyKey::by_ref(key_offset); + + // Advance offset by the size of the key. offset += Bytes::from(key_size); - keys_encoded_values.push((key, Value::by_ref(Bytes::from(0usize)))); + + keys_encoded_values.push((key, LazyValue::by_ref(Bytes::from(0usize)))); } // Load the values for (_key, value) in keys_encoded_values.iter_mut() { - // Load the values lazily. - *value = Value::by_ref(Bytes::from(offset.get())); - let value_size = read_u32(&reader, offset) as usize; - offset += U32_SIZE + Bytes::from(value_size as u64); + let value_offset = Bytes::from(offset.get()); + let value_size = read_u32(&reader, offset); + offset += U32_SIZE; + // *value = if value_size <= LOAD_SIZE_THRESHOLD { + // read_to_vec(&reader, offset, &mut buf, value_size as usize); + // LazyValue::by_value(buf.to_vec()) + // } else { + // LazyValue::by_ref(value_offset) + // }; + *value = LazyValue::by_ref(value_offset); + offset += Bytes::from(value_size as u64); } Self { @@ -191,17 +205,14 @@ impl Node { // Saves the node to memory. pub(super) fn save_v2(&mut self, allocator: &mut Allocator) { - #[cfg(feature = "canbench")] - let _p = canbench::profile("node_save_v2"); - let page_size = self.version.page_size().get(); assert!(page_size >= MINIMUM_PAGE_SIZE); - // Load all the values. This is necessary so that we don't overwrite referenced - // values when writing the entries to the node. - for i in 0..self.keys_and_encoded_values.len() { - self.value(i, allocator.memory()); - } + // Load all the entry. This is necessary so that we don't overwrite referenced + // entry when writing the entries to the node. + let entries: Vec<_> = (0..self.keys_and_encoded_values.len()) + .map(|i| self.entry(i, allocator.memory())) + .collect(); // Initialize a NodeWriter. The NodeWriter takes care of allocating/deallocating // overflow pages as needed. @@ -239,9 +250,8 @@ impl Node { } // Write the keys. - for (key, _) in self.keys_and_encoded_values.iter() { + for (key, _) in &entries { let key_bytes = key.to_bytes_checked(); - // Write the size of the key if it isn't fixed in size. if !K::BOUND.is_fixed_size() { writer.write_u32(offset, key_bytes.len() as u32); @@ -254,9 +264,8 @@ impl Node { } // Write the values. - for idx in 0..self.entries_len() { + for (_, value) in &entries { // Write the size of the value. - let value = self.value(idx, writer.memory()); writer.write_u32(offset, value.len() as u32); offset += U32_SIZE;