Skip to content

Commit

Permalink
Update binggan requirement from 0.12.0 to 0.14.0 (#2530)
Browse files Browse the repository at this point in the history
* Update binggan requirement from 0.12.0 to 0.14.0

---
updated-dependencies:
- dependency-name: binggan
  dependency-type: direct:production
...

Signed-off-by: dependabot[bot] <[email protected]>

* fix build

---------

Signed-off-by: dependabot[bot] <[email protected]>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Pascal Seitz <[email protected]>
  • Loading branch information
dependabot[bot] and PSeitz authored Oct 24, 2024
1 parent f9ac055 commit c66af2c
Show file tree
Hide file tree
Showing 9 changed files with 6 additions and 23 deletions.
2 changes: 1 addition & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ fnv = "1.0.7"
winapi = "0.3.9"

[dev-dependencies]
binggan = "0.12.0"
binggan = "0.14.0"
rand = "0.8.5"
maplit = "1.0.2"
matches = "0.1.9"
Expand Down
1 change: 0 additions & 1 deletion benches/agg_bench.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@ macro_rules! register {
($runner:expr, $func:ident) => {
$runner.register(stringify!($func), move |index| {
$func(index);
None
})
};
}
Expand Down
2 changes: 1 addition & 1 deletion columnar/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ downcast-rs = "1.2.0"
proptest = "1"
more-asserts = "0.3.1"
rand = "0.8"
binggan = "0.12.0"
binggan = "0.14.0"

[[bench]]
name = "bench_merge"
Expand Down
2 changes: 0 additions & 2 deletions columnar/benches/bench_access.rs
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,6 @@ fn bench_group(mut runner: InputGroup<Column>) {
}
}
black_box(sum);
None
});
runner.register("access_first_vals", |column| {
let mut sum = 0;
Expand All @@ -63,7 +62,6 @@ fn bench_group(mut runner: InputGroup<Column>) {
}

black_box(sum);
None
});
runner.run();
}
4 changes: 2 additions & 2 deletions columnar/benches/bench_merge.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
pub mod common;

use binggan::{black_box, BenchRunner};
use binggan::BenchRunner;
use common::{generate_columnar_with_name, Card};
use tantivy_columnar::*;

Expand Down Expand Up @@ -29,7 +29,7 @@ fn main() {
add_combo(Card::Multi, Card::Dense);
add_combo(Card::Multi, Card::Sparse);

let runner: BenchRunner = BenchRunner::new();
let mut runner: BenchRunner = BenchRunner::new();
let mut group = runner.new_group();
for (input_name, columnar_readers) in inputs.iter() {
group.register_with_input(
Expand Down
2 changes: 1 addition & 1 deletion common/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ time = { version = "0.3.10", features = ["serde-well-known"] }
serde = { version = "1.0.136", features = ["derive"] }

[dev-dependencies]
binggan = "0.12.0"
binggan = "0.14.0"
proptest = "1.0.0"
rand = "0.8.4"

6 changes: 0 additions & 6 deletions common/benches/bench.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@ fn bench_vint() {
out += u64::from(buf[0]);
}
black_box(out);
None
});

let vals: Vec<u32> = (0..20_000).choose_multiple(&mut thread_rng(), 100_000);
Expand All @@ -27,7 +26,6 @@ fn bench_vint() {
out += u64::from(buf[0]);
}
black_box(out);
None
});
}

Expand All @@ -43,24 +41,20 @@ fn bench_bitset() {
tinyset.pop_lowest();
tinyset.pop_lowest();
black_box(tinyset);
None
});

let tiny_set = TinySet::empty().insert(10u32).insert(14u32).insert(21u32);
runner.bench_function("bench_tinyset_sum", move |_| {
assert_eq!(black_box(tiny_set).into_iter().sum::<u32>(), 45u32);
None
});

let v = [10u32, 14u32, 21u32];
runner.bench_function("bench_tinyarr_sum", move |_| {
black_box(v.iter().cloned().sum::<u32>());
None
});

runner.bench_function("bench_bitset_initialize", move |_| {
black_box(BitSet::with_max_value(1_000_000));
None
});
}

Expand Down
2 changes: 1 addition & 1 deletion stacker/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ rand = "0.8.5"
zipf = "7.0.0"
rustc-hash = "1.1.0"
proptest = "1.2.0"
binggan = { version = "0.12.0" }
binggan = { version = "0.14.0" }

[features]
compare_hash_only = ["ahash"] # Compare hash only, not the key in the Hashmap
Expand Down
8 changes: 0 additions & 8 deletions stacker/benches/bench.rs
Original file line number Diff line number Diff line change
Expand Up @@ -35,30 +35,26 @@ fn bench_vint() {
group.set_input_size(input_bytes);
group.register_with_input("hashmap", &alice_terms_as_bytes, move |data| {
black_box(create_hash_map(data.iter()));
Some(())
});
group.register_with_input(
"hasmap with postings",
&alice_terms_as_bytes_with_docid,
move |data| {
black_box(create_hash_map_with_expull(data.iter().cloned()));
Some(())
},
);
group.register_with_input(
"fxhashmap ref postings",
&alice_terms_as_bytes,
move |data| {
black_box(create_fx_hash_ref_map_with_expull(data.iter().cloned()));
Some(())
},
);
group.register_with_input(
"fxhasmap owned postings",
&alice_terms_as_bytes,
move |data| {
black_box(create_fx_hash_owned_map_with_expull(data.iter().cloned()));
Some(())
},
);
group.run();
Expand Down Expand Up @@ -86,11 +82,9 @@ fn bench_vint() {
group.set_input_size(input_bytes);
group.register_with_input("only hashmap", &numbers, move |data| {
black_box(create_hash_map(data.iter()));
Some(())
});
group.register_with_input("hasmap with postings", &numbers_with_doc, move |data| {
black_box(create_hash_map_with_expull(data.iter().cloned()));
Some(())
});
group.run();
}
Expand All @@ -115,11 +109,9 @@ fn bench_vint() {
group.set_input_size(input_bytes);
group.register_with_input("hashmap", &numbers, move |data| {
black_box(create_hash_map(data.iter()));
Some(())
});
group.register_with_input("hasmap with postings", &numbers_with_doc, move |data| {
black_box(create_hash_map_with_expull(data.iter().cloned()));
Some(())
});
group.run();
}
Expand Down

0 comments on commit c66af2c

Please sign in to comment.