Skip to content

Commit

Permalink
clippy
Browse files Browse the repository at this point in the history
  • Loading branch information
CrabNejonas committed Jan 18, 2024
1 parent 912bf04 commit bdc90af
Show file tree
Hide file tree
Showing 16 changed files with 175 additions and 167 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,7 @@ jobs:
with:
components: clippy
- uses: Swatinem/rust-cache@v2
- run: cargo clippy --workspace -- -Dclippy::all -Dclippy::pedantic
- run: cargo clippy --workspace -- -Dclippy::all

rustfmt:
name: Rustfmt
Expand Down
48 changes: 23 additions & 25 deletions src/automaton/levenshtein.rs
Original file line number Diff line number Diff line change
Expand Up @@ -29,8 +29,7 @@ impl fmt::Display for LevenshteinError {
LevenshteinError::TooManyStates(size_limit) => write!(
f,
"Levenshtein automaton exceeds size limit of \
{} states",
size_limit
{size_limit} states"
),
}
}
Expand Down Expand Up @@ -61,27 +60,26 @@ impl std::error::Error for LevenshteinError {}
/// from `foo`.
///
/// ```rust
/// use fst::automaton::Levenshtein;
/// use fst::{IntoStreamer, Streamer, Set};
/// use fst_no_std::automaton::Levenshtein;
/// use fst_no_std::{IntoStreamer, Streamer, Set};
///
/// fn main() {
/// let keys = vec!["fa", "fo", "fob", "focus", "foo", "food", "foul"];
/// let set = Set::from_iter(keys).unwrap();
/// let keys = vec!["fa", "fo", "fob", "focus", "foo", "food", "foul"];
/// let set = Set::from_iter(keys).unwrap();
///
/// let lev = Levenshtein::new("foo", 1).unwrap();
/// let mut stream = set.search(&lev).into_stream();
/// let lev = Levenshtein::new("foo", 1).unwrap();
/// let mut stream = set.search(&lev).into_stream();
///
/// let mut keys = vec![];
/// while let Some(key) = stream.next() {
/// keys.push(key.to_vec());
/// }
/// assert_eq!(keys, vec![
/// "fo".as_bytes(), // 1 deletion
/// "fob".as_bytes(), // 1 substitution
/// "foo".as_bytes(), // 0 insertions/deletions/substitutions
/// "food".as_bytes(), // 1 insertion
/// ]);
/// let mut keys = vec![];
/// while let Some(key) = stream.next() {
/// keys.push(key.to_vec());
/// }
///
/// assert_eq!(keys, vec![
/// "fo".as_bytes(), // 1 deletion
/// "fob".as_bytes(), // 1 substitution
/// "foo".as_bytes(), // 0 insertions/deletions/substitutions
/// "food".as_bytes(), // 1 insertion
/// ]);
/// ```
///
/// This example only uses ASCII characters, but it will work equally well
Expand Down Expand Up @@ -182,17 +180,17 @@ impl DynamicLevenshtein {
}

fn is_match(&self, state: &[usize]) -> bool {
state.last().map(|&n| n <= self.dist).unwrap_or(false)
state.last().is_some_and(|&n| n <= self.dist)
}

fn can_match(&self, state: &[usize]) -> bool {
state.iter().min().map(|&n| n <= self.dist).unwrap_or(false)
state.iter().min().is_some_and(|&n| n <= self.dist)
}

fn accept(&self, state: &[usize], chr: Option<char>) -> Vec<usize> {
let mut next = vec![state[0] + 1];
for (i, c) in self.query.chars().enumerate() {
let cost = if Some(c) == chr { 0 } else { 1 };
let cost = usize::from(Some(c) != chr);
let v = cmp::min(
cmp::min(next[i] + 1, state[i + 1] + 1),
state[i] + cost,
Expand Down Expand Up @@ -245,7 +243,7 @@ impl fmt::Debug for State {
writeln!(f, " is_match: {:?}", self.is_match)?;
for i in 0..256 {
if let Some(si) = self.next[i] {
writeln!(f, " {:?}: {:?}", i, si)?;
writeln!(f, " {i:?}: {si:?}")?;
}
}
write!(f, "}}")
Expand Down Expand Up @@ -341,7 +339,7 @@ impl DfaBuilder {
// Some((si, false)) => si,
};
self.add_utf8_sequences(false, from_si, to_si, '\u{0}', '\u{10FFFF}');
return Some((to_si, mismatch_state));
Some((to_si, mismatch_state))
}

fn add_utf8_sequences(
Expand Down Expand Up @@ -375,7 +373,7 @@ impl DfaBuilder {
to: usize,
range: &Utf8Range,
) {
for b in range.start as usize..range.end as usize + 1 {
for b in (range.start as usize)..=(range.end as usize) {
if overwrite || self.dfa.states[from].next[b].is_none() {
self.dfa.states[from].next[b] = Some(to);
}
Expand Down
20 changes: 9 additions & 11 deletions src/automaton/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -173,6 +173,7 @@ pub struct Str<'a> {
impl<'a> Str<'a> {
/// Constructs automaton that matches an exact string.
#[inline]
#[must_use]
pub fn new(string: &'a str) -> Str<'a> {
Str { string: string.as_bytes() }
}
Expand Down Expand Up @@ -201,7 +202,7 @@ impl<'a> Automaton for Str<'a> {
// if we aren't already past the end...
if let Some(pos) = *pos {
// and there is still a matching byte at the current position...
if self.string.get(pos).cloned() == Some(byte) {
if self.string.get(pos).copied() == Some(byte) {
// then move forward
return Some(pos + 1);
}
Expand Down Expand Up @@ -246,6 +247,7 @@ impl<'a> Subsequence<'a> {
/// Constructs automaton that matches input containing the
/// specified subsequence.
#[inline]
#[must_use]
pub fn new(subsequence: &'a str) -> Subsequence<'a> {
Subsequence { subseq: subsequence.as_bytes() }
}
Expand Down Expand Up @@ -279,7 +281,7 @@ impl<'a> Automaton for Subsequence<'a> {
if state == self.subseq.len() {
return state;
}
state + (byte == self.subseq[state]) as usize
state + usize::from(byte == self.subseq[state])
}
}

Expand All @@ -294,25 +296,21 @@ impl Automaton for AlwaysMatch {
type State = ();

#[inline]
fn start(&self) -> () {
()
}
fn start(&self) {}
#[inline]
fn is_match(&self, _: &()) -> bool {
fn is_match(&self, (): &()) -> bool {
true
}
#[inline]
fn can_match(&self, _: &()) -> bool {
fn can_match(&self, (): &()) -> bool {
true
}
#[inline]
fn will_always_match(&self, _: &()) -> bool {
fn will_always_match(&self, (): &()) -> bool {
true
}
#[inline]
fn accept(&self, _: &(), _: u8) -> () {
()
}
fn accept(&self, (): &(), _: u8) {}
}

/// An automaton that matches a string that begins with something that the
Expand Down
24 changes: 12 additions & 12 deletions src/bytes.rs
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ pub fn write_u32_le(n: u32, slice: &mut [u8]) {
slice[3] = bytes[3];
}

/// Like write_u32_le, but to an io::Write implementation. If every byte could
/// Like `write_u32_le`, but to an `io::Write` implementation. If every byte could
/// not be writen, then this returns an error.
#[inline]
#[cfg(feature = "std")]
Expand All @@ -56,7 +56,7 @@ pub fn write_u64_le(n: u64, slice: &mut [u8]) {
slice[7] = bytes[7];
}

/// Like write_u64_le, but to an io::Write implementation. If every byte could
/// Like `write_u64_le`, but to an `io::Write` implementation. If every byte could
/// not be writen, then this returns an error.
#[inline]
#[cfg(feature = "std")]
Expand All @@ -66,20 +66,20 @@ pub fn io_write_u64_le<W: io::Write>(n: u64, mut wtr: W) -> io::Result<()> {
wtr.write_all(&buf)
}

/// pack_uint packs the given integer in the smallest number of bytes possible,
/// `pack_uint` packs the given integer in the smallest number of bytes possible,
/// and writes it to the given writer. The number of bytes written is returned
/// on success.
#[inline]
#[cfg(feature = "std")]
pub fn pack_uint<W: io::Write>(wtr: W, n: u64) -> io::Result<u8> {
let nbytes = pack_size(n);
pack_uint_in(wtr, n, nbytes).map(|_| nbytes)
pack_uint_in(wtr, n, nbytes).map(|()| nbytes)
}

/// pack_uint_in is like pack_uint, but always uses the number of bytes given
/// `pack_uint_in` is like `pack_uint`, but always uses the number of bytes given
/// to pack the number given.
///
/// `nbytes` must be >= pack_size(n) and <= 8, where `pack_size(n)` is the
/// `nbytes` must be >= `pack_size(n`) and <= 8, where `pack_size(n)` is the
/// smallest number of bytes that can store the integer given.
#[inline]
#[cfg(feature = "std")]
Expand All @@ -88,32 +88,32 @@ pub fn pack_uint_in<W: io::Write>(
mut n: u64,
nbytes: u8,
) -> io::Result<()> {
assert!(1 <= nbytes && nbytes <= 8);
assert!((1..=8).contains(&nbytes));
let mut buf = [0u8; 8];
for i in 0..nbytes {
buf[i as usize] = n as u8;
n = n >> 8;
n >>= 8;
}
wtr.write_all(&buf[..nbytes as usize])?;
Ok(())
}

/// unpack_uint is the dual of pack_uint. It unpacks the integer at the current
/// `unpack_uint` is the dual of `pack_uint`. It unpacks the integer at the current
/// position in `slice` after reading `nbytes` bytes.
///
/// `nbytes` must be >= 1 and <= 8.
#[inline]
pub fn unpack_uint(slice: &[u8], nbytes: u8) -> u64 {
assert!(1 <= nbytes && nbytes <= 8);
assert!((1..=8).contains(&nbytes));

let mut n = 0;
for (i, &b) in slice[..nbytes as usize].iter().enumerate() {
n = n | ((b as u64) << (8 * i));
n |= u64::from(b) << (8 * i);
}
n
}

/// pack_size returns the smallest number of bytes that can encode `n`.
/// `pack_size` returns the smallest number of bytes that can encode `n`.
#[inline]
#[cfg(feature = "std")]
pub fn pack_size(n: u64) -> u8 {
Expand Down
7 changes: 4 additions & 3 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -52,8 +52,8 @@ This requires the `levenshtein` feature in this crate to be enabled. It is not
enabled by default.
```rust
use fst::{IntoStreamer, Streamer, Set};
use fst::automaton::Levenshtein;
use fst_no_std::{IntoStreamer, Streamer, Set};
use fst_no_std::automaton::Levenshtein;
# fn main() { example().unwrap(); }
fn example() -> Result<(), Box<dyn std::error::Error>> {
Expand Down Expand Up @@ -300,8 +300,9 @@ data structures found in the standard library, such as `BTreeSet` and
*/

#![cfg_attr(not(feature = "std"), no_std)]
#![deny(missing_docs)]
#![cfg_attr(not(feature = "std"), feature(error_in_core))]
#![deny(missing_docs)]
#![allow(clippy::should_implement_trait)]

#[cfg(feature = "alloc")]
extern crate alloc;
Expand Down
22 changes: 14 additions & 8 deletions src/map.rs
Original file line number Diff line number Diff line change
Expand Up @@ -143,7 +143,7 @@ impl<D: AsRef<[u8]>> Map<D> {
/// assert_eq!(map.get("z"), None);
/// ```
pub fn get<K: AsRef<[u8]>>(&self, key: K) -> Option<u64> {
self.0.get(key).map(|output| output.value())
self.0.get(key).map(raw::Output::value)
}

/// Return a lexicographically ordered stream of all key-value pairs in
Expand Down Expand Up @@ -635,13 +635,13 @@ pub struct MapBuilder<W>(raw::Builder<W>);
impl MapBuilder<Vec<u8>> {
/// Create a builder that builds a map in memory.
#[inline]
pub fn memory() -> MapBuilder<Vec<u8>> {
#[must_use] pub fn memory() -> MapBuilder<Vec<u8>> {
MapBuilder(raw::Builder::memory())
}

/// Finishes the construction of the map and returns it.
#[inline]
pub fn into_map(self) -> Map<Vec<u8>> {
#[must_use] pub fn into_map(self) -> Map<Vec<u8>> {
Map(self.0.into_fst())
}
}
Expand Down Expand Up @@ -987,10 +987,16 @@ where
pub struct OpBuilder<'m>(raw::OpBuilder<'m>);

#[cfg(feature = "alloc")]
impl<'m> Default for OpBuilder<'m> {
fn default() -> Self {
Self::new()
}
}

impl<'m> OpBuilder<'m> {
/// Create a new set operation builder.
#[inline]
pub fn new() -> OpBuilder<'m> {
#[must_use] pub fn new() -> OpBuilder<'m> {
OpBuilder(raw::OpBuilder::new())
}

Expand Down Expand Up @@ -1063,7 +1069,7 @@ impl<'m> OpBuilder<'m> {
/// ]);
/// ```
#[inline]
pub fn union(self) -> Union<'m> {
#[must_use] pub fn union(self) -> Union<'m> {
Union(self.0.union())
}

Expand Down Expand Up @@ -1104,7 +1110,7 @@ impl<'m> OpBuilder<'m> {
/// ]);
/// ```
#[inline]
pub fn intersection(self) -> Intersection<'m> {
#[must_use] pub fn intersection(self) -> Intersection<'m> {
Intersection(self.0.intersection())
}

Expand Down Expand Up @@ -1149,7 +1155,7 @@ impl<'m> OpBuilder<'m> {
/// ]);
/// ```
#[inline]
pub fn difference(self) -> Difference<'m> {
#[must_use] pub fn difference(self) -> Difference<'m> {
Difference(self.0.difference())
}

Expand Down Expand Up @@ -1197,7 +1203,7 @@ impl<'m> OpBuilder<'m> {
/// ]);
/// ```
#[inline]
pub fn symmetric_difference(self) -> SymmetricDifference<'m> {
#[must_use] pub fn symmetric_difference(self) -> SymmetricDifference<'m> {
SymmetricDifference(self.0.symmetric_difference())
}
}
Expand Down
Loading

0 comments on commit bdc90af

Please sign in to comment.