From 2b77b2cf2be57669f1277f2cc21a206945a4d2fd Mon Sep 17 00:00:00 2001 From: Anatoly Ikorsky Date: Wed, 11 Oct 2023 12:59:04 +0300 Subject: [PATCH 1/7] clippy --- derive/src/from_row/structs/mod.rs | 18 ++++---- .../src/from_value/enums/attrs/container.rs | 28 ++++++------ derive/src/from_value/enums/mod.rs | 43 ++++++++----------- derive/src/from_value/structs/mod.rs | 9 ++-- derive/src/warn.rs | 2 +- src/binlog/consts.rs | 2 +- src/binlog/decimal/mod.rs | 4 +- src/binlog/events/anonymous_gtid_event.rs | 2 +- src/binlog/events/gtid_event.rs | 4 +- src/binlog/events/mod.rs | 32 +++++++------- src/binlog/events/table_map_event.rs | 6 +-- .../events/transaction_payload_event.rs | 10 ++--- src/binlog/mod.rs | 4 +- src/binlog/row.rs | 5 ++- src/crypto/rsa.rs | 22 ++++------ src/misc/raw/bytes.rs | 5 +-- src/misc/raw/int.rs | 4 +- src/misc/raw/mod.rs | 2 +- src/misc/raw/seq.rs | 7 ++- src/named_params.rs | 27 ++++++------ src/packets/binlog_request.rs | 4 +- src/packets/mod.rs | 24 +++++++---- src/proto/codec/mod.rs | 9 +--- src/proto/sync_framed.rs | 2 +- src/row/mod.rs | 10 ++--- src/scramble.rs | 5 +-- src/value/convert/mod.rs | 4 +- src/value/convert/time.rs | 19 +++----- src/value/mod.rs | 10 ++--- 29 files changed, 155 insertions(+), 168 deletions(-) diff --git a/derive/src/from_row/structs/mod.rs b/derive/src/from_row/structs/mod.rs index 3fd3971..91047e9 100644 --- a/derive/src/from_row/structs/mod.rs +++ b/derive/src/from_row/structs/mod.rs @@ -63,12 +63,12 @@ impl ToTokens for GenericStruct<'_> { Crate::Found(ref name) => syn::Ident::new(name, Span::call_site()), }; - let impl_generics = (generics.params.len() > 0).then(|| { + let impl_generics = (!generics.params.is_empty()).then(|| { let generics = self.generics.params.iter(); quote::quote!(< #(#generics,)* >) }); - let ident_generics = (generics.params.len() > 0).then(|| { + let ident_generics = (!generics.params.is_empty()).then(|| { let generics = self.generics.params.iter().map(|g| match g { syn::GenericParam::Type(x) => { let ident = &x.ident; @@ -92,14 +92,14 @@ impl ToTokens for GenericStruct<'_> { }); let table_name_constant = item_attrs.table_name.as_ref().map(|name| { - let lit = syn::LitStr::new(&*name, name.span()); + let lit = syn::LitStr::new(name, name.span()); quote::quote!(const TABLE_NAME: &'static str = #lit;) }); let fields_attrs = fields .named .iter() - .map(|f| ::from_attributes(&*f.attrs)) + .map(|f| ::from_attributes(&f.attrs)) .collect::, _>>() .map_err(|e: darling::Error| abort!(crate::Error::from(e))) .unwrap(); @@ -131,7 +131,7 @@ impl ToTokens for GenericStruct<'_> { let filed_name_constant = fields.named.iter().zip(&fields_names).map(|(f, name)| { let ident = f.ident.as_ref().unwrap().unraw(); - let lit = syn::LitStr::new(&*name, f.span()); + let lit = syn::LitStr::new(name, f.span()); let const_name = syn::Ident::new( &format!("{}_FIELD", heck::AsShoutySnakeCase(ident.to_string())), f.span(), @@ -148,8 +148,8 @@ impl ToTokens for GenericStruct<'_> { .enumerate() .map(|(i, ((f, attrs), name))| { let ident = f.ident.as_ref().unwrap(); - let ref ty = f.ty; - let lit = syn::LitStr::new(&name, ident.span()); + let ty = &f.ty; + let lit = syn::LitStr::new(name, ident.span()); let place = field_ident .iter() @@ -157,7 +157,7 @@ impl ToTokens for GenericStruct<'_> { .zip(&fields_names) .take(i) .map(|((f, attrs), name)| { - let lit = syn::LitStr::new(&name, f.span()); + let lit = syn::LitStr::new(name, f.span()); if attrs.json { quote::quote!( row.place(*indexes.get(#lit).unwrap(), #f.rollback()) @@ -206,7 +206,7 @@ impl ToTokens for GenericStruct<'_> { .zip(&fields_attrs) .map(|(f, attrs)| { let ident = f.ident.as_ref().unwrap(); - let ref ty = f.ty; + let ty = &f.ty; if attrs.json { quote::quote!(#ident: #ident.commit().0) } else { diff --git a/derive/src/from_value/enums/attrs/container.rs b/derive/src/from_value/enums/attrs/container.rs index 7637250..c930c39 100644 --- a/derive/src/from_value/enums/attrs/container.rs +++ b/derive/src/from_value/enums/attrs/container.rs @@ -116,18 +116,18 @@ pub enum EnumRepr { } impl EnumRepr { - const I8_IDENT: &str = "i8"; - const U8_IDENT: &str = "u8"; - const I16_IDENT: &str = "i16"; - const U16_IDENT: &str = "u16"; - const I32_IDENT: &str = "i32"; - const U32_IDENT: &str = "u32"; - const I64_IDENT: &str = "i64"; - const U64_IDENT: &str = "u64"; - const I128_IDENT: &str = "i128"; - const U128_IDENT: &str = "u128"; - const ISIZE_IDENT: &str = "isize"; - const USIZE_IDENT: &str = "usize"; + const I8_IDENT: &'static str = "i8"; + const U8_IDENT: &'static str = "u8"; + const I16_IDENT: &'static str = "i16"; + const U16_IDENT: &'static str = "u16"; + const I32_IDENT: &'static str = "i32"; + const U32_IDENT: &'static str = "u32"; + const I64_IDENT: &'static str = "i64"; + const U64_IDENT: &'static str = "u64"; + const I128_IDENT: &'static str = "i128"; + const U128_IDENT: &'static str = "u128"; + const ISIZE_IDENT: &'static str = "isize"; + const USIZE_IDENT: &'static str = "usize"; pub fn span(&self) -> Span { match self { @@ -197,13 +197,13 @@ impl Default for EnumRepr { impl FromMeta for EnumRepr { fn from_list(items: &[darling::ast::NestedMeta]) -> darling::Result { Ok(items - .into_iter() + .iter() .filter_map(|x| match x { darling::ast::NestedMeta::Meta(syn::Meta::Path(path)) => Some(path), _ => None, }) .filter_map(|x| x.get_ident()) - .find_map(|x| Self::from_ident(x)) + .find_map(Self::from_ident) .unwrap_or_default()) } } diff --git a/derive/src/from_value/enums/mod.rs b/derive/src/from_value/enums/mod.rs index 8b96e20..9b297ff 100644 --- a/derive/src/from_value/enums/mod.rs +++ b/derive/src/from_value/enums/mod.rs @@ -21,14 +21,11 @@ pub fn impl_from_value_for_enum( data_enum: &syn::DataEnum, ) -> crate::Result { let item_attrs = ::from_attributes(attrs).expect("foo"); - let meta = attrs.into_iter().map(|attr| &attr.meta).collect::>(); + let meta = attrs.iter().map(|attr| &attr.meta).collect::>(); let repr = meta .iter() - .find_map(|x| match x { - syn::Meta::List(y) if y.path.is_ident("repr") => Some(x), - _ => None, - }) + .find(|x| matches!(x, syn::Meta::List(y) if y.path.is_ident("repr"))) .map(|x| ::from_meta(x)) .transpose()? .unwrap_or_default(); @@ -233,26 +230,24 @@ impl ToTokens for Enum { Ok(#ir_name(#parsed_name::Parsed(#container_name::#ident, v))) } ) + } else if discriminant < &BigInt::default() { + quote::quote!( + #crat::Value::Bytes(ref x) if x == #s => { + Ok(#ir_name(#parsed_name::Parsed(#container_name::#ident, v))) + } + #crat::Value::Int(#n) => { + Ok(#ir_name(#parsed_name::Ready(#container_name::#ident))) + } + ) } else { - if discriminant < &BigInt::default() { - quote::quote!( - #crat::Value::Bytes(ref x) if x == #s => { - Ok(#ir_name(#parsed_name::Parsed(#container_name::#ident, v))) - } - #crat::Value::Int(#n) => { - Ok(#ir_name(#parsed_name::Ready(#container_name::#ident))) - } - ) - } else { - quote::quote!( - #crat::Value::Bytes(ref x) if x == #s => { - Ok(#ir_name(#parsed_name::Parsed(#container_name::#ident, v))) - } - #crat::Value::Int(#n) | #crat::Value::UInt(#n) => { - Ok(#ir_name(#parsed_name::Ready(#container_name::#ident))) - } - ) - } + quote::quote!( + #crat::Value::Bytes(ref x) if x == #s => { + Ok(#ir_name(#parsed_name::Parsed(#container_name::#ident, v))) + } + #crat::Value::Int(#n) | #crat::Value::UInt(#n) => { + Ok(#ir_name(#parsed_name::Ready(#container_name::#ident))) + } + ) } }, ); diff --git a/derive/src/from_value/structs/mod.rs b/derive/src/from_value/structs/mod.rs index f176d5c..06b333d 100644 --- a/derive/src/from_value/structs/mod.rs +++ b/derive/src/from_value/structs/mod.rs @@ -36,14 +36,11 @@ pub fn impl_from_value_for_struct( } }; - let meta = attrs.into_iter().map(|attr| &attr.meta).collect::>(); + let meta = attrs.iter().map(|attr| &attr.meta).collect::>(); let item_attrs = meta .iter() - .find_map(|x| match x { - syn::Meta::List(y) if y.path.is_ident("mysql") => Some(x), - _ => None, - }) + .find(|x| matches!(x, syn::Meta::List(y) if y.path.is_ident("mysql"))) .map(|x| ::from_meta(x)) .transpose()? .unwrap_or_default(); @@ -240,7 +237,7 @@ impl ToTokens for NewType<'_> { }); let additional_bounds = { - let additional_bounds = self.item_attrs.bound.iter().map(|x| x.0.iter()).flatten(); + let additional_bounds = self.item_attrs.bound.iter().flat_map(|x| x.0.iter()); quote::quote!(#(#additional_bounds,)*) }; diff --git a/derive/src/warn.rs b/derive/src/warn.rs index 8456d64..6f9552e 100644 --- a/derive/src/warn.rs +++ b/derive/src/warn.rs @@ -35,7 +35,7 @@ pub fn print_warning( writeln!(&mut buffer, " | ")?; let content = format!("{content}"); - for line in content.split("\n") { + for line in content.split('\n') { write!(&mut buffer, " | ")?; buffer.set_color(&white)?; writeln!(&mut buffer, "{}", line)?; diff --git a/src/binlog/consts.rs b/src/binlog/consts.rs index c2a952b..2447a21 100644 --- a/src/binlog/consts.rs +++ b/src/binlog/consts.rs @@ -743,7 +743,7 @@ impl TryFrom for Gno { type Error = InvalidGno; fn try_from(value: u64) -> Result { - if value == 0 || (Self::MIN_GNO <= value && value <= Self::MAX_GNO) { + if value == 0 || (Self::MIN_GNO..Self::MAX_GNO).contains(&value) { Ok(Self(value)) } else { Err(InvalidGno(value)) diff --git a/src/binlog/decimal/mod.rs b/src/binlog/decimal/mod.rs index 7f5afd6..ee65af1 100644 --- a/src/binlog/decimal/mod.rs +++ b/src/binlog/decimal/mod.rs @@ -157,7 +157,7 @@ impl Decimal { } // is it negative or not - let mask = if buffer.get(0).copied().unwrap_or(0) & 0x80 == 0 { + let mask = if buffer.first().copied().unwrap_or(0) & 0x80 == 0 { // positive, so mask should do noghing 0 } else { @@ -324,7 +324,7 @@ impl FromStr for Decimal { from = &from[1..]; } - let point_idx = from.find('.').unwrap_or_else(|| from.len()); + let point_idx = from.find('.').unwrap_or(from.len()); let (mut integral, mut fractional) = from.split_at(point_idx); fractional = fractional.get(1..).unwrap_or(fractional); diff --git a/src/binlog/events/anonymous_gtid_event.rs b/src/binlog/events/anonymous_gtid_event.rs index b59d6c2..d937b92 100644 --- a/src/binlog/events/anonymous_gtid_event.rs +++ b/src/binlog/events/anonymous_gtid_event.rs @@ -33,7 +33,7 @@ impl<'de> MyDeserialize<'de> for AnonymousGtidEvent { } } -impl<'de> MySerialize for AnonymousGtidEvent { +impl MySerialize for AnonymousGtidEvent { fn serialize(&self, buf: &mut Vec) { self.0.serialize(buf) } diff --git a/src/binlog/events/gtid_event.rs b/src/binlog/events/gtid_event.rs index 3f024db..ef103a4 100644 --- a/src/binlog/events/gtid_event.rs +++ b/src/binlog/events/gtid_event.rs @@ -316,7 +316,7 @@ impl<'de> MyDeserialize<'de> for GtidEvent { } } -impl<'de> MySerialize for GtidEvent { +impl MySerialize for GtidEvent { fn serialize(&self, buf: &mut Vec) { self.flags.serialize(&mut *buf); self.sid.serialize(&mut *buf); @@ -373,7 +373,7 @@ impl<'a> BinlogStruct<'a> for GtidEvent { len += S(7); // original_commit_timestamp } - len += S(crate::misc::lenenc_int_len(*self.tx_length as u64) as usize); // tx_length + len += S(crate::misc::lenenc_int_len(*self.tx_length) as usize); // tx_length len += S(4); // immediate_server_version if self.immediate_server_version != self.original_server_version { len += S(4); // original_server_version diff --git a/src/binlog/events/mod.rs b/src/binlog/events/mod.rs index 03b7d94..8dbee40 100644 --- a/src/binlog/events/mod.rs +++ b/src/binlog/events/mod.rs @@ -231,7 +231,7 @@ impl Event { let contains_checksum = self.footer.checksum_alg.is_some() && (self.header.event_type.0 == (EventType::FORMAT_DESCRIPTION_EVENT as u8) || self.footer.checksum_alg != Some(RawConst::new(0))); - contains_checksum.then(|| self.checksum) + contains_checksum.then_some(self.checksum) } /// Read event-type specific data as a binlog struct. @@ -674,18 +674,18 @@ impl MySerialize for EventData<'_> { fn serialize(&self, buf: &mut Vec) { match self { EventData::UnknownEvent => (), - EventData::StartEventV3(ev) => buf.put_slice(&*ev), + EventData::StartEventV3(ev) => buf.put_slice(ev), EventData::QueryEvent(ev) => ev.serialize(buf), EventData::StopEvent => (), EventData::RotateEvent(ev) => ev.serialize(buf), EventData::IntvarEvent(ev) => ev.serialize(buf), - EventData::LoadEvent(ev) => buf.put_slice(&*ev), + EventData::LoadEvent(ev) => buf.put_slice(ev), EventData::SlaveEvent => (), - EventData::CreateFileEvent(ev) => buf.put_slice(&*ev), - EventData::AppendBlockEvent(ev) => buf.put_slice(&*ev), - EventData::ExecLoadEvent(ev) => buf.put_slice(&*ev), - EventData::DeleteFileEvent(ev) => buf.put_slice(&*ev), - EventData::NewLoadEvent(ev) => buf.put_slice(&*ev), + EventData::CreateFileEvent(ev) => buf.put_slice(ev), + EventData::AppendBlockEvent(ev) => buf.put_slice(ev), + EventData::ExecLoadEvent(ev) => buf.put_slice(ev), + EventData::DeleteFileEvent(ev) => buf.put_slice(ev), + EventData::NewLoadEvent(ev) => buf.put_slice(ev), EventData::RandEvent(ev) => ev.serialize(buf), EventData::UserVarEvent(ev) => ev.serialize(buf), EventData::FormatDescriptionEvent(ev) => ev.serialize(buf), @@ -693,19 +693,19 @@ impl MySerialize for EventData<'_> { EventData::BeginLoadQueryEvent(ev) => ev.serialize(buf), EventData::ExecuteLoadQueryEvent(ev) => ev.serialize(buf), EventData::TableMapEvent(ev) => ev.serialize(buf), - EventData::PreGaWriteRowsEvent(ev) => buf.put_slice(&*ev), - EventData::PreGaUpdateRowsEvent(ev) => buf.put_slice(&*ev), - EventData::PreGaDeleteRowsEvent(ev) => buf.put_slice(&*ev), + EventData::PreGaWriteRowsEvent(ev) => buf.put_slice(ev), + EventData::PreGaUpdateRowsEvent(ev) => buf.put_slice(ev), + EventData::PreGaDeleteRowsEvent(ev) => buf.put_slice(ev), EventData::IncidentEvent(ev) => ev.serialize(buf), EventData::HeartbeatEvent => (), - EventData::IgnorableEvent(ev) => buf.put_slice(&*ev), + EventData::IgnorableEvent(ev) => buf.put_slice(ev), EventData::RowsQueryEvent(ev) => ev.serialize(buf), EventData::GtidEvent(ev) => ev.serialize(buf), EventData::AnonymousGtidEvent(ev) => ev.serialize(buf), - EventData::PreviousGtidsEvent(ev) => buf.put_slice(&*ev), - EventData::TransactionContextEvent(ev) => buf.put_slice(&*ev), - EventData::ViewChangeEvent(ev) => buf.put_slice(&*ev), - EventData::XaPrepareLogEvent(ev) => buf.put_slice(&*ev), + EventData::PreviousGtidsEvent(ev) => buf.put_slice(ev), + EventData::TransactionContextEvent(ev) => buf.put_slice(ev), + EventData::ViewChangeEvent(ev) => buf.put_slice(ev), + EventData::XaPrepareLogEvent(ev) => buf.put_slice(ev), EventData::RowsEvent(ev) => ev.serialize(buf), EventData::TransactionPayloadEvent(ev) => ev.serialize(buf), } diff --git a/src/binlog/events/table_map_event.rs b/src/binlog/events/table_map_event.rs index dddc4d5..720f739 100644 --- a/src/binlog/events/table_map_event.rs +++ b/src/binlog/events/table_map_event.rs @@ -238,7 +238,7 @@ impl<'a> TableMapEvent<'a> { _ => (), } - return Ok(column_type); + Ok(column_type) } } @@ -316,7 +316,7 @@ impl<'a> BinlogStruct<'a> for TableMapEvent<'a> { len += S(1); len += S(min(self.table_name.0.len(), u8::MAX as usize)); len += S(1); - len += S(crate::misc::lenenc_int_len(self.columns_count() as u64) as usize); + len += S(crate::misc::lenenc_int_len(self.columns_count()) as usize); len += S(self.columns_count() as usize); len += S(crate::misc::lenenc_str_len(self.columns_metadata.as_bytes()) as usize); len += S((self.columns_count() as usize + 8) / 7); @@ -1194,7 +1194,7 @@ impl<'a> OptionalMetadataIter<'a> { .0 .iter() .filter_map(|val| ColumnType::try_from(*val).ok()) - .filter(|ty| f(ty)) + .filter(f) .count() } } diff --git a/src/binlog/events/transaction_payload_event.rs b/src/binlog/events/transaction_payload_event.rs index 0b41115..94c6de3 100644 --- a/src/binlog/events/transaction_payload_event.rs +++ b/src/binlog/events/transaction_payload_event.rs @@ -53,7 +53,7 @@ impl<'a> TransactionPayloadEvent<'a> { ) -> Self { Self { payload_size: RawInt::new(payload_size), - algorithm: algorithm, + algorithm, uncompressed_size: RawInt::new(uncompressed_size), payload: RawBytes::new(payload), header_size: 0, @@ -151,7 +151,7 @@ impl<'de> MyDeserialize<'de> for TransactionPayloadEvent<'de> { if !have_payload_size || !have_compression_type { Err(io::Error::new( io::ErrorKind::InvalidData, - format!("Missing field in payload header"), + "Missing field in payload header", ))?; } if ob.payload_size.0 as usize > buf.len() { @@ -205,19 +205,19 @@ impl<'de> MyDeserialize<'de> for TransactionPayloadEvent<'de> { impl MySerialize for TransactionPayloadEvent<'_> { fn serialize(&self, buf: &mut Vec) { buf.put_lenenc_int(TransactionPayloadFields::OTW_PAYLOAD_COMPRESSION_TYPE_FIELD as u64); - buf.put_lenenc_int(crate::misc::lenenc_int_len(self.algorithm as u64) as u64); + buf.put_lenenc_int(crate::misc::lenenc_int_len(self.algorithm as u64)); buf.put_lenenc_int(self.algorithm as u64); if self.algorithm != TransactionPayloadCompressionType::NONE { buf.put_lenenc_int( TransactionPayloadFields::OTW_PAYLOAD_UNCOMPRESSED_SIZE_FIELD as u64, ); - buf.put_lenenc_int(crate::misc::lenenc_int_len(self.uncompressed_size.0) as u64); + buf.put_lenenc_int(crate::misc::lenenc_int_len(self.uncompressed_size.0)); buf.put_lenenc_int(self.uncompressed_size.0); } buf.put_lenenc_int(TransactionPayloadFields::OTW_PAYLOAD_SIZE_FIELD as u64); - buf.put_lenenc_int(crate::misc::lenenc_int_len(self.payload_size.0) as u64); + buf.put_lenenc_int(crate::misc::lenenc_int_len(self.payload_size.0)); buf.put_lenenc_int(self.payload_size.0); buf.put_lenenc_int(TransactionPayloadFields::OTW_PAYLOAD_HEADER_END_MARK as u64); diff --git a/src/binlog/mod.rs b/src/binlog/mod.rs index b92ca62..073a269 100644 --- a/src/binlog/mod.rs +++ b/src/binlog/mod.rs @@ -250,9 +250,9 @@ impl ColumnType { | Self::MYSQL_TYPE_ENUM | Self::MYSQL_TYPE_STRING | Self::MYSQL_TYPE_BIT => ptr.get(..2).map(|x| (x, 2)), - Self::MYSQL_TYPE_TYPED_ARRAY => Self::try_from(*ptr.get(0)?) + Self::MYSQL_TYPE_TYPED_ARRAY => Self::try_from(*ptr.first()?) .ok()? - .get_metadata(&ptr.get(1..)?, true) + .get_metadata(ptr.get(1..)?, true) .map(|(x, n)| (x, n + 1)), _ => Some((&[], 0)), } diff --git a/src/binlog/row.rs b/src/binlog/row.rs index 210dea2..5f824f8 100644 --- a/src/binlog/row.rs +++ b/src/binlog/row.rs @@ -85,7 +85,7 @@ impl BinlogRow { /// Returns columns of this row. pub fn columns_ref(&self) -> &[Column] { - &*self.columns + &self.columns } /// Returns columns of this row. @@ -229,9 +229,10 @@ impl<'de> MyDeserialize<'de> for BinlogRow { column_flags |= ColumnFlags::UNSIGNED_FLAG; } - if let Some(_) = primary_key_iter + if primary_key_iter .next_if(|next| next.is_err() || next.as_ref().ok() == Some(&(i as u64))) .transpose()? + .is_some() { column_flags |= ColumnFlags::PRI_KEY_FLAG; } diff --git a/src/crypto/rsa.rs b/src/crypto/rsa.rs index 0ce1091..5c06724 100644 --- a/src/crypto/rsa.rs +++ b/src/crypto/rsa.rs @@ -56,7 +56,7 @@ impl Padding for Pkcs1Padding { } output[2 + ps_len] = 0x00; - (&mut output[2 + ps_len + 1..]).copy_from_slice(input); + output[2 + ps_len + 1..].copy_from_slice(input); output } } @@ -95,7 +95,7 @@ impl Pkcs1OaepPadding { .map(|c| { let cs = &mut [0u8; 4]; BigEndian::write_u32(cs, c as u32); - Sha1::digest(&[seed, cs].concat()).to_vec() + Sha1::digest([seed, cs].concat()).to_vec() }) .collect::>>() .concat(); @@ -118,26 +118,22 @@ impl Padding for Pkcs1OaepPadding { let mut ps = vec![0; k - input.len() - 2 * Self::HASH_LEN - 2]; ps.push(0x01); // 4. Let pHash = Hash(P), an octet string of length hLen. - let p_hash = Sha1::digest(&[]).to_vec(); + let p_hash = Sha1::digest([]).to_vec(); // 5. Concatenate pHash, PS, the message M, and other padding to form a // data block DB as: DB = pHash || PS || 01 || M let db = [&*p_hash, &*ps, input].concat(); // 6. Generate a random octet string seed of length hLen. let seed: Vec<_> = (0..Self::HASH_LEN).map(|_| self.rng.gen()).collect(); // 7. Let dbMask = MGF(seed, emLen-hLen). - let db_mask = Self::mgf1(&*seed, k - Self::HASH_LEN); + let db_mask = Self::mgf1(&seed, k - Self::HASH_LEN); // 8. Let maskedDB = DB \xor dbMask. - let masked_db: Vec<_> = db - .into_iter() - .zip(db_mask.into_iter()) - .map(|(a, b)| a ^ b) - .collect(); + let masked_db: Vec<_> = db.into_iter().zip(db_mask).map(|(a, b)| a ^ b).collect(); // 9. Let seedMask = MGF(maskedDB, hLen). - let seed_mask = Self::mgf1(&*masked_db, Self::HASH_LEN); + let seed_mask = Self::mgf1(&masked_db, Self::HASH_LEN); // 10. Let maskedSeed = seed \xor seedMask. let masked_seed: Vec<_> = seed .into_iter() - .zip(seed_mask.into_iter()) + .zip(seed_mask) .map(|(a, b)| a ^ b) .collect(); // 11. Let EM = maskedSeed || maskedDB. @@ -164,7 +160,7 @@ impl PublicKey { /// Will panic in case of bad pem data. pub fn from_pem(pem_data: impl AsRef<[u8]>) -> PublicKey { let (der, file_type) = der::pem_to_der(pem_data); - let (modulus, exponent) = der::parse_pub_key(&*der, file_type); + let (modulus, exponent) = der::parse_pub_key(&der, file_type); PublicKey::new(modulus, exponent) } @@ -190,7 +186,7 @@ impl PublicKey { /// Will panic if block is too long for key or padding. pub fn encrypt_block(&self, block: impl AsRef<[u8]>, mut pad: impl Padding) -> Vec { let enc_block = pad.pub_pad(block, self.num_octets()); - let enc_int = BigUint::from_bytes_be(&*enc_block); + let enc_int = BigUint::from_bytes_be(&enc_block); let rsa = enc_int.modpow(self.exponent(), self.modulus()); let mut rsa_bytes = rsa.to_bytes_be(); // is this needed? diff --git a/src/misc/raw/bytes.rs b/src/misc/raw/bytes.rs index 9b62fa0..7b1f2df 100644 --- a/src/misc/raw/bytes.rs +++ b/src/misc/raw/bytes.rs @@ -191,10 +191,7 @@ impl BytesRepr for NullBytes { type Ctx = (); fn serialize(text: &[u8], buf: &mut Vec) { - let last = text - .iter() - .position(|x| *x == 0) - .unwrap_or_else(|| text.len()); + let last = text.iter().position(|x| *x == 0).unwrap_or(text.len()); buf.put_slice(&text[..last]); buf.put_u8(0); } diff --git a/src/misc/raw/int.rs b/src/misc/raw/int.rs index 6b1df91..b032670 100644 --- a/src/misc/raw/int.rs +++ b/src/misc/raw/int.rs @@ -124,7 +124,7 @@ macro_rules! def_end_repr { buf.$ser(val) } - fn deserialize<'de>(buf: &mut ParseBuf<'de>) -> io::Result { + fn deserialize(buf: &mut ParseBuf<'_>) -> io::Result { Ok(buf.$de()) } } @@ -144,7 +144,7 @@ macro_rules! def_end_repr { buf.$ser(val) } - fn deserialize<'de>(buf: &mut ParseBuf<'de>) -> io::Result { + fn deserialize(buf: &mut ParseBuf<'_>) -> io::Result { buf.$de().ok_or_else(crate::misc::unexpected_buf_eof) } } diff --git a/src/misc/raw/mod.rs b/src/misc/raw/mod.rs index 1a44789..8859c72 100644 --- a/src/misc/raw/mod.rs +++ b/src/misc/raw/mod.rs @@ -117,7 +117,7 @@ where [u8; LEN]: Array, { fn serialize(&self, buf: &mut Vec) { - buf.put_slice(&*self) + buf.put_slice(self) } } diff --git a/src/misc/raw/seq.rs b/src/misc/raw/seq.rs index 161ec56..078752e 100644 --- a/src/misc/raw/seq.rs +++ b/src/misc/raw/seq.rs @@ -80,7 +80,7 @@ where U: SeqRepr, { fn serialize(&self, buf: &mut Vec) { - U::serialize(&*self.0, buf); + U::serialize(&self.0, buf); } } @@ -174,6 +174,11 @@ impl<'a, T: IntRepr, U> RawSeq<'a, T, U> { self.0.len() } + /// Returns `true` if the sequence has a length of 0. + pub fn is_empty(&self) -> bool { + self.0.is_empty() + } + /// Returns a `'static` version of `self`. pub fn into_owned(self) -> RawSeq<'static, T, U> { RawSeq(Cow::Owned(self.0.into_owned()), self.1) diff --git a/src/named_params.rs b/src/named_params.rs index 5fee85a..74842ee 100644 --- a/src/named_params.rs +++ b/src/named_params.rs @@ -83,10 +83,11 @@ pub fn parse_named_params( rematch = true; } }, - InSharpComment => match c { - b'\n' => state = TopLevel, - _ => (), - }, + InSharpComment => { + if *c == b'\n' { + state = TopLevel + } + } MaybeInDoubleDashComment1 => match c { b'-' => state = MaybeInDoubleDashComment2, _ => state = TopLevel, @@ -98,10 +99,11 @@ pub fn parse_named_params( state = TopLevel } } - InDoubleDashComment => match c { - b'\n' => state = TopLevel, - _ => (), - }, + InDoubleDashComment => { + if *c == b'\n' { + state = TopLevel + } + } MaybeInCComment1 => match c { b'*' => state = MaybeInCComment2, _ => state = TopLevel, @@ -110,10 +112,11 @@ pub fn parse_named_params( b'!' | b'+' => state = TopLevel, // extensions and optimizer hints _ => state = InCComment, }, - InCComment => match c { - b'*' => state = MaybeExitCComment, - _ => (), - }, + InCComment => { + if *c == b'*' { + state = MaybeExitCComment + } + } MaybeExitCComment => match c { b'/' => state = TopLevel, _ => state = InCComment, diff --git a/src/packets/binlog_request.rs b/src/packets/binlog_request.rs index 488108e..00e5f89 100644 --- a/src/packets/binlog_request.rs +++ b/src/packets/binlog_request.rs @@ -64,13 +64,13 @@ impl<'a> BinlogRequest<'a> { /// Filename of the binlog on the master (defaults to an empty string). pub fn filename_raw(&'a self) -> &'a [u8] { - &self.filename.as_ref() + self.filename.as_ref() } /// Filename of the binlog on the master as a UTF-8 string (lossy converted) /// (defaults to an empty string). pub fn filename(&'a self) -> &'a [u8] { - &self.filename.as_ref() + self.filename.as_ref() } /// Position in the binlog-file to start the stream with (defaults to `4`). diff --git a/src/packets/mod.rs b/src/packets/mod.rs index 08bf941..dbf0b95 100644 --- a/src/packets/mod.rs +++ b/src/packets/mod.rs @@ -276,7 +276,7 @@ impl Column { /// Returns value of the schema field of a column packet as a byte slice. pub fn schema_ref(&self) -> &[u8] { - &*self.schema + &self.schema } /// Returns value of the schema field of a column packet as a string (lossy converted). @@ -286,7 +286,7 @@ impl Column { /// Returns value of the table field of a column packet as a byte slice. pub fn table_ref(&self) -> &[u8] { - &*self.table + &self.table } /// Returns value of the table field of a column packet as a string (lossy converted). @@ -298,7 +298,7 @@ impl Column { /// /// "org_table" is for original table name. pub fn org_table_ref(&self) -> &[u8] { - &*self.org_table + &self.org_table } /// Returns value of the org_table field of a column packet as a string (lossy converted). @@ -308,7 +308,7 @@ impl Column { /// Returns value of the name field of a column packet as a byte slice. pub fn name_ref(&self) -> &[u8] { - &*self.name + &self.name } /// Returns value of the name field of a column packet as a string (lossy converted). @@ -320,7 +320,7 @@ impl Column { /// /// "org_name" is for original column name. pub fn org_name_ref(&self) -> &[u8] { - &*self.org_name + &self.org_name } /// Returns value of the org_name field of a column packet as a string (lossy converted). @@ -726,7 +726,7 @@ impl<'a> ProgressReport<'a> { /// Status or state name as a byte slice. pub fn stage_info_ref(&self) -> &[u8] { - &self.stage_info.as_bytes() + self.stage_info.as_bytes() } /// Status or state name as a string (lossy converted). @@ -1136,7 +1136,7 @@ impl<'a> AuthPlugin<'a> { AuthPlugin::MysqlNativePassword => MYSQL_NATIVE_PASSWORD_PLUGIN_NAME, AuthPlugin::MysqlOldPassword => MYSQL_OLD_PASSWORD_PLUGIN_NAME, AuthPlugin::MysqlClearPassword => MYSQL_CLEAR_PASSWORD_PLUGIN_NAME, - AuthPlugin::Other(name) => &*name, + AuthPlugin::Other(name) => name, } } @@ -1150,7 +1150,7 @@ impl<'a> AuthPlugin<'a> { } } - pub fn borrow<'b>(&'b self) -> AuthPlugin<'b> { + pub fn borrow(&self) -> AuthPlugin<'_> { match self { AuthPlugin::CachingSha2Password => AuthPlugin::CachingSha2Password, AuthPlugin::MysqlNativePassword => AuthPlugin::MysqlNativePassword, @@ -1269,6 +1269,12 @@ impl OldAuthSwitchRequest { } } +impl Default for OldAuthSwitchRequest { + fn default() -> Self { + Self::new() + } +} + impl<'de> MyDeserialize<'de> for OldAuthSwitchRequest { const SIZE: Option = Some(1); type Ctx = (); @@ -3116,7 +3122,7 @@ impl<'a> ComBinlogDumpGtid<'a> { /// Returns the sequence of sids in this packet. pub fn sids(&self) -> &[Sid<'a>] { - &*self.sid_block + &self.sid_block } /// Defines filename for this instance. diff --git a/src/proto/codec/mod.rs b/src/proto/codec/mod.rs index 6f83044..d024d81 100644 --- a/src/proto/codec/mod.rs +++ b/src/proto/codec/mod.rs @@ -132,11 +132,12 @@ impl ChunkInfo { } /// Decoder for MySql protocol chunk. -#[derive(Debug, Clone, Copy, Eq, PartialEq)] +#[derive(Debug, Clone, Copy, Eq, PartialEq, Default)] pub enum ChunkDecoder { /// Decoder is waiting for the first or subsequent packet chunk. /// /// It'll need at least 4 bytes to start decoding a chunk. + #[default] Idle, /// Chunk is being decoded. Chunk { @@ -220,12 +221,6 @@ impl ChunkDecoder { } } -impl Default for ChunkDecoder { - fn default() -> Self { - ChunkDecoder::Idle - } -} - /// Stores information about compressed packet being decoded. #[derive(Debug, Clone, Copy, Eq, PartialEq)] pub enum CompData { diff --git a/src/proto/sync_framed.rs b/src/proto/sync_framed.rs index da15400..a6e9f5f 100644 --- a/src/proto/sync_framed.rs +++ b/src/proto/sync_framed.rs @@ -109,7 +109,7 @@ where /// Will write packets into the stream. Stream may not be flushed. pub fn write(&mut self, item: &mut U) -> Result<(), PacketCodecError> { self.codec.encode(item, &mut self.out_buf)?; - with_interrupt!(self.stream.write_all(&*self.out_buf))?; + with_interrupt!(self.stream.write_all(&self.out_buf))?; self.out_buf.clear(); Ok(()) } diff --git a/src/row/mod.rs b/src/row/mod.rs index 2f4a0b4..3288ffc 100644 --- a/src/row/mod.rs +++ b/src/row/mod.rs @@ -77,7 +77,7 @@ impl Row { /// Returns columns of this row. pub fn columns_ref(&self) -> &[Column] { - &*self.columns + &self.columns } /// Returns columns of this row. @@ -100,7 +100,7 @@ impl Row { T: FromValue, I: ColumnIndex, { - index.idx(&*self.columns).and_then(|idx| { + index.idx(&self.columns).and_then(|idx| { self.values .get(idx) .and_then(|x| x.as_ref()) @@ -117,7 +117,7 @@ impl Row { I: ColumnIndex, { index - .idx(&*self.columns) + .idx(&self.columns) .and_then(|idx| self.values.get(idx)) .and_then(|x| x.as_ref()) .map(|x| from_value_opt::(x.clone())) @@ -130,7 +130,7 @@ impl Row { T: FromValue, I: ColumnIndex, { - index.idx(&*self.columns).and_then(|idx| { + index.idx(&self.columns).and_then(|idx| { self.values .get_mut(idx) .and_then(|x| x.take()) @@ -147,7 +147,7 @@ impl Row { I: ColumnIndex, { index - .idx(&*self.columns) + .idx(&self.columns) .and_then(|idx| self.values.get_mut(idx)) .and_then(|x| x.take()) .map(from_value_opt::) diff --git a/src/scramble.rs b/src/scramble.rs index 8f1c5ed..35b46dc 100644 --- a/src/scramble.rs +++ b/src/scramble.rs @@ -24,7 +24,7 @@ where fn to_u8_32(bytes: impl AsRef<[u8]>) -> [u8; 32] { let mut out = [0; 32]; - (&mut out[..]).copy_from_slice(bytes.as_ref()); + out[..].copy_from_slice(bytes.as_ref()); out } @@ -87,7 +87,6 @@ pub fn scramble_323(nonce: &[u8], password: &[u8]) -> Option<[u8; 8]> { } let mut output = [0_u8; 8]; - let extra: u8; hash_password(&mut hash_pass, password); hash_password(&mut hash_message, nonce); @@ -101,7 +100,7 @@ pub fn scramble_323(nonce: &[u8], password: &[u8]) -> Option<[u8; 8]> { *x = ((rand_st.my_rnd() * 31_f64).floor() + 64_f64) as u8; } - extra = (rand_st.my_rnd() * 31_f64).floor() as u8; + let extra = (rand_st.my_rnd() * 31_f64).floor() as u8; for x in output.iter_mut() { *x ^= extra; diff --git a/src/value/convert/mod.rs b/src/value/convert/mod.rs index 56b103d..d3c8e27 100644 --- a/src/value/convert/mod.rs +++ b/src/value/convert/mod.rs @@ -335,7 +335,7 @@ fn mysql_time_to_duration( seconds: u8, microseconds: u32, ) -> Duration { - let nanos = (microseconds as u32) * 1000; + let nanos = (microseconds) * 1000; let secs = u64::from(seconds) + u64::from(minutes) * 60 + u64::from(hours) * 60 * 60 @@ -353,7 +353,7 @@ impl TryFrom for ParseIrOpt { Ok(ParseIrOpt::Parsed(duration, v)) } Value::Bytes(ref val_bytes) => { - let duration = match parse_mysql_time_string(&*val_bytes) { + let duration = match parse_mysql_time_string(val_bytes) { Some((false, hours, minutes, seconds, microseconds)) => { let days = hours / 24; let hours = (hours % 24) as u8; diff --git a/src/value/convert/time.rs b/src/value/convert/time.rs index 7c695ef..a314587 100644 --- a/src/value/convert/time.rs +++ b/src/value/convert/time.rs @@ -109,14 +109,14 @@ lazy_static::lazy_static! { static ref DATE_TIME_FORMAT: Vec> = { let mut format = DATE_FORMAT.clone(); format.push(FormatItem::Literal(b" ")); - format.extend_from_slice(&*TIME_FORMAT); + format.extend_from_slice(&TIME_FORMAT); format }; static ref DATE_TIME_FORMAT_MICRO: Vec> = { let mut format = DATE_FORMAT.clone(); format.push(FormatItem::Literal(b" ")); - format.extend_from_slice(&*TIME_FORMAT_MICRO); + format.extend_from_slice(&TIME_FORMAT_MICRO); format }; } @@ -178,7 +178,7 @@ impl TryFrom for ParseIr { } } Value::Bytes(ref bytes) => { - match from_utf8(&*bytes) + match from_utf8(bytes) .ok() .and_then(|s| Date::parse(s, &*DATE_FORMAT).ok()) { @@ -274,7 +274,7 @@ fn create_primitive_date_time( pub(crate) fn parse_mysql_datetime_string_with_time( bytes: &[u8], ) -> Result { - from_utf8(&*bytes) + from_utf8(bytes) .map_err(|_| Parse::TryFromParsed(TryFromParsed::InsufficientInformation)) .and_then(|s| { if s.len() > 19 { @@ -290,7 +290,7 @@ pub(crate) fn parse_mysql_datetime_string_with_time( } fn parse_mysql_time_string_with_time(bytes: &[u8]) -> Result { - from_utf8(&*bytes) + from_utf8(bytes) .map_err(|_| Parse::TryFromParsed(TryFromParsed::InsufficientInformation)) .and_then(|s| match s.len().cmp(&8) { Ordering::Less => Err(Parse::TryFromParsed(TryFromParsed::InsufficientInformation)), @@ -382,14 +382,7 @@ impl From for Value { #[cfg_attr(docsrs, doc(cfg(feature = "time")))] impl From