Skip to content

Commit

Permalink
review feedback
Browse files Browse the repository at this point in the history
  • Loading branch information
samuelorji committed Oct 13, 2023
1 parent a23e6eb commit b35346b
Show file tree
Hide file tree
Showing 5 changed files with 8 additions and 19 deletions.
2 changes: 1 addition & 1 deletion scylla-cql/src/frame/request/batch.rs
Original file line number Diff line number Diff line change
Expand Up @@ -190,7 +190,7 @@ impl<'b> DeserializableRequest for Batch<'b, BatchStatement<'b>, Vec<SerializedV
fn deserialize(buf: &mut &[u8]) -> Result<Self, ParseError> {
let batch_type = buf.get_u8().try_into()?;

let statements_count: usize = types::read_short(buf)?.try_into()?;
let statements_count: usize = types::read_short(buf)?.into();
let statements_with_values = (0..statements_count)
.map(|_| {
let batch_statement = BatchStatement::deserialize(buf)?;
Expand Down
4 changes: 2 additions & 2 deletions scylla-cql/src/frame/response/result.rs
Original file line number Diff line number Diff line change
Expand Up @@ -437,7 +437,7 @@ fn deser_type(buf: &mut &[u8]) -> StdResult<ColumnType, ParseError> {
0x0030 => {
let keyspace_name: String = types::read_string(buf)?.to_string();
let type_name: String = types::read_string(buf)?.to_string();
let fields_size: usize = types::read_short(buf)?.try_into()?;
let fields_size: usize = types::read_short(buf)?.into();

let mut field_types: Vec<(String, ColumnType)> = Vec::with_capacity(fields_size);

Expand All @@ -455,7 +455,7 @@ fn deser_type(buf: &mut &[u8]) -> StdResult<ColumnType, ParseError> {
}
}
0x0031 => {
let len: usize = types::read_short(buf)?.try_into()?;
let len: usize = types::read_short(buf)?.into();
let mut types = Vec::with_capacity(len);
for _ in 0..len {
types.push(deser_type(buf)?);
Expand Down
15 changes: 2 additions & 13 deletions scylla-cql/src/frame/types.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,8 @@ use byteorder::{BigEndian, ReadBytesExt};
use bytes::{Buf, BufMut};
use num_enum::TryFromPrimitive;
use std::collections::HashMap;
use std::convert::TryFrom;
use std::convert::TryInto;
use std::convert::{Infallible, TryFrom};
use std::net::IpAddr;
use std::net::SocketAddr;
use std::str;
Expand Down Expand Up @@ -98,12 +98,6 @@ impl From<std::str::Utf8Error> for ParseError {
}
}

impl From<Infallible> for ParseError {
fn from(_: Infallible) -> Self {
ParseError::BadIncomingData("Unexpected Infallible Error".to_string())
}
}

impl From<std::array::TryFromSliceError> for ParseError {
fn from(_err: std::array::TryFromSliceError) -> Self {
ParseError::BadIncomingData("array try from slice failed".to_string())
Expand Down Expand Up @@ -180,18 +174,13 @@ pub fn read_short(buf: &mut &[u8]) -> Result<u16, ParseError> {
Ok(v)
}

pub fn read_u16(buf: &mut &[u8]) -> Result<u16, ParseError> {
let v = buf.read_u16::<BigEndian>()?;
Ok(v)
}

pub fn write_short(v: u16, buf: &mut impl BufMut) {
buf.put_u16(v);
}

pub(crate) fn read_short_length(buf: &mut &[u8]) -> Result<usize, ParseError> {
let v = read_short(buf)?;
let v: usize = v.try_into()?;
let v: usize = v.into();
Ok(v)
}

Expand Down
2 changes: 1 addition & 1 deletion scylla-cql/src/frame/value.rs
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ pub struct CqlDuration {

#[derive(Debug, Error, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
pub enum SerializeValuesError {
#[error("Too many values to add, max 32 767 values can be sent in a request")]
#[error("Too many values to add, max 65,535 values can be sent in a request")]
TooManyValues,
#[error("Mixing named and not named values is not allowed")]
MixingNamedAndNotNamedValues,
Expand Down
4 changes: 2 additions & 2 deletions scylla/src/transport/large_batch_statements_test.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ async fn test_large_batch_statements() {
let max_queries = u16::MAX as usize;
let batch_insert_result = write_batch(&session, max_queries, &ks).await;

assert!(batch_insert_result.is_ok());
batch_insert_result.unwrap();

let too_many_queries = u16::MAX as usize + 1;
let batch_insert_result = write_batch(&session, too_many_queries, &ks).await;
Expand Down Expand Up @@ -51,7 +51,7 @@ async fn create_test_session(session: Session, ks: &String) -> Session {
}

async fn write_batch(session: &Session, n: usize, ks: &String) -> Result<QueryResult, QueryError> {
let mut batch_query = Batch::new(BatchType::Logged);
let mut batch_query = Batch::new(BatchType::Unlogged);
let mut batch_values = Vec::new();
for i in 0..n {
let mut key = vec![0];
Expand Down

0 comments on commit b35346b

Please sign in to comment.