Skip to content

Commit

Permalink
fix rust lints
Browse files Browse the repository at this point in the history
  • Loading branch information
iskakaushik committed Oct 5, 2023
1 parent 3ec3b32 commit 0951fbd
Show file tree
Hide file tree
Showing 11 changed files with 86 additions and 86 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/rust-lint.yml
Original file line number Diff line number Diff line change
Expand Up @@ -26,5 +26,5 @@ jobs:
components: clippy

- name: clippy
run: cargo clippy --all-targets --all-features -- -D warnings
run: cargo clippy -- -D warnings
working-directory: ./nexus
7 changes: 3 additions & 4 deletions nexus/parser/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ pub struct NexusQueryParser {
pub enum NexusStatement {
PeerDDL {
stmt: Statement,
ddl: PeerDDL,
ddl: Box<PeerDDL>,
},
PeerQuery {
stmt: Statement,
Expand Down Expand Up @@ -55,7 +55,7 @@ impl NexusStatement {
if let Some(ddl) = ddl {
return Ok(NexusStatement::PeerDDL {
stmt: stmt.clone(),
ddl,
ddl: Box::new(ddl),
});
}

Expand Down Expand Up @@ -100,8 +100,7 @@ impl NexusQueryParser {
let peers = tokio::task::block_in_place(move || {
tokio::runtime::Handle::current().block_on(async move {
let catalog = self.catalog.lock().await;
let peers = catalog.get_peers().await;
peers
catalog.get_peers().await
})
});

Expand Down
24 changes: 13 additions & 11 deletions nexus/peer-bigquery/src/ast.rs
Original file line number Diff line number Diff line change
Expand Up @@ -78,17 +78,19 @@ impl BigqueryAst {

visit_function_arg_mut(query, |node| {
if let FunctionArgExpr::Expr(arg_expr) = node {
if let Expr::Cast { expr: _, data_type } = arg_expr {
if let DataType::Array(_) = data_type {
let list = self
.flatten_expr_to_in_list(&arg_expr)
.expect("failed to flatten in function");
let rewritten_array = Array {
elem: list,
named: true,
};
*node = FunctionArgExpr::Expr(Expr::Array(rewritten_array));
}
if let Expr::Cast {
expr: _,
data_type: DataType::Array(_),
} = arg_expr
{
let list = self
.flatten_expr_to_in_list(arg_expr)
.expect("failed to flatten in function");
let rewritten_array = Array {
elem: list,
named: true,
};
*node = FunctionArgExpr::Expr(Expr::Array(rewritten_array));
}
}

Expand Down
2 changes: 0 additions & 2 deletions nexus/peer-bigquery/src/cursor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@ use sqlparser::ast::Statement;
use crate::BigQueryQueryExecutor;

pub struct BigQueryCursor {
stmt: Statement,
position: usize,
stream: Mutex<SendableStream>,
schema: SchemaRef,
Expand Down Expand Up @@ -42,7 +41,6 @@ impl BigQueryCursorManager {

// Create a new cursor
let cursor = BigQueryCursor {
stmt: stmt.clone(),
position: 0,
stream: Mutex::new(stream),
schema,
Expand Down
22 changes: 10 additions & 12 deletions nexus/peer-snowflake/src/auth.rs
Original file line number Diff line number Diff line change
Expand Up @@ -47,9 +47,8 @@ impl SnowflakeAuth {
expiry_threshold: u64,
) -> anyhow::Result<Self> {
let pkey = match password {
Some(pw) => {
DecodePrivateKey::from_pkcs8_encrypted_pem(&private_key, pw).context("Invalid private key or decryption failed")?
},
Some(pw) => DecodePrivateKey::from_pkcs8_encrypted_pem(&private_key, pw)
.context("Invalid private key or decryption failed")?,
None => {
DecodePrivateKey::from_pkcs8_pem(&private_key).context("Invalid private key")?
}
Expand Down Expand Up @@ -77,16 +76,15 @@ impl SnowflakeAuth {
// Normalize the account identifer to a form that is embedded into the JWT.
// Logic adapted from Snowflake's example Python code for key-pair authentication "sql-api-generate-jwt.py".
fn normalize_account_identifier(raw_account: &str) -> String {
let split_index: usize;
if !raw_account.contains(".global") {
split_index = *raw_account
.find(".")
.get_or_insert(raw_account.chars().count());
let split_index = if !raw_account.contains(".global") {
*raw_account
.find('.')
.get_or_insert(raw_account.chars().count())
} else {
split_index = *raw_account
.find("-")
.get_or_insert(raw_account.chars().count());
}
*raw_account
.find('-')
.get_or_insert(raw_account.chars().count())
};
raw_account
.to_uppercase()
.chars()
Expand Down
2 changes: 0 additions & 2 deletions nexus/peer-snowflake/src/cursor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@ use sqlparser::ast::Statement;
use tokio::sync::Mutex;

pub struct SnowflakeCursor {
stmt: Statement,
position: usize,
stream: Mutex<SendableStream>,
schema: SchemaRef,
Expand Down Expand Up @@ -39,7 +38,6 @@ impl SnowflakeCursorManager {

// Create a new cursor
let cursor = SnowflakeCursor {
stmt: stmt.clone(),
position: 0,
stream: Mutex::new(stream),
schema,
Expand Down
16 changes: 8 additions & 8 deletions nexus/peer-snowflake/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -28,13 +28,13 @@ mod stream;

const DEFAULT_REFRESH_THRESHOLD: u64 = 3000;
const DEFAULT_EXPIRY_THRESHOLD: u64 = 3600;
const SNOWFLAKE_URL_PREFIX: &'static str = "https://";
const SNOWFLAKE_URL_SUFFIX: &'static str = ".snowflakecomputing.com/api/v2/statements";
const SNOWFLAKE_URL_PREFIX: &str = "https://";
const SNOWFLAKE_URL_SUFFIX: &str = ".snowflakecomputing.com/api/v2/statements";

const DATE_OUTPUT_FORMAT: &'static str = "YYYY/MM/DD";
const TIME_OUTPUT_FORMAT: &'static str = "HH:MI:SS.FF";
const TIMESTAMP_OUTPUT_FORMAT: &'static str = "YYYY-MM-DDTHH24:MI:SS.FF";
const TIMESTAMP_TZ_OUTPUT_FORMAT: &'static str = "YYYY-MM-DDTHH24:MI:SS.FFTZHTZM";
const DATE_OUTPUT_FORMAT: &str = "YYYY/MM/DD";
const TIME_OUTPUT_FORMAT: &str = "HH:MI:SS.FF";
const TIMESTAMP_OUTPUT_FORMAT: &str = "YYYY-MM-DDTHH24:MI:SS.FF";
const TIMESTAMP_TZ_OUTPUT_FORMAT: &str = "YYYY-MM-DDTHH24:MI:SS.FFTZHTZM";

#[derive(Debug, Serialize)]
struct SQLStatement<'a> {
Expand All @@ -59,7 +59,7 @@ pub(crate) struct ResultSetRowType {
r#type: SnowflakeDataType,
}

#[allow(non_snake_case)]
#[allow(non_snake_case, dead_code)]
#[derive(Deserialize, Debug)]
struct ResultSetPartitionInfo {
rowCount: u64,
Expand Down Expand Up @@ -207,7 +207,7 @@ impl SnowflakeQueryExecutor {
})
}

pub async fn query(&self, query: &Box<Query>) -> PgWireResult<ResultSet> {
pub async fn query(&self, query: &Query) -> PgWireResult<ResultSet> {
let mut query = query.clone();

let ast = ast::SnowflakeAst::default();
Expand Down
55 changes: 31 additions & 24 deletions nexus/peer-snowflake/src/stream.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@ use pgwire::{
};
use secrecy::ExposeSecret;
use serde::Deserialize;
use serde_json;
use std::{
pin::Pin,
task::{Context, Poll},
Expand Down Expand Up @@ -146,39 +145,47 @@ impl SnowflakeRecordStream {
// really hacky workaround for parsing the UTC timezone specifically.
SnowflakeDataType::TimestampLtz => {
match DateTime::parse_from_str(elem, TIMESTAMP_TZ_PARSE_FORMAT) {
Ok(_) => TimestampWithTimeZone(Utc.from_utc_datetime(
&DateTime::parse_from_str(elem, TIMESTAMP_TZ_PARSE_FORMAT)?
Ok(_) => TimestampWithTimeZone(
Utc.from_utc_datetime(
&DateTime::parse_from_str(elem, TIMESTAMP_TZ_PARSE_FORMAT)?
.naive_utc(),
),
),
Err(_) => TimestampWithTimeZone(
Utc.from_utc_datetime(
&DateTime::parse_from_str(
&elem.replace('Z', "+0000"),
TIMESTAMP_TZ_PARSE_FORMAT,
)?
.naive_utc(),
)),
Err(_) => TimestampWithTimeZone(Utc.from_utc_datetime(
&DateTime::parse_from_str(
&elem.replace("Z", "+0000"),
TIMESTAMP_TZ_PARSE_FORMAT,
)?
.naive_utc(),
)),
),
),
}
}
SnowflakeDataType::TimestampNtz => PostgresTimestamp(
NaiveDateTime::parse_from_str(elem, TIMESTAMP_PARSE_FORMAT)?,
),
SnowflakeDataType::TimestampTz => {
match DateTime::parse_from_str(elem, TIMESTAMP_TZ_PARSE_FORMAT) {
Ok(_) => TimestampWithTimeZone(Utc.from_utc_datetime(
&DateTime::parse_from_str(elem, TIMESTAMP_TZ_PARSE_FORMAT)?
Ok(_) => TimestampWithTimeZone(
Utc.from_utc_datetime(
&DateTime::parse_from_str(elem, TIMESTAMP_TZ_PARSE_FORMAT)?
.naive_utc(),
),
),
Err(_) => TimestampWithTimeZone(
Utc.from_utc_datetime(
&DateTime::parse_from_str(
&elem.replace('Z', "+0000"),
TIMESTAMP_TZ_PARSE_FORMAT,
)?
.naive_utc(),
)),
Err(_) => TimestampWithTimeZone(Utc.from_utc_datetime(
&DateTime::parse_from_str(
&elem.replace("Z", "+0000"),
TIMESTAMP_TZ_PARSE_FORMAT,
)?
.naive_utc(),
)),
),
),
}
}
SnowflakeDataType::Variant => {
let jsonb: serde_json::Value = serde_json::from_str(&elem)?;
let jsonb: serde_json::Value = serde_json::from_str(elem)?;
Value::JsonB(jsonb)
}
},
Expand All @@ -188,7 +195,7 @@ impl SnowflakeRecordStream {
row_values.push(row_value.unwrap_or(Value::Null));
}

self.partition_index = self.partition_index + 1;
self.partition_index += 1;

Ok(Record {
values: row_values,
Expand All @@ -200,7 +207,7 @@ impl SnowflakeRecordStream {
if (self.partition_number + 1) == self.result_set.resultSetMetaData.partitionInfo.len() {
return Ok(false);
}
self.partition_number = self.partition_number + 1;
self.partition_number += 1;
self.partition_index = 0;
let partition_number = self.partition_number;
let secret = self.auth.get_jwt()?.expose_secret().clone();
Expand Down
4 changes: 2 additions & 2 deletions nexus/server/src/cursor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ impl PeerCursors {
self.cursors.remove(&name);
}

pub fn get_peer(&self, name: &str) -> Option<&Box<Peer>> {
self.cursors.get(name)
pub fn get_peer(&self, name: &str) -> Option<&Peer> {
self.cursors.get(name).map(|peer| peer.as_ref())
}
}
Loading

0 comments on commit 0951fbd

Please sign in to comment.