Skip to content

Commit

Permalink
refactor: use std::slice::from_ref to remove some clones (#13518)
Browse files Browse the repository at this point in the history
  • Loading branch information
jonahgao authored Nov 22, 2024
1 parent 58761ac commit a53b974
Show file tree
Hide file tree
Showing 10 changed files with 18 additions and 16 deletions.
2 changes: 1 addition & 1 deletion datafusion/core/src/dataframe/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1671,7 +1671,7 @@ impl DataFrame {
/// # }
/// ```
pub fn with_column(self, name: &str, expr: Expr) -> Result<DataFrame> {
let window_func_exprs = find_window_exprs(&[expr.clone()]);
let window_func_exprs = find_window_exprs(std::slice::from_ref(&expr));

let (window_fn_str, plan) = if window_func_exprs.is_empty() {
(None, self.plan)
Expand Down
4 changes: 2 additions & 2 deletions datafusion/core/src/datasource/file_format/arrow.rs
Original file line number Diff line number Diff line change
Expand Up @@ -476,7 +476,7 @@ mod tests {
.infer_schema(
&state,
&(store.clone() as Arc<dyn ObjectStore>),
&[object_meta.clone()],
std::slice::from_ref(&object_meta),
)
.await?;
let actual_fields = inferred_schema
Expand Down Expand Up @@ -515,7 +515,7 @@ mod tests {
.infer_schema(
&state,
&(store.clone() as Arc<dyn ObjectStore>),
&[object_meta.clone()],
std::slice::from_ref(&object_meta),
)
.await;

Expand Down
4 changes: 3 additions & 1 deletion datafusion/core/src/datasource/file_format/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -573,7 +573,9 @@ pub(crate) mod test_util {
let store = Arc::new(LocalFileSystem::new()) as _;
let meta = local_unpartitioned_file(format!("{store_root}/{file_name}"));

let file_schema = format.infer_schema(state, &store, &[meta.clone()]).await?;
let file_schema = format
.infer_schema(state, &store, std::slice::from_ref(&meta))
.await?;

let statistics = format
.infer_stats(state, &store, file_schema.clone(), &meta)
Expand Down
6 changes: 3 additions & 3 deletions datafusion/core/src/datasource/physical_plan/avro.rs
Original file line number Diff line number Diff line change
Expand Up @@ -284,7 +284,7 @@ mod tests {
let meta = local_unpartitioned_file(filename);

let file_schema = AvroFormat {}
.infer_schema(&state, &store, &[meta.clone()])
.infer_schema(&state, &store, std::slice::from_ref(&meta))
.await?;

let avro_exec = AvroExec::new(
Expand Down Expand Up @@ -349,7 +349,7 @@ mod tests {
let object_store_url = ObjectStoreUrl::local_filesystem();
let meta = local_unpartitioned_file(filename);
let actual_schema = AvroFormat {}
.infer_schema(&state, &object_store, &[meta.clone()])
.infer_schema(&state, &object_store, std::slice::from_ref(&meta))
.await?;

let mut builder = SchemaBuilder::from(actual_schema.fields());
Expand Down Expand Up @@ -422,7 +422,7 @@ mod tests {
let object_store_url = ObjectStoreUrl::local_filesystem();
let meta = local_unpartitioned_file(filename);
let file_schema = AvroFormat {}
.infer_schema(&state, &object_store, &[meta.clone()])
.infer_schema(&state, &object_store, std::slice::from_ref(&meta))
.await?;

let mut partitioned_file = PartitionedFile::from(meta);
Expand Down
2 changes: 1 addition & 1 deletion datafusion/core/src/datasource/physical_plan/json.rs
Original file line number Diff line number Diff line change
Expand Up @@ -430,7 +430,7 @@ mod tests {
.object_meta;
let schema = JsonFormat::default()
.with_file_compression_type(file_compression_type.to_owned())
.infer_schema(state, &store, &[meta.clone()])
.infer_schema(state, &store, std::slice::from_ref(&meta))
.await
.unwrap();

Expand Down
4 changes: 2 additions & 2 deletions datafusion/core/src/datasource/physical_plan/parquet/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1702,7 +1702,7 @@ mod tests {

let store = Arc::new(LocalFileSystem::new()) as _;
let file_schema = ParquetFormat::default()
.infer_schema(&state, &store, &[meta.clone()])
.infer_schema(&state, &store, std::slice::from_ref(&meta))
.await?;

let group_empty = vec![vec![file_range(&meta, 0, 2)]];
Expand Down Expand Up @@ -1734,7 +1734,7 @@ mod tests {
let meta = local_unpartitioned_file(filename);

let schema = ParquetFormat::default()
.infer_schema(&state, &store, &[meta.clone()])
.infer_schema(&state, &store, std::slice::from_ref(&meta))
.await
.unwrap();

Expand Down
2 changes: 1 addition & 1 deletion datafusion/core/tests/parquet/page_pruning.rs
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ async fn get_parquet_exec(state: &SessionState, filter: Expr) -> ParquetExec {
};

let schema = ParquetFormat::default()
.infer_schema(state, &store, &[meta.clone()])
.infer_schema(state, &store, std::slice::from_ref(&meta))
.await
.unwrap();

Expand Down
4 changes: 2 additions & 2 deletions datafusion/physical-plan/src/aggregates/topk_stream.rs
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,7 @@ impl Stream for GroupedTopKAggregateStream {
batch.num_rows()
);
if log::log_enabled!(Level::Trace) && batch.num_rows() < 20 {
print_batches(&[batch.clone()])?;
print_batches(std::slice::from_ref(&batch))?;
}
self.row_count += batch.num_rows();
let batches = &[batch];
Expand Down Expand Up @@ -165,7 +165,7 @@ impl Stream for GroupedTopKAggregateStream {
batch.num_rows()
);
if log::log_enabled!(Level::Trace) {
print_batches(&[batch.clone()])?;
print_batches(std::slice::from_ref(&batch))?;
}
return Poll::Ready(Some(Ok(batch)));
}
Expand Down
2 changes: 1 addition & 1 deletion datafusion/sql/src/expr/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -140,7 +140,7 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> {
) -> Result<Expr> {
let mut expr = self.sql_expr_to_logical_expr(sql, schema, planner_context)?;
expr = self.rewrite_partial_qualifier(expr, schema);
self.validate_schema_satisfies_exprs(schema, &[expr.clone()])?;
self.validate_schema_satisfies_exprs(schema, std::slice::from_ref(&expr))?;
let (expr, _) = expr.infer_placeholder_types(schema)?;
Ok(expr)
}
Expand Down
4 changes: 2 additions & 2 deletions datafusion/sql/src/select.rs
Original file line number Diff line number Diff line change
Expand Up @@ -167,7 +167,7 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> {
let group_by_expr = normalize_col(group_by_expr, &projected_plan)?;
self.validate_schema_satisfies_exprs(
base_plan.schema(),
&[group_by_expr.clone()],
std::slice::from_ref(&group_by_expr),
)?;
Ok(group_by_expr)
})
Expand Down Expand Up @@ -815,7 +815,7 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> {

check_columns_satisfy_exprs(
&column_exprs_post_aggr,
&[having_expr_post_aggr.clone()],
std::slice::from_ref(&having_expr_post_aggr),
"HAVING clause references non-aggregate values",
)?;

Expand Down

0 comments on commit a53b974

Please sign in to comment.