Skip to content

Commit

Permalink
fix merge conflicts
Browse files Browse the repository at this point in the history
  • Loading branch information
mertak-synnada committed Jan 15, 2025
1 parent 78dfce8 commit cb6a5ff
Show file tree
Hide file tree
Showing 4 changed files with 16 additions and 11 deletions.
4 changes: 3 additions & 1 deletion datafusion/core/src/datasource/file_format/json.rs
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,9 @@ use crate::datasource::data_source::FileSourceConfig;
use crate::datasource::file_format::file_compression_type::FileCompressionType;
use crate::datasource::file_format::write::demux::DemuxedStreamReceiver;
use crate::datasource::file_format::write::BatchSerializer;
use crate::datasource::physical_plan::{FileGroupDisplay, FileSink, FileSinkConfig, JsonConfig};
use crate::datasource::physical_plan::{
FileGroupDisplay, FileSink, FileSinkConfig, JsonConfig,
};
use crate::error::Result;
use crate::execution::context::SessionState;
use crate::physical_plan::insert::{DataSink, DataSinkExec};
Expand Down
8 changes: 4 additions & 4 deletions datafusion/core/src/datasource/file_format/parquet.rs
Original file line number Diff line number Diff line change
Expand Up @@ -32,12 +32,13 @@ use super::{
};
use crate::arrow::array::RecordBatch;
use crate::arrow::datatypes::{Fields, Schema, SchemaRef};
use crate::datasource::data_source::FileSourceConfig;
use crate::datasource::file_format::file_compression_type::FileCompressionType;
use crate::datasource::file_format::write::get_writer_schema;
use crate::datasource::physical_plan::parquet::{
can_expr_be_pushed_down_with_schemas,
use crate::datasource::physical_plan::parquet::can_expr_be_pushed_down_with_schemas;
use crate::datasource::physical_plan::{
FileGroupDisplay, FileSink, FileSinkConfig, ParquetConfig,
};
use crate::datasource::physical_plan::{FileGroupDisplay, FileSink, FileSinkConfig};
use crate::datasource::statistics::{create_max_min_accs, get_col_stats};
use crate::error::Result;
use crate::execution::context::SessionState;
Expand All @@ -46,7 +47,6 @@ use crate::physical_plan::{
Accumulator, DisplayAs, DisplayFormatType, ExecutionPlan, SendableRecordBatchStream,
Statistics,
};
use crate::datasource::data_source::FileSourceConfig;

use arrow::compute::sum;
use datafusion_common::config::{ConfigField, ConfigFileType, TableParquetOptions};
Expand Down
5 changes: 2 additions & 3 deletions datafusion/core/src/physical_planner.rs
Original file line number Diff line number Diff line change
Expand Up @@ -466,9 +466,8 @@ impl DefaultPhysicalPlanner {
.collect::<Result<Vec<Arc<dyn PhysicalExpr>>>>()
})
.collect::<Result<Vec<_>>>()?;
let value_exec =
MemoryExec::try_new_as_values(SchemaRef::new(exec_schema), exprs)?;
Arc::new(value_exec)
MemorySourceConfig::try_new_as_values(SchemaRef::new(exec_schema), exprs)?
as _
}
LogicalPlan::EmptyRelation(EmptyRelation {
produce_one_row: false,
Expand Down
10 changes: 7 additions & 3 deletions datafusion/physical-plan/src/memory.rs
Original file line number Diff line number Diff line change
Expand Up @@ -816,7 +816,8 @@ mod tests {
Field::new("col0", DataType::UInt32, false),
Field::new("col1", DataType::Utf8, false),
]));
let _ = MemorySourceConfig::try_new_from_batches(invalid_schema, batches).unwrap_err();
let _ = MemorySourceConfig::try_new_from_batches(invalid_schema, batches)
.unwrap_err();
}

// Test issue: https://github.com/apache/datafusion/issues/8763
Expand All @@ -827,8 +828,11 @@ mod tests {
DataType::UInt32,
false,
)]));
let _ = MemorySourceConfig::try_new_as_values(Arc::clone(&schema), vec![vec![lit(1u32)]])
.unwrap();
let _ = MemorySourceConfig::try_new_as_values(
Arc::clone(&schema),
vec![vec![lit(1u32)]],
)
.unwrap();
// Test that a null value is rejected
let _ = MemorySourceConfig::try_new_as_values(
schema,
Expand Down

0 comments on commit cb6a5ff

Please sign in to comment.