Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Don't attempt to run 64-bit tests on 32-bit platforms #6678

Draft
wants to merge 10 commits into
base: main
Choose a base branch
from
Draft
9 changes: 9 additions & 0 deletions arrow-array/src/builder/generic_bytes_view_builder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -665,9 +665,18 @@ mod tests {
);

let err = v.try_append_view(0, u32::MAX, 1).unwrap_err();
#[cfg(target_pointer_width = "32")]
assert_eq!(err.to_string(), "Invalid argument error: Range 4294967295..4294967295 out of bounds for block of length 17");
#[cfg(target_pointer_width = "64")]
assert_eq!(err.to_string(), "Invalid argument error: Range 4294967295..4294967296 out of bounds for block of length 17");

let err = v.try_append_view(0, 1, u32::MAX).unwrap_err();
#[cfg(target_pointer_width = "32")]
assert_eq!(
err.to_string(),
"Invalid argument error: Range 1..4294967295 out of bounds for block of length 17"
);
#[cfg(target_pointer_width = "64")]
assert_eq!(
err.to_string(),
"Invalid argument error: Range 1..4294967296 out of bounds for block of length 17"
Expand Down
1 change: 1 addition & 0 deletions arrow-array/src/record_batch.rs
Original file line number Diff line number Diff line change
Expand Up @@ -678,6 +678,7 @@ mod tests {
}

#[test]
#[cfg(target_pointer_width = "64")]
fn byte_size_should_not_regress() {
let schema = Schema::new(vec![
Field::new("a", DataType::Int32, false),
Expand Down
6 changes: 6 additions & 0 deletions arrow-buffer/src/native.rs
Original file line number Diff line number Diff line change
Expand Up @@ -345,10 +345,16 @@ mod tests {
assert_eq!(IntervalDayTime::new(1, 0).as_usize(), 1);
assert_eq!(IntervalMonthDayNano::new(1, 0, 0).as_usize(), 1);

#[cfg(target_pointer_width = "32")]
let a = IntervalDayTime::new(23, 0);
#[cfg(target_pointer_width = "64")]
let a = IntervalDayTime::new(23, 53);
let b = IntervalDayTime::usize_as(a.as_usize());
assert_eq!(a, b);

#[cfg(target_pointer_width = "32")]
let a = IntervalMonthDayNano::new(23, 0, 0);
#[cfg(target_pointer_width = "64")]
let a = IntervalMonthDayNano::new(23, 53, 0);
let b = IntervalMonthDayNano::usize_as(a.as_usize());
assert_eq!(a, b);
Expand Down
16 changes: 12 additions & 4 deletions arrow-buffer/src/util/bit_util.rs
Original file line number Diff line number Diff line change
Expand Up @@ -257,7 +257,7 @@ mod tests {
}

#[test]
fn test_ceil() {
fn test_ceil_with_32_bits() {
assert_eq!(ceil(0, 1), 0);
assert_eq!(ceil(1, 1), 1);
assert_eq!(ceil(1, 2), 1);
Expand All @@ -266,8 +266,16 @@ mod tests {
assert_eq!(ceil(8, 8), 1);
assert_eq!(ceil(9, 8), 2);
assert_eq!(ceil(9, 9), 1);
assert_eq!(ceil(10000000000, 10), 1000000000);
assert_eq!(ceil(10, 10000000000), 1);
assert_eq!(ceil(10000000000, 1000000000), 10);
assert_eq!(ceil(1_000_000_000, 10), 100_000_000);
assert_eq!(ceil(10, 1_000_000_000), 1);
assert_eq!(ceil(1_000_000_000, 100_000_000), 10);
}

#[test]
#[cfg(target_pointer_width = "64")]
fn test_ceil_with_64_bits() {
assert_eq!(ceil(10_000_000_000_000, 10), 1_000_000_000_000);
assert_eq!(ceil(10, 10_000_000_000_000), 1);
assert_eq!(ceil(10_000_000_000_000, 1_000_000_000_000), 10);
}
}
3 changes: 2 additions & 1 deletion arrow-integration-testing/tests/ipc_writer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@
// specific language governing permissions and limitations
// under the License.

use arrow::ipc;
use arrow::ipc::reader::{FileReader, StreamReader};
use arrow::ipc::writer::{FileWriter, IpcWriteOptions, StreamWriter};
use arrow::util::test_util::arrow_test_data;
Expand Down Expand Up @@ -91,7 +90,9 @@ fn write_1_0_0_littleendian() {
}

#[test]
#[cfg(target_pointer_width = "64")]
fn write_2_0_0_compression() {
use arrow::ipc;
let testdata = arrow_test_data();
let version = "2.0.0-compression";
let paths = ["generated_lz4", "generated_zstd"];
Expand Down
1 change: 1 addition & 0 deletions arrow-ipc/src/writer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2678,6 +2678,7 @@ mod tests {
}

#[test]
#[cfg(target_pointer_width = "64")]
fn test_decimal128_alignment8_is_unaligned() {
const IPC_ALIGNMENT: usize = 8;

Expand Down
14 changes: 8 additions & 6 deletions arrow-json/src/reader/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1250,7 +1250,8 @@ mod tests {
test_timestamp::<TimestampNanosecondType>();
}

fn test_time<T: ArrowTemporalType>() {
#[cfg(target_pointer_width = "64")]
fn test_time_with_64_bits<T: ArrowTemporalType>() {
let buf = r#"
{"a": 1, "b": "09:26:56.123 AM", "c": 38.30}
{"a": 2, "b": "23:59:59", "c": 123.456}
Expand Down Expand Up @@ -1323,11 +1324,12 @@ mod tests {
}

#[test]
fn test_times() {
test_time::<Time32MillisecondType>();
test_time::<Time32SecondType>();
test_time::<Time64MicrosecondType>();
test_time::<Time64NanosecondType>();
#[cfg(target_pointer_width = "64")]
fn test_times_with_64_bits() {
test_time_with_64_bits::<Time32MillisecondType>();
test_time_with_64_bits::<Time32SecondType>();
test_time_with_64_bits::<Time64MicrosecondType>();
test_time_with_64_bits::<Time64NanosecondType>();
}

#[test]
Expand Down
1 change: 1 addition & 0 deletions arrow-schema/src/datatype.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1058,6 +1058,7 @@ mod tests {
}

#[test]
#[cfg(target_pointer_width = "64")]
fn size_should_not_regress() {
assert_eq!(std::mem::size_of::<DataType>(), 24);
}
Expand Down
1 change: 1 addition & 0 deletions arrow/tests/array_validation.rs
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,7 @@ fn test_bad_number_of_buffers() {
#[should_panic(
expected = "Need at least 18446744073709551615 bytes in buffers[0] in array of type Int64, but got 8"
)]
#[cfg(target_pointer_width = "64")]
fn test_fixed_width_overflow() {
let buffer = Buffer::from_slice_ref([0i32, 2i32]);
ArrayData::try_new(DataType::Int64, usize::MAX, None, 0, vec![buffer], vec![]).unwrap();
Expand Down
14 changes: 11 additions & 3 deletions parquet/src/arrow/arrow_reader/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -925,18 +925,23 @@ mod tests {

use bytes::Bytes;
use half::f16;
#[cfg(target_pointer_width = "64")]
use num::PrimInt;
use rand::{thread_rng, Rng, RngCore};
use tempfile::tempfile;

use arrow_array::builder::*;
use arrow_array::cast::AsArray;
#[cfg(target_pointer_width = "64")]
use arrow_array::types::DecimalType;
use arrow_array::types::{
Decimal128Type, Decimal256Type, DecimalType, Float16Type, Float32Type, Float64Type,
Time32MillisecondType, Time64MicrosecondType,
Decimal128Type, Float16Type, Float32Type, Float64Type, Time32MillisecondType,
Time64MicrosecondType,
};
use arrow_array::*;
use arrow_buffer::{i256, ArrowNativeType, Buffer, IntervalDayTime};
#[cfg(target_pointer_width = "64")]
use arrow_buffer::ArrowNativeType;
use arrow_buffer::{i256, Buffer, IntervalDayTime};
use arrow_data::ArrayDataBuilder;
use arrow_schema::{
ArrowError, DataType as ArrowDataType, Field, Fields, Schema, SchemaRef, TimeUnit,
Expand Down Expand Up @@ -3848,6 +3853,7 @@ mod tests {
assert_eq!(out, batch.slice(2, 1));
}

#[cfg(target_pointer_width = "64")]
fn test_decimal_roundtrip<T: DecimalType>() {
// Precision <= 9 -> INT32
// Precision <= 18 -> INT64
Expand Down Expand Up @@ -3896,7 +3902,9 @@ mod tests {
}

#[test]
#[cfg(target_pointer_width = "64")]
fn test_decimal() {
use arrow_array::types::Decimal256Type;
test_decimal_roundtrip::<Decimal128Type>();
test_decimal_roundtrip::<Decimal256Type>();
}
Expand Down
1 change: 1 addition & 0 deletions parquet/src/bloom_filter/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -517,6 +517,7 @@ mod tests {
(0.1, 1000000, 5772541),
(0.01, 1000000, 9681526),
(0.001, 1000000, 14607697),
#[cfg(target_pointer_width = "64")]
(1e-50, 1_000_000_000_000, 14226231280773240832),
] {
assert_eq!(*num_bits, num_of_bits_from_ndv_fpp(*ndv, *fpp) as u64);
Expand Down
6 changes: 6 additions & 0 deletions parquet/src/file/metadata/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1861,6 +1861,9 @@ mod tests {
let parquet_meta = ParquetMetaDataBuilder::new(file_metadata.clone())
.set_row_groups(row_group_meta_with_stats)
.build();
#[cfg(target_pointer_width = "32")]
let base_expected_size = 1632;
#[cfg(target_pointer_width = "64")]
let base_expected_size = 2312;

assert_eq!(parquet_meta.memory_size(), base_expected_size);
Expand Down Expand Up @@ -1888,6 +1891,9 @@ mod tests {
]]))
.build();

#[cfg(target_pointer_width = "32")]
let bigger_expected_size = 1972;
#[cfg(target_pointer_width = "64")]
let bigger_expected_size = 2816;
// more set fields means more memory usage
assert!(bigger_expected_size > base_expected_size);
Expand Down
1 change: 1 addition & 0 deletions parquet/tests/arrow_reader/bad_data.rs
Original file line number Diff line number Diff line change
Expand Up @@ -102,6 +102,7 @@ fn test_arrow_gh_41317() {
}

#[test]
#[cfg(target_pointer_width = "64")]
fn test_arrow_rs_gh_6229_dict_header() {
let err = read_file("ARROW-RS-GH-6229-DICTHEADER.parquet").unwrap_err();
assert_eq!(
Expand Down
2 changes: 2 additions & 0 deletions parquet_derive/src/parquet_field.rs
Original file line number Diff line number Diff line change
Expand Up @@ -841,6 +841,7 @@ mod test {
}

#[test]
#[cfg(target_pointer_width = "64")]
fn test_generating_a_simple_writer_snippet() {
let snippet: proc_macro2::TokenStream = quote! {
struct ABoringStruct {
Expand Down Expand Up @@ -868,6 +869,7 @@ mod test {
}

#[test]
#[cfg(target_pointer_width = "64")]
fn test_generating_a_simple_reader_snippet() {
let snippet: proc_macro2::TokenStream = quote! {
struct ABoringStruct {
Expand Down
6 changes: 4 additions & 2 deletions parquet_derive_test/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -106,16 +106,18 @@ mod tests {
use super::*;

use chrono::SubsecRound;
use std::{env, fs, io::Write, sync::Arc};
use std::{env, fs, io::Write};

use parquet::{
file::writer::SerializedFileWriter,
record::{RecordReader, RecordWriter},
schema::parser::parse_message_type,
};

#[test]
#[cfg(target_pointer_width = "64")]
fn test_parquet_derive_hello() {
use parquet::schema::parser::parse_message_type;
use std::sync::Arc;
let file = get_temp_file("test_parquet_derive_hello", &[]);

// The schema is not required, but this tests that the generated
Expand Down