diff --git a/arrow-integration-testing/src/lib.rs b/arrow-integration-testing/src/lib.rs index e669690ef4f5..a0a399e62ed0 100644 --- a/arrow-integration-testing/src/lib.rs +++ b/arrow-integration-testing/src/lib.rs @@ -27,6 +27,7 @@ use arrow::datatypes::{DataType, Field, Fields, Schema}; use arrow::error::{ArrowError, Result}; use arrow::ffi::{from_ffi_and_data_type, FFI_ArrowArray, FFI_ArrowSchema}; use arrow::record_batch::RecordBatch; +#[allow(deprecated)] use arrow::util::test_util::arrow_test_data; use arrow_integration_test::*; use std::collections::HashMap; @@ -148,6 +149,7 @@ pub fn open_json_file(json_name: &str) -> Result { /// /// Returns the contents of /// `arrow-ipc-stream/integration/0.17.1/generated_union.json.gz` +#[allow(deprecated)] pub fn read_gzip_json(version: &str, path: &str) -> ArrowJson { use flate2::read::GzDecoder; use std::io::Read; diff --git a/arrow-integration-testing/tests/ipc_reader.rs b/arrow-integration-testing/tests/ipc_reader.rs index a683075990c7..60827c4114d5 100644 --- a/arrow-integration-testing/tests/ipc_reader.rs +++ b/arrow-integration-testing/tests/ipc_reader.rs @@ -20,6 +20,7 @@ use arrow::error::ArrowError; use arrow::ipc::reader::{FileReader, StreamDecoder, StreamReader}; +#[allow(deprecated)] use arrow::util::test_util::arrow_test_data; use arrow_buffer::Buffer; use arrow_integration_testing::read_gzip_json; @@ -27,6 +28,7 @@ use std::fs::File; use std::io::Read; #[test] +#[allow(deprecated)] fn read_0_1_4() { let testdata = arrow_test_data(); let version = "0.14.1"; @@ -48,6 +50,7 @@ fn read_0_1_4() { } #[test] +#[allow(deprecated)] fn read_0_1_7() { let testdata = arrow_test_data(); let version = "0.17.1"; @@ -59,6 +62,7 @@ fn read_0_1_7() { } #[test] +#[allow(deprecated)] fn read_1_0_0_bigendian() { let testdata = arrow_test_data(); let paths = [ @@ -90,6 +94,7 @@ fn read_1_0_0_bigendian() { } #[test] +#[allow(deprecated)] fn read_1_0_0_littleendian() { let testdata = arrow_test_data(); let version = "1.0.0-littleendian"; @@ -125,6 +130,7 @@ fn read_1_0_0_littleendian() { } #[test] +#[allow(deprecated)] fn read_2_0_0_compression() { let testdata = arrow_test_data(); let version = "2.0.0-compression"; diff --git a/arrow-integration-testing/tests/ipc_writer.rs b/arrow-integration-testing/tests/ipc_writer.rs index d780eb2ee0b5..60d21259cd1a 100644 --- a/arrow-integration-testing/tests/ipc_writer.rs +++ b/arrow-integration-testing/tests/ipc_writer.rs @@ -18,12 +18,14 @@ use arrow::ipc; use arrow::ipc::reader::{FileReader, StreamReader}; use arrow::ipc::writer::{FileWriter, IpcWriteOptions, StreamWriter}; +#[allow(deprecated)] use arrow::util::test_util::arrow_test_data; use arrow_integration_testing::read_gzip_json; use std::fs::File; use std::io::Seek; #[test] +#[allow(deprecated)] fn write_0_1_4() { let testdata = arrow_test_data(); let version = "0.14.1"; @@ -45,6 +47,7 @@ fn write_0_1_4() { } #[test] +#[allow(deprecated)] fn write_0_1_7() { let testdata = arrow_test_data(); let version = "0.17.1"; @@ -56,6 +59,7 @@ fn write_0_1_7() { } #[test] +#[allow(deprecated)] fn write_1_0_0_littleendian() { let testdata = arrow_test_data(); let version = "1.0.0-littleendian"; @@ -91,6 +95,7 @@ fn write_1_0_0_littleendian() { } #[test] +#[allow(deprecated)] fn write_2_0_0_compression() { let testdata = arrow_test_data(); let version = "2.0.0-compression"; diff --git a/arrow/benches/bitwise_kernel.rs b/arrow/benches/bitwise_kernel.rs index 8604ea97eb3c..b345f3bf10e6 100644 --- a/arrow/benches/bitwise_kernel.rs +++ b/arrow/benches/bitwise_kernel.rs @@ -29,6 +29,7 @@ use rand::RngCore; extern crate arrow; use arrow::util::bench_util::create_primitive_array; +#[allow(deprecated)] use arrow::util::test_util::seedable_rng; fn bitwise_array_benchmark(c: &mut Criterion) { @@ -75,6 +76,7 @@ fn bitwise_array_benchmark(c: &mut Criterion) { group.finish(); } +#[allow(deprecated)] fn bitwise_array_scalar_benchmark(c: &mut Criterion) { let size = 64 * 1024_usize; let array_without_null = create_primitive_array::(size, 0 as f32); diff --git a/arrow/benches/buffer_create.rs b/arrow/benches/buffer_create.rs index f6199ccab55c..d55d717aa593 100644 --- a/arrow/benches/buffer_create.rs +++ b/arrow/benches/buffer_create.rs @@ -17,6 +17,7 @@ #[macro_use] extern crate criterion; +#[allow(deprecated)] use arrow::util::test_util::seedable_rng; use criterion::Criterion; use rand::distr::Uniform; @@ -108,6 +109,7 @@ fn from_slice(data: &[Vec], capacity: usize) -> Buffer { }) } +#[allow(deprecated)] fn create_data(size: usize) -> Vec> { let rng = &mut seedable_rng(); let range = Uniform::new(0, 33).unwrap(); @@ -123,6 +125,7 @@ fn create_data(size: usize) -> Vec> { .collect() } +#[allow(deprecated)] fn create_data_bool(size: usize) -> Vec> { let rng = &mut seedable_rng(); let range = Uniform::new(0, 33).unwrap(); diff --git a/arrow/benches/builder.rs b/arrow/benches/builder.rs index 4f5f38eadfcb..cabb6af85a40 100644 --- a/arrow/benches/builder.rs +++ b/arrow/benches/builder.rs @@ -25,6 +25,7 @@ use criterion::*; use rand::distr::StandardUniform; use arrow::array::*; +#[allow(deprecated)] use arrow::util::test_util::seedable_rng; use arrow_buffer::i256; use rand::Rng; @@ -66,6 +67,7 @@ fn bench_primitive_nulls(c: &mut Criterion) { group.finish(); } +#[allow(deprecated)] fn bench_bool(c: &mut Criterion) { let data: Vec = seedable_rng() .sample_iter(&StandardUniform) diff --git a/arrow/benches/cast_kernels.rs b/arrow/benches/cast_kernels.rs index a68fc48d30d7..c0095b76fb49 100644 --- a/arrow/benches/cast_kernels.rs +++ b/arrow/benches/cast_kernels.rs @@ -30,6 +30,7 @@ use arrow::array::*; use arrow::compute::cast; use arrow::datatypes::*; use arrow::util::bench_util::*; +#[allow(deprecated)] use arrow::util::test_util::seedable_rng; fn build_array(size: usize) -> ArrayRef @@ -40,6 +41,7 @@ where Arc::new(array) } +#[allow(deprecated)] fn build_utf8_date_array(size: usize, with_nulls: bool) -> ArrayRef { use chrono::NaiveDate; @@ -62,6 +64,7 @@ fn build_utf8_date_array(size: usize, with_nulls: bool) -> ArrayRef { Arc::new(builder.finish()) } +#[allow(deprecated)] fn build_utf8_date_time_array(size: usize, with_nulls: bool) -> ArrayRef { // use random numbers to avoid spurious compiler optimizations wrt to branching let mut rng = seedable_rng(); @@ -82,6 +85,7 @@ fn build_utf8_date_time_array(size: usize, with_nulls: bool) -> ArrayRef { Arc::new(builder.finish()) } +#[allow(deprecated)] fn build_decimal128_array(size: usize, precision: u8, scale: i8) -> ArrayRef { let mut rng = seedable_rng(); let mut builder = Decimal128Builder::with_capacity(size); @@ -97,6 +101,7 @@ fn build_decimal128_array(size: usize, precision: u8, scale: i8) -> ArrayRef { ) } +#[allow(deprecated)] fn build_decimal256_array(size: usize, precision: u8, scale: i8) -> ArrayRef { let mut rng = seedable_rng(); let mut builder = Decimal256Builder::with_capacity(size); diff --git a/arrow/benches/comparison_kernels.rs b/arrow/benches/comparison_kernels.rs index c12fd2ad3548..f56a891424a7 100644 --- a/arrow/benches/comparison_kernels.rs +++ b/arrow/benches/comparison_kernels.rs @@ -21,6 +21,7 @@ extern crate criterion; use arrow::compute::kernels::cmp::*; use arrow::util::bench_util::*; +#[allow(deprecated)] use arrow::util::test_util::seedable_rng; use arrow::{array::*, datatypes::Float32Type, datatypes::Int32Type}; use arrow_buffer::IntervalMonthDayNano; @@ -78,6 +79,7 @@ fn make_string_array(size: usize, rng: &mut StdRng) -> impl Iterator(SIZE, 0.0, 42); let arr_b = create_primitive_array_with_seed::(SIZE, 0.0, 43); diff --git a/arrow/benches/csv_reader.rs b/arrow/benches/csv_reader.rs index 331ff9edd5b9..6edd0042dad0 100644 --- a/arrow/benches/csv_reader.rs +++ b/arrow/benches/csv_reader.rs @@ -29,6 +29,7 @@ use arrow::array::*; use arrow::csv; use arrow::datatypes::*; use arrow::util::bench_util::{create_primitive_array, create_string_array_with_len}; +#[allow(deprecated)] use arrow::util::test_util::seedable_rng; fn do_bench(c: &mut Criterion, name: &str, cols: Vec) { @@ -57,6 +58,7 @@ fn do_bench(c: &mut Criterion, name: &str, cols: Vec) { } } +#[allow(deprecated)] fn criterion_benchmark(c: &mut Criterion) { let mut rng = seedable_rng(); diff --git a/arrow/benches/interleave_kernels.rs b/arrow/benches/interleave_kernels.rs index ed7ac12379d4..3a17cdaf05d2 100644 --- a/arrow/benches/interleave_kernels.rs +++ b/arrow/benches/interleave_kernels.rs @@ -26,6 +26,7 @@ use rand::Rng; extern crate arrow; use arrow::datatypes::*; +#[allow(deprecated)] use arrow::util::test_util::seedable_rng; use arrow::{array::*, util::bench_util::*}; use arrow_select::interleave::interleave; @@ -50,6 +51,7 @@ fn do_bench( ); } +#[allow(deprecated)] fn bench_values(c: &mut Criterion, name: &str, len: usize, values: &[&dyn Array]) { let mut rng = seedable_rng(); let indices: Vec<_> = (0..len) diff --git a/arrow/benches/json_writer.rs b/arrow/benches/json_writer.rs index ff76ecdd6253..ca127dbf8f23 100644 --- a/arrow/benches/json_writer.rs +++ b/arrow/benches/json_writer.rs @@ -22,6 +22,7 @@ use arrow::util::bench_util::{ create_primitive_array, create_string_array, create_string_array_with_len, create_string_dict_array, }; +#[allow(deprecated)] use arrow::util::test_util::seedable_rng; use arrow_array::{Array, ListArray, RecordBatch, StructArray}; use arrow_buffer::{BooleanBuffer, NullBuffer, OffsetBuffer}; @@ -59,11 +60,13 @@ fn create_mixed(len: usize) -> RecordBatch { .unwrap() } +#[allow(deprecated)] fn create_nulls(len: usize) -> NullBuffer { let mut rng = seedable_rng(); BooleanBuffer::from_iter((0..len).map(|_| rng.random_bool(0.2))).into() } +#[allow(deprecated)] fn create_offsets(len: usize) -> (usize, OffsetBuffer) { let mut rng = seedable_rng(); let mut last_offset = 0; diff --git a/arrow/benches/mutable_array.rs b/arrow/benches/mutable_array.rs index 67591194ae6d..fb647813f355 100644 --- a/arrow/benches/mutable_array.rs +++ b/arrow/benches/mutable_array.rs @@ -23,9 +23,11 @@ use rand::Rng; extern crate arrow; +#[allow(deprecated)] use arrow::util::test_util::seedable_rng; use arrow::{array::*, util::bench_util::create_string_array}; +#[allow(deprecated)] fn create_slices(size: usize) -> Vec<(usize, usize)> { let rng = &mut seedable_rng(); diff --git a/arrow/benches/primitive_run_take.rs b/arrow/benches/primitive_run_take.rs index cabf9c118f97..bc85f4b20bed 100644 --- a/arrow/benches/primitive_run_take.rs +++ b/arrow/benches/primitive_run_take.rs @@ -19,11 +19,13 @@ use arrow::array::UInt32Builder; use arrow::compute::take; use arrow::datatypes::{Int32Type, Int64Type}; use arrow::util::bench_util::*; +#[allow(deprecated)] use arrow::util::test_util::seedable_rng; use arrow_array::UInt32Array; use criterion::{criterion_group, criterion_main, Criterion}; use rand::Rng; +#[allow(deprecated)] fn create_random_index(size: usize, null_density: f32, max_value: usize) -> UInt32Array { let mut rng = seedable_rng(); let mut builder = UInt32Builder::with_capacity(size); diff --git a/arrow/benches/take_kernels.rs b/arrow/benches/take_kernels.rs index a09064839f8a..4f7eb158ff7f 100644 --- a/arrow/benches/take_kernels.rs +++ b/arrow/benches/take_kernels.rs @@ -25,9 +25,11 @@ extern crate arrow; use arrow::compute::{take, TakeOptions}; use arrow::datatypes::*; +#[allow(deprecated)] use arrow::util::test_util::seedable_rng; use arrow::{array::*, util::bench_util::*}; +#[allow(deprecated)] fn create_random_index(size: usize, null_density: f32) -> UInt32Array { let mut rng = seedable_rng(); let mut builder = UInt32Builder::with_capacity(size); diff --git a/arrow/src/util/bench_util.rs b/arrow/src/util/bench_util.rs index 2f0ccf2addd4..c5de96289ad8 100644 --- a/arrow/src/util/bench_util.rs +++ b/arrow/src/util/bench_util.rs @@ -19,6 +19,7 @@ use crate::array::*; use crate::datatypes::*; +#[allow(deprecated)] use crate::util::test_util::seedable_rng; use arrow_buffer::{Buffer, IntervalMonthDayNano}; use half::f16; @@ -33,6 +34,7 @@ use rand::{ use std::ops::Range; /// Creates an random (but fixed-seeded) array of a given size and null density +#[allow(deprecated)] pub fn create_primitive_array(size: usize, null_density: f32) -> PrimitiveArray where T: ArrowPrimitiveType, @@ -100,6 +102,7 @@ pub fn create_month_day_nano_array_with_seed( } /// Creates a random (but fixed-seeded) array of a given size and null density +#[allow(deprecated)] pub fn create_boolean_array(size: usize, null_density: f32, true_density: f32) -> BooleanArray where StandardUniform: Distribution, @@ -149,6 +152,7 @@ pub fn create_longer_string_view_array_with_same_prefix( create_string_view_array_with_len_range_and_prefix(size, null_density, 13, 100, "prefix_") } +#[allow(deprecated)] fn create_string_array_with_len_range_and_prefix( size: usize, null_density: f32, @@ -188,6 +192,7 @@ fn create_string_array_with_len_range_and_prefix( .collect() } +#[allow(deprecated)] fn create_string_view_array_with_len_range_and_prefix( size: usize, null_density: f32, @@ -228,6 +233,7 @@ fn create_string_view_array_with_len_range_and_prefix( } /// Creates a random (but fixed-seeded) array of rand size with a given max size, null density and length +#[allow(deprecated)] fn create_string_array_with_max_len( size: usize, null_density: f32, @@ -249,6 +255,7 @@ fn create_string_array_with_max_len( } /// Creates a random (but fixed-seeded) array of a given size, null density and length +#[allow(deprecated)] pub fn create_string_array_with_len( size: usize, null_density: f32, @@ -277,6 +284,7 @@ pub fn create_string_view_array(size: usize, null_density: f32) -> StringViewArr } /// Creates a random (but fixed-seeded) array of rand size with a given max size, null density and length +#[allow(deprecated)] fn create_string_view_array_with_max_len( size: usize, null_density: f32, @@ -298,6 +306,7 @@ fn create_string_view_array_with_max_len( } /// Creates a random (but fixed-seeded) array of a given size, null density and length +#[allow(deprecated)] pub fn create_string_view_array_with_len( size: usize, null_density: f32, @@ -335,6 +344,7 @@ pub fn create_string_view_array_with_len( /// Creates an random (but fixed-seeded) array of a given size and null density /// consisting of random 4 character alphanumeric strings +#[allow(deprecated)] pub fn create_string_dict_array( size: usize, null_density: f32, @@ -425,6 +435,7 @@ pub fn create_string_array_for_runs( } /// Creates an random (but fixed-seeded) binary array of a given size and null density +#[allow(deprecated)] pub fn create_binary_array( size: usize, null_density: f32, @@ -448,6 +459,7 @@ pub fn create_binary_array( } /// Creates an random (but fixed-seeded) array of a given size and null density +#[allow(deprecated)] pub fn create_fsb_array(size: usize, null_density: f32, value_len: usize) -> FixedSizeBinaryArray { let rng = &mut seedable_rng(); @@ -487,6 +499,7 @@ where /// Creates a random (but fixed-seeded) dictionary array of a given size and null density /// with the provided values array and key range +#[allow(deprecated)] pub fn create_sparse_dict_from_values( size: usize, null_density: f32, @@ -524,6 +537,7 @@ where } /// Creates a random (but fixed-seeded) f16 array of a given size and nan-value density +#[allow(deprecated)] pub fn create_f16_array(size: usize, nan_density: f32) -> Float16Array { let mut rng = seedable_rng(); @@ -539,6 +553,7 @@ pub fn create_f16_array(size: usize, nan_density: f32) -> Float16Array { } /// Creates a random (but fixed-seeded) f32 array of a given size and nan-value density +#[allow(deprecated)] pub fn create_f32_array(size: usize, nan_density: f32) -> Float32Array { let mut rng = seedable_rng(); @@ -554,6 +569,7 @@ pub fn create_f32_array(size: usize, nan_density: f32) -> Float32Array { } /// Creates a random (but fixed-seeded) f64 array of a given size and nan-value density +#[allow(deprecated)] pub fn create_f64_array(size: usize, nan_density: f32) -> Float64Array { let mut rng = seedable_rng(); diff --git a/arrow/src/util/data_gen.rs b/arrow/src/util/data_gen.rs index 42a0798f5540..5be2a10cb9a2 100644 --- a/arrow/src/util/data_gen.rs +++ b/arrow/src/util/data_gen.rs @@ -31,6 +31,7 @@ use crate::{ datatypes::*, }; +#[allow(deprecated)] use super::{bench_util::*, bit_util, test_util::seedable_rng}; /// Create a random [RecordBatch] from a schema @@ -369,6 +370,7 @@ fn create_random_map_array( } /// Generate random offsets for list arrays +#[allow(deprecated)] fn create_random_offsets( size: usize, min: T, @@ -389,6 +391,7 @@ fn create_random_offsets( (offsets, current_offset) } +#[allow(deprecated)] fn create_random_null_buffer(size: usize, null_density: f32) -> Buffer { let mut rng = seedable_rng(); let mut mut_buf = MutableBuffer::new_null(size); @@ -506,6 +509,7 @@ impl RandomTemporalValue for Time64NanosecondType { } } +#[allow(deprecated)] fn create_random_temporal_array(size: usize, null_density: f32) -> PrimitiveArray where T: RandomTemporalValue, diff --git a/arrow/src/util/mod.rs b/arrow/src/util/mod.rs index 2c131669b73e..f54b54edb1a6 100644 --- a/arrow/src/util/mod.rs +++ b/arrow/src/util/mod.rs @@ -29,6 +29,10 @@ pub mod data_gen; pub use arrow_cast::pretty; pub mod string_writer; #[cfg(any(test, feature = "test_utils"))] +#[deprecated( + since = "55.0.0", + note = "The `test_util` module is deprecated for public use and will be removed in a future release" +)] pub mod test_util; pub use arrow_cast::display; diff --git a/arrow/src/util/test_util.rs b/arrow/src/util/test_util.rs index 566ccc6ab536..222e9c5e7560 100644 --- a/arrow/src/util/test_util.rs +++ b/arrow/src/util/test_util.rs @@ -201,6 +201,7 @@ impl Iterator for BadIterator { } #[cfg(test)] +#[cfg(not(clippy))] mod tests { use super::*; diff --git a/parquet/examples/async_read_parquet.rs b/parquet/examples/async_read_parquet.rs index 0a2e9ba994dd..e50decefabe2 100644 --- a/parquet/examples/async_read_parquet.rs +++ b/parquet/examples/async_read_parquet.rs @@ -26,6 +26,7 @@ use std::time::SystemTime; use tokio::fs::File; #[tokio::main(flavor = "current_thread")] +#[allow(deprecated)] async fn main() -> Result<()> { // Create parquet file that will be read. let testdata = arrow::util::test_util::parquet_test_data(); diff --git a/parquet/examples/read_parquet.rs b/parquet/examples/read_parquet.rs index f374fcd2e1f7..2fd25d42c888 100644 --- a/parquet/examples/read_parquet.rs +++ b/parquet/examples/read_parquet.rs @@ -20,6 +20,7 @@ use parquet::arrow::arrow_reader::ParquetRecordBatchReaderBuilder; use parquet::errors::Result; use std::fs::File; +#[allow(deprecated)] fn main() -> Result<()> { // Create parquet file that will be read. let testdata = arrow::util::test_util::parquet_test_data(); diff --git a/parquet/examples/read_with_rowgroup.rs b/parquet/examples/read_with_rowgroup.rs index 52b3d112274d..a2fceff7a6d0 100644 --- a/parquet/examples/read_with_rowgroup.rs +++ b/parquet/examples/read_with_rowgroup.rs @@ -29,6 +29,7 @@ use std::sync::Arc; use tokio::fs::File; #[tokio::main(flavor = "current_thread")] +#[allow(deprecated)] async fn main() -> Result<()> { let testdata = arrow::util::test_util::parquet_test_data(); let path = format!("{testdata}/alltypes_plain.parquet"); diff --git a/parquet/src/arrow/arrow_reader/mod.rs b/parquet/src/arrow/arrow_reader/mod.rs index 66780fcd6003..6cbb99d0f204 100644 --- a/parquet/src/arrow/arrow_reader/mod.rs +++ b/parquet/src/arrow/arrow_reader/mod.rs @@ -1793,6 +1793,7 @@ mod tests { } #[test] + #[allow(deprecated)] fn test_read_decimal_file() { use arrow_array::Decimal128Array; let testdata = arrow::util::test_util::parquet_test_data(); @@ -1827,6 +1828,7 @@ mod tests { } #[test] + #[allow(deprecated)] fn test_read_float16_nonzeros_file() { use arrow_array::Float16Array; let testdata = arrow::util::test_util::parquet_test_data(); @@ -1859,6 +1861,7 @@ mod tests { } #[test] + #[allow(deprecated)] fn test_read_float16_zeros_file() { use arrow_array::Float16Array; let testdata = arrow::util::test_util::parquet_test_data(); @@ -1883,6 +1886,7 @@ mod tests { } #[test] + #[allow(deprecated)] fn test_read_float32_float64_byte_stream_split() { let path = format!( "{}/byte_stream_split.zstd.parquet", @@ -1912,6 +1916,7 @@ mod tests { } #[test] + #[allow(deprecated)] fn test_read_extended_byte_stream_split() { let path = format!( "{}/byte_stream_split_extended.gzip.parquet", @@ -1992,6 +1997,7 @@ mod tests { } #[test] + #[allow(deprecated)] fn test_read_incorrect_map_schema_file() { let testdata = arrow::util::test_util::parquet_test_data(); // see https://github.com/apache/parquet-testing/pull/47 @@ -2614,6 +2620,7 @@ mod tests { writer.close() } + #[allow(deprecated)] fn get_test_file(file_name: &str) -> File { let mut path = PathBuf::new(); path.push(arrow::util::test_util::arrow_test_data()); @@ -2623,6 +2630,7 @@ mod tests { } #[test] + #[allow(deprecated)] fn test_read_structs() { // This particular test file has columns of struct types where there is // a column that has the same name as one of the struct fields @@ -2676,6 +2684,7 @@ mod tests { } #[test] + #[allow(deprecated)] // same as test_read_structs but constructs projection mask via column names fn test_read_structs_by_name() { let testdata = arrow::util::test_util::parquet_test_data(); @@ -2729,6 +2738,7 @@ mod tests { } #[test] + #[allow(deprecated)] fn test_read_maps() { let testdata = arrow::util::test_util::parquet_test_data(); let path = format!("{testdata}/nested_maps.snappy.parquet"); @@ -3118,6 +3128,7 @@ mod tests { } #[test] + #[allow(deprecated)] fn test_read_null_list() { let testdata = arrow::util::test_util::parquet_test_data(); let path = format!("{testdata}/null_list.parquet"); @@ -3142,6 +3153,7 @@ mod tests { } #[test] + #[allow(deprecated)] fn test_null_schema_inference() { let testdata = arrow::util::test_util::parquet_test_data(); let path = format!("{testdata}/null_list.parquet"); @@ -3591,6 +3603,7 @@ mod tests { } #[test] + #[allow(deprecated)] fn test_empty_projection() { let testdata = arrow::util::test_util::parquet_test_data(); let path = format!("{testdata}/alltypes_plain.parquet"); @@ -3758,6 +3771,7 @@ mod tests { } #[test] + #[allow(deprecated)] fn test_scan_row_with_selection() { let testdata = arrow::util::test_util::parquet_test_data(); let path = format!("{testdata}/alltypes_tiny_pages_plain.parquet"); @@ -3812,6 +3826,7 @@ mod tests { } #[test] + #[allow(deprecated)] fn test_batch_size_overallocate() { let testdata = arrow::util::test_util::parquet_test_data(); // `alltypes_plain.parquet` only have 8 rows @@ -3830,6 +3845,7 @@ mod tests { } #[test] + #[allow(deprecated)] fn test_read_with_page_index_enabled() { let testdata = arrow::util::test_util::parquet_test_data(); @@ -3929,6 +3945,7 @@ mod tests { } #[test] + #[allow(deprecated)] fn test_read_lz4_raw() { let testdata = arrow::util::test_util::parquet_test_data(); let path = format!("{testdata}/lz4_raw_compressed.parquet"); @@ -3969,6 +3986,7 @@ mod tests { // // For more information, check: https://github.com/apache/arrow-rs/issues/2988 #[test] + #[allow(deprecated)] fn test_read_lz4_hadoop_fallback() { for file in [ "hadoop_lz4_compressed.parquet", @@ -4005,6 +4023,7 @@ mod tests { } #[test] + #[allow(deprecated)] fn test_read_lz4_hadoop_large() { let testdata = arrow::util::test_util::parquet_test_data(); let path = format!("{testdata}/hadoop_lz4_compressed_larger.parquet"); @@ -4031,6 +4050,7 @@ mod tests { #[test] #[cfg(feature = "snap")] + #[allow(deprecated)] fn test_read_nested_lists() { let testdata = arrow::util::test_util::parquet_test_data(); let path = format!("{testdata}/nested_lists.snappy.parquet"); @@ -4344,6 +4364,7 @@ mod tests { } #[test] + #[allow(deprecated)] fn test_read_old_nested_list() { use arrow::datatypes::DataType; use arrow::datatypes::ToByteSlice; @@ -4394,6 +4415,7 @@ mod tests { } #[test] + #[allow(deprecated)] fn test_map_no_value() { // File schema: // message schema { diff --git a/parquet/src/arrow/async_reader/mod.rs b/parquet/src/arrow/async_reader/mod.rs index 5d5a7036eefb..449f4a54aff0 100644 --- a/parquet/src/arrow/async_reader/mod.rs +++ b/parquet/src/arrow/async_reader/mod.rs @@ -1139,6 +1139,7 @@ mod tests { } #[tokio::test] + #[allow(deprecated)] async fn test_async_reader() { let testdata = arrow::util::test_util::parquet_test_data(); let path = format!("{testdata}/alltypes_plain.parquet"); @@ -1196,6 +1197,7 @@ mod tests { } #[tokio::test] + #[allow(deprecated)] async fn test_async_reader_with_next_row_group() { let testdata = arrow::util::test_util::parquet_test_data(); let path = format!("{testdata}/alltypes_plain.parquet"); @@ -1261,6 +1263,7 @@ mod tests { } #[tokio::test] + #[allow(deprecated)] async fn test_async_reader_with_index() { let testdata = arrow::util::test_util::parquet_test_data(); let path = format!("{testdata}/alltypes_tiny_pages_plain.parquet"); @@ -1329,6 +1332,7 @@ mod tests { } #[tokio::test] + #[allow(deprecated)] async fn test_async_reader_with_limit() { let testdata = arrow::util::test_util::parquet_test_data(); let path = format!("{testdata}/alltypes_tiny_pages_plain.parquet"); @@ -1375,6 +1379,7 @@ mod tests { } #[tokio::test] + #[allow(deprecated)] async fn test_async_reader_skip_pages() { let testdata = arrow::util::test_util::parquet_test_data(); let path = format!("{testdata}/alltypes_tiny_pages_plain.parquet"); @@ -1433,6 +1438,7 @@ mod tests { } #[tokio::test] + #[allow(deprecated)] async fn test_fuzz_async_reader_selection() { let testdata = arrow::util::test_util::parquet_test_data(); let path = format!("{testdata}/alltypes_tiny_pages_plain.parquet"); @@ -1499,6 +1505,7 @@ mod tests { } #[tokio::test] + #[allow(deprecated)] async fn test_async_reader_zero_row_selector() { //See https://github.com/apache/arrow-rs/issues/2669 let testdata = arrow::util::test_util::parquet_test_data(); @@ -1747,6 +1754,7 @@ mod tests { } #[tokio::test] + #[allow(deprecated)] async fn test_row_filter_with_index() { let testdata = arrow::util::test_util::parquet_test_data(); let path = format!("{testdata}/alltypes_tiny_pages_plain.parquet"); @@ -1799,6 +1807,7 @@ mod tests { } #[tokio::test] + #[allow(deprecated)] async fn test_in_memory_row_group_sparse() { let testdata = arrow::util::test_util::parquet_test_data(); let path = format!("{testdata}/alltypes_tiny_pages.parquet"); @@ -1892,6 +1901,7 @@ mod tests { } #[tokio::test] + #[allow(deprecated)] async fn test_batch_size_overallocate() { let testdata = arrow::util::test_util::parquet_test_data(); // `alltypes_plain.parquet` only have 8 rows @@ -1924,6 +1934,7 @@ mod tests { } #[tokio::test] + #[allow(deprecated)] async fn test_get_row_group_column_bloom_filter_without_length() { let testdata = arrow::util::test_util::parquet_test_data(); let path = format!("{testdata}/data_index_bloom_encoding_stats.parquet"); @@ -2040,6 +2051,7 @@ mod tests { } #[tokio::test] + #[allow(deprecated)] async fn test_get_row_group_column_bloom_filter_with_length() { // convert to new parquet file with bloom_filter_length let testdata = arrow::util::test_util::parquet_test_data(); @@ -2282,6 +2294,7 @@ mod tests { } #[tokio::test] + #[allow(deprecated)] async fn empty_offset_index_doesnt_panic_in_read_row_group() { use tokio::fs::File; let testdata = arrow::util::test_util::parquet_test_data(); @@ -2307,6 +2320,7 @@ mod tests { } #[tokio::test] + #[allow(deprecated)] async fn non_empty_offset_index_doesnt_panic_in_read_row_group() { use tokio::fs::File; let testdata = arrow::util::test_util::parquet_test_data(); @@ -2331,6 +2345,7 @@ mod tests { } #[tokio::test] + #[allow(deprecated)] async fn empty_offset_index_doesnt_panic_in_column_chunks() { use tempfile::TempDir; use tokio::fs::File; diff --git a/parquet/src/arrow/async_reader/store.rs b/parquet/src/arrow/async_reader/store.rs index d5595a83be6e..e3ced427238e 100644 --- a/parquet/src/arrow/async_reader/store.rs +++ b/parquet/src/arrow/async_reader/store.rs @@ -238,12 +238,14 @@ mod tests { use crate::arrow::async_reader::{AsyncFileReader, ParquetObjectReader}; use crate::arrow::ParquetRecordBatchStreamBuilder; use crate::errors::ParquetError; + #[allow(deprecated)] use arrow::util::test_util::parquet_test_data; use futures::FutureExt; use object_store::local::LocalFileSystem; use object_store::path::Path; use object_store::{ObjectMeta, ObjectStore}; + #[allow(deprecated)] async fn get_meta_store() -> (ObjectMeta, Arc) { let res = parquet_test_data(); let store = LocalFileSystem::new_with_prefix(res).unwrap(); diff --git a/parquet/src/arrow/async_writer/mod.rs b/parquet/src/arrow/async_writer/mod.rs index edd4b71ae254..341cfcadb153 100644 --- a/parquet/src/arrow/async_writer/mod.rs +++ b/parquet/src/arrow/async_writer/mod.rs @@ -292,6 +292,7 @@ mod tests { use super::*; + #[allow(deprecated)] fn get_test_reader() -> ParquetRecordBatchReader { let testdata = arrow::util::test_util::parquet_test_data(); // This test file is large enough to generate multiple row groups. diff --git a/parquet/src/file/serialized_reader.rs b/parquet/src/file/serialized_reader.rs index ec2cd38c1389..ab25f78fcc23 100644 --- a/parquet/src/file/serialized_reader.rs +++ b/parquet/src/file/serialized_reader.rs @@ -2115,6 +2115,7 @@ mod tests { } #[test] + #[allow(deprecated)] fn test_byte_stream_split_extended() { let path = format!( "{}/byte_stream_split_extended.gzip.parquet", diff --git a/parquet/src/file/writer.rs b/parquet/src/file/writer.rs index 18e357ebc2b9..314772965af0 100644 --- a/parquet/src/file/writer.rs +++ b/parquet/src/file/writer.rs @@ -2292,6 +2292,7 @@ mod tests { #[test] #[cfg(feature = "arrow")] + #[allow(deprecated)] fn test_byte_stream_split_extended_roundtrip() { let path = format!( "{}/byte_stream_split_extended.gzip.parquet", diff --git a/parquet/src/util/test_common/file_util.rs b/parquet/src/util/test_common/file_util.rs index 6c031358e795..0827bc55cb8f 100644 --- a/parquet/src/util/test_common/file_util.rs +++ b/parquet/src/util/test_common/file_util.rs @@ -18,6 +18,7 @@ use std::{fs, path::PathBuf, str::FromStr}; /// Returns path to the test parquet file in 'data' directory +#[allow(deprecated)] pub fn get_test_path(file_name: &str) -> PathBuf { let mut pathbuf = PathBuf::from_str(&arrow::util::test_util::parquet_test_data()).unwrap(); pathbuf.push(file_name); diff --git a/parquet/tests/arrow_reader/bad_data.rs b/parquet/tests/arrow_reader/bad_data.rs index 7de5d7e346d6..0eaf4ba4735d 100644 --- a/parquet/tests/arrow_reader/bad_data.rs +++ b/parquet/tests/arrow_reader/bad_data.rs @@ -17,6 +17,7 @@ //! Tests that reading invalid parquet files returns an error +#[allow(deprecated)] use arrow::util::test_util::parquet_test_data; use parquet::arrow::arrow_reader::ArrowReaderBuilder; use parquet::errors::ParquetError; @@ -35,6 +36,7 @@ static KNOWN_FILES: &[&str] = &[ ]; /// Returns the path to 'parquet-testing/bad_data' +#[allow(deprecated)] fn bad_data_dir() -> PathBuf { // points to parquet-testing/data let parquet_testing_data = parquet_test_data(); diff --git a/parquet/tests/arrow_reader/checksum.rs b/parquet/tests/arrow_reader/checksum.rs index b500b7cb1df8..03e19c50daf5 100644 --- a/parquet/tests/arrow_reader/checksum.rs +++ b/parquet/tests/arrow_reader/checksum.rs @@ -19,6 +19,7 @@ use std::path::PathBuf; +#[allow(deprecated)] use arrow::util::test_util::parquet_test_data; use parquet::arrow::arrow_reader::ArrowReaderBuilder; @@ -61,6 +62,7 @@ fn test_rle_dict_snappy_checksum() { /// Reads a file and returns a vector with one element per record batch. /// The record batch data is replaced with () and errors are stringified. +#[allow(deprecated)] fn read_file_batch_errors(name: &str) -> Vec> { let path = PathBuf::from(parquet_test_data()).join(name); println!("Reading file: {:?}", path); diff --git a/parquet/tests/arrow_reader/statistics.rs b/parquet/tests/arrow_reader/statistics.rs index 0eb0fc2b277f..bcff9b333ce7 100644 --- a/parquet/tests/arrow_reader/statistics.rs +++ b/parquet/tests/arrow_reader/statistics.rs @@ -2263,6 +2263,7 @@ async fn test_column_non_existent() { #[cfg(test)] mod test { use super::*; + #[allow(deprecated)] use arrow::util::test_util::parquet_test_data; use arrow_array::{ new_empty_array, ArrayRef, BooleanArray, Decimal128Array, Float32Array, Float64Array, @@ -2595,6 +2596,7 @@ mod test { /// Reads the specified parquet file and validates that the expected min/max /// values for the specified columns are as expected. + #[allow(deprecated)] fn run(self) { let path = PathBuf::from(parquet_test_data()).join(self.file_name); let file = File::open(path).unwrap(); diff --git a/parquet/tests/encryption/encryption.rs b/parquet/tests/encryption/encryption.rs index 86a148be2bd8..87f40dee0f5f 100644 --- a/parquet/tests/encryption/encryption.rs +++ b/parquet/tests/encryption/encryption.rs @@ -37,6 +37,7 @@ use std::fs::File; use std::sync::Arc; #[test] +#[allow(deprecated)] fn test_non_uniform_encryption_plaintext_footer() { let test_data = arrow::util::test_util::parquet_test_data(); let path = format!("{test_data}/encrypt_columns_plaintext_footer.parquet.encrypted"); @@ -58,6 +59,7 @@ fn test_non_uniform_encryption_plaintext_footer() { } #[test] +#[allow(deprecated)] fn test_non_uniform_encryption_disabled_aad_storage() { let test_data = arrow::util::test_util::parquet_test_data(); let path = @@ -122,6 +124,7 @@ fn test_plaintext_footer_read_without_decryption() { } #[test] +#[allow(deprecated)] fn test_non_uniform_encryption() { let test_data = arrow::util::test_util::parquet_test_data(); let path = format!("{test_data}/encrypt_columns_and_footer.parquet.encrypted"); @@ -141,6 +144,7 @@ fn test_non_uniform_encryption() { } #[test] +#[allow(deprecated)] fn test_uniform_encryption() { let test_data = arrow::util::test_util::parquet_test_data(); let path = format!("{test_data}/uniform_encryption.parquet.encrypted"); @@ -153,6 +157,7 @@ fn test_uniform_encryption() { } #[test] +#[allow(deprecated)] fn test_decrypting_without_decryption_properties_fails() { let test_data = arrow::util::test_util::parquet_test_data(); let path = format!("{test_data}/uniform_encryption.parquet.encrypted"); @@ -168,6 +173,7 @@ fn test_decrypting_without_decryption_properties_fails() { } #[test] +#[allow(deprecated)] fn test_aes_ctr_encryption() { let test_data = arrow::util::test_util::parquet_test_data(); let path = format!("{test_data}/encrypt_columns_and_footer_ctr.parquet.encrypted"); @@ -198,6 +204,7 @@ fn test_aes_ctr_encryption() { } #[test] +#[allow(deprecated)] fn test_non_uniform_encryption_plaintext_footer_with_key_retriever() { let test_data = arrow::util::test_util::parquet_test_data(); let path = format!("{test_data}/encrypt_columns_plaintext_footer.parquet.encrypted"); @@ -217,6 +224,7 @@ fn test_non_uniform_encryption_plaintext_footer_with_key_retriever() { } #[test] +#[allow(deprecated)] fn test_non_uniform_encryption_with_key_retriever() { let test_data = arrow::util::test_util::parquet_test_data(); let path = format!("{test_data}/encrypt_columns_and_footer.parquet.encrypted"); @@ -236,6 +244,7 @@ fn test_non_uniform_encryption_with_key_retriever() { } #[test] +#[allow(deprecated)] fn test_uniform_encryption_with_key_retriever() { let test_data = arrow::util::test_util::parquet_test_data(); let path = format!("{test_data}/uniform_encryption.parquet.encrypted"); @@ -370,6 +379,7 @@ fn test_uniform_encryption_roundtrip() { } #[test] +#[allow(deprecated)] fn test_write_non_uniform_encryption() { let testdata = arrow::util::test_util::parquet_test_data(); let path = format!("{testdata}/encrypt_columns_and_footer.parquet.encrypted"); @@ -396,6 +406,7 @@ fn test_write_non_uniform_encryption() { // todo: currently we raise if writing with plaintext footer, but we should support it // for uniform and non-uniform encryption (see https://github.com/apache/arrow-rs/issues/7320) #[test] +#[allow(deprecated)] fn test_write_uniform_encryption_plaintext_footer() { let testdata = arrow::util::test_util::parquet_test_data(); let path = format!("{testdata}/encrypt_columns_and_footer.parquet.encrypted"); @@ -438,6 +449,7 @@ fn test_write_uniform_encryption_plaintext_footer() { } #[test] +#[allow(deprecated)] fn test_write_uniform_encryption() { let testdata = arrow::util::test_util::parquet_test_data(); let path = format!("{testdata}/uniform_encryption.parquet.encrypted"); @@ -456,6 +468,7 @@ fn test_write_uniform_encryption() { } #[test] +#[allow(deprecated)] fn test_write_non_uniform_encryption_column_missmatch() { let testdata = arrow::util::test_util::parquet_test_data(); let path = format!("{testdata}/encrypt_columns_and_footer.parquet.encrypted"); diff --git a/parquet/tests/encryption/encryption_agnostic.rs b/parquet/tests/encryption/encryption_agnostic.rs index e071471712f4..283bf0930fe0 100644 --- a/parquet/tests/encryption/encryption_agnostic.rs +++ b/parquet/tests/encryption/encryption_agnostic.rs @@ -24,6 +24,7 @@ use parquet::arrow::arrow_reader::{ArrowReaderMetadata, ParquetRecordBatchReader use parquet::arrow::ProjectionMask; use std::fs::File; +#[allow(deprecated)] pub fn read_plaintext_footer_file_without_decryption_properties() { let test_data = arrow::util::test_util::parquet_test_data(); let path = format!("{test_data}/encrypt_columns_plaintext_footer.parquet.encrypted"); @@ -81,6 +82,7 @@ pub fn read_plaintext_footer_file_without_decryption_properties() { } #[cfg(feature = "async")] +#[allow(deprecated)] pub async fn read_plaintext_footer_file_without_decryption_properties_async() { use futures::StreamExt; use futures::TryStreamExt; diff --git a/parquet/tests/encryption/encryption_async.rs b/parquet/tests/encryption/encryption_async.rs index 9deadece9544..d00b37cfc3d5 100644 --- a/parquet/tests/encryption/encryption_async.rs +++ b/parquet/tests/encryption/encryption_async.rs @@ -31,6 +31,7 @@ use std::sync::Arc; use tokio::fs::File; #[tokio::test] +#[allow(deprecated)] async fn test_non_uniform_encryption_plaintext_footer() { let test_data = arrow::util::test_util::parquet_test_data(); let path = format!("{test_data}/encrypt_columns_plaintext_footer.parquet.encrypted"); @@ -54,6 +55,7 @@ async fn test_non_uniform_encryption_plaintext_footer() { } #[tokio::test] +#[allow(deprecated)] async fn test_misspecified_encryption_keys() { let test_data = arrow::util::test_util::parquet_test_data(); let path = format!("{test_data}/encrypt_columns_and_footer.parquet.encrypted"); @@ -167,6 +169,7 @@ async fn test_plaintext_footer_read_without_decryption() { } #[tokio::test] +#[allow(deprecated)] async fn test_non_uniform_encryption() { let test_data = arrow::util::test_util::parquet_test_data(); let path = format!("{test_data}/encrypt_columns_and_footer.parquet.encrypted"); @@ -188,6 +191,7 @@ async fn test_non_uniform_encryption() { } #[tokio::test] +#[allow(deprecated)] async fn test_uniform_encryption() { let test_data = arrow::util::test_util::parquet_test_data(); let path = format!("{test_data}/uniform_encryption.parquet.encrypted"); @@ -204,6 +208,7 @@ async fn test_uniform_encryption() { } #[tokio::test] +#[allow(deprecated)] async fn test_aes_ctr_encryption() { let test_data = arrow::util::test_util::parquet_test_data(); let path = format!("{test_data}/encrypt_columns_and_footer_ctr.parquet.encrypted"); @@ -233,6 +238,7 @@ async fn test_aes_ctr_encryption() { } #[tokio::test] +#[allow(deprecated)] async fn test_decrypting_without_decryption_properties_fails() { let test_data = arrow::util::test_util::parquet_test_data(); let path = format!("{test_data}/uniform_encryption.parquet.encrypted"); @@ -248,6 +254,7 @@ async fn test_decrypting_without_decryption_properties_fails() { } #[tokio::test] +#[allow(deprecated)] async fn test_write_non_uniform_encryption() { let testdata = arrow::util::test_util::parquet_test_data(); let path = format!("{testdata}/encrypt_columns_and_footer.parquet.encrypted"); @@ -278,6 +285,7 @@ async fn test_write_non_uniform_encryption() { } #[cfg(feature = "object_store")] +#[allow(deprecated)] async fn get_encrypted_meta_store() -> ( object_store::ObjectMeta, std::sync::Arc, @@ -323,6 +331,7 @@ async fn test_read_encrypted_file_from_object_store() { } #[tokio::test] +#[allow(deprecated)] async fn test_non_uniform_encryption_plaintext_footer_with_key_retriever() { let testdata = arrow::util::test_util::parquet_test_data(); let path = format!("{testdata}/encrypt_columns_plaintext_footer.parquet.encrypted"); @@ -344,6 +353,7 @@ async fn test_non_uniform_encryption_plaintext_footer_with_key_retriever() { } #[tokio::test] +#[allow(deprecated)] async fn test_non_uniform_encryption_with_key_retriever() { let testdata = arrow::util::test_util::parquet_test_data(); let path = format!("{testdata}/encrypt_columns_and_footer.parquet.encrypted"); @@ -365,6 +375,7 @@ async fn test_non_uniform_encryption_with_key_retriever() { } #[tokio::test] +#[allow(deprecated)] async fn test_uniform_encryption_with_key_retriever() { let testdata = arrow::util::test_util::parquet_test_data(); let path = format!("{testdata}/uniform_encryption.parquet.encrypted"); diff --git a/parquet/tests/encryption/encryption_disabled.rs b/parquet/tests/encryption/encryption_disabled.rs index 8b38fd5e4ea4..bf006b76dcd8 100644 --- a/parquet/tests/encryption/encryption_disabled.rs +++ b/parquet/tests/encryption/encryption_disabled.rs @@ -19,6 +19,7 @@ use parquet::arrow::arrow_reader::{ArrowReaderMetadata, ArrowReaderOptions}; use std::fs::File; #[test] +#[allow(deprecated)] fn test_read_without_encryption_enabled_fails() { let test_data = arrow::util::test_util::parquet_test_data(); let path = format!("{test_data}/uniform_encryption.parquet.encrypted");