diff --git a/Cargo.toml b/Cargo.toml index db7b83a6f050..e71e96e8c039 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -81,7 +81,7 @@ include = [ "LICENSE.txt", "NOTICE.txt", ] -edition = "2021" +edition = "2024" rust-version = "1.85" [workspace.dependencies] diff --git a/arrow-arith/Cargo.toml b/arrow-arith/Cargo.toml index 6816eab8dffa..f2a4604c116e 100644 --- a/arrow-arith/Cargo.toml +++ b/arrow-arith/Cargo.toml @@ -25,7 +25,7 @@ authors = { workspace = true } license = { workspace = true } keywords = { workspace = true } include = { workspace = true } -edition = "2024" +edition = { workspace = true } rust-version = { workspace = true } [lib] diff --git a/arrow-array/Cargo.toml b/arrow-array/Cargo.toml index e2cfd14e098b..94c595f07980 100644 --- a/arrow-array/Cargo.toml +++ b/arrow-array/Cargo.toml @@ -25,7 +25,7 @@ authors = { workspace = true } license = { workspace = true } keywords = { workspace = true } include = { workspace = true } -edition = "2024" +edition = { workspace = true } rust-version = { workspace = true } [lib] diff --git a/arrow-avro/Cargo.toml b/arrow-avro/Cargo.toml index 975bcbe5e8ff..374cc896d53c 100644 --- a/arrow-avro/Cargo.toml +++ b/arrow-avro/Cargo.toml @@ -25,7 +25,7 @@ authors = { workspace = true } license = { workspace = true } keywords = { workspace = true } include = { workspace = true } -edition = "2024" +edition = { workspace = true } rust-version = { workspace = true } [lib] diff --git a/arrow-buffer/Cargo.toml b/arrow-buffer/Cargo.toml index 8a8a1d8269c4..d1651abb795b 100644 --- a/arrow-buffer/Cargo.toml +++ b/arrow-buffer/Cargo.toml @@ -25,7 +25,7 @@ authors = { workspace = true } license = { workspace = true } keywords = { workspace = true } include = { workspace = true } -edition = "2024" +edition = { workspace = true } rust-version = { workspace = true } [lib] diff --git a/arrow-cast/Cargo.toml b/arrow-cast/Cargo.toml index 67b96fa684ae..12da1af79fe0 100644 --- a/arrow-cast/Cargo.toml +++ b/arrow-cast/Cargo.toml @@ -25,7 +25,7 @@ authors = { workspace = true } license = { workspace = true } keywords = { workspace = true } include = { workspace = true } -edition = "2024" +edition = { workspace = true } rust-version = { workspace = true } [lib] diff --git a/arrow-csv/Cargo.toml b/arrow-csv/Cargo.toml index f823226c2106..c44ec01ce357 100644 --- a/arrow-csv/Cargo.toml +++ b/arrow-csv/Cargo.toml @@ -25,7 +25,7 @@ authors = { workspace = true } license = { workspace = true } keywords = { workspace = true } include = { workspace = true } -edition = "2024" +edition = { workspace = true } rust-version = { workspace = true } [lib] diff --git a/arrow-data/Cargo.toml b/arrow-data/Cargo.toml index fb6c04abafc7..9c7a5206b2f4 100644 --- a/arrow-data/Cargo.toml +++ b/arrow-data/Cargo.toml @@ -25,7 +25,7 @@ authors = { workspace = true } license = { workspace = true } keywords = { workspace = true } include = { workspace = true } -edition = "2024" +edition = { workspace = true } rust-version = { workspace = true } [lib] diff --git a/arrow-flight/Cargo.toml b/arrow-flight/Cargo.toml index 048847be7763..8f95e1995a67 100644 --- a/arrow-flight/Cargo.toml +++ b/arrow-flight/Cargo.toml @@ -19,7 +19,7 @@ name = "arrow-flight" description = "Apache Arrow Flight" version = { workspace = true } -edition = "2024" +edition = { workspace = true } rust-version = { workspace = true } authors = { workspace = true } homepage = { workspace = true } diff --git a/arrow-integration-test/src/field.rs b/arrow-integration-test/src/field.rs index 4b896ed391be..8b0ca264e02e 100644 --- a/arrow-integration-test/src/field.rs +++ b/arrow-integration-test/src/field.rs @@ -142,7 +142,7 @@ pub fn field_from_json(json: &serde_json::Value) -> Result { Some(_) => { return Err(ArrowError::ParseError( "Field 'children' must be an array".to_string(), - )) + )); } None => { return Err(ArrowError::ParseError( @@ -158,7 +158,7 @@ pub fn field_from_json(json: &serde_json::Value) -> Result { Some(_) => { return Err(ArrowError::ParseError( "Field 'children' must be an array".to_string(), - )) + )); } None => { return Err(ArrowError::ParseError( @@ -177,15 +177,15 @@ pub fn field_from_json(json: &serde_json::Value) -> Result { } t => { return Err(ArrowError::ParseError(format!( - "Map children should be a struct with 2 fields, found {t:?}" - ))) + "Map children should be a struct with 2 fields, found {t:?}" + ))); } } } Some(_) => { return Err(ArrowError::ParseError( "Field 'children' must be an array with 1 element".to_string(), - )) + )); } None => { return Err(ArrowError::ParseError( @@ -207,7 +207,7 @@ pub fn field_from_json(json: &serde_json::Value) -> Result { Some(_) => { return Err(ArrowError::ParseError( "Field 'children' must be an array".to_string(), - )) + )); } None => { return Err(ArrowError::ParseError( @@ -275,7 +275,7 @@ pub fn field_to_json(field: &Field) -> serde_json::Value { }; match field.data_type() { - DataType::Dictionary(ref index_type, ref value_type) => { + DataType::Dictionary(index_type, value_type) => { #[allow(deprecated)] let dict_id = field.dict_id().unwrap(); serde_json::json!({ diff --git a/arrow-integration-test/src/schema.rs b/arrow-integration-test/src/schema.rs index 512f0aed8e54..7777c48c1f4b 100644 --- a/arrow-integration-test/src/schema.rs +++ b/arrow-integration-test/src/schema.rs @@ -40,7 +40,7 @@ pub fn schema_from_json(json: &serde_json::Value) -> Result { _ => { return Err(ArrowError::ParseError( "Schema fields should be an array".to_string(), - )) + )); } }; diff --git a/arrow-integration-testing/src/flight_client_scenarios/auth_basic_proto.rs b/arrow-integration-testing/src/flight_client_scenarios/auth_basic_proto.rs index 0296fbb7df2c..4c12be6d6c42 100644 --- a/arrow-integration-testing/src/flight_client_scenarios/auth_basic_proto.rs +++ b/arrow-integration-testing/src/flight_client_scenarios/auth_basic_proto.rs @@ -19,10 +19,10 @@ use crate::{AUTH_PASSWORD, AUTH_USERNAME}; -use arrow_flight::{flight_service_client::FlightServiceClient, BasicAuth, HandshakeRequest}; -use futures::{stream, StreamExt}; +use arrow_flight::{BasicAuth, HandshakeRequest, flight_service_client::FlightServiceClient}; +use futures::{StreamExt, stream}; use prost::Message; -use tonic::{metadata::MetadataValue, transport::Endpoint, Request, Status}; +use tonic::{Request, Status, metadata::MetadataValue, transport::Endpoint}; type Error = Box; type Result = std::result::Result; diff --git a/arrow-integration-testing/src/flight_client_scenarios/integration_test.rs b/arrow-integration-testing/src/flight_client_scenarios/integration_test.rs index aa3e6952841e..05ca5627ecd8 100644 --- a/arrow-integration-testing/src/flight_client_scenarios/integration_test.rs +++ b/arrow-integration-testing/src/flight_client_scenarios/integration_test.rs @@ -31,11 +31,11 @@ use arrow::{ record_batch::RecordBatch, }; use arrow_flight::{ - flight_descriptor::DescriptorType, flight_service_client::FlightServiceClient, - utils::flight_data_to_arrow_batch, FlightData, FlightDescriptor, IpcMessage, Location, Ticket, + FlightData, FlightDescriptor, IpcMessage, Location, Ticket, flight_descriptor::DescriptorType, + flight_service_client::FlightServiceClient, utils::flight_data_to_arrow_batch, }; -use futures::{channel::mpsc, sink::SinkExt, stream, StreamExt}; -use tonic::{transport::Endpoint, Request, Streaming}; +use futures::{StreamExt, channel::mpsc, sink::SinkExt, stream}; +use tonic::{Request, Streaming, transport::Endpoint}; use arrow::datatypes::Schema; use std::sync::Arc; diff --git a/arrow-integration-testing/src/flight_client_scenarios/middleware.rs b/arrow-integration-testing/src/flight_client_scenarios/middleware.rs index 495825738aec..e8836c34c47d 100644 --- a/arrow-integration-testing/src/flight_client_scenarios/middleware.rs +++ b/arrow-integration-testing/src/flight_client_scenarios/middleware.rs @@ -18,7 +18,7 @@ //! Scenario for testing middleware. use arrow_flight::{ - flight_descriptor::DescriptorType, flight_service_client::FlightServiceClient, FlightDescriptor, + FlightDescriptor, flight_descriptor::DescriptorType, flight_service_client::FlightServiceClient, }; use prost::bytes::Bytes; use tonic::{Request, Status}; diff --git a/arrow-integration-testing/src/flight_server_scenarios/auth_basic_proto.rs b/arrow-integration-testing/src/flight_server_scenarios/auth_basic_proto.rs index 5462e5bd674b..38582e6fef68 100644 --- a/arrow-integration-testing/src/flight_server_scenarios/auth_basic_proto.rs +++ b/arrow-integration-testing/src/flight_server_scenarios/auth_basic_proto.rs @@ -21,13 +21,13 @@ use std::pin::Pin; use std::sync::Arc; use arrow_flight::{ - flight_service_server::FlightService, flight_service_server::FlightServiceServer, Action, - ActionType, BasicAuth, Criteria, Empty, FlightData, FlightDescriptor, FlightInfo, + Action, ActionType, BasicAuth, Criteria, Empty, FlightData, FlightDescriptor, FlightInfo, HandshakeRequest, HandshakeResponse, PollInfo, PutResult, SchemaResult, Ticket, + flight_service_server::FlightService, flight_service_server::FlightServiceServer, }; -use futures::{channel::mpsc, sink::SinkExt, Stream, StreamExt}; +use futures::{Stream, StreamExt, channel::mpsc, sink::SinkExt}; use tokio::sync::Mutex; -use tonic::{metadata::MetadataMap, transport::Server, Request, Response, Status, Streaming}; +use tonic::{Request, Response, Status, Streaming, metadata::MetadataMap, transport::Server}; type TonicStream = Pin + Send + Sync + 'static>>; type Error = Box; diff --git a/arrow-integration-testing/src/flight_server_scenarios/integration_test.rs b/arrow-integration-testing/src/flight_server_scenarios/integration_test.rs index 9faced000366..ae316886381a 100644 --- a/arrow-integration-testing/src/flight_server_scenarios/integration_test.rs +++ b/arrow-integration-testing/src/flight_server_scenarios/integration_test.rs @@ -31,14 +31,14 @@ use arrow::{ record_batch::RecordBatch, }; use arrow_flight::{ - flight_descriptor::DescriptorType, flight_service_server::FlightService, - flight_service_server::FlightServiceServer, Action, ActionType, Criteria, Empty, FlightData, - FlightDescriptor, FlightEndpoint, FlightInfo, HandshakeRequest, HandshakeResponse, IpcMessage, - PollInfo, PutResult, SchemaAsIpc, SchemaResult, Ticket, + Action, ActionType, Criteria, Empty, FlightData, FlightDescriptor, FlightEndpoint, FlightInfo, + HandshakeRequest, HandshakeResponse, IpcMessage, PollInfo, PutResult, SchemaAsIpc, + SchemaResult, Ticket, flight_descriptor::DescriptorType, flight_service_server::FlightService, + flight_service_server::FlightServiceServer, }; -use futures::{channel::mpsc, sink::SinkExt, Stream, StreamExt}; +use futures::{Stream, StreamExt, channel::mpsc, sink::SinkExt}; use tokio::sync::Mutex; -use tonic::{transport::Server, Request, Response, Status, Streaming}; +use tonic::{Request, Response, Status, Streaming, transport::Server}; type TonicStream = Pin + Send + Sync + 'static>>; @@ -383,7 +383,7 @@ async fn save_uploaded_chunks( ipc::MessageHeader::Schema => { return Err(Status::internal( "Not expecting a schema when messages are read", - )) + )); } ipc::MessageHeader::RecordBatch => { send_app_metadata(&mut response_tx, &data.app_metadata).await?; diff --git a/arrow-integration-testing/src/flight_server_scenarios/middleware.rs b/arrow-integration-testing/src/flight_server_scenarios/middleware.rs index 6685d45dffac..6bafb4843316 100644 --- a/arrow-integration-testing/src/flight_server_scenarios/middleware.rs +++ b/arrow-integration-testing/src/flight_server_scenarios/middleware.rs @@ -20,13 +20,13 @@ use std::pin::Pin; use arrow_flight::{ + Action, ActionType, Criteria, Empty, FlightData, FlightDescriptor, FlightInfo, + HandshakeRequest, HandshakeResponse, PollInfo, PutResult, SchemaResult, Ticket, flight_descriptor::DescriptorType, flight_service_server::FlightService, - flight_service_server::FlightServiceServer, Action, ActionType, Criteria, Empty, FlightData, - FlightDescriptor, FlightInfo, HandshakeRequest, HandshakeResponse, PollInfo, PutResult, - SchemaResult, Ticket, + flight_service_server::FlightServiceServer, }; use futures::Stream; -use tonic::{transport::Server, Request, Response, Status, Streaming}; +use tonic::{Request, Response, Status, Streaming, transport::Server}; type TonicStream = Pin + Send + Sync + 'static>>; diff --git a/arrow-integration-testing/src/lib.rs b/arrow-integration-testing/src/lib.rs index 10512a00eb9d..cf572d769df5 100644 --- a/arrow-integration-testing/src/lib.rs +++ b/arrow-integration-testing/src/lib.rs @@ -25,12 +25,12 @@ use serde_json::Value; use arrow::array::{Array, StructArray}; use arrow::datatypes::{DataType, Field, Fields, Schema}; use arrow::error::{ArrowError, Result}; -use arrow::ffi::{from_ffi_and_data_type, FFI_ArrowArray, FFI_ArrowSchema}; +use arrow::ffi::{FFI_ArrowArray, FFI_ArrowSchema, from_ffi_and_data_type}; use arrow::record_batch::RecordBatch; use arrow::util::test_util::arrow_test_data; use arrow_integration_test::*; use std::collections::HashMap; -use std::ffi::{c_char, c_int, CStr, CString}; +use std::ffi::{CStr, CString, c_char, c_int}; use std::fs::File; use std::io::BufReader; use std::iter::zip; @@ -261,7 +261,7 @@ fn result_to_c_error(result: &std::result::Result /// # Safety /// /// The pointer is assumed to have been obtained using CString::into_raw. -#[no_mangle] +#[unsafe(no_mangle)] pub unsafe extern "C" fn arrow_rs_free_error(c_error: *mut c_char) { if !c_error.is_null() { drop(unsafe { CString::from_raw(c_error) }); @@ -269,7 +269,7 @@ pub unsafe extern "C" fn arrow_rs_free_error(c_error: *mut c_char) { } /// A C-ABI for exporting an Arrow schema from a JSON file -#[no_mangle] +#[unsafe(no_mangle)] pub extern "C" fn arrow_rs_cdata_integration_export_schema_from_json( c_json_name: *const c_char, out: *mut FFI_ArrowSchema, @@ -279,7 +279,7 @@ pub extern "C" fn arrow_rs_cdata_integration_export_schema_from_json( } /// A C-ABI to compare an Arrow schema against a JSON file -#[no_mangle] +#[unsafe(no_mangle)] pub extern "C" fn arrow_rs_cdata_integration_import_schema_and_compare_to_json( c_json_name: *const c_char, c_schema: *mut FFI_ArrowSchema, @@ -289,7 +289,7 @@ pub extern "C" fn arrow_rs_cdata_integration_import_schema_and_compare_to_json( } /// A C-ABI for exporting a RecordBatch from a JSON file -#[no_mangle] +#[unsafe(no_mangle)] pub extern "C" fn arrow_rs_cdata_integration_export_batch_from_json( c_json_name: *const c_char, batch_num: c_int, @@ -300,7 +300,7 @@ pub extern "C" fn arrow_rs_cdata_integration_export_batch_from_json( } /// A C-ABI to compare a RecordBatch against a JSON file -#[no_mangle] +#[unsafe(no_mangle)] pub extern "C" fn arrow_rs_cdata_integration_import_batch_and_compare_to_json( c_json_name: *const c_char, batch_num: c_int, diff --git a/arrow-ipc/Cargo.toml b/arrow-ipc/Cargo.toml index 1a58be10b6ef..eb42a1ea9589 100644 --- a/arrow-ipc/Cargo.toml +++ b/arrow-ipc/Cargo.toml @@ -25,7 +25,7 @@ authors = { workspace = true } license = { workspace = true } keywords = { workspace = true } include = { workspace = true } -edition = "2024" +edition = { workspace = true } rust-version = { workspace = true } [lib] diff --git a/arrow-json/Cargo.toml b/arrow-json/Cargo.toml index 291bfb1906c9..b7134b170f8f 100644 --- a/arrow-json/Cargo.toml +++ b/arrow-json/Cargo.toml @@ -25,7 +25,7 @@ authors = { workspace = true } license = { workspace = true } keywords = { workspace = true } include = { workspace = true } -edition = "2024" +edition = { workspace = true } rust-version = { workspace = true } [lib] diff --git a/arrow-ord/Cargo.toml b/arrow-ord/Cargo.toml index e01405170ae6..ae76841bda39 100644 --- a/arrow-ord/Cargo.toml +++ b/arrow-ord/Cargo.toml @@ -25,7 +25,7 @@ authors = { workspace = true } license = { workspace = true } keywords = { workspace = true } include = { workspace = true } -edition = "2024" +edition = { workspace = true } rust-version = { workspace = true } [lib] diff --git a/arrow-pyarrow/Cargo.toml b/arrow-pyarrow/Cargo.toml index 6af2636a2495..9cfa235324f1 100644 --- a/arrow-pyarrow/Cargo.toml +++ b/arrow-pyarrow/Cargo.toml @@ -25,7 +25,7 @@ authors = { workspace = true } license = { workspace = true } keywords = { workspace = true } include = { workspace = true } -edition = "2024" +edition = { workspace = true } rust-version = { workspace = true } [lib] diff --git a/arrow-row/Cargo.toml b/arrow-row/Cargo.toml index 9248448325db..cd854aa3d48f 100644 --- a/arrow-row/Cargo.toml +++ b/arrow-row/Cargo.toml @@ -25,7 +25,7 @@ authors = { workspace = true } license = { workspace = true } keywords = { workspace = true } include = { workspace = true } -edition = "2024" +edition = { workspace = true } rust-version = { workspace = true } [lib] diff --git a/arrow-schema/Cargo.toml b/arrow-schema/Cargo.toml index d71d55496b1b..e8ca520c3c66 100644 --- a/arrow-schema/Cargo.toml +++ b/arrow-schema/Cargo.toml @@ -25,7 +25,7 @@ authors = { workspace = true } license = { workspace = true } keywords = { workspace = true } include = { workspace = true } -edition = "2024" +edition = { workspace = true } rust-version = { workspace = true } [lib] diff --git a/arrow-select/Cargo.toml b/arrow-select/Cargo.toml index 0cc9054f98d2..443094e6c986 100644 --- a/arrow-select/Cargo.toml +++ b/arrow-select/Cargo.toml @@ -25,7 +25,7 @@ authors = { workspace = true } license = { workspace = true } keywords = { workspace = true } include = { workspace = true } -edition = "2024" +edition = { workspace = true } rust-version = { workspace = true } [lib] diff --git a/arrow-string/Cargo.toml b/arrow-string/Cargo.toml index 41c1e2ad9dac..3045c355e48a 100644 --- a/arrow-string/Cargo.toml +++ b/arrow-string/Cargo.toml @@ -25,7 +25,7 @@ authors = { workspace = true } license = { workspace = true } keywords = { workspace = true } include = { workspace = true } -edition = "2024" +edition = { workspace = true } rust-version = { workspace = true } [lib] diff --git a/arrow/benches/aggregate_kernels.rs b/arrow/benches/aggregate_kernels.rs index 25dbe3548496..baf90e22962d 100644 --- a/arrow/benches/aggregate_kernels.rs +++ b/arrow/benches/aggregate_kernels.rs @@ -25,7 +25,7 @@ extern crate arrow; use arrow::compute::kernels::aggregate::*; use arrow::util::bench_util::*; use arrow::{array::*, datatypes::Float32Type}; -use arrow_array::types::{Float64Type, Int16Type, Int32Type, Int64Type, Int8Type}; +use arrow_array::types::{Float64Type, Int8Type, Int16Type, Int32Type, Int64Type}; const BATCH_SIZE: usize = 64 * 1024; diff --git a/arrow/benches/boolean_append_packed.rs b/arrow/benches/boolean_append_packed.rs index 508720eb346f..5bf98741bc83 100644 --- a/arrow/benches/boolean_append_packed.rs +++ b/arrow/benches/boolean_append_packed.rs @@ -16,8 +16,8 @@ // under the License. use arrow::array::BooleanBufferBuilder; -use criterion::{criterion_group, criterion_main, Criterion}; -use rand::{rng, Rng}; +use criterion::{Criterion, criterion_group, criterion_main}; +use rand::{Rng, rng}; fn rand_bytes(len: usize) -> Vec { let mut rng = rng(); diff --git a/arrow/benches/buffer_bit_ops.rs b/arrow/benches/buffer_bit_ops.rs index b46544b1f8e7..c569224b0f9b 100644 --- a/arrow/benches/buffer_bit_ops.rs +++ b/arrow/benches/buffer_bit_ops.rs @@ -22,7 +22,7 @@ use criterion::{Criterion, Throughput}; extern crate arrow; -use arrow::buffer::{buffer_bin_and, buffer_bin_or, buffer_unary_not, Buffer, MutableBuffer}; +use arrow::buffer::{Buffer, MutableBuffer, buffer_bin_and, buffer_bin_or, buffer_unary_not}; use std::hint; /// Helper function to create arrays diff --git a/arrow/benches/buffer_create.rs b/arrow/benches/buffer_create.rs index 690327e86f68..be73b2ad218c 100644 --- a/arrow/benches/buffer_create.rs +++ b/arrow/benches/buffer_create.rs @@ -19,8 +19,8 @@ extern crate criterion; use arrow::util::test_util::seedable_rng; use criterion::Criterion; -use rand::distr::Uniform; use rand::Rng; +use rand::distr::Uniform; extern crate arrow; diff --git a/arrow/benches/cast_kernels.rs b/arrow/benches/cast_kernels.rs index 179fde0a70be..a54529c8d108 100644 --- a/arrow/benches/cast_kernels.rs +++ b/arrow/benches/cast_kernels.rs @@ -18,8 +18,8 @@ #[macro_use] extern crate criterion; use criterion::Criterion; -use rand::distr::{Distribution, StandardUniform, Uniform}; use rand::Rng; +use rand::distr::{Distribution, StandardUniform, Uniform}; use std::hint; use chrono::DateTime; diff --git a/arrow/benches/coalesce_kernels.rs b/arrow/benches/coalesce_kernels.rs index 941882c70e8d..b85c5cc532db 100644 --- a/arrow/benches/coalesce_kernels.rs +++ b/arrow/benches/coalesce_kernels.rs @@ -24,7 +24,7 @@ use arrow::array::*; use arrow_array::types::{Float64Type, Int32Type, TimestampNanosecondType}; use arrow_schema::{DataType, Field, Schema, SchemaRef, TimeUnit}; use arrow_select::coalesce::BatchCoalescer; -use criterion::{criterion_group, criterion_main, Criterion}; +use criterion::{Criterion, criterion_group, criterion_main}; /// Benchmarks for generating evently sized output RecordBatches /// from a sequence of filtered source batches diff --git a/arrow/benches/comparison_kernels.rs b/arrow/benches/comparison_kernels.rs index 6a02deb41ad5..00c01374b625 100644 --- a/arrow/benches/comparison_kernels.rs +++ b/arrow/benches/comparison_kernels.rs @@ -27,8 +27,8 @@ use arrow_buffer::IntervalMonthDayNano; use arrow_string::like::*; use arrow_string::regexp::regexp_is_match_scalar; use criterion::Criterion; -use rand::rngs::StdRng; use rand::Rng; +use rand::rngs::StdRng; use std::hint; const SIZE: usize = 65536; diff --git a/arrow/benches/decimal_validate.rs b/arrow/benches/decimal_validate.rs index 7867b10ba222..474b93737005 100644 --- a/arrow/benches/decimal_validate.rs +++ b/arrow/benches/decimal_validate.rs @@ -19,8 +19,8 @@ extern crate criterion; use arrow::array::{ - Array, Decimal128Array, Decimal128Builder, Decimal256Array, Decimal256Builder, Decimal32Array, - Decimal32Builder, Decimal64Array, Decimal64Builder, + Array, Decimal32Array, Decimal32Builder, Decimal64Array, Decimal64Builder, Decimal128Array, + Decimal128Builder, Decimal256Array, Decimal256Builder, }; use criterion::Criterion; use rand::Rng; diff --git a/arrow/benches/filter_kernels.rs b/arrow/benches/filter_kernels.rs index 354fe606dd76..ff117f9d63f5 100644 --- a/arrow/benches/filter_kernels.rs +++ b/arrow/benches/filter_kernels.rs @@ -18,7 +18,7 @@ extern crate arrow; use std::sync::Arc; -use arrow::compute::{filter_record_batch, FilterBuilder, FilterPredicate}; +use arrow::compute::{FilterBuilder, FilterPredicate, filter_record_batch}; use arrow::util::bench_util::*; use arrow::array::*; @@ -26,7 +26,7 @@ use arrow::compute::filter; use arrow::datatypes::{Field, Float32Type, Int32Type, Int64Type, Schema, UInt8Type}; use arrow_array::types::Decimal128Type; -use criterion::{criterion_group, criterion_main, Criterion}; +use criterion::{Criterion, criterion_group, criterion_main}; use std::hint; fn bench_filter(data_array: &dyn Array, filter_array: &BooleanArray) { diff --git a/arrow/benches/lexsort.rs b/arrow/benches/lexsort.rs index 6e6f607f7b3f..16a2606b919a 100644 --- a/arrow/benches/lexsort.rs +++ b/arrow/benches/lexsort.rs @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. -use arrow::compute::{lexsort_to_indices, SortColumn}; +use arrow::compute::{SortColumn, lexsort_to_indices}; use arrow::row::{RowConverter, SortField}; use arrow::util::bench_util::{ create_dict_from_values, create_primitive_array, create_string_array_with_len, @@ -24,7 +24,7 @@ use arrow::util::data_gen::create_random_array; use arrow_array::types::Int32Type; use arrow_array::{Array, ArrayRef, UInt32Array}; use arrow_schema::{DataType, Field}; -use criterion::{criterion_group, criterion_main, Criterion}; +use criterion::{Criterion, criterion_group, criterion_main}; use std::{hint, sync::Arc}; #[derive(Copy, Clone)] diff --git a/arrow/benches/partition_kernels.rs b/arrow/benches/partition_kernels.rs index 8e3907d26143..f150d155c317 100644 --- a/arrow/benches/partition_kernels.rs +++ b/arrow/benches/partition_kernels.rs @@ -20,7 +20,7 @@ extern crate criterion; use criterion::Criterion; use std::sync::Arc; extern crate arrow; -use arrow::compute::kernels::sort::{lexsort, SortColumn}; +use arrow::compute::kernels::sort::{SortColumn, lexsort}; use arrow::util::bench_util::*; use arrow::{ array::*, diff --git a/arrow/benches/primitive_run_accessor.rs b/arrow/benches/primitive_run_accessor.rs index 10c1e9ff39a9..a3cd20434060 100644 --- a/arrow/benches/primitive_run_accessor.rs +++ b/arrow/benches/primitive_run_accessor.rs @@ -18,7 +18,7 @@ use arrow::datatypes::Int32Type; use arrow::{array::PrimitiveArray, util::bench_util::create_primitive_run_array}; use arrow_array::ArrayAccessor; -use criterion::{criterion_group, criterion_main, Criterion}; +use criterion::{Criterion, criterion_group, criterion_main}; fn criterion_benchmark(c: &mut Criterion) { let mut group = c.benchmark_group("primitive_run_accessor"); diff --git a/arrow/benches/primitive_run_take.rs b/arrow/benches/primitive_run_take.rs index 8e5dbced62bd..c394c37c6ccf 100644 --- a/arrow/benches/primitive_run_take.rs +++ b/arrow/benches/primitive_run_take.rs @@ -21,7 +21,7 @@ use arrow::datatypes::{Int32Type, Int64Type}; use arrow::util::bench_util::*; use arrow::util::test_util::seedable_rng; use arrow_array::UInt32Array; -use criterion::{criterion_group, criterion_main, Criterion}; +use criterion::{Criterion, criterion_group, criterion_main}; use rand::Rng; use std::hint; diff --git a/arrow/benches/row_format.rs b/arrow/benches/row_format.rs index 4054ff0dda22..d67095ac2c43 100644 --- a/arrow/benches/row_format.rs +++ b/arrow/benches/row_format.rs @@ -28,8 +28,8 @@ use arrow::util::bench_util::{ create_string_view_array_with_max_len, }; use arrow::util::data_gen::create_random_array; -use arrow_array::types::Int32Type; use arrow_array::Array; +use arrow_array::types::Int32Type; use arrow_schema::{DataType, Field}; use criterion::Criterion; use std::{hint, sync::Arc}; @@ -179,88 +179,104 @@ fn row_bench(c: &mut Criterion) { Arc::new(create_string_dict_array::(4096, 0., 100)) as ArrayRef, Arc::new(create_primitive_array::(4096, 0.)) as ArrayRef, ]; - do_bench(c, "4096 4096 string_dictionary(20, 0.5), string_dictionary(30, 0), string_dictionary(100, 0), i64(0)", cols); + do_bench( + c, + "4096 4096 string_dictionary(20, 0.5), string_dictionary(30, 0), string_dictionary(100, 0), i64(0)", + cols, + ); // List - let cols = vec![create_random_array( - &Field::new( - "list", - DataType::List(Arc::new(Field::new_list_field(DataType::UInt64, false))), - false, - ), - 4096, - 0., - 1.0, - ) - .unwrap()]; + let cols = vec![ + create_random_array( + &Field::new( + "list", + DataType::List(Arc::new(Field::new_list_field(DataType::UInt64, false))), + false, + ), + 4096, + 0., + 1.0, + ) + .unwrap(), + ]; do_bench(c, "4096 list(0) of u64(0)", cols); - let cols = vec![create_random_array( - &Field::new( - "list", - DataType::LargeList(Arc::new(Field::new_list_field(DataType::UInt64, false))), - false, - ), - 4096, - 0., - 1.0, - ) - .unwrap()]; + let cols = vec![ + create_random_array( + &Field::new( + "list", + DataType::LargeList(Arc::new(Field::new_list_field(DataType::UInt64, false))), + false, + ), + 4096, + 0., + 1.0, + ) + .unwrap(), + ]; do_bench(c, "4096 large_list(0) of u64(0)", cols); - let cols = vec![create_random_array( - &Field::new( - "list", - DataType::List(Arc::new(Field::new_list_field(DataType::UInt64, false))), - false, - ), - 10, - 0., - 1.0, - ) - .unwrap()]; + let cols = vec![ + create_random_array( + &Field::new( + "list", + DataType::List(Arc::new(Field::new_list_field(DataType::UInt64, false))), + false, + ), + 10, + 0., + 1.0, + ) + .unwrap(), + ]; do_bench(c, "10 list(0) of u64(0)", cols); - let cols = vec![create_random_array( - &Field::new( - "list", - DataType::LargeList(Arc::new(Field::new_list_field(DataType::UInt64, false))), - false, - ), - 10, - 0., - 1.0, - ) - .unwrap()]; + let cols = vec![ + create_random_array( + &Field::new( + "list", + DataType::LargeList(Arc::new(Field::new_list_field(DataType::UInt64, false))), + false, + ), + 10, + 0., + 1.0, + ) + .unwrap(), + ]; do_bench(c, "10 large_list(0) of u64(0)", cols); - let cols = vec![create_random_array( - &Field::new( - "list", - DataType::List(Arc::new(Field::new_list_field(DataType::UInt64, false))), - false, - ), - 4096, - 0., - 1.0, - ) - .unwrap() - .slice(10, 20)]; + let cols = vec![ + create_random_array( + &Field::new( + "list", + DataType::List(Arc::new(Field::new_list_field(DataType::UInt64, false))), + false, + ), + 4096, + 0., + 1.0, + ) + .unwrap() + .slice(10, 20), + ]; do_bench(c, "4096 list(0) sliced to 10 of u64(0)", cols); - let cols = vec![create_random_array( - &Field::new( - "list", - DataType::LargeList(Arc::new(Field::new_list_field(DataType::UInt64, false))), - false, - ), - 4096, - 0., - 1.0, - ) - .unwrap() - .slice(10, 20)]; + let cols = vec![ + create_random_array( + &Field::new( + "list", + DataType::LargeList(Arc::new(Field::new_list_field(DataType::UInt64, false))), + false, + ), + 4096, + 0., + 1.0, + ) + .unwrap() + .slice(10, 20), + ]; do_bench(c, "4096 large_list(0) sliced to 10 of u64(0)", cols); bench_iter(c); diff --git a/arrow/benches/sort_kernel.rs b/arrow/benches/sort_kernel.rs index 8fcd8a570daf..408d55b5cc6e 100644 --- a/arrow/benches/sort_kernel.rs +++ b/arrow/benches/sort_kernel.rs @@ -23,7 +23,7 @@ use std::sync::Arc; extern crate arrow; -use arrow::compute::{lexsort, sort, sort_to_indices, SortColumn}; +use arrow::compute::{SortColumn, lexsort, sort, sort_to_indices}; use arrow::datatypes::{Int16Type, Int32Type}; use arrow::util::bench_util::*; use arrow::{array::*, datatypes::Float32Type}; diff --git a/arrow/benches/string_dictionary_builder.rs b/arrow/benches/string_dictionary_builder.rs index a39fd5d03847..a47995efb0e5 100644 --- a/arrow/benches/string_dictionary_builder.rs +++ b/arrow/benches/string_dictionary_builder.rs @@ -17,8 +17,8 @@ use arrow::array::StringDictionaryBuilder; use arrow::datatypes::Int32Type; -use criterion::{criterion_group, criterion_main, Criterion}; -use rand::{rng, Rng}; +use criterion::{Criterion, criterion_group, criterion_main}; +use rand::{Rng, rng}; /// Note: this is best effort, not all keys are necessarily present or unique fn build_strings(dict_size: usize, total_size: usize, key_len: usize) -> Vec { diff --git a/arrow/benches/string_run_builder.rs b/arrow/benches/string_run_builder.rs index b4457b74dada..ab4e645cf894 100644 --- a/arrow/benches/string_run_builder.rs +++ b/arrow/benches/string_run_builder.rs @@ -18,7 +18,7 @@ use arrow::array::StringRunBuilder; use arrow::datatypes::Int32Type; use arrow::util::bench_util::create_string_array_for_runs; -use criterion::{criterion_group, criterion_main, Criterion}; +use criterion::{Criterion, criterion_group, criterion_main}; fn criterion_benchmark(c: &mut Criterion) { let mut group = c.benchmark_group("string_run_builder"); diff --git a/arrow/benches/string_run_iterator.rs b/arrow/benches/string_run_iterator.rs index 9766f10b4d73..3008c09f09d4 100644 --- a/arrow/benches/string_run_iterator.rs +++ b/arrow/benches/string_run_iterator.rs @@ -17,8 +17,8 @@ use arrow::array::{Int32RunArray, StringArray, StringRunBuilder}; use arrow::datatypes::Int32Type; -use criterion::{criterion_group, criterion_main, Criterion}; -use rand::{rng, Rng}; +use criterion::{Criterion, criterion_group, criterion_main}; +use rand::{Rng, rng}; fn build_strings_runs( physical_array_len: usize, diff --git a/arrow/benches/take_kernels.rs b/arrow/benches/take_kernels.rs index 8f6f92a375e3..37b83a5e33ed 100644 --- a/arrow/benches/take_kernels.rs +++ b/arrow/benches/take_kernels.rs @@ -23,7 +23,7 @@ use rand::Rng; extern crate arrow; -use arrow::compute::{take, TakeOptions}; +use arrow::compute::{TakeOptions, take}; use arrow::datatypes::*; use arrow::util::test_util::seedable_rng; use arrow::{array::*, util::bench_util::*}; diff --git a/arrow/examples/collect.rs b/arrow/examples/collect.rs index ced4640d600f..57b78a822ae6 100644 --- a/arrow/examples/collect.rs +++ b/arrow/examples/collect.rs @@ -20,7 +20,7 @@ use arrow::array::Array; use arrow_array::types::Int32Type; -use arrow_array::{Float32Array, Int32Array, Int8Array, ListArray}; +use arrow_array::{Float32Array, Int8Array, Int32Array, ListArray}; fn main() { // Primitive Arrays @@ -71,11 +71,13 @@ fn main() { .unwrap() .values() ); - assert!(!list2 - .as_any() - .downcast_ref::() - .unwrap() - .is_valid(1)); + assert!( + !list2 + .as_any() + .downcast_ref::() + .unwrap() + .is_valid(1) + ); assert_eq!( &[6, 7], list3 diff --git a/arrow/examples/zero_copy_ipc.rs b/arrow/examples/zero_copy_ipc.rs index 15fc477c59cf..0c80572cf468 100644 --- a/arrow/examples/zero_copy_ipc.rs +++ b/arrow/examples/zero_copy_ipc.rs @@ -20,14 +20,14 @@ //! Zero copy in this case means the Arrow arrays refer directly to a user //! provided buffer or memory region. -use arrow::array::{record_batch, RecordBatch}; +use arrow::array::{RecordBatch, record_batch}; use arrow::error::Result; use arrow_buffer::Buffer; use arrow_cast::pretty::pretty_format_batches; use arrow_ipc::convert::fb_to_schema; -use arrow_ipc::reader::{read_footer_length, FileDecoder}; +use arrow_ipc::reader::{FileDecoder, read_footer_length}; use arrow_ipc::writer::FileWriter; -use arrow_ipc::{root_as_footer, Block}; +use arrow_ipc::{Block, root_as_footer}; use std::path::PathBuf; use std::sync::Arc; diff --git a/arrow/src/array/mod.rs b/arrow/src/array/mod.rs index 985ce70fcdb8..f95afc4928df 100644 --- a/arrow/src/array/mod.rs +++ b/arrow/src/array/mod.rs @@ -25,7 +25,7 @@ pub use arrow_array::cast::*; pub use arrow_array::iterator::*; pub use arrow_array::*; pub use arrow_data::{ - layout, ArrayData, ArrayDataBuilder, ArrayDataRef, BufferSpec, ByteView, DataTypeLayout, + ArrayData, ArrayDataBuilder, ArrayDataRef, BufferSpec, ByteView, DataTypeLayout, layout, }; pub use arrow_data::transform::{Capacities, MutableArrayData}; @@ -35,4 +35,4 @@ pub use arrow_data::transform::{Capacities, MutableArrayData}; pub use arrow_array::ffi::export_array_into_raw; // --------------------- Array's values comparison --------------------- -pub use arrow_ord::ord::{make_comparator, DynComparator}; +pub use arrow_ord::ord::{DynComparator, make_comparator}; diff --git a/arrow/src/datatypes/mod.rs b/arrow/src/datatypes/mod.rs index d41289d52e2a..4286128a76e1 100644 --- a/arrow/src/datatypes/mod.rs +++ b/arrow/src/datatypes/mod.rs @@ -24,7 +24,7 @@ pub use arrow_array::types::*; pub use arrow_array::{ArrowNativeTypeOp, ArrowNumericType, ArrowPrimitiveType}; -pub use arrow_buffer::{i256, ArrowNativeType, ToByteSlice}; +pub use arrow_buffer::{ArrowNativeType, ToByteSlice, i256}; pub use arrow_data::decimal::*; pub use arrow_schema::{ DataType, Field, FieldRef, Fields, IntervalUnit, Schema, SchemaBuilder, SchemaRef, TimeUnit, diff --git a/arrow/src/util/bench_util.rs b/arrow/src/util/bench_util.rs index 1b7819001c9c..4bd648bc40ad 100644 --- a/arrow/src/util/bench_util.rs +++ b/arrow/src/util/bench_util.rs @@ -22,10 +22,10 @@ use crate::datatypes::*; use crate::util::test_util::seedable_rng; use arrow_buffer::{Buffer, IntervalMonthDayNano}; use half::f16; -use rand::distr::uniform::SampleUniform; -use rand::rng; use rand::Rng; use rand::SeedableRng; +use rand::distr::uniform::SampleUniform; +use rand::rng; use rand::{ distr::{Alphanumeric, Distribution, StandardUniform}, prelude::StdRng, diff --git a/arrow/src/util/data_gen.rs b/arrow/src/util/data_gen.rs index 70af62e6b40d..89bbe4b1fbcb 100644 --- a/arrow/src/util/data_gen.rs +++ b/arrow/src/util/data_gen.rs @@ -20,8 +20,8 @@ use std::sync::Arc; use rand::{ - distr::uniform::{SampleRange, SampleUniform}, Rng, + distr::uniform::{SampleRange, SampleUniform}, }; use crate::array::*; @@ -118,7 +118,7 @@ pub fn create_random_array( Float16 => { return Err(ArrowError::NotYetImplemented( "Float16 is not implemented".to_string(), - )) + )); } Float32 => Arc::new(create_primitive_array::( size, @@ -174,7 +174,7 @@ pub fn create_random_array( _ => { return Err(ArrowError::InvalidArgumentError(format!( "Unsupported unit {unit:?} for Time32" - ))) + ))); } }, Time64(unit) => match unit { @@ -188,7 +188,7 @@ pub fn create_random_array( _ => { return Err(ArrowError::InvalidArgumentError(format!( "Unsupported unit {unit:?} for Time64" - ))) + ))); } }, Utf8 => Arc::new(create_string_array::(size, primitive_null_density)), @@ -228,7 +228,7 @@ pub fn create_random_array( other => { return Err(ArrowError::NotYetImplemented(format!( "Generating random arrays not yet implemented for {other:?}" - ))) + ))); } }) } @@ -299,7 +299,7 @@ fn create_random_list_array( _ => { return Err(ArrowError::InvalidArgumentError(format!( "Cannot create list array for field {field}" - ))) + ))); } }; @@ -337,7 +337,7 @@ fn create_random_struct_array( _ => { return Err(ArrowError::InvalidArgumentError(format!( "Cannot create struct array for field {field}" - ))) + ))); } }; @@ -383,7 +383,7 @@ fn create_random_map_array( _ => { return Err(ArrowError::InvalidArgumentError(format!( "Cannot create map array for field {field:?}" - ))) + ))); } }; diff --git a/arrow/src/util/test_util.rs b/arrow/src/util/test_util.rs index 566ccc6ab536..dbcea03ee74d 100644 --- a/arrow/src/util/test_util.rs +++ b/arrow/src/util/test_util.rs @@ -17,7 +17,7 @@ //! Utils to make testing easier -use rand::{rngs::StdRng, Rng, SeedableRng}; +use rand::{Rng, SeedableRng, rngs::StdRng}; use std::{env, error::Error, fs, io::Write, path::PathBuf}; /// Returns a vector of size `n`, filled with randomly generated bytes. @@ -216,26 +216,26 @@ mod tests { let non_existing = cwd.join("non-existing-dir").display().to_string(); let non_existing_str = non_existing.as_str(); - env::set_var(udf_env, non_existing_str); + unsafe { env::set_var(udf_env, non_existing_str) }; let res = get_data_dir(udf_env, existing_str); assert!(res.is_err()); - env::set_var(udf_env, ""); + unsafe { env::set_var(udf_env, "") }; let res = get_data_dir(udf_env, existing_str); assert!(res.is_ok()); assert_eq!(res.unwrap(), existing_pb); - env::set_var(udf_env, " "); + unsafe { env::set_var(udf_env, " ") }; let res = get_data_dir(udf_env, existing_str); assert!(res.is_ok()); assert_eq!(res.unwrap(), existing_pb); - env::set_var(udf_env, existing_str); + unsafe { env::set_var(udf_env, existing_str) }; let res = get_data_dir(udf_env, existing_str); assert!(res.is_ok()); assert_eq!(res.unwrap(), existing_pb); - env::remove_var(udf_env); + unsafe { env::remove_var(udf_env) }; let res = get_data_dir(udf_env, non_existing_str); assert!(res.is_err()); diff --git a/arrow/tests/arithmetic.rs b/arrow/tests/arithmetic.rs index 59a162ef6dc0..cc6a97e123f8 100644 --- a/arrow/tests/arithmetic.rs +++ b/arrow/tests/arithmetic.rs @@ -16,7 +16,7 @@ // under the License. use arrow_arith::numeric::{add, sub}; -use arrow_arith::temporal::{date_part, DatePart}; +use arrow_arith::temporal::{DatePart, date_part}; use arrow_array::cast::AsArray; use arrow_array::temporal_conversions::as_datetime_with_timezone; use arrow_array::timezone::Tz; diff --git a/arrow/tests/array_cast.rs b/arrow/tests/array_cast.rs index 522687c3e493..3dcbfd970a2b 100644 --- a/arrow/tests/array_cast.rs +++ b/arrow/tests/array_cast.rs @@ -18,23 +18,23 @@ use arrow_array::builder::{PrimitiveDictionaryBuilder, StringDictionaryBuilder, UnionBuilder}; use arrow_array::cast::AsArray; use arrow_array::types::{ - ArrowDictionaryKeyType, Decimal128Type, Decimal256Type, Decimal32Type, Decimal64Type, - Int16Type, Int32Type, Int64Type, Int8Type, TimestampMicrosecondType, UInt16Type, UInt32Type, - UInt64Type, UInt8Type, + ArrowDictionaryKeyType, Decimal32Type, Decimal64Type, Decimal128Type, Decimal256Type, Int8Type, + Int16Type, Int32Type, Int64Type, TimestampMicrosecondType, UInt8Type, UInt16Type, UInt32Type, + UInt64Type, }; use arrow_array::{ Array, ArrayRef, ArrowPrimitiveType, BinaryArray, BooleanArray, Date32Array, Date64Array, - Decimal128Array, Decimal256Array, Decimal32Array, Decimal64Array, DurationMicrosecondArray, + Decimal32Array, Decimal64Array, Decimal128Array, Decimal256Array, DurationMicrosecondArray, DurationMillisecondArray, DurationNanosecondArray, DurationSecondArray, FixedSizeBinaryArray, - FixedSizeListArray, Float16Array, Float32Array, Float64Array, Int16Array, Int32Array, - Int64Array, Int8Array, IntervalDayTimeArray, IntervalMonthDayNanoArray, IntervalYearMonthArray, - LargeBinaryArray, LargeListArray, LargeStringArray, ListArray, NullArray, PrimitiveArray, - StringArray, StructArray, Time32MillisecondArray, Time32SecondArray, Time64MicrosecondArray, - Time64NanosecondArray, TimestampMicrosecondArray, TimestampMillisecondArray, - TimestampNanosecondArray, TimestampSecondArray, UInt16Array, UInt32Array, UInt64Array, - UInt8Array, UnionArray, + FixedSizeListArray, Float16Array, Float32Array, Float64Array, Int8Array, Int16Array, + Int32Array, Int64Array, IntervalDayTimeArray, IntervalMonthDayNanoArray, + IntervalYearMonthArray, LargeBinaryArray, LargeListArray, LargeStringArray, ListArray, + NullArray, PrimitiveArray, StringArray, StructArray, Time32MillisecondArray, Time32SecondArray, + Time64MicrosecondArray, Time64NanosecondArray, TimestampMicrosecondArray, + TimestampMillisecondArray, TimestampNanosecondArray, TimestampSecondArray, UInt8Array, + UInt16Array, UInt32Array, UInt64Array, UnionArray, }; -use arrow_buffer::{i256, Buffer, IntervalDayTime, IntervalMonthDayNano}; +use arrow_buffer::{Buffer, IntervalDayTime, IntervalMonthDayNano, i256}; use arrow_cast::pretty::pretty_format_columns; use arrow_cast::{can_cast_types, cast}; use arrow_data::ArrayData; @@ -164,13 +164,22 @@ fn test_can_cast_types() { // check for mismatch match (cast_result, reported_cast_ability) { (Ok(_), false) => { - panic!("Was able to cast array {:?} from {:?} to {:?} but can_cast_types reported false", - array, array.data_type(), to_type) + panic!( + "Was able to cast array {:?} from {:?} to {:?} but can_cast_types reported false", + array, + array.data_type(), + to_type + ) } (Err(e), true) => { - panic!("Was not able to cast array {:?} from {:?} to {:?} but can_cast_types reported true. \ + panic!( + "Was not able to cast array {:?} from {:?} to {:?} but can_cast_types reported true. \ Error was {:?}", - array, array.data_type(), to_type, e) + array, + array.data_type(), + to_type, + e + ) } // otherwise it was a match _ => {} diff --git a/arrow/tests/array_equal.rs b/arrow/tests/array_equal.rs index 94fb85030bf3..7fc8b0be7a3d 100644 --- a/arrow/tests/array_equal.rs +++ b/arrow/tests/array_equal.rs @@ -16,10 +16,10 @@ // under the License. use arrow::array::{ - make_array, Array, ArrayRef, BooleanArray, Decimal128Array, FixedSizeBinaryArray, - FixedSizeBinaryBuilder, FixedSizeListBuilder, GenericBinaryArray, GenericStringArray, - Int32Array, Int32Builder, Int64Builder, ListArray, ListBuilder, NullArray, OffsetSizeTrait, - StringArray, StringDictionaryBuilder, StructArray, UnionBuilder, + Array, ArrayRef, BooleanArray, Decimal128Array, FixedSizeBinaryArray, FixedSizeBinaryBuilder, + FixedSizeListBuilder, GenericBinaryArray, GenericStringArray, Int32Array, Int32Builder, + Int64Builder, ListArray, ListBuilder, NullArray, OffsetSizeTrait, StringArray, + StringDictionaryBuilder, StructArray, UnionBuilder, make_array, }; use arrow::datatypes::{Int16Type, Int32Type}; use arrow_array::builder::{StringBuilder, StringViewBuilder, StructBuilder}; diff --git a/arrow/tests/array_transform.rs b/arrow/tests/array_transform.rs index c6de9f4a3417..511dc1e8bfcd 100644 --- a/arrow/tests/array_transform.rs +++ b/arrow/tests/array_transform.rs @@ -19,13 +19,13 @@ use arrow::array::{ Array, ArrayRef, BooleanArray, Decimal128Array, DictionaryArray, FixedSizeBinaryArray, FixedSizeListBuilder, Int16Array, Int32Array, Int64Array, Int64Builder, ListArray, ListBuilder, MapBuilder, NullArray, StringArray, StringBuilder, StringDictionaryBuilder, StructArray, - UInt16Array, UInt16Builder, UInt8Array, UnionArray, + UInt8Array, UInt16Array, UInt16Builder, UnionArray, }; use arrow::datatypes::Int16Type; use arrow_array::StringViewArray; use arrow_buffer::{Buffer, ScalarBuffer}; -use arrow_data::transform::MutableArrayData; use arrow_data::ArrayData; +use arrow_data::transform::MutableArrayData; use arrow_schema::{DataType, Field, Fields, UnionFields}; use std::sync::Arc; diff --git a/arrow/tests/array_validation.rs b/arrow/tests/array_validation.rs index e1f6944a93bb..66a7b7c45255 100644 --- a/arrow/tests/array_validation.rs +++ b/arrow/tests/array_validation.rs @@ -16,8 +16,8 @@ // under the License. use arrow::array::{ - make_array, Array, BooleanBuilder, Decimal128Builder, Int32Array, Int32Builder, Int64Array, - StringArray, StructBuilder, UInt64Array, + Array, BooleanBuilder, Decimal128Builder, Int32Array, Int32Builder, Int64Array, StringArray, + StructBuilder, UInt64Array, make_array, }; use arrow_array::Decimal128Array; use arrow_buffer::{ArrowNativeType, Buffer}; @@ -1106,5 +1106,8 @@ fn test_sliced_array_child() { }; let err = data.validate_values().unwrap_err(); - assert_eq!(err.to_string(), "Invalid argument error: Offset invariant failure: offset at position 1 out of bounds: 3 > 2"); + assert_eq!( + err.to_string(), + "Invalid argument error: Offset invariant failure: offset at position 1 out of bounds: 3 > 2" + ); } diff --git a/arrow/tests/shrink_to_fit.rs b/arrow/tests/shrink_to_fit.rs index 5d7c2cf98bc9..1613fefeda98 100644 --- a/arrow/tests/shrink_to_fit.rs +++ b/arrow/tests/shrink_to_fit.rs @@ -50,7 +50,9 @@ fn test_shrink_to_fit_after_concat() { }); let expected_len = num_concats * array_len; assert_eq!(bytes_used(concatenated.clone()), expected_len); - eprintln!("The concatenated array is {expected_len} B long. Amount of memory used by this thread: {bytes_allocated_by_this_thread} B"); + eprintln!( + "The concatenated array is {expected_len} B long. Amount of memory used by this thread: {bytes_allocated_by_this_thread} B" + ); assert!( expected_len <= bytes_allocated_by_this_thread, @@ -91,8 +93,8 @@ fn bytes_used(array: ArrayRef) -> usize { use std::{ alloc::Layout, sync::{ - atomic::{AtomicUsize, Ordering::Relaxed}, Arc, + atomic::{AtomicUsize, Ordering::Relaxed}, }, }; diff --git a/arrow/tests/timezone.rs b/arrow/tests/timezone.rs index d0db1d76e422..7b5ec8fbecb0 100644 --- a/arrow/tests/timezone.rs +++ b/arrow/tests/timezone.rs @@ -58,7 +58,7 @@ fn test_parse_timezone_invalid() { ), ( "2023-01-01 04:05:06.789 +07:30:00", - "Parser error: Invalid timezone \"+07:30:00\": failed to parse timezone" + "Parser error: Invalid timezone \"+07:30:00\": failed to parse timezone", ), ( // Sunday, 12 March 2023, 02:00:00 clocks are turned forward 1 hour to diff --git a/parquet-geospatial/Cargo.toml b/parquet-geospatial/Cargo.toml index 0a28ed3c5bcd..ea19f9c9b49f 100644 --- a/parquet-geospatial/Cargo.toml +++ b/parquet-geospatial/Cargo.toml @@ -27,7 +27,7 @@ repository = { workspace = true } authors = { workspace = true } keywords = ["arrow", "parquet", "geometry", "geography"] readme = "README.md" -edition = "2024" +edition = { workspace = true } rust-version = { workspace = true } [dependencies] diff --git a/parquet-variant-compute/Cargo.toml b/parquet-variant-compute/Cargo.toml index 1dfc1ec80f3a..cadfa1cbb63a 100644 --- a/parquet-variant-compute/Cargo.toml +++ b/parquet-variant-compute/Cargo.toml @@ -26,7 +26,7 @@ homepage = { workspace = true } repository = { workspace = true } authors = { workspace = true } keywords = ["arrow", "parquet", "variant"] -edition = "2024" +edition = { workspace = true } rust-version = { workspace = true } diff --git a/parquet-variant-json/Cargo.toml b/parquet-variant-json/Cargo.toml index aef736d10735..e85704c4148d 100644 --- a/parquet-variant-json/Cargo.toml +++ b/parquet-variant-json/Cargo.toml @@ -27,7 +27,7 @@ repository = { workspace = true } authors = { workspace = true } keywords = ["arrow", "parquet", "variant"] readme = "../parquet-variant/README.md" -edition = "2024" +edition = { workspace = true } rust-version = { workspace = true } diff --git a/parquet-variant/Cargo.toml b/parquet-variant/Cargo.toml index b1985e5f35fc..f1282e8cdab3 100644 --- a/parquet-variant/Cargo.toml +++ b/parquet-variant/Cargo.toml @@ -27,7 +27,7 @@ repository = { workspace = true } authors = { workspace = true } keywords = ["arrow", "parquet", "variant"] readme = "README.md" -edition = "2024" +edition = { workspace = true } rust-version = { workspace = true } [dependencies] diff --git a/parquet/Cargo.toml b/parquet/Cargo.toml index 8f76f96d842b..aa0071ca38e5 100644 --- a/parquet/Cargo.toml +++ b/parquet/Cargo.toml @@ -25,7 +25,7 @@ repository = { workspace = true } authors = { workspace = true } keywords = ["arrow", "parquet", "hadoop"] readme = "README.md" -edition = "2024" +edition = { workspace = true } rust-version = { workspace = true } [target.'cfg(target_arch = "wasm32")'.dependencies] diff --git a/parquet_derive/Cargo.toml b/parquet_derive/Cargo.toml index 9e41e3107447..033d5173402e 100644 --- a/parquet_derive/Cargo.toml +++ b/parquet_derive/Cargo.toml @@ -25,7 +25,7 @@ repository = { workspace = true } authors = { workspace = true } keywords = ["parquet"] readme = "README.md" -edition = "2024" +edition = { workspace = true } rust-version = { workspace = true } [lib] diff --git a/parquet_derive_test/Cargo.toml b/parquet_derive_test/Cargo.toml index 53b2d52fde13..168d10891d3c 100644 --- a/parquet_derive_test/Cargo.toml +++ b/parquet_derive_test/Cargo.toml @@ -24,7 +24,7 @@ homepage = { workspace = true } repository = { workspace = true } authors = { workspace = true } keywords = [ "parquet" ] -edition = "2024" +edition = { workspace = true } publish = false rust-version = { workspace = true } diff --git a/rustfmt.toml b/rustfmt.toml index 585c1b612978..bc9377059f7d 100644 --- a/rustfmt.toml +++ b/rustfmt.toml @@ -15,4 +15,4 @@ # specific language governing permissions and limitations # under the License. -edition = "2021" +style_edition = "2024"