Skip to content

Commit b073ff5

Browse files
committed
chore: cargo fmt
1 parent 1fd2f95 commit b073ff5

File tree

1 file changed

+37
-39
lines changed

1 file changed

+37
-39
lines changed

crates/iceberg/src/writer/file_writer/parquet_writer.rs

Lines changed: 37 additions & 39 deletions
Original file line numberDiff line numberDiff line change
@@ -19,11 +19,11 @@
1919
2020
use std::collections::hash_map::Entry;
2121
use std::collections::HashMap;
22+
use std::ops::Deref;
2223
use std::sync::atomic::AtomicI64;
2324
use std::sync::Arc;
24-
use std::ops::Deref;
2525

26-
use arrow_array::{Float32Array, Float64Array, ArrayRef, StructArray, ListArray, MapArray};
26+
use arrow_array::{ArrayRef, Float32Array, Float64Array, ListArray, MapArray, StructArray};
2727
use arrow_schema::{DataType, SchemaRef as ArrowSchemaRef};
2828
use bytes::Bytes;
2929
use futures::future::BoxFuture;
@@ -791,12 +791,12 @@ mod tests {
791791
use std::sync::Arc;
792792

793793
use anyhow::Result;
794-
use arrow_array::types::{Int64Type, Float32Type};
794+
use arrow_array::builder::{Float32Builder, Int32Builder, MapBuilder};
795+
use arrow_array::types::{Float32Type, Int64Type};
795796
use arrow_array::{
796797
Array, ArrayRef, BooleanArray, Decimal128Array, Float32Array, Int32Array, Int64Array,
797798
ListArray, RecordBatch, StructArray,
798799
};
799-
use arrow_array::builder::{MapBuilder, Int32Builder, Float32Builder};
800800
use arrow_schema::{DataType, Field, Fields, SchemaRef as ArrowSchemaRef};
801801
use arrow_select::concat::concat_batches;
802802
use parquet::arrow::PARQUET_FIELD_ID_META_KEY;
@@ -1088,11 +1088,8 @@ mod tests {
10881088
None,
10891089
)) as ArrayRef;
10901090

1091-
let to_write = RecordBatch::try_new(arrow_schema.clone(), vec![
1092-
float_32_col,
1093-
float_64_col,
1094-
])
1095-
.unwrap();
1091+
let to_write =
1092+
RecordBatch::try_new(arrow_schema.clone(), vec![float_32_col, float_64_col]).unwrap();
10961093

10971094
// write data
10981095
let mut pw = ParquetWriterBuilder::new(
@@ -1120,23 +1117,14 @@ mod tests {
11201117

11211118
// check data file
11221119
assert_eq!(data_file.record_count(), 4);
1123-
assert_eq!(
1124-
*data_file.value_counts(),
1125-
HashMap::from([(0, 4), (1, 4)])
1126-
);
1120+
assert_eq!(*data_file.value_counts(), HashMap::from([(0, 4), (1, 4)]));
11271121
assert_eq!(
11281122
*data_file.lower_bounds(),
1129-
HashMap::from([
1130-
(0, Datum::float(1.0)),
1131-
(1, Datum::double(1.0)),
1132-
])
1123+
HashMap::from([(0, Datum::float(1.0)), (1, Datum::double(1.0)),])
11331124
);
11341125
assert_eq!(
11351126
*data_file.upper_bounds(),
1136-
HashMap::from([
1137-
(0, Datum::float(2.0)),
1138-
(1, Datum::double(2.0)),
1139-
])
1127+
HashMap::from([(0, Datum::float(2.0)), (1, Datum::double(2.0)),])
11401128
);
11411129
assert_eq!(
11421130
*data_file.null_value_counts(),
@@ -1265,23 +1253,14 @@ mod tests {
12651253

12661254
// check data file
12671255
assert_eq!(data_file.record_count(), 4);
1268-
assert_eq!(
1269-
*data_file.value_counts(),
1270-
HashMap::from([(4, 4), (7, 4)])
1271-
);
1256+
assert_eq!(*data_file.value_counts(), HashMap::from([(4, 4), (7, 4)]));
12721257
assert_eq!(
12731258
*data_file.lower_bounds(),
1274-
HashMap::from([
1275-
(4, Datum::float(1.0)),
1276-
(7, Datum::float(1.0)),
1277-
])
1259+
HashMap::from([(4, Datum::float(1.0)), (7, Datum::float(1.0)),])
12781260
);
12791261
assert_eq!(
12801262
*data_file.upper_bounds(),
1281-
HashMap::from([
1282-
(4, Datum::float(2.0)),
1283-
(7, Datum::float(2.0)),
1284-
])
1263+
HashMap::from([(4, Datum::float(2.0)), (7, Datum::float(2.0)),])
12851264
);
12861265
assert_eq!(
12871266
*data_file.null_value_counts(),
@@ -1580,8 +1559,11 @@ mod tests {
15801559
None,
15811560
)) as ArrayRef;
15821561

1583-
let to_write = RecordBatch::try_new(arrow_schema.clone(), vec![map_array, struct_list_float_field_col])
1584-
.expect("Could not form record batch");
1562+
let to_write = RecordBatch::try_new(arrow_schema.clone(), vec![
1563+
map_array,
1564+
struct_list_float_field_col,
1565+
])
1566+
.expect("Could not form record batch");
15851567

15861568
// write data
15871569
let mut pw = ParquetWriterBuilder::new(
@@ -1614,20 +1596,36 @@ mod tests {
16141596

16151597
// check data file
16161598
assert_eq!(data_file.record_count(), 4);
1617-
assert_eq!(*data_file.value_counts(), HashMap::from([(1, 4), (2, 4), (6, 4), (7, 4)]));
1599+
assert_eq!(
1600+
*data_file.value_counts(),
1601+
HashMap::from([(1, 4), (2, 4), (6, 4), (7, 4)])
1602+
);
16181603
assert_eq!(
16191604
*data_file.lower_bounds(),
1620-
HashMap::from([(1, Datum::int(1)), (2, Datum::float(1.0)), (6, Datum::int(1)), (7, Datum::float(1.0))])
1605+
HashMap::from([
1606+
(1, Datum::int(1)),
1607+
(2, Datum::float(1.0)),
1608+
(6, Datum::int(1)),
1609+
(7, Datum::float(1.0))
1610+
])
16211611
);
16221612
assert_eq!(
16231613
*data_file.upper_bounds(),
1624-
HashMap::from([(1, Datum::int(4)), (2, Datum::float(2.0)), (6, Datum::int(4)), (7, Datum::float(2.0))])
1614+
HashMap::from([
1615+
(1, Datum::int(4)),
1616+
(2, Datum::float(2.0)),
1617+
(6, Datum::int(4)),
1618+
(7, Datum::float(2.0))
1619+
])
16251620
);
16261621
assert_eq!(
16271622
*data_file.null_value_counts(),
16281623
HashMap::from([(1, 0), (2, 0), (6, 0), (7, 0)])
16291624
);
1630-
assert_eq!(*data_file.nan_value_counts(), HashMap::from([(2, 1), (7, 1)]));
1625+
assert_eq!(
1626+
*data_file.nan_value_counts(),
1627+
HashMap::from([(2, 1), (7, 1)])
1628+
);
16311629

16321630
// check the written file
16331631
let expect_batch = concat_batches(&arrow_schema, vec![&to_write]).unwrap();

0 commit comments

Comments
 (0)