-
Notifications
You must be signed in to change notification settings - Fork 1k
Support writing GeospatialStatistics in Parquet writer #8524
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
55e32d2
9a88f7e
6a34eaf
d59c644
2f268f3
5c9547e
6516544
629afc7
7ae4f17
523575d
203ea9c
5305e4e
3089b69
9e12b57
a3b729b
f8b58c6
7472ba6
be7b522
e60cd98
a34e5c4
15bbe3c
0fca11f
cd0f609
92d0d73
182776e
4c7c52a
0e600b4
59a00ed
a024793
3798609
85ebb72
2bc7bbe
431da25
6156112
ed85f90
9d6d6c3
392d949
f9112f7
ec31096
eac356e
1bb2cd8
d5ba2f2
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -29,3 +29,4 @@ | |
|
||
pub mod bounding; | ||
pub mod interval; | ||
pub mod testing; |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,66 @@ | ||
// Licensed to the Apache Software Foundation (ASF) under one | ||
// or more contributor license agreements. See the NOTICE file | ||
// distributed with this work for additional information | ||
// regarding copyright ownership. The ASF licenses this file | ||
// to you under the Apache License, Version 2.0 (the | ||
// "License"); you may not use this file except in compliance | ||
// with the License. You may obtain a copy of the License at | ||
// | ||
// http://www.apache.org/licenses/LICENSE-2.0 | ||
// | ||
// Unless required by applicable law or agreed to in writing, | ||
// software distributed under the License is distributed on an | ||
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY | ||
// KIND, either express or implied. See the License for the | ||
// specific language governing permissions and limitations | ||
// under the License. | ||
|
||
//! Testing utilities for geospatial Parquet types | ||
/// Build well-known binary representing a point with the given XY coordinate | ||
pub fn wkb_point_xy(x: f64, y: f64) -> Vec<u8> { | ||
let mut item: [u8; 21] = [0; 21]; | ||
item[0] = 0x01; | ||
item[1] = 0x01; | ||
item[5..13].copy_from_slice(x.to_le_bytes().as_slice()); | ||
item[13..21].copy_from_slice(y.to_le_bytes().as_slice()); | ||
item.to_vec() | ||
} | ||
Comment on lines
+21
to
+28
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. It's not a huge deal for XY and XYZM points, but if we want more complex helpers for more complex geometries, I think it would be more maintainable and more understandable for future people to use an existing crate to generate the WKB buffers. (In my own projects I use There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Definitely! The |
||
|
||
/// Build well-known binary representing a point with the given XYZM coordinate | ||
pub fn wkb_point_xyzm(x: f64, y: f64, z: f64, m: f64) -> Vec<u8> { | ||
let mut item: [u8; 37] = [0; 37]; | ||
item[0] = 0x01; | ||
item[1..5].copy_from_slice(3001_u32.to_le_bytes().as_slice()); | ||
item[5..13].copy_from_slice(x.to_le_bytes().as_slice()); | ||
item[13..21].copy_from_slice(y.to_le_bytes().as_slice()); | ||
item[21..29].copy_from_slice(z.to_le_bytes().as_slice()); | ||
item[29..37].copy_from_slice(m.to_le_bytes().as_slice()); | ||
item.to_vec() | ||
} | ||
|
||
#[cfg(test)] | ||
mod test { | ||
|
||
use wkb::reader::Wkb; | ||
|
||
use super::*; | ||
|
||
#[test] | ||
fn test_wkb_item() { | ||
let bytes = wkb_point_xy(1.0, 2.0); | ||
let geometry = Wkb::try_new(&bytes).unwrap(); | ||
let mut wkt = String::new(); | ||
wkt::to_wkt::write_geometry(&mut wkt, &geometry).unwrap(); | ||
assert_eq!(wkt, "POINT(1 2)"); | ||
} | ||
|
||
#[test] | ||
fn test_wkb_point_xyzm() { | ||
let bytes = wkb_point_xyzm(1.0, 2.0, 3.0, 4.0); | ||
let geometry = Wkb::try_new(&bytes).unwrap(); | ||
let mut wkt = String::new(); | ||
wkt::to_wkt::write_geometry(&mut wkt, &geometry).unwrap(); | ||
assert_eq!(wkt, "POINT ZM(1 2 3 4)"); | ||
} | ||
} |
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -45,6 +45,7 @@ arrow-data = { workspace = true, optional = true } | |
arrow-schema = { workspace = true, optional = true } | ||
arrow-select = { workspace = true, optional = true } | ||
arrow-ipc = { workspace = true, optional = true } | ||
parquet-geospatial = { workspace = true, optional = true } | ||
parquet-variant = { workspace = true, optional = true } | ||
parquet-variant-json = { workspace = true, optional = true } | ||
parquet-variant-compute = { workspace = true, optional = true } | ||
|
@@ -131,6 +132,8 @@ flate2-rust_backened = ["flate2/rust_backend"] | |
flate2-zlib-rs = ["flate2/zlib-rs"] | ||
# Enable parquet variant support | ||
variant_experimental = ["arrow", "parquet-variant", "parquet-variant-json", "parquet-variant-compute"] | ||
# Enable geospatial support | ||
geospatial = ["parquet-geospatial"] | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Could you please also add the new feature flag to the main crate readme as well? https://github.com/apache/arrow-rs/blob/main/parquet/README.md#feature-flags |
||
|
||
|
||
[[example]] | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -228,11 +228,18 @@ impl<W: Write + Send> ArrowWriter<W> { | |
options: ArrowWriterOptions, | ||
) -> Result<Self> { | ||
let mut props = options.properties; | ||
let mut converter = ArrowSchemaConverter::new().with_coerce_types(props.coerce_types()); | ||
if let Some(schema_root) = &options.schema_root { | ||
converter = converter.schema_root(schema_root); | ||
} | ||
let schema = converter.convert(&arrow_schema)?; | ||
|
||
let schema = if let Some(parquet_schema) = options.schema_descr { | ||
parquet_schema.clone() | ||
} else { | ||
let mut converter = ArrowSchemaConverter::new().with_coerce_types(props.coerce_types()); | ||
if let Some(schema_root) = &options.schema_root { | ||
converter = converter.schema_root(schema_root); | ||
} | ||
|
||
converter.convert(&arrow_schema)? | ||
}; | ||
|
||
if !options.skip_arrow_metadata { | ||
// add serialized arrow schema | ||
add_encoded_arrow_schema_to_metadata(&arrow_schema, &mut props); | ||
|
@@ -457,6 +464,7 @@ pub struct ArrowWriterOptions { | |
properties: WriterProperties, | ||
skip_arrow_metadata: bool, | ||
schema_root: Option<String>, | ||
schema_descr: Option<SchemaDescriptor>, | ||
} | ||
|
||
impl ArrowWriterOptions { | ||
|
@@ -490,6 +498,18 @@ impl ArrowWriterOptions { | |
..self | ||
} | ||
} | ||
|
||
/// Explicitly specify the Parquet schema to be used | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. this is a nice API addition I think |
||
/// | ||
/// If omitted (the default), the [`ArrowSchemaConverter`] is used to compute the | ||
/// Parquet [`SchemaDescriptor`]. This may be used When the [`SchemaDescriptor`] is | ||
/// already known or must be calculated using custom logic. | ||
pub fn with_parquet_schema(self, schema_descr: SchemaDescriptor) -> Self { | ||
Self { | ||
schema_descr: Some(schema_descr), | ||
..self | ||
} | ||
} | ||
} | ||
|
||
/// A single column chunk produced by [`ArrowColumnWriter`] | ||
|
@@ -1513,7 +1533,7 @@ mod tests { | |
use crate::file::page_index::column_index::ColumnIndexMetaData; | ||
use crate::file::reader::SerializedPageReader; | ||
use crate::parquet_thrift::{ReadThrift, ThriftSliceInputProtocol}; | ||
use crate::schema::types::ColumnPath; | ||
use crate::schema::types::{ColumnPath, Type}; | ||
use arrow::datatypes::ToByteSlice; | ||
use arrow::datatypes::{DataType, Schema}; | ||
use arrow::error::Result as ArrowResult; | ||
|
@@ -4135,6 +4155,69 @@ mod tests { | |
} | ||
} | ||
|
||
#[test] | ||
fn test_arrow_writer_explicit_schema() { | ||
// Write an int32 array using explicit int64 storage | ||
let batch_schema = Arc::new(Schema::new(vec![Field::new( | ||
"integers", | ||
DataType::Int32, | ||
true, | ||
)])); | ||
let parquet_schema = Type::group_type_builder("root") | ||
.with_fields(vec![Type::primitive_type_builder( | ||
"integers", | ||
crate::basic::Type::INT64, | ||
) | ||
.build() | ||
.unwrap() | ||
.into()]) | ||
.build() | ||
.unwrap(); | ||
let parquet_schema_descr = SchemaDescriptor::new(parquet_schema.into()); | ||
|
||
let batch = RecordBatch::try_new( | ||
batch_schema.clone(), | ||
vec![Arc::new(Int32Array::from(vec![1, 2, 3, 4])) as _], | ||
) | ||
.unwrap(); | ||
|
||
let explicit_schema_options = | ||
ArrowWriterOptions::new().with_parquet_schema(parquet_schema_descr); | ||
let mut buf = Vec::with_capacity(1024); | ||
let mut writer = ArrowWriter::try_new_with_options( | ||
&mut buf, | ||
batch_schema.clone(), | ||
explicit_schema_options, | ||
) | ||
.unwrap(); | ||
writer.write(&batch).unwrap(); | ||
writer.close().unwrap(); | ||
|
||
let bytes = Bytes::from(buf); | ||
let reader_builder = ParquetRecordBatchReaderBuilder::try_new(bytes).unwrap(); | ||
|
||
let expected_schema = Arc::new(Schema::new(vec![Field::new( | ||
"integers", | ||
DataType::Int64, | ||
true, | ||
)])); | ||
assert_eq!(reader_builder.schema(), &expected_schema); | ||
|
||
let batches = reader_builder | ||
.build() | ||
.unwrap() | ||
.collect::<Result<Vec<_>, ArrowError>>() | ||
.unwrap(); | ||
assert_eq!(batches.len(), 1); | ||
|
||
let expected_batch = RecordBatch::try_new( | ||
expected_schema.clone(), | ||
vec![Arc::new(Int64Array::from(vec![1, 2, 3, 4])) as _], | ||
) | ||
.unwrap(); | ||
assert_eq!(batches[0], expected_batch); | ||
} | ||
|
||
#[test] | ||
fn mismatched_schemas() { | ||
let batch_schema = Schema::new(vec![Field::new("count", DataType::Int32, false)]); | ||
|
Uh oh!
There was an error while loading. Please reload this page.