@@ -255,12 +255,14 @@ where
255255
256256#[ cfg( test) ]
257257mod test {
258+ use std:: collections:: HashMap ;
258259 use std:: sync:: Arc ;
259260
260261 use arrow_array:: { Int32Array , Int64Array , RecordBatch , StringArray } ;
261262 use arrow_schema:: { DataType , Field , Schema as ArrowSchema } ;
262263 use arrow_select:: concat:: concat_batches;
263264 use parquet:: arrow:: arrow_reader:: ParquetRecordBatchReaderBuilder ;
265+ use parquet:: arrow:: PARQUET_FIELD_ID_META_KEY ;
264266 use parquet:: file:: properties:: WriterProperties ;
265267 use tempfile:: TempDir ;
266268
@@ -318,7 +320,7 @@ mod test {
318320 location_gen. clone ( ) ,
319321 file_name_gen. clone ( ) ,
320322 ) ;
321- DataFileWriterBuilder :: new ( pw. clone ( ) , None , None )
323+ DataFileWriterBuilder :: new ( pw. clone ( ) , None , 0 )
322324 } ;
323325 let position_delete_writer_builder = {
324326 let pw = ParquetWriterBuilder :: new (
@@ -331,7 +333,7 @@ mod test {
331333 SortPositionDeleteWriterBuilder :: new ( pw. clone ( ) , 100 , None , None )
332334 } ;
333335 let equality_delete_writer_builder = {
334- let config = EqualityDeleteWriterConfig :: new ( vec ! [ 1 , 2 ] , schema, None , None ) ?;
336+ let config = EqualityDeleteWriterConfig :: new ( vec ! [ 1 , 2 ] , schema, None , 0 ) ?;
335337 let pw = ParquetWriterBuilder :: new (
336338 WriterProperties :: builder ( ) . build ( ) ,
337339 arrow_schema_to_schema ( config. projected_arrow_schema_ref ( ) )
@@ -355,9 +357,18 @@ mod test {
355357
356358 // write data
357359 let schema = Arc :: new ( ArrowSchema :: new ( vec ! [
358- Field :: new( "id" , DataType :: Int64 , true ) ,
359- Field :: new( "data" , DataType :: Utf8 , true ) ,
360- Field :: new( "op" , DataType :: Int32 , false ) ,
360+ Field :: new( "id" , DataType :: Int64 , true ) . with_metadata( HashMap :: from( [ (
361+ PARQUET_FIELD_ID_META_KEY . to_string( ) ,
362+ 1 . to_string( ) ,
363+ ) ] ) ) ,
364+ Field :: new( "data" , DataType :: Utf8 , true ) . with_metadata( HashMap :: from( [ (
365+ PARQUET_FIELD_ID_META_KEY . to_string( ) ,
366+ 2 . to_string( ) ,
367+ ) ] ) ) ,
368+ Field :: new( "op" , DataType :: Int32 , false ) . with_metadata( HashMap :: from( [ (
369+ PARQUET_FIELD_ID_META_KEY . to_string( ) ,
370+ 3 . to_string( ) ,
371+ ) ] ) ) ,
361372 ] ) ) ;
362373 {
363374 let id_array = Int64Array :: from ( vec ! [ 1 , 2 , 1 , 3 , 2 , 3 , 1 ] ) ;
@@ -388,8 +399,14 @@ mod test {
388399 assert_eq ! ( data_files. len( ) , 3 ) ;
389400 // data file
390401 let data_schema = Arc :: new ( ArrowSchema :: new ( vec ! [
391- Field :: new( "id" , DataType :: Int64 , true ) ,
392- Field :: new( "data" , DataType :: Utf8 , true ) ,
402+ Field :: new( "id" , DataType :: Int64 , true ) . with_metadata( HashMap :: from( [ (
403+ PARQUET_FIELD_ID_META_KEY . to_string( ) ,
404+ 1 . to_string( ) ,
405+ ) ] ) ) ,
406+ Field :: new( "data" , DataType :: Utf8 , true ) . with_metadata( HashMap :: from( [ (
407+ PARQUET_FIELD_ID_META_KEY . to_string( ) ,
408+ 2 . to_string( ) ,
409+ ) ] ) ) ,
393410 ] ) ) ;
394411 let data_file = data_files
395412 . iter ( )
0 commit comments