@@ -21,6 +21,7 @@ use crate::handlers::http::fetch_schema;
21
21
use actix_web:: http:: header:: ContentType ;
22
22
use actix_web:: web:: { self , Json } ;
23
23
use actix_web:: { Either , FromRequest , HttpRequest , HttpResponse , Responder } ;
24
+ use arrow_array:: RecordBatch ;
24
25
use bytes:: Bytes ;
25
26
use chrono:: { DateTime , Utc } ;
26
27
use datafusion:: common:: tree_node:: TreeNode ;
@@ -248,7 +249,6 @@ async fn handle_streaming_query(
248
249
}
249
250
Either :: Right ( stream) => stream,
250
251
} ;
251
- let fields = fields. clone ( ) ;
252
252
let total_time = format ! ( "{:?}" , time. elapsed( ) ) ;
253
253
let time = time. elapsed ( ) . as_secs_f64 ( ) ;
254
254
QUERY_EXECUTE_TIME
@@ -266,22 +266,10 @@ async fn handle_streaming_query(
266
266
. to_string ( ) ;
267
267
268
268
// stream the records without fields
269
- let records_stream = records_stream. map ( move |batch_result| match batch_result {
270
- Ok ( batch) => {
271
- let response = QueryResponse {
272
- records : vec ! [ batch] ,
273
- fields : Vec :: new ( ) ,
274
- fill_null : send_null,
275
- with_fields : false ,
276
- }
277
- . to_json ( )
278
- . unwrap_or_else ( |e| {
279
- error ! ( "Failed to parse record batch into JSON: {}" , e) ;
280
- json ! ( { } )
281
- } ) ;
282
- Ok ( Bytes :: from ( format ! ( "{}\n " , response) ) )
283
- }
284
- Err ( e) => Err ( actix_web:: error:: ErrorInternalServerError ( e) ) ,
269
+ let mut batch_processor = create_batch_processor ( send_null) ;
270
+ let records_stream = records_stream. map ( move |batch_result| {
271
+ let batch_result = batch_result. map_err ( QueryError :: from) ;
272
+ batch_processor ( batch_result)
285
273
} ) ;
286
274
287
275
// Combine the initial fields chunk with the records stream
@@ -292,24 +280,9 @@ async fn handle_streaming_query(
292
280
Box :: pin ( fields_chunk. chain ( records_stream) )
293
281
as Pin < Box < dyn Stream < Item = Result < Bytes , actix_web:: Error > > > >
294
282
} else {
295
- let stream = records_stream. map ( move |batch_result| match batch_result {
296
- Ok ( batch) => {
297
- let response = QueryResponse {
298
- records : vec ! [ batch] ,
299
- fields : fields. clone ( ) ,
300
- fill_null : send_null,
301
- with_fields,
302
- }
303
- . to_json ( )
304
- . unwrap_or_else ( |e| {
305
- error ! ( "Failed to parse record batch into JSON: {}" , e) ;
306
- json ! ( { } )
307
- } ) ;
308
- Ok ( Bytes :: from ( format ! ( "{}\n " , response) ) )
309
- }
310
- Err ( e) => Err ( actix_web:: error:: ErrorInternalServerError ( e) ) ,
311
- } ) ;
312
-
283
+ let mut batch_processor = create_batch_processor ( send_null) ;
284
+ let stream = records_stream
285
+ . map ( move |batch_result| batch_processor ( batch_result. map_err ( QueryError :: from) ) ) ;
313
286
Box :: pin ( stream) as Pin < Box < dyn Stream < Item = Result < Bytes , actix_web:: Error > > > >
314
287
} ;
315
288
@@ -319,6 +292,27 @@ async fn handle_streaming_query(
319
292
. streaming ( stream) )
320
293
}
321
294
295
+ fn create_batch_processor (
296
+ send_null : bool ,
297
+ ) -> impl FnMut ( Result < RecordBatch , QueryError > ) -> Result < Bytes , actix_web:: Error > {
298
+ move |batch_result| match batch_result {
299
+ Ok ( batch) => {
300
+ let response = QueryResponse {
301
+ records : vec ! [ batch] ,
302
+ fields : Vec :: new ( ) ,
303
+ fill_null : send_null,
304
+ with_fields : false ,
305
+ }
306
+ . to_json ( )
307
+ . map_err ( |e| {
308
+ error ! ( "Failed to parse record batch into JSON: {}" , e) ;
309
+ actix_web:: error:: ErrorInternalServerError ( e)
310
+ } ) ?;
311
+ Ok ( Bytes :: from ( format ! ( "{}\n " , response) ) )
312
+ }
313
+ Err ( e) => Err ( actix_web:: error:: ErrorInternalServerError ( e) ) ,
314
+ }
315
+ }
322
316
pub async fn get_counts (
323
317
req : HttpRequest ,
324
318
counts_request : Json < CountsRequest > ,
0 commit comments