Skip to content

Commit 97148bd

Browse files
authored
Fix clippy lints found by Clippy in Rust 1.78 (#10353)
* clippy: unused code * clippy: clone from * fix: more clippy * moar * fix aother
1 parent d4da80b commit 97148bd

File tree

15 files changed

+13
-148
lines changed

15 files changed

+13
-148
lines changed

datafusion-cli/src/exec.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -81,7 +81,7 @@ pub async fn exec_from_lines(
8181
Ok(_) => {}
8282
Err(err) => eprintln!("{err}"),
8383
}
84-
query = "".to_owned();
84+
query = "".to_string();
8585
} else {
8686
query.push('\n');
8787
}

datafusion/core/src/datasource/avro_to_arrow/arrow_array_reader.rs

Lines changed: 2 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -203,13 +203,9 @@ impl<'a, R: Read> AvroArrowArrayReader<'a, R> {
203203
Arc::new(builder.finish())
204204
}
205205

206-
fn build_primitive_array<T: ArrowPrimitiveType + Resolver>(
207-
&self,
208-
rows: RecordSlice,
209-
col_name: &str,
210-
) -> ArrayRef
206+
fn build_primitive_array<T>(&self, rows: RecordSlice, col_name: &str) -> ArrayRef
211207
where
212-
T: ArrowNumericType,
208+
T: ArrowNumericType + Resolver,
213209
T::Native: num_traits::cast::NumCast,
214210
{
215211
Arc::new(

datafusion/core/src/datasource/file_format/json.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -219,7 +219,7 @@ impl BatchSerializer for JsonSerializer {
219219
pub struct JsonSink {
220220
/// Config options for writing data
221221
config: FileSinkConfig,
222-
///
222+
/// Writer options for underlying Json writer
223223
writer_options: JsonWriterOptions,
224224
}
225225

datafusion/core/src/datasource/file_format/parquet.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -536,7 +536,7 @@ async fn fetch_statistics(
536536
pub struct ParquetSink {
537537
/// Config options for writing data
538538
config: FileSinkConfig,
539-
///
539+
/// Underlying parquet options
540540
parquet_options: TableParquetOptions,
541541
/// File metadata from successfully produced parquet files. The Mutex is only used
542542
/// to allow inserting to HashMap from behind borrowed reference in DataSink::write_all.

datafusion/core/src/datasource/physical_plan/file_stream.rs

Lines changed: 0 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -519,16 +519,13 @@ mod tests {
519519
use std::sync::Arc;
520520

521521
use super::*;
522-
use crate::datasource::file_format::write::BatchSerializer;
523522
use crate::datasource::object_store::ObjectStoreUrl;
524523
use crate::prelude::SessionContext;
525524
use crate::test::{make_partition, object_store::register_test_store};
526525

527526
use arrow_schema::Schema;
528527
use datafusion_common::{internal_err, Statistics};
529528

530-
use bytes::Bytes;
531-
532529
/// Test `FileOpener` which will simulate errors during file opening or scanning
533530
#[derive(Default)]
534531
struct TestOpener {
@@ -974,14 +971,4 @@ mod tests {
974971

975972
Ok(())
976973
}
977-
978-
struct TestSerializer {
979-
bytes: Bytes,
980-
}
981-
982-
impl BatchSerializer for TestSerializer {
983-
fn serialize(&self, _batch: RecordBatch, _initial: bool) -> Result<Bytes> {
984-
Ok(self.bytes.clone())
985-
}
986-
}
987974
}

datafusion/core/src/execution/context/avro.rs

Lines changed: 0 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -57,29 +57,3 @@ impl SessionContext {
5757
Ok(())
5858
}
5959
}
60-
61-
#[cfg(test)]
62-
mod tests {
63-
use super::*;
64-
65-
use async_trait::async_trait;
66-
67-
// Test for compilation error when calling read_* functions from an #[async_trait] function.
68-
// See https://github.com/apache/datafusion/issues/1154
69-
#[async_trait]
70-
trait CallReadTrait {
71-
async fn call_read_avro(&self) -> DataFrame;
72-
}
73-
74-
struct CallRead {}
75-
76-
#[async_trait]
77-
impl CallReadTrait for CallRead {
78-
async fn call_read_avro(&self) -> DataFrame {
79-
let ctx = SessionContext::new();
80-
ctx.read_avro("dummy", AvroReadOptions::default())
81-
.await
82-
.unwrap()
83-
}
84-
}
85-
}

datafusion/core/src/execution/context/csv.rs

Lines changed: 0 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -90,7 +90,6 @@ mod tests {
9090
use crate::assert_batches_eq;
9191
use crate::test_util::{plan_and_collect, populate_csv_partitions};
9292

93-
use async_trait::async_trait;
9493
use tempfile::TempDir;
9594

9695
#[tokio::test]
@@ -125,21 +124,4 @@ mod tests {
125124

126125
Ok(())
127126
}
128-
129-
// Test for compilation error when calling read_* functions from an #[async_trait] function.
130-
// See https://github.com/apache/datafusion/issues/1154
131-
#[async_trait]
132-
trait CallReadTrait {
133-
async fn call_read_csv(&self) -> DataFrame;
134-
}
135-
136-
struct CallRead {}
137-
138-
#[async_trait]
139-
impl CallReadTrait for CallRead {
140-
async fn call_read_csv(&self) -> DataFrame {
141-
let ctx = SessionContext::new();
142-
ctx.read_csv("dummy", CsvReadOptions::new()).await.unwrap()
143-
}
144-
}
145127
}

datafusion/core/src/execution/context/parquet.rs

Lines changed: 0 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -84,7 +84,6 @@ mod tests {
8484
use datafusion_common::config::TableParquetOptions;
8585
use datafusion_execution::config::SessionConfig;
8686

87-
use async_trait::async_trait;
8887
use tempfile::tempdir;
8988

9089
#[tokio::test]
@@ -331,23 +330,4 @@ mod tests {
331330
assert_eq!(total_rows, 5);
332331
Ok(())
333332
}
334-
335-
// Test for compilation error when calling read_* functions from an #[async_trait] function.
336-
// See https://github.com/apache/datafusion/issues/1154
337-
#[async_trait]
338-
trait CallReadTrait {
339-
async fn call_read_parquet(&self) -> DataFrame;
340-
}
341-
342-
struct CallRead {}
343-
344-
#[async_trait]
345-
impl CallReadTrait for CallRead {
346-
async fn call_read_parquet(&self) -> DataFrame {
347-
let ctx = SessionContext::new();
348-
ctx.read_parquet("dummy", ParquetReadOptions::default())
349-
.await
350-
.unwrap()
351-
}
352-
}
353333
}

datafusion/core/src/physical_optimizer/enforce_distribution.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -411,7 +411,7 @@ fn adjust_input_keys_ordering(
411411
} else {
412412
// By default, push down the parent requirements to children
413413
for child in requirements.children.iter_mut() {
414-
child.data = requirements.data.clone();
414+
child.data.clone_from(&requirements.data);
415415
}
416416
}
417417
Ok(Transformed::yes(requirements))

datafusion/core/src/physical_planner.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2035,7 +2035,7 @@ impl DefaultPhysicalPlanner {
20352035
let config = &session_state.config_options().explain;
20362036

20372037
if !config.physical_plan_only {
2038-
stringified_plans = e.stringified_plans.clone();
2038+
stringified_plans.clone_from(&e.stringified_plans);
20392039
if e.logical_optimization_succeeded {
20402040
stringified_plans.push(e.plan.to_stringified(FinalLogicalPlan));
20412041
}

0 commit comments

Comments
 (0)