Skip to content

Commit 186b0b5

Browse files
authored
fix some clippy warnings (#1277)
1 parent e3a682d commit 186b0b5

File tree

8 files changed

+13
-33
lines changed

8 files changed

+13
-33
lines changed

datafusion/src/datasource/file_format/json.rs

+1-9
Original file line numberDiff line numberDiff line change
@@ -38,19 +38,11 @@ use crate::physical_plan::ExecutionPlan;
3838
use crate::physical_plan::Statistics;
3939

4040
/// New line delimited JSON `FileFormat` implementation.
41-
#[derive(Debug)]
41+
#[derive(Debug, Default)]
4242
pub struct JsonFormat {
4343
schema_infer_max_rec: Option<usize>,
4444
}
4545

46-
impl Default for JsonFormat {
47-
fn default() -> Self {
48-
Self {
49-
schema_infer_max_rec: None,
50-
}
51-
}
52-
}
53-
5446
impl JsonFormat {
5547
/// Set a limit in terms of records to scan to infer the schema
5648
/// - defaults to `None` (no limit)

datafusion/src/optimizer/common_subexpr_eliminate.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -224,13 +224,13 @@ fn optimize(plan: &LogicalPlan, execution_props: &ExecutionProps) -> Result<Logi
224224
fn to_arrays(
225225
expr: &[Expr],
226226
input: &LogicalPlan,
227-
mut expr_set: &mut ExprSet,
227+
expr_set: &mut ExprSet,
228228
) -> Result<Vec<Vec<(usize, String)>>> {
229229
expr.iter()
230230
.map(|e| {
231231
let data_type = e.get_type(input.schema())?;
232232
let mut id_array = vec![];
233-
expr_to_identifier(e, &mut expr_set, &mut id_array, data_type)?;
233+
expr_to_identifier(e, expr_set, &mut id_array, data_type)?;
234234

235235
Ok(id_array)
236236
})

datafusion/src/physical_plan/crypto_expressions.rs

+3-5
Original file line numberDiff line numberDiff line change
@@ -65,8 +65,7 @@ fn digest_process(
6565
DataType::LargeUtf8 => digest_algorithm.digest_array::<i64>(a.as_ref()),
6666
other => Err(DataFusionError::Internal(format!(
6767
"Unsupported data type {:?} for function {}",
68-
other,
69-
digest_algorithm.to_string(),
68+
other, digest_algorithm,
7069
))),
7170
},
7271
ColumnarValue::Scalar(scalar) => match scalar {
@@ -75,8 +74,7 @@ fn digest_process(
7574
}
7675
other => Err(DataFusionError::Internal(format!(
7776
"Unsupported data type {:?} for function {}",
78-
other,
79-
digest_algorithm.to_string(),
77+
other, digest_algorithm,
8078
))),
8179
},
8280
}
@@ -244,7 +242,7 @@ pub fn md5(args: &[ColumnarValue]) -> Result<ColumnarValue> {
244242
return Err(DataFusionError::Internal(format!(
245243
"{:?} args were supplied but {} takes exactly one argument",
246244
args.len(),
247-
DigestAlgorithm::Md5.to_string(),
245+
DigestAlgorithm::Md5,
248246
)));
249247
}
250248
let value = digest_process(&args[0], DigestAlgorithm::Md5)?;

datafusion/src/physical_plan/planner.rs

+1-8
Original file line numberDiff line numberDiff line change
@@ -260,18 +260,11 @@ pub trait ExtensionPlanner {
260260

261261
/// Default single node physical query planner that converts a
262262
/// `LogicalPlan` to an `ExecutionPlan` suitable for execution.
263+
#[derive(Default)]
263264
pub struct DefaultPhysicalPlanner {
264265
extension_planners: Vec<Arc<dyn ExtensionPlanner + Send + Sync>>,
265266
}
266267

267-
impl Default for DefaultPhysicalPlanner {
268-
fn default() -> Self {
269-
Self {
270-
extension_planners: vec![],
271-
}
272-
}
273-
}
274-
275268
#[async_trait]
276269
impl PhysicalPlanner for DefaultPhysicalPlanner {
277270
/// Create a physical plan from a logical plan

datafusion/src/physical_plan/string_expressions.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -119,7 +119,7 @@ where
119119
// first map is the iterator, second is for the `Option<_>`
120120
Ok(string_array
121121
.iter()
122-
.map(|string| string.map(|string| op(string)))
122+
.map(|string| string.map(|s| op(s)))
123123
.collect())
124124
}
125125

datafusion/src/scalar.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -788,7 +788,7 @@ impl ScalarValue {
788788
(0..fields.len()).map(|_| Vec::new()).collect();
789789

790790
// Iterate over scalars to populate the column scalars for each row
791-
for scalar in scalars.into_iter() {
791+
for scalar in scalars {
792792
if let ScalarValue::Struct(values, fields) = scalar {
793793
match values {
794794
Some(values) => {

datafusion/src/sql/planner.rs

+2-5
Original file line numberDiff line numberDiff line change
@@ -1066,8 +1066,7 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> {
10661066
.map_err(|_: DataFusionError| {
10671067
DataFusionError::Plan(format!(
10681068
"Invalid identifier '{}' for schema {}",
1069-
col,
1070-
schema.to_string()
1069+
col, schema
10711070
))
10721071
}),
10731072
_ => Err(DataFusionError::Internal("Not a column".to_string())),
@@ -1852,9 +1851,7 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> {
18521851
.iter()
18531852
.rev()
18541853
.zip(columns)
1855-
.map(|(ident, column_name)| {
1856-
format!(r#"{} = '{}'"#, column_name, ident.to_string())
1857-
})
1854+
.map(|(ident, column_name)| format!(r#"{} = '{}'"#, column_name, ident))
18581855
.collect::<Vec<_>>()
18591856
.join(" AND ");
18601857

datafusion/src/test_util.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -199,7 +199,7 @@ fn get_data_dir(udf_env: &str, submodule_data: &str) -> Result<PathBuf, Box<dyn
199199
} else {
200200
return Err(format!(
201201
"the data dir `{}` defined by env {} not found",
202-
pb.display().to_string(),
202+
pb.display(),
203203
udf_env
204204
)
205205
.into());
@@ -222,7 +222,7 @@ fn get_data_dir(udf_env: &str, submodule_data: &str) -> Result<PathBuf, Box<dyn
222222
"env `{}` is undefined or has empty value, and the pre-defined data dir `{}` not found\n\
223223
HINT: try running `git submodule update --init`",
224224
udf_env,
225-
pb.display().to_string(),
225+
pb.display(),
226226
).into())
227227
}
228228
}

0 commit comments

Comments
 (0)