Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Make nightly clippy happy #4515

Merged
merged 2 commits into from
Dec 29, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions benchmarks/src/bin/h2o.rs
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ struct GroupBy {
#[tokio::main]
async fn main() -> Result<()> {
let opt = Opt::from_args();
println!("Running benchmarks with the following options: {:?}", opt);
println!("Running benchmarks with the following options: {opt:?}");
match opt {
Opt::GroupBy(config) => group_by(&config).await,
}
Expand Down Expand Up @@ -107,7 +107,7 @@ async fn group_by(opt: &GroupBy) -> Result<()> {
_ => unimplemented!(),
};

println!("Executing {}", sql);
println!("Executing {sql}");
let start = Instant::now();
let df = ctx.sql(sql).await?;
let batches = df.collect().await?;
Expand Down
6 changes: 3 additions & 3 deletions benchmarks/src/bin/nyctaxi.rs
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ struct Opt {
#[tokio::main]
async fn main() -> Result<()> {
let opt = Opt::from_args();
println!("Running benchmarks with the following options: {:?}", opt);
println!("Running benchmarks with the following options: {opt:?}");

let config = SessionConfig::new()
.with_target_partitions(opt.partitions)
Expand All @@ -86,7 +86,7 @@ async fn main() -> Result<()> {
.await?
}
other => {
println!("Invalid file format '{}'", other);
println!("Invalid file format '{other}'");
process::exit(-1);
}
}
Expand All @@ -102,7 +102,7 @@ async fn datafusion_sql_benchmarks(
let mut queries = HashMap::new();
queries.insert("fare_amt_by_passenger", "SELECT passenger_count, MIN(fare_amount), MAX(fare_amount), SUM(fare_amount) FROM tripdata GROUP BY passenger_count");
for (name, sql) in &queries {
println!("Executing '{}'", name);
println!("Executing '{name}'");
for i in 0..iterations {
let start = Instant::now();
execute_sql(ctx, sql, debug).await?;
Expand Down
6 changes: 3 additions & 3 deletions benchmarks/src/bin/parquet_filter_pushdown.rs
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ struct Opt {
#[tokio::main]
async fn main() -> Result<()> {
let opt: Opt = Opt::from_args();
println!("Running benchmarks with the following options: {:?}", opt);
println!("Running benchmarks with the following options: {opt:?}");

let path = opt.path.join("logs.parquet");

Expand Down Expand Up @@ -137,9 +137,9 @@ async fn run_benchmarks(opt: Opt, test_file: &TestParquetFile) -> Result<()> {
];

for filter_expr in &filter_matrix {
println!("Executing with filter '{}'", filter_expr);
println!("Executing with filter '{filter_expr}'");
for scan_options in &scan_options_matrix {
println!("Using scan options {:?}", scan_options);
println!("Using scan options {scan_options:?}");
for i in 0..opt.iterations {
let start = Instant::now();

Expand Down
29 changes: 13 additions & 16 deletions benchmarks/src/bin/tpch.rs
Original file line number Diff line number Diff line change
Expand Up @@ -169,8 +169,7 @@ async fn main() -> Result<()> {
"zstd" => Compression::ZSTD,
other => {
return Err(DataFusionError::NotImplemented(format!(
"Invalid compression format: {}",
other
"Invalid compression format: {other}"
)));
}
};
Expand All @@ -193,7 +192,7 @@ const TPCH_QUERY_END_ID: usize = 22;
async fn benchmark_datafusion(
opt: DataFusionBenchmarkOpt,
) -> Result<Vec<Vec<RecordBatch>>> {
println!("Running benchmarks with the following options: {:?}", opt);
println!("Running benchmarks with the following options: {opt:?}");
let query_range = match opt.query {
Some(query_id) => query_id..=query_id,
None => TPCH_QUERY_START_ID..=TPCH_QUERY_END_ID,
Expand Down Expand Up @@ -257,14 +256,13 @@ async fn benchmark_query(
millis.push(elapsed);
let row_count = result.iter().map(|b| b.num_rows()).sum();
println!(
"Query {} iteration {} took {:.1} ms and returned {} rows",
query_id, i, elapsed, row_count
"Query {query_id} iteration {i} took {elapsed:.1} ms and returned {row_count} rows"
);
benchmark_run.add_result(elapsed, row_count);
}

let avg = millis.iter().sum::<f64>() / millis.len() as f64;
println!("Query {} avg time: {:.2} ms", query_id, avg);
println!("Query {query_id} avg time: {avg:.2} ms");

Ok((benchmark_run, result))
}
Expand All @@ -286,7 +284,7 @@ async fn register_tables(
};

if opt.mem_table {
println!("Loading table '{}' into memory", table);
println!("Loading table '{table}' into memory");
let start = Instant::now();
let memtable =
MemTable::load(table_provider, Some(opt.partitions), &ctx.state())
Expand Down Expand Up @@ -328,12 +326,12 @@ async fn execute_query(
let (state, plan) = plan.into_parts();

if debug {
println!("=== Logical plan ===\n{:?}\n", plan);
println!("=== Logical plan ===\n{plan:?}\n");
}

let plan = state.optimize(&plan)?;
if debug {
println!("=== Optimized logical plan ===\n{:?}\n", plan);
println!("=== Optimized logical plan ===\n{plan:?}\n");
}
let physical_plan = state.create_physical_plan(&plan).await?;
if debug {
Expand Down Expand Up @@ -378,7 +376,7 @@ async fn get_table(
match table_format {
// dbgen creates .tbl ('|' delimited) files without header
"tbl" => {
let path = format!("{}/{}.tbl", path, table);
let path = format!("{path}/{table}.tbl");

let format = CsvFormat::default()
.with_delimiter(b'|')
Expand All @@ -387,15 +385,15 @@ async fn get_table(
(Arc::new(format), path, ".tbl")
}
"csv" => {
let path = format!("{}/{}", path, table);
let path = format!("{path}/{table}");
let format = CsvFormat::default()
.with_delimiter(b',')
.with_has_header(true);

(Arc::new(format), path, DEFAULT_CSV_EXTENSION)
}
"parquet" => {
let path = format!("{}/{}", path, table);
let path = format!("{path}/{table}");
let format = ParquetFormat::default().with_enable_pruning(Some(true));

(Arc::new(format), path, DEFAULT_PARQUET_EXTENSION)
Expand Down Expand Up @@ -651,8 +649,8 @@ mod tests {
}

let possibilities = vec![
format!("expected-plans/q{}.txt", query),
format!("benchmarks/expected-plans/q{}.txt", query),
format!("expected-plans/q{query}.txt"),
format!("benchmarks/expected-plans/q{query}.txt"),
];

let mut found = false;
Expand All @@ -661,8 +659,7 @@ mod tests {
if let Ok(expected) = read_text_file(path) {
assert_eq!(expected, actual,
// generate output that is easier to copy/paste/update
"\n\nMismatch of expected content in: {:?}\nExpected:\n\n{}\n\nActual:\n\n{}\n\n",
path, expected, actual);
"\n\nMismatch of expected content in: {path:?}\nExpected:\n\n{expected}\n\nActual:\n\n{actual}\n\n");
found = true;
break;
}
Expand Down
16 changes: 7 additions & 9 deletions benchmarks/src/tpch.rs
Original file line number Diff line number Diff line change
Expand Up @@ -292,8 +292,8 @@ pub fn get_answer_schema(n: usize) -> Schema {
pub fn get_query_sql(query: usize) -> Result<Vec<String>> {
if query > 0 && query < 23 {
let possibilities = vec![
format!("queries/q{}.sql", query),
format!("benchmarks/queries/q{}.sql", query),
format!("queries/q{query}.sql"),
format!("benchmarks/queries/q{query}.sql"),
];
let mut errors = vec![];
for filename in possibilities {
Expand All @@ -306,12 +306,11 @@ pub fn get_query_sql(query: usize) -> Result<Vec<String>> {
.map(|s| s.to_string())
.collect());
}
Err(e) => errors.push(format!("{}: {}", filename, e)),
Err(e) => errors.push(format!("{filename}: {e}")),
};
}
Err(DataFusionError::Plan(format!(
"invalid query. Could not find query: {:?}",
errors
"invalid query. Could not find query: {errors:?}"
)))
} else {
Err(DataFusionError::Plan(
Expand All @@ -334,7 +333,7 @@ pub async fn convert_tbl(
let start = Instant::now();
let schema = get_tpch_table_schema(table);

let input_path = format!("{}/{}.tbl", input_path, table);
let input_path = format!("{input_path}/{table}.tbl");
let options = CsvReadOptions::new()
.schema(&schema)
.has_header(false)
Expand Down Expand Up @@ -372,8 +371,7 @@ pub async fn convert_tbl(
}
other => {
return Err(DataFusionError::NotImplemented(format!(
"Invalid output format: {}",
other
"Invalid output format: {other}"
)));
}
}
Expand Down Expand Up @@ -447,7 +445,7 @@ fn col_to_scalar(column: &ArrayRef, row_index: usize) -> ScalarValue {
let array = as_string_array(column).unwrap();
ScalarValue::Utf8(Some(array.value(row_index).to_string()))
}
other => panic!("unexpected data type in benchmark: {}", other),
other => panic!("unexpected data type in benchmark: {other}"),
}
}

Expand Down
2 changes: 1 addition & 1 deletion datafusion-examples/examples/csv_sql.rs
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ async fn main() -> Result<()> {
// register csv file with the execution context
ctx.register_csv(
"aggregate_test_100",
&format!("{}/csv/aggregate_test_100.csv", testdata),
&format!("{testdata}/csv/aggregate_test_100.csv"),
CsvReadOptions::new(),
)
.await?;
Expand Down
2 changes: 1 addition & 1 deletion datafusion-examples/examples/dataframe.rs
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ async fn main() -> Result<()> {

let testdata = datafusion::test_util::parquet_test_data();

let filename = &format!("{}/alltypes_plain.parquet", testdata);
let filename = &format!("{testdata}/alltypes_plain.parquet");

// define the query using the DataFrame trait
let df = ctx
Expand Down
4 changes: 2 additions & 2 deletions datafusion-examples/examples/deserialize_to_struct.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ use serde::Deserialize;
#[tokio::main]
async fn main() -> Result<()> {
let data_list = Data::new().await?;
println!("{:#?}", data_list);
println!("{data_list:#?}");
Ok(())
}

Expand All @@ -48,7 +48,7 @@ impl Data {

ctx.register_parquet(
"alltypes_plain",
&format!("{}/alltypes_plain.parquet", testdata),
&format!("{testdata}/alltypes_plain.parquet"),
ParquetReadOptions::default(),
)
.await?;
Expand Down
6 changes: 3 additions & 3 deletions datafusion-examples/examples/flight_client.rs
Original file line number Diff line number Diff line change
Expand Up @@ -41,12 +41,12 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
let request = tonic::Request::new(FlightDescriptor {
r#type: flight_descriptor::DescriptorType::Path as i32,
cmd: vec![],
path: vec![format!("{}/alltypes_plain.parquet", testdata)],
path: vec![format!("{testdata}/alltypes_plain.parquet")],
});

let schema_result = client.get_schema(request).await?.into_inner();
let schema = Schema::try_from(&schema_result)?;
println!("Schema: {:?}", schema);
println!("Schema: {schema:?}");

// Call do_get to execute a SQL query and receive results
let request = tonic::Request::new(Ticket {
Expand All @@ -59,7 +59,7 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
let flight_data = stream.message().await?.unwrap();
// convert FlightData to a stream
let schema = Arc::new(Schema::try_from(&flight_data)?);
println!("Schema: {:?}", schema);
println!("Schema: {schema:?}");

// all the remaining stream messages should be dictionary and record batches
let mut results = vec![];
Expand Down
10 changes: 5 additions & 5 deletions datafusion-examples/examples/flight_server.rs
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ impl FlightService for FlightServiceImpl {
let ticket = request.into_inner();
match std::str::from_utf8(&ticket.ticket) {
Ok(sql) => {
println!("do_get: {}", sql);
println!("do_get: {sql}");

// create local execution context
let ctx = SessionContext::new();
Expand All @@ -87,7 +87,7 @@ impl FlightService for FlightServiceImpl {
// register parquet file with the execution context
ctx.register_parquet(
"alltypes_plain",
&format!("{}/alltypes_plain.parquet", testdata),
&format!("{testdata}/alltypes_plain.parquet"),
ParquetReadOptions::default(),
)
.await
Expand Down Expand Up @@ -131,7 +131,7 @@ impl FlightService for FlightServiceImpl {

Ok(Response::new(Box::pin(output) as Self::DoGetStream))
}
Err(e) => Err(Status::invalid_argument(format!("Invalid ticket: {:?}", e))),
Err(e) => Err(Status::invalid_argument(format!("Invalid ticket: {e:?}"))),
}
}

Expand Down Expand Up @@ -186,7 +186,7 @@ impl FlightService for FlightServiceImpl {
}

fn to_tonic_err(e: datafusion::error::DataFusionError) -> Status {
Status::internal(format!("{:?}", e))
Status::internal(format!("{e:?}"))
}

/// This example shows how to wrap DataFusion with `FlightService` to support looking up schema information for
Expand All @@ -199,7 +199,7 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {

let svc = FlightServiceServer::new(service);

println!("Listening on {:?}", addr);
println!("Listening on {addr:?}");

Server::builder().add_service(svc).serve(addr).await?;

Expand Down
2 changes: 1 addition & 1 deletion datafusion-examples/examples/parquet_sql.rs
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ async fn main() -> Result<()> {
// register parquet file with the execution context
ctx.register_parquet(
"alltypes_plain",
&format!("{}/alltypes_plain.parquet", testdata),
&format!("{testdata}/alltypes_plain.parquet"),
ParquetReadOptions::default(),
)
.await?;
Expand Down
2 changes: 1 addition & 1 deletion datafusion-examples/examples/parquet_sql_multiple_files.rs
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ async fn main() -> Result<()> {
// for the query
ctx.register_listing_table(
"my_table",
&format!("file://{}", testdata),
&format!("file://{testdata}"),
listing_options,
None,
None,
Expand Down
5 changes: 1 addition & 4 deletions datafusion-examples/examples/query-aws-s3.rs
Original file line number Diff line number Diff line change
Expand Up @@ -51,10 +51,7 @@ async fn main() -> Result<()> {
// cannot query the parquet files from this bucket because the path contains a whitespace
// and we don't support that yet
// https://github.com/apache/arrow-rs/issues/2799
let path = format!(
"s3://{}/csv_backup/yellow_tripdata_2022-02.csv",
bucket_name
);
let path = format!("s3://{bucket_name}/csv_backup/yellow_tripdata_2022-02.csv");
ctx.register_csv("trips", &path, CsvReadOptions::default())
.await?;

Expand Down
Loading