Skip to content

Commit e0d14c4

Browse files
committed
support min,max for decimal data type
1 parent e3a682d commit e0d14c4

File tree

17 files changed

+814
-55
lines changed

17 files changed

+814
-55
lines changed
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,54 @@
1+
// Licensed to the Apache Software Foundation (ASF) under one
2+
// or more contributor license agreements. See the NOTICE file
3+
// distributed with this work for additional information
4+
// regarding copyright ownership. The ASF licenses this file
5+
// to you under the Apache License, Version 2.0 (the
6+
// "License"); you may not use this file except in compliance
7+
// with the License. You may obtain a copy of the License at
8+
//
9+
// http://www.apache.org/licenses/LICENSE-2.0
10+
//
11+
// Unless required by applicable law or agreed to in writing,
12+
// software distributed under the License is distributed on an
13+
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14+
// KIND, either express or implied. See the License for the
15+
// specific language governing permissions and limitations
16+
// under the License.
17+
18+
use datafusion::arrow::datatypes::{DataType, Field, Schema};
19+
use datafusion::error::Result;
20+
use datafusion::prelude::*;
21+
use std::sync::Arc;
22+
23+
/// This example demonstrates executing a simple query against an Arrow data source (CSV) and
24+
/// fetching results
25+
#[tokio::main]
26+
async fn main() -> Result<()> {
27+
// create local execution context
28+
let mut ctx = ExecutionContext::new();
29+
30+
let testdata = datafusion::test_util::arrow_test_data();
31+
32+
// schema with decimal type
33+
let schema = Arc::new(Schema::new(vec![
34+
Field::new("c1", DataType::Decimal(10, 6), false),
35+
Field::new("c2", DataType::Float64, false),
36+
Field::new("c3", DataType::Boolean, false),
37+
]));
38+
39+
// register csv file with the execution context
40+
ctx.register_csv(
41+
"aggregate_simple",
42+
&format!("{}/csv/aggregate_simple.csv", testdata),
43+
CsvReadOptions::new().schema(&schema),
44+
)
45+
.await?;
46+
47+
// execute the query
48+
let df = ctx.sql("select c1 from aggregate_simple").await?;
49+
50+
// print the results
51+
df.show().await?;
52+
53+
Ok(())
54+
}

datafusion/Cargo.toml

+4-2
Original file line numberDiff line numberDiff line change
@@ -52,8 +52,10 @@ avro = ["avro-rs", "num-traits"]
5252
[dependencies]
5353
ahash = "0.7"
5454
hashbrown = { version = "0.11", features = ["raw"] }
55-
arrow = { version = "6.2.0", features = ["prettyprint"] }
56-
parquet = { version = "6.2.0", features = ["arrow"] }
55+
arrow = { path = "/Users/kliu3/Documents/github/arrow-rs/arrow", features = ["prettyprint"] }
56+
#arrow = { version = "6.2.0", features = ["prettyprint"] }
57+
#parquet = { version = "6.2.0", features = ["arrow"] }
58+
parquet = { path = "/Users/kliu3/Documents/github/arrow-rs/parquet", features = ["arrow"] }
5759
sqlparser = "0.12"
5860
paste = "^1.0"
5961
num_cpus = "1.13.0"

datafusion/src/execution/context.rs

+25
Original file line numberDiff line numberDiff line change
@@ -3895,6 +3895,31 @@ mod tests {
38953895
Ok(())
38963896
}
38973897

3898+
#[tokio::test]
3899+
async fn aggregate_decimal() -> Result<()> {
3900+
let mut ctx = ExecutionContext::new();
3901+
// schema with data
3902+
let schema = Arc::new(Schema::new(vec![
3903+
Field::new("c1", DataType::Decimal(10, 6), false),
3904+
Field::new("c2", DataType::Float64, false),
3905+
Field::new("c3", DataType::Boolean, false),
3906+
]));
3907+
3908+
ctx.register_csv(
3909+
"aggregate_simple",
3910+
"tests/aggregate_simple.csv",
3911+
CsvReadOptions::new().schema(&schema),
3912+
)
3913+
.await?;
3914+
3915+
// decimal query
3916+
let result = plan_and_collect(&mut ctx, "select min(c1) from aggregate_simple")
3917+
.await
3918+
.unwrap();
3919+
println!("{:?}", result);
3920+
Ok(())
3921+
}
3922+
38983923
#[tokio::test]
38993924
async fn create_external_table_with_timestamps() {
39003925
let mut ctx = ExecutionContext::new();

0 commit comments

Comments
 (0)