Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merge dataframe and dataframe imp #1998

Merged
merged 10 commits into from
Mar 15, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 7 additions & 8 deletions ballista/rust/client/src/context.rs
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,6 @@ use datafusion::catalog::TableReference;
use datafusion::dataframe::DataFrame;
use datafusion::datasource::TableProvider;
use datafusion::error::{DataFusionError, Result};
use datafusion::execution::dataframe_impl::DataFrameImpl;
use datafusion::logical_plan::{CreateExternalTable, LogicalPlan, TableScan};
use datafusion::prelude::{
AvroReadOptions, CsvReadOptions, ExecutionConfig, ExecutionContext,
Expand Down Expand Up @@ -148,7 +147,7 @@ impl BallistaContext {
&self,
path: &str,
options: AvroReadOptions<'_>,
) -> Result<Arc<dyn DataFrame>> {
) -> Result<Arc<DataFrame>> {
// convert to absolute path because the executor likely has a different working directory
let path = PathBuf::from(path);
let path = fs::canonicalize(&path)?;
Expand All @@ -168,7 +167,7 @@ impl BallistaContext {

/// Create a DataFrame representing a Parquet table scan
/// TODO fetch schema from scheduler instead of resolving locally
pub async fn read_parquet(&self, path: &str) -> Result<Arc<dyn DataFrame>> {
pub async fn read_parquet(&self, path: &str) -> Result<Arc<DataFrame>> {
// convert to absolute path because the executor likely has a different working directory
let path = PathBuf::from(path);
let path = fs::canonicalize(&path)?;
Expand All @@ -192,7 +191,7 @@ impl BallistaContext {
&self,
path: &str,
options: CsvReadOptions<'_>,
) -> Result<Arc<dyn DataFrame>> {
) -> Result<Arc<DataFrame>> {
// convert to absolute path because the executor likely has a different working directory
let path = PathBuf::from(path);
let path = fs::canonicalize(&path)?;
Expand Down Expand Up @@ -291,7 +290,7 @@ impl BallistaContext {
///
/// This method is `async` because queries of type `CREATE EXTERNAL TABLE`
/// might require the schema to be inferred.
pub async fn sql(&self, sql: &str) -> Result<Arc<dyn DataFrame>> {
pub async fn sql(&self, sql: &str) -> Result<Arc<DataFrame>> {
let mut ctx = {
let state = self.state.lock();
create_df_ctx_with_ballista_query_planner::<LogicalPlanNode>(
Expand Down Expand Up @@ -342,16 +341,16 @@ impl BallistaContext {
.has_header(*has_header),
)
.await?;
Ok(Arc::new(DataFrameImpl::new(ctx.state, &plan)))
Ok(Arc::new(DataFrame::new(ctx.state, &plan)))
}
FileType::Parquet => {
self.register_parquet(name, location).await?;
Ok(Arc::new(DataFrameImpl::new(ctx.state, &plan)))
Ok(Arc::new(DataFrame::new(ctx.state, &plan)))
}
FileType::Avro => {
self.register_avro(name, location, AvroReadOptions::default())
.await?;
Ok(Arc::new(DataFrameImpl::new(ctx.state, &plan)))
Ok(Arc::new(DataFrame::new(ctx.state, &plan)))
}
_ => Err(DataFusionError::NotImplemented(format!(
"Unsupported file type {:?}.",
Expand Down
6 changes: 3 additions & 3 deletions datafusion-cli/src/context.rs
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ impl Context {
}

/// execute an SQL statement against the context
pub async fn sql(&mut self, sql: &str) -> Result<Arc<dyn DataFrame>> {
pub async fn sql(&mut self, sql: &str) -> Result<Arc<DataFrame>> {
match self {
Context::Local(datafusion) => datafusion.sql(sql).await,
Context::Remote(ballista) => ballista.sql(sql).await,
Expand All @@ -63,7 +63,7 @@ impl BallistaContext {
.map_err(|e| DataFusionError::Execution(format!("{:?}", e)))?;
Ok(Self(BallistaContext::remote(host, port, &config)))
}
pub async fn sql(&mut self, sql: &str) -> Result<Arc<dyn DataFrame>> {
pub async fn sql(&mut self, sql: &str) -> Result<Arc<DataFrame>> {
self.0.sql(sql).await
}
}
Expand All @@ -78,7 +78,7 @@ impl BallistaContext {
.to_string(),
))
}
pub async fn sql(&mut self, _sql: &str) -> Result<Arc<dyn DataFrame>> {
pub async fn sql(&mut self, _sql: &str) -> Result<Arc<DataFrame>> {
unreachable!()
}
}
6 changes: 3 additions & 3 deletions datafusion-examples/examples/custom_datasource.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,12 +16,12 @@
// under the License.

use async_trait::async_trait;
use datafusion::arrow::array::{Array, UInt64Builder, UInt8Builder};
use datafusion::arrow::array::{UInt64Builder, UInt8Builder};
use datafusion::arrow::datatypes::{DataType, Field, Schema, SchemaRef};
use datafusion::arrow::record_batch::RecordBatch;
use datafusion::dataframe::DataFrame;
use datafusion::datasource::TableProvider;
use datafusion::error::{DataFusionError, Result};
use datafusion::execution::dataframe_impl::DataFrameImpl;
use datafusion::execution::runtime_env::RuntimeEnv;
use datafusion::logical_plan::{Expr, LogicalPlanBuilder};
use datafusion::physical_plan::expressions::PhysicalSortExpr;
Expand Down Expand Up @@ -66,7 +66,7 @@ async fn search_accounts(
.build()
.unwrap();

let mut dataframe = DataFrameImpl::new(ctx.state, &logical_plan)
let mut dataframe = DataFrame::new(ctx.state, &logical_plan)
.select_columns(&["id", "bank_account"])?;

if let Some(f) = filter {
Expand Down
Loading