-
Notifications
You must be signed in to change notification settings - Fork 7
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Browse files
Browse the repository at this point in the history
Co-authored-by: jakobbraun <[email protected]>
- Loading branch information
1 parent
2abe84a
commit 256bc16
Showing
21 changed files
with
696 additions
and
628 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,29 @@ | ||
package com.exasol.spark | ||
|
||
import org.apache.spark.sql.DataFrame | ||
|
||
import com.holdenkarau.spark.testing.DataFrameSuiteBase | ||
import org.scalatest.BeforeAndAfterEach | ||
|
||
abstract class AbstractTableQueryIT | ||
extends BaseIntegrationTest | ||
with DataFrameSuiteBase | ||
with BeforeAndAfterEach { | ||
|
||
val tableName: String | ||
def createTable(): Unit | ||
|
||
override def beforeEach(): Unit = { | ||
createTable() | ||
() | ||
} | ||
|
||
private[spark] def getDataFrame(query: Option[String] = None): DataFrame = | ||
spark.read | ||
.format("exasol") | ||
.option("host", jdbcHost) | ||
.option("port", jdbcPort) | ||
.option("query", query.fold(s"SELECT * FROM $tableName")(identity)) | ||
.load() | ||
|
||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,35 @@ | ||
package com.exasol.spark | ||
|
||
class BaseTableQueryIT extends AbstractTableQueryIT { | ||
|
||
val schema = "TEST_SCHEMA" | ||
override val tableName: String = s"$schema.TEST_TABLE" | ||
override def createTable: Unit = | ||
exasolConnectionManager.withExecute( | ||
// scalastyle:off nonascii | ||
Seq( | ||
s"DROP SCHEMA IF EXISTS $schema CASCADE", | ||
s"CREATE SCHEMA $schema", | ||
s"""|CREATE OR REPLACE TABLE $tableName ( | ||
| ID INTEGER IDENTITY NOT NULL, | ||
| NAME VARCHAR(100) UTF8, | ||
| CITY VARCHAR(2000) UTF8, | ||
| DATE_INFO DATE, | ||
| UNICODE_COL VARCHAR(100) UTF8, | ||
| UPDATED_AT TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL | ||
|)""".stripMargin, | ||
s"""|INSERT INTO $tableName (name, city, date_info, unicode_col) | ||
| VALUES ('Germany', 'Berlin', '2017-12-31', 'öäüß') | ||
|""".stripMargin, | ||
s"""|INSERT INTO $tableName (name, city, date_info, unicode_col) | ||
| VALUES ('France', 'Paris', '2018-01-01','\u00d6') | ||
|""".stripMargin, | ||
s"""|INSERT INTO $tableName (name, city, date_info, unicode_col) | ||
| VALUES ('Portugal', 'Lisbon', '2018-10-01','\u00d9') | ||
|""".stripMargin, | ||
"commit" | ||
) | ||
// scalastyle:on nonascii | ||
) | ||
|
||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,27 +1,15 @@ | ||
package com.exasol.spark | ||
|
||
import com.holdenkarau.spark.testing.DataFrameSuiteBase | ||
|
||
/** | ||
* Test only required columns selection from queries. | ||
* Tests column pruning for user queries. | ||
*/ | ||
class ColumnPruningIT extends BaseIntegrationTest with DataFrameSuiteBase { | ||
|
||
test("returns only required columns in query") { | ||
createDummyTable() | ||
|
||
val df = spark.read | ||
.format("com.exasol.spark") | ||
.option("host", jdbcHost) | ||
.option("port", jdbcPort) | ||
.option("query", s"SELECT * FROM $EXA_SCHEMA.$EXA_TABLE") | ||
.load() | ||
.select("city") | ||
class ColumnPruningIT extends BaseTableQueryIT { | ||
|
||
test("returns datarame with selected columns") { | ||
val df = getDataFrame().select("city") | ||
assert(df.columns.size === 1) | ||
assert(df.columns.head === "city") | ||
val result = df.collect().map(x => x.getString(0)).toSet | ||
assert(result === Set("Berlin", "Paris", "Lisbon")) | ||
assert(df.collect().map(x => x.getString(0)) === Seq("Berlin", "Paris", "Lisbon")) | ||
} | ||
|
||
} |
Oops, something went wrong.