Skip to content

Commit

Permalink
[SPARK-35206][TESTS][SQL] Extract common used get project path into a…
Browse files Browse the repository at this point in the history
… function in SparkFunctionSuite

### What changes were proposed in this pull request?

Add a common functions `getWorkspaceFilePath` (which prefixed with spark home) to `SparkFunctionSuite`, and applies these the function to where they're extracted from.

### Why are the changes needed?

Spark sql has test suites to read resources when running tests. The way of getting the path of resources is commonly used in different suites. We can extract them into a function to ease the code maintenance.

### Does this PR introduce _any_ user-facing change?

No.

### How was this patch tested?

Pass existing tests.

Closes apache#32315 from Ngone51/extract-common-file-path.

Authored-by: yi.wu <[email protected]>
Signed-off-by: Wenchen Fan <[email protected]>
  • Loading branch information
Ngone51 authored and cloud-fan committed May 14, 2021
1 parent 68239d1 commit 94bd480
Show file tree
Hide file tree
Showing 7 changed files with 25 additions and 16 deletions.
20 changes: 16 additions & 4 deletions core/src/test/scala/org/apache/spark/SparkFunSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -19,21 +19,22 @@ package org.apache.spark

// scalastyle:off
import java.io.File
import java.nio.file.Path
import java.util.{Locale, TimeZone}

import org.apache.log4j.spi.LoggingEvent

import scala.annotation.tailrec
import scala.collection.mutable.ArrayBuffer

import org.apache.commons.io.FileUtils
import org.apache.log4j.{Appender, AppenderSkeleton, Level, Logger}
import org.apache.log4j.spi.LoggingEvent
import org.scalatest.{BeforeAndAfter, BeforeAndAfterAll, BeforeAndAfterEach, Failed, Outcome}
import org.scalatest.funsuite.AnyFunSuite

import org.apache.spark.internal.Logging
import org.apache.spark.internal.config.Tests.IS_TESTING
import org.apache.spark.util.{AccumulatorContext, Utils}

import scala.collection.mutable.ArrayBuffer

/**
* Base abstract class for all unit tests in Spark for handling common functionality.
*
Expand Down Expand Up @@ -119,6 +120,17 @@ abstract class SparkFunSuite
file
}

/**
* Get a Path relative to the root project. It is assumed that a spark home is set.
*/
protected final def getWorkspaceFilePath(first: String, more: String*): Path = {
if (!(sys.props.contains("spark.test.home") || sys.env.contains("SPARK_HOME"))) {
fail("spark.test.home or SPARK_HOME is not set.")
}
val sparkHome = sys.props.getOrElse("spark.test.home", sys.env("SPARK_HOME"))
java.nio.file.Paths.get(sparkHome, first +: more: _*)
}

/**
* Note: this method doesn't support `BeforeAndAfter`. You must use `BeforeAndAfterEach` to
* set up and tear down resources.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,11 +27,11 @@ import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.catalyst.plans.SQLHelper
import org.apache.spark.sql.catalyst.util.fileToString

trait SQLKeywordUtils extends SQLHelper {
trait SQLKeywordUtils extends SparkFunSuite with SQLHelper {

val sqlSyntaxDefs = {
val sqlBasePath = {
java.nio.file.Paths.get(sparkHome, "sql", "catalyst", "src", "main", "antlr4", "org",
getWorkspaceFilePath("sql", "catalyst", "src", "main", "antlr4", "org",
"apache", "spark", "sql", "catalyst", "parser", "SqlBase.g4").toFile
}
fileToString(sqlBasePath).split("\n")
Expand All @@ -41,7 +41,7 @@ trait SQLKeywordUtils extends SQLHelper {
// Spark default mode, and the SQL standard.
val keywordsInDoc: Array[Array[String]] = {
val docPath = {
java.nio.file.Paths.get(sparkHome, "docs", "sql-ref-ansi-compliance.md").toFile
getWorkspaceFilePath("docs", "sql-ref-ansi-compliance.md").toFile
}
fileToString(docPath).split("\n")
.dropWhile(!_.startsWith("|Keyword|")).drop(2).takeWhile(_.startsWith("|"))
Expand Down Expand Up @@ -150,7 +150,7 @@ trait SQLKeywordUtils extends SQLHelper {
}
}

class SQLKeywordSuite extends SparkFunSuite with SQLKeywordUtils {
class SQLKeywordSuite extends SQLKeywordUtils {
test("all keywords are documented") {
val documentedKeywords = keywordsInDoc.map(_.head).toSet
if (allCandidateKeywords != documentedKeywords) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,11 +16,10 @@
*/
package org.apache.spark.sql.catalyst.parser

import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.catalyst.{SQLKeywordUtils, TableIdentifier}
import org.apache.spark.sql.internal.SQLConf

class TableIdentifierParserSuite extends SparkFunSuite with SQLKeywordUtils {
class TableIdentifierParserSuite extends SQLKeywordUtils {
import CatalystSqlParser._

// Add "$elem$", "$value$" & "$key$"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -72,8 +72,7 @@ class ExpressionsSchemaSuite extends QueryTest with SharedSparkSession {
// We use a path based on Spark home for 2 reasons:
// 1. Maven can't get correct resource directory when resources in other jars.
// 2. We test subclasses in the hive-thriftserver module.
java.nio.file.Paths.get(sparkHome,
"sql", "core", "src", "test", "resources", "sql-functions").toFile
getWorkspaceFilePath("sql", "core", "src", "test", "resources", "sql-functions").toFile
}

private val resultFile = new File(baseResourcePath, "sql-expression-schema.md")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ trait PlanStabilitySuite extends TPCDSBase with DisableAdaptiveExecutionSuite {

protected val baseResourcePath = {
// use the same way as `SQLQueryTestSuite` to get the resource path
java.nio.file.Paths.get("src", "test", "resources", "tpcds-plan-stability").toFile
getWorkspaceFilePath("sql", "core", "src", "test", "resources", "tpcds-plan-stability").toFile
}

private val referenceRegex = "#\\d+".r
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -131,8 +131,7 @@ class SQLQueryTestSuite extends QueryTest with SharedSparkSession with SQLHelper
// We use a path based on Spark home for 2 reasons:
// 1. Maven can't get correct resource directory when resources in other jars.
// 2. We test subclasses in the hive-thriftserver module.
java.nio.file.Paths.get(sparkHome,
"sql", "core", "src", "test", "resources", "sql-tests").toFile
getWorkspaceFilePath("sql", "core", "src", "test", "resources", "sql-tests").toFile
}

protected val inputFilePath = new File(baseResourcePath, "inputs").getAbsolutePath
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,7 @@ class TPCDSQueryTestSuite extends QueryTest with TPCDSBase with SQLQueryTestHelp

protected val baseResourcePath = {
// use the same way as `SQLQueryTestSuite` to get the resource path
java.nio.file.Paths.get("src", "test", "resources", "tpcds-query-results")
getWorkspaceFilePath("sql", "core", "src", "test", "resources", "tpcds-query-results")
.toFile.getAbsolutePath
}

Expand Down

0 comments on commit 94bd480

Please sign in to comment.