Skip to content

Commit 319c179

Browse files
belieferdchvn
authored andcommitted
[SPARK-37734][SQL][TESTS] Upgrade h2 from 1.4.195 to 2.0.204
### What changes were proposed in this pull request? This PR aims to upgrade `com.h2database` from 1.4.195 to 2.0.202 ### Why are the changes needed? Fix one vulnerability, ref: https://www.tenable.com/cve/CVE-2021-23463 ### Does this PR introduce _any_ user-facing change? 'No'. ### How was this patch tested? Jenkins test. Closes apache#35013 from beliefer/SPARK-37734. Authored-by: Jiaan Geng <[email protected]> Signed-off-by: Wenchen Fan <[email protected]>
1 parent 14d0893 commit 319c179

File tree

4 files changed

+23
-33
lines changed

4 files changed

+23
-33
lines changed

sql/core/pom.xml

+1-1
Original file line numberDiff line numberDiff line change
@@ -153,7 +153,7 @@
153153
<dependency>
154154
<groupId>com.h2database</groupId>
155155
<artifactId>h2</artifactId>
156-
<version>1.4.195</version>
156+
<version>2.0.204</version>
157157
<scope>test</scope>
158158
</dependency>
159159
<dependency>

sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala

+8-23
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,6 @@ import java.util.{Calendar, GregorianCalendar, Properties, TimeZone}
2424

2525
import scala.collection.JavaConverters._
2626

27-
import org.h2.jdbc.JdbcSQLException
2827
import org.mockito.ArgumentMatchers._
2928
import org.mockito.Mockito._
3029
import org.scalatest.{BeforeAndAfter, PrivateMethodTester}
@@ -54,7 +53,8 @@ class JDBCSuite extends QueryTest
5453
val urlWithUserAndPass = "jdbc:h2:mem:testdb0;user=testUser;password=testPass"
5554
var conn: java.sql.Connection = null
5655

57-
val testBytes = Array[Byte](99.toByte, 134.toByte, 135.toByte, 200.toByte, 205.toByte)
56+
val testBytes = Array[Byte](99.toByte, 134.toByte, 135.toByte, 200.toByte, 205.toByte) ++
57+
Array.fill(15)(0.toByte)
5858

5959
val testH2Dialect = new JdbcDialect {
6060
override def canHandle(url: String): Boolean = url.startsWith("jdbc:h2")
@@ -87,7 +87,6 @@ class JDBCSuite extends QueryTest
8787
val properties = new Properties()
8888
properties.setProperty("user", "testUser")
8989
properties.setProperty("password", "testPass")
90-
properties.setProperty("rowId", "false")
9190

9291
conn = DriverManager.getConnection(url, properties)
9392
conn.prepareStatement("create schema test").executeUpdate()
@@ -162,7 +161,7 @@ class JDBCSuite extends QueryTest
162161
|OPTIONS (url '$url', dbtable 'TEST.STRTYPES', user 'testUser', password 'testPass')
163162
""".stripMargin.replaceAll("\n", " "))
164163

165-
conn.prepareStatement("create table test.timetypes (a TIME, b DATE, c TIMESTAMP)"
164+
conn.prepareStatement("create table test.timetypes (a TIME, b DATE, c TIMESTAMP(7))"
166165
).executeUpdate()
167166
conn.prepareStatement("insert into test.timetypes values ('12:34:56', "
168167
+ "'1996-01-01', '2002-02-20 11:22:33.543543543')").executeUpdate()
@@ -177,12 +176,12 @@ class JDBCSuite extends QueryTest
177176
""".stripMargin.replaceAll("\n", " "))
178177

179178
conn.prepareStatement("CREATE TABLE test.timezone (tz TIMESTAMP WITH TIME ZONE) " +
180-
"AS SELECT '1999-01-08 04:05:06.543543543 GMT-08:00'")
179+
"AS SELECT '1999-01-08 04:05:06.543543543-08:00'")
181180
.executeUpdate()
182181
conn.commit()
183182

184-
conn.prepareStatement("CREATE TABLE test.array (ar ARRAY) " +
185-
"AS SELECT '(1, 2, 3)'")
183+
conn.prepareStatement("CREATE TABLE test.array_table (ar Integer ARRAY) " +
184+
"AS SELECT ARRAY[1, 2, 3]")
186185
.executeUpdate()
187186
conn.commit()
188187

@@ -638,7 +637,7 @@ class JDBCSuite extends QueryTest
638637
assert(rows(0).getAs[Array[Byte]](0).sameElements(testBytes))
639638
assert(rows(0).getString(1).equals("Sensitive"))
640639
assert(rows(0).getString(2).equals("Insensitive"))
641-
assert(rows(0).getString(3).equals("Twenty-byte CHAR"))
640+
assert(rows(0).getString(3).equals("Twenty-byte CHAR "))
642641
assert(rows(0).getAs[Array[Byte]](4).sameElements(testBytes))
643642
assert(rows(0).getString(5).equals("I am a clob!"))
644643
}
@@ -729,20 +728,6 @@ class JDBCSuite extends QueryTest
729728
assert(math.abs(rows(0).getDouble(1) - 1.00000023841859331) < 1e-12)
730729
}
731730

732-
test("Pass extra properties via OPTIONS") {
733-
// We set rowId to false during setup, which means that _ROWID_ column should be absent from
734-
// all tables. If rowId is true (default), the query below doesn't throw an exception.
735-
intercept[JdbcSQLException] {
736-
sql(
737-
s"""
738-
|CREATE OR REPLACE TEMPORARY VIEW abc
739-
|USING org.apache.spark.sql.jdbc
740-
|OPTIONS (url '$url', dbtable '(SELECT _ROWID_ FROM test.people)',
741-
| user 'testUser', password 'testPass')
742-
""".stripMargin.replaceAll("\n", " "))
743-
}
744-
}
745-
746731
test("Remap types via JdbcDialects") {
747732
JdbcDialects.registerDialect(testH2Dialect)
748733
val df = spark.read.jdbc(urlWithUserAndPass, "TEST.PEOPLE", new Properties())
@@ -1375,7 +1360,7 @@ class JDBCSuite extends QueryTest
13751360
}.getMessage
13761361
assert(e.contains("Unsupported type TIMESTAMP_WITH_TIMEZONE"))
13771362
e = intercept[SQLException] {
1378-
spark.read.jdbc(urlWithUserAndPass, "TEST.ARRAY", new Properties()).collect()
1363+
spark.read.jdbc(urlWithUserAndPass, "TEST.ARRAY_TABLE", new Properties()).collect()
13791364
}.getMessage
13801365
assert(e.contains("Unsupported type ARRAY"))
13811366
}

sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCV2Suite.scala

+1-1
Original file line numberDiff line numberDiff line change
@@ -432,7 +432,7 @@ class JDBCV2Suite extends QueryTest with SharedSparkSession with ExplainSuiteHel
432432
"PushedGroupByColumns: []"
433433
checkKeywordsExistsInExplain(df, expected_plan_fragment)
434434
}
435-
checkAnswer(df, Seq(Row(2, 1.0)))
435+
checkAnswer(df, Seq(Row(2, 1.5)))
436436
}
437437

438438
test("partitioned scan with aggregate push-down: complete push-down only") {

sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCWriteSuite.scala

+13-8
Original file line numberDiff line numberDiff line change
@@ -227,7 +227,7 @@ class JDBCWriteSuite extends SharedSparkSession with BeforeAndAfter {
227227
JdbcDialects.registerDialect(testH2Dialect)
228228
val df = spark.createDataFrame(sparkContext.parallelize(arr2x2), schema2)
229229

230-
val m = intercept[org.h2.jdbc.JdbcSQLException] {
230+
val m = intercept[org.h2.jdbc.JdbcSQLSyntaxErrorException] {
231231
df.write.option("createTableOptions", "ENGINE tableEngineName")
232232
.jdbc(url1, "TEST.CREATETBLOPTS", properties)
233233
}.getMessage
@@ -326,7 +326,7 @@ class JDBCWriteSuite extends SharedSparkSession with BeforeAndAfter {
326326
test("save errors if wrong user/password combination") {
327327
val df = spark.createDataFrame(sparkContext.parallelize(arr2x2), schema2)
328328

329-
val e = intercept[org.h2.jdbc.JdbcSQLException] {
329+
val e = intercept[org.h2.jdbc.JdbcSQLInvalidAuthorizationSpecException] {
330330
df.write.format("jdbc")
331331
.option("dbtable", "TEST.SAVETEST")
332332
.option("url", url1)
@@ -427,7 +427,7 @@ class JDBCWriteSuite extends SharedSparkSession with BeforeAndAfter {
427427
// verify the data types of the created table by reading the database catalog of H2
428428
val query =
429429
"""
430-
|(SELECT column_name, type_name, character_maximum_length
430+
|(SELECT column_name, data_type, character_maximum_length
431431
| FROM information_schema.columns WHERE table_name = 'DBCOLTYPETEST')
432432
""".stripMargin
433433
val rows = spark.read.jdbc(url1, query, properties).collect()
@@ -436,7 +436,7 @@ class JDBCWriteSuite extends SharedSparkSession with BeforeAndAfter {
436436
val typeName = row.getString(1)
437437
// For CHAR and VARCHAR, we also compare the max length
438438
if (typeName.contains("CHAR")) {
439-
val charMaxLength = row.getInt(2)
439+
val charMaxLength = row.getLong(2)
440440
assert(expectedTypes(row.getString(0)) == s"$typeName($charMaxLength)")
441441
} else {
442442
assert(expectedTypes(row.getString(0)) == typeName)
@@ -452,15 +452,18 @@ class JDBCWriteSuite extends SharedSparkSession with BeforeAndAfter {
452452
val df = spark.createDataFrame(sparkContext.parallelize(data), schema)
453453

454454
// out-of-order
455-
val expected1 = Map("id" -> "BIGINT", "first#name" -> "VARCHAR(123)", "city" -> "CHAR(20)")
455+
val expected1 =
456+
Map("id" -> "BIGINT", "first#name" -> "CHARACTER VARYING(123)", "city" -> "CHARACTER(20)")
456457
testUserSpecifiedColTypes(df, "`first#name` VARCHAR(123), id BIGINT, city CHAR(20)", expected1)
457458
// partial schema
458-
val expected2 = Map("id" -> "INTEGER", "first#name" -> "VARCHAR(123)", "city" -> "CHAR(20)")
459+
val expected2 =
460+
Map("id" -> "INTEGER", "first#name" -> "CHARACTER VARYING(123)", "city" -> "CHARACTER(20)")
459461
testUserSpecifiedColTypes(df, "`first#name` VARCHAR(123), city CHAR(20)", expected2)
460462

461463
withSQLConf(SQLConf.CASE_SENSITIVE.key -> "false") {
462464
// should still respect the original column names
463-
val expected = Map("id" -> "INTEGER", "first#name" -> "VARCHAR(123)", "city" -> "CLOB")
465+
val expected = Map("id" -> "INTEGER", "first#name" -> "CHARACTER VARYING(123)",
466+
"city" -> "CHARACTER LARGE OBJECT(9223372036854775807)")
464467
testUserSpecifiedColTypes(df, "`FiRsT#NaMe` VARCHAR(123)", expected)
465468
}
466469

@@ -470,7 +473,9 @@ class JDBCWriteSuite extends SharedSparkSession with BeforeAndAfter {
470473
StructField("First#Name", StringType) ::
471474
StructField("city", StringType) :: Nil)
472475
val df = spark.createDataFrame(sparkContext.parallelize(data), schema)
473-
val expected = Map("id" -> "INTEGER", "First#Name" -> "VARCHAR(123)", "city" -> "CLOB")
476+
val expected =
477+
Map("id" -> "INTEGER", "First#Name" -> "CHARACTER VARYING(123)",
478+
"city" -> "CHARACTER LARGE OBJECT(9223372036854775807)")
474479
testUserSpecifiedColTypes(df, "`First#Name` VARCHAR(123)", expected)
475480
}
476481
}

0 commit comments

Comments
 (0)