Skip to content

Commit 2d76e44

Browse files
JoshRosenrxin
authored andcommitted
[SPARK-11647] Attempt to reduce time/flakiness of Thriftserver CLI and SparkSubmit tests
This patch aims to reduce the test time and flakiness of HiveSparkSubmitSuite, SparkSubmitSuite, and CliSuite. Key changes: - Disable IO synchronization calls for Derby writes, since durability doesn't matter for tests. This was done for HiveCompatibilitySuite in apache#6651 and resulted in huge test speedups. - Add a few missing `--conf`s to disable various Spark UIs. The CliSuite, in particular, never disabled these UIs, leaving it prone to port-contention-related flakiness. - Fix two instances where tests defined `beforeAll()` methods which were never called because the appropriate traits were not mixed in. I updated these tests suites to extend `BeforeAndAfterEach` so that they play nicely with our `ResetSystemProperties` trait. Author: Josh Rosen <[email protected]> Closes apache#9623 from JoshRosen/SPARK-11647.
1 parent dd77e27 commit 2d76e44

File tree

4 files changed

+38
-18
lines changed

4 files changed

+38
-18
lines changed

core/src/test/scala/org/apache/spark/deploy/RPackageUtilsSuite.scala

Lines changed: 7 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -33,8 +33,12 @@ import org.scalatest.BeforeAndAfterEach
3333
import org.apache.spark.SparkFunSuite
3434
import org.apache.spark.api.r.RUtils
3535
import org.apache.spark.deploy.SparkSubmitUtils.MavenCoordinate
36+
import org.apache.spark.util.ResetSystemProperties
3637

37-
class RPackageUtilsSuite extends SparkFunSuite with BeforeAndAfterEach {
38+
class RPackageUtilsSuite
39+
extends SparkFunSuite
40+
with BeforeAndAfterEach
41+
with ResetSystemProperties {
3842

3943
private val main = MavenCoordinate("a", "b", "c")
4044
private val dep1 = MavenCoordinate("a", "dep1", "c")
@@ -60,11 +64,9 @@ class RPackageUtilsSuite extends SparkFunSuite with BeforeAndAfterEach {
6064
}
6165
}
6266

63-
def beforeAll() {
64-
System.setProperty("spark.testing", "true")
65-
}
66-
6767
override def beforeEach(): Unit = {
68+
super.beforeEach()
69+
System.setProperty("spark.testing", "true")
6870
lineBuffer.clear()
6971
}
7072

core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ import scala.collection.mutable.ArrayBuffer
2323

2424
import com.google.common.base.Charsets.UTF_8
2525
import com.google.common.io.ByteStreams
26-
import org.scalatest.Matchers
26+
import org.scalatest.{BeforeAndAfterEach, Matchers}
2727
import org.scalatest.concurrent.Timeouts
2828
import org.scalatest.time.SpanSugar._
2929

@@ -37,10 +37,12 @@ import org.apache.spark.util.{ResetSystemProperties, Utils}
3737
class SparkSubmitSuite
3838
extends SparkFunSuite
3939
with Matchers
40+
with BeforeAndAfterEach
4041
with ResetSystemProperties
4142
with Timeouts {
4243

43-
def beforeAll() {
44+
override def beforeEach() {
45+
super.beforeEach()
4446
System.setProperty("spark.testing", "true")
4547
}
4648

sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/CliSuite.scala

Lines changed: 14 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ import scala.concurrent.{Await, Promise}
2727
import org.apache.spark.sql.test.ProcessTestUtils.ProcessOutputCapturer
2828

2929
import org.apache.hadoop.hive.conf.HiveConf.ConfVars
30-
import org.scalatest.BeforeAndAfter
30+
import org.scalatest.BeforeAndAfterAll
3131

3232
import org.apache.spark.util.Utils
3333
import org.apache.spark.{Logging, SparkFunSuite}
@@ -36,21 +36,26 @@ import org.apache.spark.{Logging, SparkFunSuite}
3636
* A test suite for the `spark-sql` CLI tool. Note that all test cases share the same temporary
3737
* Hive metastore and warehouse.
3838
*/
39-
class CliSuite extends SparkFunSuite with BeforeAndAfter with Logging {
39+
class CliSuite extends SparkFunSuite with BeforeAndAfterAll with Logging {
4040
val warehousePath = Utils.createTempDir()
4141
val metastorePath = Utils.createTempDir()
4242
val scratchDirPath = Utils.createTempDir()
4343

44-
before {
44+
override def beforeAll(): Unit = {
45+
super.beforeAll()
4546
warehousePath.delete()
4647
metastorePath.delete()
4748
scratchDirPath.delete()
4849
}
4950

50-
after {
51-
warehousePath.delete()
52-
metastorePath.delete()
53-
scratchDirPath.delete()
51+
override def afterAll(): Unit = {
52+
try {
53+
warehousePath.delete()
54+
metastorePath.delete()
55+
scratchDirPath.delete()
56+
} finally {
57+
super.afterAll()
58+
}
5459
}
5560

5661
/**
@@ -79,6 +84,8 @@ class CliSuite extends SparkFunSuite with BeforeAndAfter with Logging {
7984
val jdbcUrl = s"jdbc:derby:;databaseName=$metastorePath;create=true"
8085
s"""$cliScript
8186
| --master local
87+
| --driver-java-options -Dderby.system.durability=test
88+
| --conf spark.ui.enabled=false
8289
| --hiveconf ${ConfVars.METASTORECONNECTURLKEY}=$jdbcUrl
8390
| --hiveconf ${ConfVars.METASTOREWAREHOUSE}=$warehousePath
8491
| --hiveconf ${ConfVars.SCRATCHDIR}=$scratchDirPath

sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala

Lines changed: 13 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ import java.util.Date
2323

2424
import scala.collection.mutable.ArrayBuffer
2525

26-
import org.scalatest.Matchers
26+
import org.scalatest.{BeforeAndAfterEach, Matchers}
2727
import org.scalatest.concurrent.Timeouts
2828
import org.scalatest.exceptions.TestFailedDueToTimeoutException
2929
import org.scalatest.time.SpanSugar._
@@ -42,14 +42,14 @@ import org.apache.spark.util.{ResetSystemProperties, Utils}
4242
class HiveSparkSubmitSuite
4343
extends SparkFunSuite
4444
with Matchers
45-
// This test suite sometimes gets extremely slow out of unknown reason on Jenkins. Here we
46-
// add a timestamp to provide more diagnosis information.
45+
with BeforeAndAfterEach
4746
with ResetSystemProperties
4847
with Timeouts {
4948

5049
// TODO: rewrite these or mark them as slow tests to be run sparingly
5150

52-
def beforeAll() {
51+
override def beforeEach() {
52+
super.beforeEach()
5353
System.setProperty("spark.testing", "true")
5454
}
5555

@@ -66,6 +66,7 @@ class HiveSparkSubmitSuite
6666
"--master", "local-cluster[2,1,1024]",
6767
"--conf", "spark.ui.enabled=false",
6868
"--conf", "spark.master.rest.enabled=false",
69+
"--driver-java-options", "-Dderby.system.durability=test",
6970
"--jars", jarsString,
7071
unusedJar.toString, "SparkSubmitClassA", "SparkSubmitClassB")
7172
runSparkSubmit(args)
@@ -79,6 +80,7 @@ class HiveSparkSubmitSuite
7980
"--master", "local-cluster[2,1,1024]",
8081
"--conf", "spark.ui.enabled=false",
8182
"--conf", "spark.master.rest.enabled=false",
83+
"--driver-java-options", "-Dderby.system.durability=test",
8284
unusedJar.toString)
8385
runSparkSubmit(args)
8486
}
@@ -93,6 +95,7 @@ class HiveSparkSubmitSuite
9395
val args = Seq(
9496
"--conf", "spark.ui.enabled=false",
9597
"--conf", "spark.master.rest.enabled=false",
98+
"--driver-java-options", "-Dderby.system.durability=test",
9699
"--class", "Main",
97100
testJar)
98101
runSparkSubmit(args)
@@ -104,6 +107,9 @@ class HiveSparkSubmitSuite
104107
"--class", SPARK_9757.getClass.getName.stripSuffix("$"),
105108
"--name", "SparkSQLConfTest",
106109
"--master", "local-cluster[2,1,1024]",
110+
"--conf", "spark.ui.enabled=false",
111+
"--conf", "spark.master.rest.enabled=false",
112+
"--driver-java-options", "-Dderby.system.durability=test",
107113
unusedJar.toString)
108114
runSparkSubmit(args)
109115
}
@@ -114,6 +120,9 @@ class HiveSparkSubmitSuite
114120
"--class", SPARK_11009.getClass.getName.stripSuffix("$"),
115121
"--name", "SparkSQLConfTest",
116122
"--master", "local-cluster[2,1,1024]",
123+
"--conf", "spark.ui.enabled=false",
124+
"--conf", "spark.master.rest.enabled=false",
125+
"--driver-java-options", "-Dderby.system.durability=test",
117126
unusedJar.toString)
118127
runSparkSubmit(args)
119128
}

0 commit comments

Comments
 (0)