8000 Handle cross compatability issues - move the Shared Spark context int… · randomf/spark-testing-base@4baf3b2 · GitHub
[go: up one dir, main page]

Skip to content

Commit 4baf3b2

Browse files
committed
Handle cross compatability issues - move the Shared Spark context into before/after 1.5 for setting the log level at runtime, change the wrappedconfvar to be safe back to 1.3
1 parent fedc276 commit 4baf3b2

File tree

6 files changed

+132
-1
lines changed

6 files changed

+132
-1
lines changed

build.sbt

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -31,27 +31,31 @@ unmanagedSourceDirectories in Compile := {
3131
if (sparkVersion.value >= "1.6") Seq(
3232
(sourceDirectory in Compile)(_ / "1.6/scala"),
3333
(sourceDirectory in Compile)(_ / "1.5/scala"),
34+
(sourceDirectory in Compile)(_ / "1.5/java"),
3435
(sourceDirectory in Compile)(_ / "1.4/scala"),
3536
(sourceDirectory in Compile)(_ / "1.4/java"),
3637
(sourceDirectory in Compile)(_ / "1.3/scala"),
3738
(sourceDirectory in Compile)(_ / "1.3/java")
3839
).join.value
3940
else if (sparkVersion.value >= "1.5") Seq(
4041
(sourceDirectory in Compile)(_ / "1.5/scala"),
42+
(sourceDirectory in Compile)(_ / "1.5/java"),
4143
(sourceDirectory in Compile)(_ / "1.4/scala"),
4244
(sourceDirectory in Compile)(_ / "1.4/java"),
4345
(sourceDirectory in Compile)(_ / "1.3/scala"),
4446
(sourceDirectory in Compile)(_ / "1.3/java")
4547
).join.value
4648
else if (sparkVersion.value >= "1.4") Seq(
4749
(sourceDirectory in Compile)(_ / "pre-1.5/scala"),
50+
(sourceDirectory in Compile)(_ / "pre-1.5/java"),
4851
(sourceDirectory in Compile)(_ / "1.4/scala"),
4952
(sourceDirectory in Compile)(_ / "1.4/java"),
5053
(sourceDirectory in Compile)(_ / "1.3/scala"),
5154
(sourceDirectory in Compile)(_ / "1.3/java")
5255
).join.value
5356
else Seq(
5457
(sourceDirectory in Compile)(_ / "pre-1.5/scala"),
58+
(sourceDirectory in Compile)(_ / "pre-1.5/java"),
5559
(sourceDirectory in Compile)(_ / "1.3/scala"),
5660
(sourceDirectory in Compile)(_ / "1.3/java"),
5761
(sourceDirectory in Compile)(_ / "1.3-only/scala")
Lines changed: 70 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,70 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one or more
3+
* contributor license agreements. See the NOTICE file distributed with
4+
* this work for additional information regarding copyright ownership.
5+
* The ASF licenses this file to You under the Apache License, Version 2.0
6+
* (the "License"); you may not use this file except in compliance with
7+
* the License. You may obtain a copy of the License at
8+
*
9+
* http://www.apache.org/licenses/LICENSE-2.0
10+
*
11+
* Unless required by applicable law or agreed to in writing, software
12+
* distributed under the License is distributed on an "AS IS" BASIS,
13+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
* See the License for the specific language governing permissions and
15+
* limitations under the License.
16+
*/
17+
package com.holdenkarau.spark.testing;
18+
19+
import org.apache.spark.*;
20+
import org.apache.spark.api.java.*;
21+
import org.junit.*;
22+
23+
/** Shares a local `SparkContext` between all tests in a suite and closes it at the end */
24+
public class SharedJavaSparkContext implements SparkContextProvider {
25+
private static transient SparkContext _sc;
26+
private static transient JavaSparkContext _jsc;
27+
protected boolean initialized = false;
28+
private static SparkConf _conf = new SparkConf().setMaster("local[4]").setAppName("magic");
29+
30+
public SparkConf conf() {
31+
return _conf;
32+
}
33+
34+
public SparkContext sc() {
35+
return _sc;
36+
}
37+
38+
public JavaSparkContext jsc() {
39+
return _jsc;
40+
}
41+
42+
/**
43+
* Hooks for setup code that needs to be executed/torn down in order with SparkContexts
44+
*/
45+
void beforeAllTestCasesHook() {
46+
}
47+
48+
static void afterAllTestCasesHook() {
49+
}
50+
51+
@Before
52+
public void runBefore() {
53+
initialized = (_sc != null);
54+
55+
if (!initialized) {
56+
_sc = new SparkContext(conf());
57+
_jsc = new JavaSparkContext(_sc);
58+
59+
beforeAllTestCasesHook();
60+
}
61+
}
62+
63+
@AfterClass
64+
static public void runAfterClass() {
65+
LocalSparkContext$.MODULE$.stop(_sc);
66+
_sc = null;
67+
_jsc = null;
68+
afterAllTestCasesHook();
69+
}
70+
}
Lines changed: 56 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,56 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one or more
3+
* contributor license agreements. See the NOTICE file distributed with
4+
* this work for additional information regarding copyright ownership.
5+
* The ASF licenses this file to You under the Apache License, Version 2.0
6+
* (the "License"); you may not use this file except in compliance with
7+
* the License. You may obtain a copy of the License at
8+
*
9+
* http://www.apache.org/licenses/LICENSE-2.0
10+
*
11+
* Unless required by applicable law or agreed to in writing, software
12+
* distributed under the License is distributed on an "AS IS" BASIS,
13+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
* See the License for the specific language governing permissions and
15+
* limitations under the License.
16+
*/
17+
18+
package com.holdenkarau.spark.testing
19+
20+
import java.util.Date
21+
import org.apache.spark._
22+
import org.scalatest.BeforeAndAfterAll
23+
import org.scalatest.Suite
24+
25+
/** Shares a local `SparkContext` between all tests in a suite and closes it at the end. */
26+
trait SharedSparkContext extends BeforeAndAfterAll with SparkContextProvider {
27+
self: Suite =>
28+
29+
@transient private var _sc: SparkContext = _
30+
31+
override def sc: SparkContext = _sc
32+
33+
val appID = new Date().toString + math.floor(math.random * 10E4).toLong.toString
34+
35+
override val conf = new SparkConf().
36+
setMaster("local[*]").
37+
setAppName("test").
38+
set("spark.ui.enabled", "false").
39+
set("spark.app.id", appID)
40+
41+
42+
override def beforeAll() {
43+
_sc = new SparkContext(conf)
44+
45+
super.beforeAll()
46+
}
47+
48+
override def afterAll() {
49+
try {
50+
LocalSparkContext.stop(_sc)
51+
_sc = null
52+
} finally {
53+
super.afterAll()
54+
}
55+
}
56+
}

src/main/pre-1.5/scala/com/holdenkarau/spark/testing/WrappedConfVar.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,5 +21,6 @@ import org.apache.hadoop.hive.conf.HiveConf.ConfVars
2121

2222
private[testing] case class WrappedConfVar(cv: ConfVars) {
2323
val varname = cv.varname
24-
def getDefaultExpr() = cv.getDefaultExpr
24+
// Despite what warnings we may get here, this is the one we want for pre-1.5 versions.
25+
def getDefaultExpr() = cv.defaultVal
2526
}

0 commit comments

Comments
 (0)
0