8000 More codacy inspired cleanup · randomf/spark-testing-base@cda4ee4 · GitHub
[go: up one dir, main page]

Skip to content

Commit cda4ee4

Browse files
committed
More codacy inspired cleanup
1 parent 48dadfc commit cda4ee4

File tree

8 files changed

+18
-15
lines changed

8 files changed

+18
-15
lines changed

src/main/1.3/scala/com/holdenkarau/spark/testing/DataframeGenerator.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -97,9 +97,9 @@ object DataframeGenerator {
9797
generator => (generator.columnName -> generator)).toMap
9898
(0 until fields.length).toList.map(index => {
9999
if (generatorMap.contains(fields(index).name)) {
100-
generatorMap.get(fields(index).name).get match {
101-
case gen: Column => gen.gen
102-
case list: ColumnList => getGenerator(fields(index).dataType, list.gen)
100+
generatorMap.get(fields(index).name) match {
101+
case Some(gen: Column) => gen.gen
102+
case Some(list: ColumnList) => getGenerator(fields(index).dataType, list.gen)
103103
}
104104
}
105105
else getGenerator(fields(index).dataType)

src/main/1.3/scala/com/holdenkarau/spark/testing/JavaRDDComparisons.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -43,7 +43,7 @@ object JavaRDDComparisons extends RDDComparisonsLike with JavaTestSuite {
4343
Option[(T, Integer, Integer)] = {
4444
implicit val ctag = Utils.fakeClassTag[T]
4545
compareRDD(expected.rdd, result.rdd).
46-
map(x => (x._1, Integer.valueOf(x._2), Integer.valueOf(x._3)))
46+
map{case(value, expectedCount, resultCount) => (value, Integer.valueOf(expectedCount), Integer.valueOf(resultCount))}
4747
}
4848

4949
}

src/main/1.3/scala/com/holdenkarau/spark/testing/SharedMiniCluster.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -49,7 +49,7 @@ trait SharedMiniCluster extends BeforeAndAfterAll
4949
_sc = new SparkContext(sparkConf)
5050
setup(_sc)
5151
} catch {
52-
case e: Exception =>
52+
case e: Throwable =>
5353
super.shutdownYARN()
5454
super.shutdownHDFS()
5555
throw e

src/main/1.3/scala/com/holdenkarau/spark/testing/StreamingActionBase.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -57,7 +57,7 @@ trait StreamingActionBase extends StreamingSuiteBase {
5757
try {
5858
outputStreamSSC.stop(stopSparkContext = false)
5959
} catch {
60-
case e: Exception =>
60+
case e: Throwable =>
6161
logError("Error stopping StreamingContext", e)
6262
}
6363
}

src/main/1.3/scala/com/holdenkarau/spark/testing/YARNCluster.scala

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -113,11 +113,13 @@ trait YARNClusterLike {
113113
if (configurationFile.exists()) {
114114
configurationFile.delete()
115115
}
116-
val configuration = yarnCluster.get.getConfig
117-
iterableAsScalaIterable(configuration).foreach { e =>
118-
sys.props += ("spark.hadoop." + e.getKey() -> e.getValue())
116+
val configuration = yarnCluster.map(_.getConfig)
117+
configuration.foreach{config =>
118+
iterableAsScalaIterable(config).foreach { e =>
119+
sys.props += ("spark.hadoop." + e.getKey() -> e.getValue())
120+
}
121+
config.writeXml(new FileOutputStream(configurationFile))
119122
}
120-
configuration.writeXml(new FileOutputStream(configurationFile))
121123
// Copy the system props
122124
val props = new Properties()
123125
sys.props.foreach { case (k, v) =>

src/test/1.3/scala/com/holdenkarau/spark/testing/SampleRDDTest.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -56,7 +56,7 @@ class SampleRDDTest extends FunSuite with SharedSparkContext with RDDComparisons
5656
val inputList = List("hi", "hi holden", "byez", "cheet oz", "murh bots bots")
5757
val inputRDD = sc.parallelize(inputList, 10)
5858
val tokenizedRDD = tokenize(inputRDD)
59-
val ordered = tokenizedRDD.sortBy(x => x.head)
59+
val ordered = tokenizedRDD.sortBy(x => x.headOption)
6060

6161
val expectedList = List(
6262
List("hi"), List("hi", "holden"), List("byez"), List("cheet", "oz"),

src/test/1.6/scala/com/holdenkarau/spark/testing/YARNClusterTest.scala

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -33,8 +33,9 @@ class YARNClusterTest extends FunSuite with BeforeAndAfterAll {
3333

3434
strRDD.saveAsTextFile(tmpDir.getAbsolutePath)
3535

36-
val readStr = sc.textFile(tmpDir.getAbsolutePath).collect().head
37-
assert(readStr === originalStr)
36+
val readStr = sc.textFile(tmpDir.getAbsolutePath).collect().headOption
37+
assert(readStr.isDefined)
38+
readStr.foreach(result => assert(result === originalStr))
3839
}
3940

4041
override def afterAll(): Unit = {

src/test/kafka/scala/com/holdenkarau/spark/testing/kafka/KafkaTestUtilsTest.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,8 @@ package com.holdenkarau.spark.testing.kafka
1818

1919
import java.util.Properties
2020

21+
import scala.collection.JavaConversions._
22+
2123
import kafka.consumer.ConsumerConfig
2224
import org.apache.spark.streaming.kafka.KafkaTestUtils
2325
import org.junit.runner.RunWith
@@ -58,8 +60,6 @@ class KafkaTestUtilsTest extends FunSuite with BeforeAndAfterAll {
5860
val consumer = kafka.consumer.Consumer.createJavaConsumerConnector(new ConsumerConfig(consumerProps))
5961

6062
try {
61-
import scala.collection.JavaConversions._
62-
6363
val topicCountMap = Map(topic -> new Integer(1))
6464
val consumerMap = consumer.createMessageStreams(topicCountMap)
6565
val stream = consumerMap.get(topic).get(0)

0 commit comments

Comments
 (0)
0