Skip to content

Commit ce1572d

Browse files
committed
[MINOR] Resolve a number of miscellaneous build warnings
## What changes were proposed in this pull request? This change resolves a number of build warnings that have accumulated, before 2.x. It does not address a large number of deprecation warnings, especially related to the Accumulator API. That will happen separately. ## How was this patch tested? Jenkins Author: Sean Owen <[email protected]> Closes apache#13377 from srowen/BuildWarnings.
1 parent 472f161 commit ce1572d

File tree

8 files changed

+15
-6
lines changed

8 files changed

+15
-6
lines changed

common/unsafe/src/main/java/org/apache/spark/unsafe/Platform.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -155,8 +155,8 @@ public static long reallocateMemory(long address, long oldSize, long newSize) {
155155
@SuppressWarnings("unchecked")
156156
public static ByteBuffer allocateDirectBuffer(int size) {
157157
try {
158-
Class cls = Class.forName("java.nio.DirectByteBuffer");
159-
Constructor constructor = cls.getDeclaredConstructor(Long.TYPE, Integer.TYPE);
158+
Class<?> cls = Class.forName("java.nio.DirectByteBuffer");
159+
Constructor<?> constructor = cls.getDeclaredConstructor(Long.TYPE, Integer.TYPE);
160160
constructor.setAccessible(true);
161161
Field cleanerField = cls.getDeclaredField("cleaner");
162162
cleanerField.setAccessible(true);

mllib/src/test/java/org/apache/spark/mllib/fpm/JavaPrefixSpanSuite.java

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -72,7 +72,9 @@ public void runPrefixSpanSaveLoad() {
7272

7373
try {
7474
model.save(spark.sparkContext(), outputPath);
75-
PrefixSpanModel newModel = PrefixSpanModel.load(spark.sparkContext(), outputPath);
75+
@SuppressWarnings("unchecked")
76+
PrefixSpanModel<Integer> newModel =
77+
(PrefixSpanModel<Integer>) PrefixSpanModel.load(spark.sparkContext(), outputPath);
7678
JavaRDD<FreqSequence<Integer>> freqSeqs = newModel.freqSequences().toJavaRDD();
7779
List<FreqSequence<Integer>> localFreqSeqs = freqSeqs.collect();
7880
Assert.assertEquals(5, localFreqSeqs.size());

mllib/src/test/scala/org/apache/spark/mllib/fpm/PrefixSpanSuite.scala

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,8 @@
1616
*/
1717
package org.apache.spark.mllib.fpm
1818

19+
import scala.language.existentials
20+
1921
import org.apache.spark.SparkFunSuite
2022
import org.apache.spark.mllib.util.MLlibTestSparkContext
2123
import org.apache.spark.util.Utils

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/ScalaReflectionSuite.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -288,7 +288,7 @@ class ScalaReflectionSuite extends SparkFunSuite {
288288
assert(serializer.children.head.asInstanceOf[Literal].value === UTF8String.fromString("value"))
289289
assert(serializer.children.last.isInstanceOf[NewInstance])
290290
assert(serializer.children.last.asInstanceOf[NewInstance]
291-
.cls.isInstanceOf[Class[org.apache.spark.sql.catalyst.util.GenericArrayData]])
291+
.cls.isAssignableFrom(classOf[org.apache.spark.sql.catalyst.util.GenericArrayData]))
292292
}
293293

294294
private val dataTypeForComplexData = dataTypeFor[ComplexData]

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/ExpressionEncoderSuite.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -365,7 +365,8 @@ class ExpressionEncoderSuite extends PlanTest with AnalysisTest {
365365
Arrays.deepEquals(b1.asInstanceOf[Array[AnyRef]], b2.asInstanceOf[Array[AnyRef]])
366366
case (b1: Array[_], b2: Array[_]) =>
367367
Arrays.equals(b1.asInstanceOf[Array[AnyRef]], b2.asInstanceOf[Array[AnyRef]])
368-
case (left: Comparable[Any], right: Comparable[Any]) => left.compareTo(right) == 0
368+
case (left: Comparable[_], right: Comparable[_]) =>
369+
left.asInstanceOf[Comparable[Any]].compareTo(right) == 0
369370
case _ => input == convertedBack
370371
}
371372

sql/core/src/test/scala/org/apache/spark/sql/JoinSuite.scala

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,8 @@
1717

1818
package org.apache.spark.sql
1919

20+
import scala.language.existentials
21+
2022
import org.apache.spark.sql.catalyst.analysis.UnresolvedRelation
2123
import org.apache.spark.sql.catalyst.TableIdentifier
2224
import org.apache.spark.sql.execution.joins._

sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/FileCatalogSuite.scala

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,8 @@ package org.apache.spark.sql.execution.datasources
1919

2020
import java.io.File
2121

22+
import scala.language.reflectiveCalls
23+
2224
import org.apache.hadoop.fs.Path
2325

2426
import org.apache.spark.sql.catalyst.util._

yarn/src/test/scala/org/apache/spark/network/yarn/YarnShuffleServiceSuite.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,14 +20,14 @@ import java.io.{DataOutputStream, File, FileOutputStream}
2020

2121
import scala.annotation.tailrec
2222
import scala.concurrent.duration._
23+
import scala.language.postfixOps
2324

2425
import org.apache.hadoop.fs.Path
2526
import org.apache.hadoop.yarn.api.records.ApplicationId
2627
import org.apache.hadoop.yarn.conf.YarnConfiguration
2728
import org.apache.hadoop.yarn.server.api.{ApplicationInitializationContext, ApplicationTerminationContext}
2829
import org.scalatest.{BeforeAndAfterEach, Matchers}
2930
import org.scalatest.concurrent.Eventually._
30-
import org.scalatest.concurrent.Timeouts
3131

3232
import org.apache.spark.SparkFunSuite
3333
import org.apache.spark.network.shuffle.ShuffleTestAccessor

0 commit comments

Comments
 (0)