Skip to content

Commit

Permalink
[SPARK-45344][CORE][SQL] Remove all Scala version string check
Browse files Browse the repository at this point in the history
### What changes were proposed in this pull request?
This PR removes all the no longer needed Scala version string checks.

### Why are the changes needed?
These version checks are no longer needed.

### Does this PR introduce _any_ user-facing change?
No

### How was this patch tested?
Pass GitHub Actions

### Was this patch authored or co-authored using generative AI tooling?
No

Closes #43133 from LuciferYang/SPARK-45344.

Authored-by: yangjie01 <yangjie01@baidu.com>
Signed-off-by: yangjie01 <yangjie01@baidu.com>
  • Loading branch information
LuciferYang committed Sep 28, 2023
1 parent 6d2ffaa commit 2cc1ee4
Show file tree
Hide file tree
Showing 4 changed files with 3 additions and 38 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,6 @@ import javax.annotation.Nullable
import scala.collection.mutable.ArrayBuffer
import scala.jdk.CollectionConverters._
import scala.reflect.ClassTag
import scala.util.Properties
import scala.util.control.NonFatal

import com.esotericsoftware.kryo.{Kryo, KryoException, Serializer => KryoClassSerializer}
Expand Down Expand Up @@ -229,9 +228,7 @@ class KryoSerializer(conf: SparkConf)

kryo.register(None.getClass)
kryo.register(Nil.getClass)
if (Properties.versionNumberString.startsWith("2.13")) {
kryo.register(Utils.classForName("scala.collection.immutable.ArraySeq$ofRef"))
}
kryo.register(Utils.classForName("scala.collection.immutable.ArraySeq$ofRef"))
kryo.register(Utils.classForName("scala.collection.immutable.$colon$colon"))
kryo.register(Utils.classForName("scala.collection.immutable.Map$EmptyMap$"))
kryo.register(Utils.classForName("scala.math.Ordering$Reverse"))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,6 @@ package org.apache.spark.deploy.rest

import java.lang.Boolean

import scala.util.Properties.versionNumberString

import org.json4s.jackson.JsonMethods._

import org.apache.spark.{SparkConf, SparkFunSuite}
Expand Down Expand Up @@ -235,34 +233,7 @@ class SubmitRestProtocolSuite extends SparkFunSuite {
|}
""".stripMargin

private lazy val submitDriverRequestJson = if (versionNumberString.startsWith("2.12")) {
s"""
|{
| "action" : "CreateSubmissionRequest",
| "appArgs" : [ "two slices", "a hint of cinnamon" ],
| "appResource" : "honey-walnut-cherry.jar",
| "clientSparkVersion" : "1.2.3",
| "environmentVariables" : {
| "PATH" : "/dev/null"
| },
| "mainClass" : "org.apache.spark.examples.SparkPie",
| "sparkProperties" : {
| "spark.archives" : "fireballs.zip",
| "spark.driver.extraLibraryPath" : "pickle.jar",
| "spark.jars" : "mayonnaise.jar,ketchup.jar",
| "spark.driver.supervise" : "false",
| "spark.app.name" : "SparkPie",
| "spark.cores.max" : "10000",
| "spark.driver.memory" : "${Utils.DEFAULT_DRIVER_MEM_MB}m",
| "spark.files" : "fireball.png",
| "spark.driver.cores" : "180",
| "spark.driver.extraJavaOptions" : " -Dslices=5 -Dcolor=mostly_red",
| "spark.executor.memory" : "256m",
| "spark.driver.extraClassPath" : "food-coloring.jar"
| }
|}
""".stripMargin
} else {
private lazy val submitDriverRequestJson =
s"""
|{
| "action" : "CreateSubmissionRequest",
Expand All @@ -289,7 +260,6 @@ class SubmitRestProtocolSuite extends SparkFunSuite {
| }
|}
""".stripMargin
}

private val submitDriverResponseJson =
"""
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -553,7 +553,6 @@ class KryoSerializerSuite extends SparkFunSuite with SharedSparkContext {
}

test("SPARK-43898: Register scala.collection.immutable.ArraySeq$ofRef for Scala 2.13") {
assume(scala.util.Properties.versionNumberString.startsWith("2.13"))
val conf = new SparkConf(false)
conf.set(KRYO_REGISTRATION_REQUIRED, true)
val ser = new KryoSerializer(conf).newInstance()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -162,9 +162,8 @@ class HiveSparkSubmitSuite
// Before the fix in SPARK-8470, this results in a MissingRequirementError because
// the HiveContext code mistakenly overrides the class loader that contains user classes.
// For more detail, see sql/hive/src/test/resources/regression-test-SPARK-8489/*scala.
// TODO: revisit for Scala 2.13 support
val version = Properties.versionNumberString match {
case v if v.startsWith("2.12") || v.startsWith("2.13") => v.substring(0, 4)
case v if v.startsWith("2.13") => v.substring(0, 4)
case x => throw new Exception(s"Unsupported Scala Version: $x")
}
val jarDir = getTestResourcePath("regression-test-SPARK-8489")
Expand Down

0 comments on commit 2cc1ee4

Please sign in to comment.