Skip to content

Commit

Permalink
Fix Implicit definition should have explicit type
Browse files Browse the repository at this point in the history
  • Loading branch information
jiyong-lee-dev committed Apr 29, 2024
1 parent 51e249e commit f04b745
Show file tree
Hide file tree
Showing 72 changed files with 140 additions and 149 deletions.
4 changes: 2 additions & 2 deletions R/pkg/tests/fulltests/test_client.R
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ test_that("multiple packages don't produce a warning", {

test_that("sparkJars sparkPackages as character vectors", {
args <- generateSparkSubmitArgs("", "", c("one.jar", "two.jar", "three.jar"), "",
c("com.databricks:spark-avro_2.12:2.0.1"))
c("com.databricks:spark-avro_2.13:2.0.1"))
expect_match(args, "--jars one.jar,two.jar,three.jar")
expect_match(args, "--packages com.databricks:spark-avro_2.12:2.0.1")
expect_match(args, "--packages com.databricks:spark-avro_2.13:2.0.1")
})
4 changes: 2 additions & 2 deletions common/kvstore/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -21,12 +21,12 @@
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.apache.spark</groupId>
<artifactId>spark-parent_2.12</artifactId>
<artifactId>spark-parent_2.13</artifactId>
<version>3.5.1</version>
<relativePath>../../pom.xml</relativePath>
</parent>

<artifactId>spark-kvstore_2.12</artifactId>
<artifactId>spark-kvstore_2.13</artifactId>
<packaging>jar</packaging>
<name>Spark Project Local DB</name>
<url>https://spark.apache.org/</url>
Expand Down
4 changes: 2 additions & 2 deletions common/network-common/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -21,12 +21,12 @@
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.apache.spark</groupId>
<artifactId>spark-parent_2.12</artifactId>
<artifactId>spark-parent_2.13</artifactId>
<version>3.5.1</version>
<relativePath>../../pom.xml</relativePath>
</parent>

<artifactId>spark-network-common_2.12</artifactId>
<artifactId>spark-network-common_2.13</artifactId>
<packaging>jar</packaging>
<name>Spark Project Networking</name>
<url>https://spark.apache.org/</url>
Expand Down
4 changes: 2 additions & 2 deletions common/network-shuffle/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -21,12 +21,12 @@
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.apache.spark</groupId>
<artifactId>spark-parent_2.12</artifactId>
<artifactId>spark-parent_2.13</artifactId>
<version>3.5.1</version>
<relativePath>../../pom.xml</relativePath>
</parent>

<artifactId>spark-network-shuffle_2.12</artifactId>
<artifactId>spark-network-shuffle_2.13</artifactId>
<packaging>jar</packaging>
<name>Spark Project Shuffle Streaming Service</name>
<url>https://spark.apache.org/</url>
Expand Down
4 changes: 2 additions & 2 deletions common/unsafe/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -21,12 +21,12 @@
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.apache.spark</groupId>
<artifactId>spark-parent_2.12</artifactId>
<artifactId>spark-parent_2.13</artifactId>
<version>3.5.1</version>
<relativePath>../../pom.xml</relativePath>
</parent>

<artifactId>spark-unsafe_2.12</artifactId>
<artifactId>spark-unsafe_2.13</artifactId>
<packaging>jar</packaging>
<name>Spark Project Unsafe</name>
<url>https://spark.apache.org/</url>
Expand Down
4 changes: 2 additions & 2 deletions connector/docker-integration-tests/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -21,12 +21,12 @@
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.apache.spark</groupId>
<artifactId>spark-parent_2.12</artifactId>
<artifactId>spark-parent_2.13</artifactId>
<version>3.5.1</version>
<relativePath>../../pom.xml</relativePath>
</parent>

<artifactId>spark-docker-integration-tests_2.12</artifactId>
<artifactId>spark-docker-integration-tests_2.13</artifactId>
<packaging>jar</packaging>
<name>Spark Project Docker Integration Tests</name>
<url>https://spark.apache.org/</url>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,14 +21,14 @@ import scala.collection.mutable.HashMap
import scala.util.control.NonFatal

import org.apache.kafka.common.TopicPartition
import org.json4s.NoTypeHints
import org.json4s.{Formats, NoTypeHints}
import org.json4s.jackson.Serialization

/**
* Utilities for converting Kafka related objects to and from json.
*/
private object JsonUtils {
private implicit val formats = Serialization.formats(NoTypeHints)
private implicit val formats: Formats = Serialization.formats(NoTypeHints)

/**
* Read TopicPartitions from json string
Expand Down Expand Up @@ -96,10 +96,8 @@ private object JsonUtils {
*/
def partitionOffsets(partitionOffsets: Map[TopicPartition, Long]): String = {
val result = new HashMap[String, HashMap[Int, Long]]()
implicit val order = new Ordering[TopicPartition] {
override def compare(x: TopicPartition, y: TopicPartition): Int = {
implicit val order: Ordering[TopicPartition] = (x: TopicPartition, y: TopicPartition) => {
Ordering.Tuple2[String, Int].compare((x.topic, x.partition), (y.topic, y.partition))
}
}
val partitions = partitionOffsets.keySet.toSeq.sorted // sort for more determinism
partitions.foreach { tp =>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration._
import scala.sys.process._

import org.json4s.Formats
import org.json4s.jackson.JsonMethods

import org.apache.spark.{SparkConf, SparkContext}
Expand Down Expand Up @@ -340,7 +341,7 @@ private object FaultToleranceTest extends App with Logging {
private class TestMasterInfo(val ip: String, val dockerId: DockerId, val logFile: File)
extends Logging {

implicit val formats = org.json4s.DefaultFormats
implicit val formats: Formats = org.json4s.DefaultFormats
var state: RecoveryState.Value = _
var liveWorkerIPs: List[String] = _
var numLiveApps = 0
Expand Down Expand Up @@ -383,7 +384,7 @@ private class TestMasterInfo(val ip: String, val dockerId: DockerId, val logFile
private class TestWorkerInfo(val ip: String, val dockerId: DockerId, val logFile: File)
extends Logging {

implicit val formats = org.json4s.DefaultFormats
implicit val formats: Formats = org.json4s.DefaultFormats

logDebug("Created worker: " + this)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ import java.nio.file.Files
import scala.collection.mutable
import scala.util.control.NonFatal

import org.json4s.{DefaultFormats, Extraction}
import org.json4s.{DefaultFormats, Extraction, Formats}
import org.json4s.jackson.JsonMethods.{compact, render}

import org.apache.spark.SparkException
Expand Down Expand Up @@ -114,7 +114,7 @@ private[spark] object StandaloneResourceUtils extends Logging {
private def writeResourceAllocationJson[T](
allocations: Seq[T],
jsonFile: File): Unit = {
implicit val formats = DefaultFormats
implicit val formats: Formats = DefaultFormats
val allocationJson = Extraction.decompose(allocations)
Files.write(jsonFile.toPath, compact(render(allocationJson)).getBytes())
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -60,8 +60,6 @@ private[spark] class CoarseGrainedExecutorBackend(

import CoarseGrainedExecutorBackend._

private implicit val formats = DefaultFormats

private[spark] val stopping = new AtomicBoolean(false)
var executor: Executor = null
@volatile var driver: Option[RpcEndpointRef] = None
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ package org.apache.spark.resource

import scala.util.control.NonFatal

import org.json4s.{DefaultFormats, Extraction, JValue}
import org.json4s.{DefaultFormats, Extraction, JValue, Formats}
import org.json4s.jackson.JsonMethods._

import org.apache.spark.SparkException
Expand Down Expand Up @@ -69,7 +69,7 @@ private[spark] object ResourceInformation {
* Parses a JSON string into a [[ResourceInformation]] instance.
*/
def parseJson(json: String): ResourceInformation = {
implicit val formats = DefaultFormats
implicit val formats: Formats = DefaultFormats
try {
parse(json).extract[ResourceInformationJson].toResourceInformation
} catch {
Expand All @@ -80,7 +80,7 @@ private[spark] object ResourceInformation {
}

def parseJson(json: JValue): ResourceInformation = {
implicit val formats = DefaultFormats
implicit val formats: Formats = DefaultFormats
try {
json.extract[ResourceInformationJson].toResourceInformation
} catch {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ import java.util.Optional

import scala.util.control.NonFatal

import org.json4s.DefaultFormats
import org.json4s.{DefaultFormats, Formats}
import org.json4s.jackson.JsonMethods._

import org.apache.spark.{SparkConf, SparkException}
Expand Down Expand Up @@ -252,7 +252,7 @@ private[spark] object ResourceUtils extends Logging {

def parseAllocatedFromJsonFile(resourcesFile: String): Seq[ResourceAllocation] = {
withResourcesJson[ResourceAllocation](resourcesFile) { json =>
implicit val formats = DefaultFormats
implicit val formats: Formats = DefaultFormats
parse(json).extract[Seq[ResourceAllocation]]
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ private [spark] class JobDuration(val value: AtomicLong) extends Gauge[Long] {

private[spark] class AppStatusSource extends Source {

override implicit val metricRegistry = new MetricRegistry()
override implicit val metricRegistry: MetricRegistry = new MetricRegistry()

override val sourceName = "appStatus"

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ import java.util.concurrent.TimeUnit

import scala.collection.JavaConverters._
import scala.collection.mutable
import scala.concurrent.{ExecutionContext, Future, TimeoutException}
import scala.concurrent.{ExecutionContext, ExecutionContextExecutorService, Future, TimeoutException}
import scala.util.Random
import scala.util.control.NonFatal

Expand Down Expand Up @@ -94,7 +94,7 @@ class BlockManagerMasterEndpoint(

private val askThreadPool =
ThreadUtils.newDaemonCachedThreadPool("block-manager-ask-thread-pool", 100)
private implicit val askExecutionContext = ExecutionContext.fromExecutorService(askThreadPool)
private implicit val askExecutionContext: ExecutionContextExecutorService = ExecutionContext.fromExecutorService(askThreadPool)

private val topologyMapper = {
val topologyMapperClassName = conf.get(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@

package org.apache.spark.storage

import scala.concurrent.{ExecutionContext, Future}
import scala.concurrent.{ExecutionContext, ExecutionContextExecutorService, Future}

import org.apache.spark.{MapOutputTracker, SparkEnv}
import org.apache.spark.internal.Logging
Expand All @@ -38,7 +38,7 @@ class BlockManagerStorageEndpoint(

private val asyncThreadPool =
ThreadUtils.newDaemonCachedThreadPool("block-manager-storage-async-thread-pool", 100)
private implicit val asyncExecutionContext = ExecutionContext.fromExecutorService(asyncThreadPool)
private implicit val asyncExecutionContext: ExecutionContextExecutorService = ExecutionContext.fromExecutorService(asyncThreadPool)

// Operations that involve removing blocks may be slow and should be done asynchronously
override def receiveAndReply(context: RpcCallContext): PartialFunction[Any, Unit] = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ import org.apache.spark.storage._
abstract class ContextCleanerSuiteBase(val shuffleManager: Class[_] = classOf[SortShuffleManager])
extends SparkFunSuite with BeforeAndAfter with LocalSparkContext
{
implicit val defaultTimeout = timeout(10.seconds)
implicit val defaultTimeout: PatienceConfiguration.Timeout = timeout(10.seconds)
val conf = new SparkConf()
.setMaster("local[2]")
.setAppName("ContextCleanerSuite")
Expand Down
4 changes: 2 additions & 2 deletions core/src/test/scala/org/apache/spark/SparkContextSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ import org.apache.hadoop.io.{BytesWritable, LongWritable, Text}
import org.apache.hadoop.mapred.TextInputFormat
import org.apache.hadoop.mapreduce.lib.input.{TextInputFormat => NewTextInputFormat}
import org.apache.logging.log4j.{Level, LogManager}
import org.json4s.{DefaultFormats, Extraction}
import org.json4s.{DefaultFormats, Extraction, Formats}
import org.scalatest.concurrent.Eventually
import org.scalatest.matchers.must.Matchers._

Expand Down Expand Up @@ -923,7 +923,7 @@ class SparkContextSuite extends SparkFunSuite with LocalSparkContext with Eventu
val scriptPath = createTempScriptWithExpectedOutput(dir, "gpuDiscoveryScript",
"""{"name": "gpu","addresses":["5", "6"]}""")

implicit val formats = DefaultFormats
implicit val formats: Formats = DefaultFormats
val gpusAllocated =
ResourceAllocation(DRIVER_GPU_ID, Seq("0", "1", "8"))
val ja = Extraction.decompose(Seq(gpusAllocated))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ import scala.concurrent.duration._
import com.google.common.io.{ByteStreams, Files}
import org.apache.commons.io.{FileUtils, IOUtils}
import org.apache.hadoop.fs.{FileStatus, FileSystem, Path}
import org.json4s.Formats
import org.json4s.JsonAST._
import org.json4s.jackson.JsonMethods
import org.json4s.jackson.JsonMethods._
Expand Down Expand Up @@ -380,7 +381,7 @@ abstract class HistoryServerSuite extends SparkFunSuite with BeforeAndAfter with

test("incomplete apps get refreshed") {
implicit val webDriver: WebDriver = new HtmlUnitDriver
implicit val formats = org.json4s.DefaultFormats
implicit val formats: Formats = org.json4s.DefaultFormats

// this test dir is explicitly deleted on successful runs; retained for diagnostics when
// not
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -327,7 +327,7 @@ class MasterSuite extends SparkFunSuite
}

test("SPARK-46888: master should reject worker kill request if decommision is disabled") {
implicit val formats = org.json4s.DefaultFormats
implicit val formats: Formats = org.json4s.DefaultFormats
val conf = new SparkConf()
.set(DECOMMISSION_ENABLED, false)
.set(MASTER_UI_DECOMMISSION_ALLOW_MODE, "ALLOW")
Expand All @@ -347,7 +347,7 @@ class MasterSuite extends SparkFunSuite
}

test("master/worker web ui available") {
implicit val formats = org.json4s.DefaultFormats
implicit val formats: Formats = org.json4s.DefaultFormats
val conf = new SparkConf()
val localCluster = LocalSparkCluster(2, 2, 512, conf)
localCluster.start()
Expand Down Expand Up @@ -383,7 +383,7 @@ class MasterSuite extends SparkFunSuite
}

test("master/worker web ui available with reverseProxy") {
implicit val formats = org.json4s.DefaultFormats
implicit val formats: Formats = org.json4s.DefaultFormats
val conf = new SparkConf()
conf.set(UI_REVERSE_PROXY, true)
val localCluster = LocalSparkCluster(2, 2, 512, conf)
Expand Down Expand Up @@ -419,7 +419,7 @@ class MasterSuite extends SparkFunSuite
}

test("master/worker web ui available behind front-end reverseProxy") {
implicit val formats = org.json4s.DefaultFormats
implicit val formats: Formats = org.json4s.DefaultFormats
val reverseProxyUrl = "http://proxyhost:8080/path/to/spark"
val conf = new SparkConf()
conf.set(UI_REVERSE_PROXY, true)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ import java.util.function.Supplier

import scala.concurrent.duration._

import org.json4s.{DefaultFormats, Extraction}
import org.json4s.{DefaultFormats, Extraction, Formats}
import org.mockito.{Mock, MockitoAnnotations}
import org.mockito.Answers.RETURNS_SMART_NULLS
import org.mockito.ArgumentMatchers.any
Expand Down Expand Up @@ -60,7 +60,7 @@ class WorkerSuite extends SparkFunSuite with Matchers with BeforeAndAfter {
}
def conf(opts: (String, String)*): SparkConf = new SparkConf(loadDefaults = false).setAll(opts)

implicit val formats = DefaultFormats
implicit val formats: Formats = DefaultFormats

private var _worker: Worker = _

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ import java.util.concurrent.atomic.AtomicInteger
import scala.collection.concurrent.TrieMap
import scala.concurrent.duration._

import org.json4s.{DefaultFormats, Extraction}
import org.json4s.{DefaultFormats, Extraction, Formats}
import org.json4s.JsonAST.{JArray, JObject}
import org.json4s.JsonDSL._
import org.mockito.ArgumentMatchers.any
Expand All @@ -50,7 +50,7 @@ import org.apache.spark.util.{SerializableBuffer, ThreadUtils, Utils}
class CoarseGrainedExecutorBackendSuite extends SparkFunSuite
with LocalSparkContext with MockitoSugar {

implicit val formats = DefaultFormats
implicit val formats: Formats = DefaultFormats

test("parsing no resources") {
val conf = new SparkConf
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ import java.io.File
import java.nio.file.{Files => JavaFiles}
import java.util.Optional

import org.json4s.{DefaultFormats, Extraction}
import org.json4s.{DefaultFormats, Extraction, Formats}

import org.apache.spark.{LocalSparkContext, SparkConf, SparkException, SparkFunSuite}
import org.apache.spark.TestUtils._
Expand Down Expand Up @@ -117,7 +117,7 @@ class ResourceUtilsSuite extends SparkFunSuite
val conf = new SparkConf
assume(!(Utils.isWindows))
withTempDir { dir =>
implicit val formats = DefaultFormats
implicit val formats: Formats = DefaultFormats
val fpgaAddrs = Seq("f1", "f2", "f3")
val fpgaAllocation = ResourceAllocation(EXECUTOR_FPGA_ID, fpgaAddrs)
val resourcesFile = createTempJsonFile(
Expand Down Expand Up @@ -146,7 +146,7 @@ class ResourceUtilsSuite extends SparkFunSuite
val rpId = 1
assume(!(Utils.isWindows))
withTempDir { dir =>
implicit val formats = DefaultFormats
implicit val formats: Formats = DefaultFormats
val fpgaAddrs = Seq("f1", "f2", "f3")
val fpgaAllocation = ResourceAllocation(EXECUTOR_FPGA_ID, fpgaAddrs)
val resourcesFile = createTempJsonFile(
Expand Down
Loading

0 comments on commit f04b745

Please sign in to comment.