Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Update MockTaskContext to support new functions added in Spark-4.0 [databricks] #11972

Merged
merged 7 commits into from
Jan 17, 2025
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,135 @@
/*
* Copyright (c) 2020-2025, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

/*** spark-rapids-shim-json-lines
{"spark": "320"}
{"spark": "321"}
{"spark": "321cdh"}
{"spark": "322"}
{"spark": "323"}
{"spark": "324"}
{"spark": "330"}
{"spark": "330cdh"}
{"spark": "330db"}
{"spark": "331"}
{"spark": "332"}
{"spark": "332cdh"}
{"spark": "332db"}
{"spark": "333"}
{"spark": "334"}
{"spark": "340"}
{"spark": "341"}
{"spark": "341db"}
{"spark": "342"}
{"spark": "343"}
{"spark": "344"}
{"spark": "350"}
{"spark": "350db143"}
{"spark": "351"}
{"spark": "352"}
{"spark": "353"}
{"spark": "354"}
spark-rapids-shim-json-lines ***/
package org.apache.spark.sql.rapids.metrics.source

import java.util
import java.util.Properties

import scala.collection.JavaConverters._
import scala.collection.mutable.ListBuffer

import org.apache.spark.TaskContext
import org.apache.spark.executor.TaskMetrics
import org.apache.spark.metrics.source.Source
import org.apache.spark.resource.ResourceInformation
import org.apache.spark.scheduler.TaskLocality
import org.apache.spark.shuffle.FetchFailedException
import org.apache.spark.util.{AccumulatorV2, TaskCompletionListener, TaskFailureListener}

class MockTaskContext(taskAttemptId: Long, partitionId: Int) extends TaskContext {

val listeners = new ListBuffer[TaskCompletionListener]

override def isCompleted(): Boolean = false

override def isInterrupted(): Boolean = false

override def addTaskCompletionListener(listener: TaskCompletionListener): TaskContext = {
listeners += listener
this
}

override def addTaskFailureListener(listener: TaskFailureListener): TaskContext = this

override def stageId(): Int = 1

override def stageAttemptNumber(): Int = 1

override def partitionId(): Int = partitionId

override def attemptNumber(): Int = 1

override def taskAttemptId(): Long = taskAttemptId

override def getLocalProperty(key: String): String = null

override def resources(): Map[String, ResourceInformation] = Map()

override def resourcesJMap(): util.Map[String, ResourceInformation] = resources().asJava

override def taskMetrics(): TaskMetrics = new TaskMetrics

override def getMetricsSources(sourceName: String): Seq[Source] = Seq.empty

override private[spark] def killTaskIfInterrupted(): Unit = {}

override def getKillReason() = None

override def taskMemoryManager() = null

override private[spark] def registerAccumulator(a: AccumulatorV2[_, _]): Unit = {}

override private[spark] def setFetchFailed(fetchFailed: FetchFailedException): Unit = {}

override private[spark] def markInterrupted(reason: String): Unit = {}

override private[spark] def markTaskFailed(error: Throwable): Unit = {}

override private[spark] def markTaskCompleted(error: Option[Throwable]): Unit = {}

override private[spark] def fetchFailed = None

override private[spark] def getLocalProperties = new Properties()

def cpus(): Int = 2

def numPartitions(): Int = 2

def taskLocality(): TaskLocality.TaskLocality = TaskLocality.ANY

/**
* This is exposed to invoke the listeners onTaskCompletion
*/
def markTaskComplete(): Unit = {
listeners.foreach(_.onTaskCompletion(this))
}

/**
* This method was introduced in Spark-3.5.1. It's not shimmed and added to the common class by
* removing the override keyword.
*/
def isFailed(): Boolean = false
}
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
/*
* Copyright (c) 2020-2023, NVIDIA CORPORATION.
* Copyright (c) 2025, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
Expand All @@ -13,8 +13,13 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/

/*** spark-rapids-shim-json-lines
{"spark": "400"}
spark-rapids-shim-json-lines ***/
package org.apache.spark.sql.rapids.metrics.source

import java.io.Closeable
import java.util
import java.util.Properties

Expand Down Expand Up @@ -84,6 +89,14 @@ class MockTaskContext(taskAttemptId: Long, partitionId: Int) extends TaskContext

override private[spark] def getLocalProperties = new Properties()

override private[spark] def interruptible(): Boolean = false

override private[spark] def pendingInterrupt(
threadToInterrupt: Option[Thread], reason: String): Unit = {}

override private[spark] def createResourceUninterruptibly[T <: Closeable](
resourceBuilder: => T): T = resourceBuilder

nartal1 marked this conversation as resolved.
Show resolved Hide resolved
def cpus(): Int = 2

def numPartitions(): Int = 2
Expand Down
Loading