Skip to content

Commit

Permalink
Add docstring to Dataset methods
Browse files Browse the repository at this point in the history
  • Loading branch information
EnricoMi committed Sep 25, 2023
1 parent 75b7387 commit 9a5ef1e
Showing 1 changed file with 52 additions and 0 deletions.
52 changes: 52 additions & 0 deletions sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala
Original file line number Diff line number Diff line change
Expand Up @@ -2248,10 +2248,62 @@ class Dataset[T] private[sql](
observation.on(this, expr, exprs: _*)
}

/**
* Show metrics in the Spark UI. This adds a node to the Spark query plan
* that shows values of given SQLMetrics:
*
* {{{
* val metric = SQLMetrics.createMetric(sparkContext, "sum")
* val df = spark.range(10).as[Long]
* .map { v: Long => metric.add(v); v }
* .showMetrics(metric)
* .collect()
* }}}
*
* The query plan in the Spark UI then contains this node:
*
* ↓
* ╔═════════════╗
* ║ShowMetrics ║
* ║ ║
* ║ sum: 45 ║
* ╚══════╤══════╝
* ↓
*
*
* @group typedrel
* @since 4.0.0
*/
def showMetrics(metrics: SQLMetric*): Dataset[T] = withTypedPlan {
ShowMetrics(None, metrics.map(_.id), logicalPlan)
}

/**
* Show metrics in the Spark UI. This adds a node to the Spark query plan
* that shows values of given SQLMetrics:
*
* {{{
* val metric = SQLMetrics.createMetric(sparkContext, "sum")
* val df = spark.range(10).as[Long]
* .map { v: Long => metric.add(v); v }
* .showMetrics("My metrics", metric)
* .collect()
* }}}
*
* The query plan in the Spark UI then contains this node:
*
* ↓
* ╔═════════════╗
* ║My metrics ║
* ║ ║
* ║ sum: 45 ║
* ╚══════╤══════╝
* ↓
*
*
* @group typedrel
* @since 4.0.0
*/
def showMetrics(label: String, metrics: SQLMetric*): Dataset[T] = withTypedPlan {
ShowMetrics(Some(label), metrics.map(_.id), logicalPlan)
}
Expand Down

0 comments on commit 9a5ef1e

Please sign in to comment.