Skip to content

Commit

Permalink
[KYUUBI #6541] [AUTHZ] Fix DataSourceV2RelationTableExtractor can't g…
Browse files Browse the repository at this point in the history
…et the 'database' attribute if it's a Paimon plan.

# 🔍 Description
## Issue References 🔗

This pull request fixes #6541

## Describe Your Solution 🔧
Fix an issue where DataSourceV2RelationTableExtractor#table could not fetch the ‘database’ attribute causing the Ranger checks to fail when using the Paimon Catalog.
If the database attribute is not resolved, use DataSourceV2RelationTableExtractor#identifier to complete it.

## Types of changes 🔖

- [x] Bugfix (non-breaking change which fixes an issue)
- [ ] New feature (non-breaking change which adds functionality)
- [ ] Breaking change (fix or feature that would cause existing functionality to change)

## Test Plan 🧪

#### Behavior Without This Pull Request ⚰️

#### Behavior With This Pull Request 🎉

#### Related Unit Tests

---

# Checklist 📝

- [ ] This patch was not authored or co-authored using [Generative Tooling](https://www.apache.org/legal/generative-tooling.html)

**Be nice. Be informative.**

Closes #6544 from promising-forever/issues/6541.

Closes #6541

6549f85 [caoyu] Fix test failure, paimon-spark run on Scala 2.12.
c1a0921 [caoyu] Optimising the 'database' capture logic
69fb0bc [caoyu] PolicyJsonFileGenerator#genPolicies add paimonNamespace
c89c70b [caoyu] [KYUUBI #6541] [AUTHZ] Fix DataSourceV2RelationTableExtractor#table can't get the 'database' attribute if it's a Paimon plan.
77f121b [caoyu] [KYUUBI #6541] [AUTHZ] Fix DataSourceV2RelationTableExtractor#table can't get the 'database' attribute if it's a Paimon plan.
9cfb584 [caoyu] [KYUUBI #6541] [AUTHZ] Fix DataSourceV2RelationTableExtractor#table can't get the 'database' attribute if it's a Paimon plan.

Authored-by: caoyu <[email protected]>
Signed-off-by: Bowen Liang <[email protected]>
  • Loading branch information
caoyu authored and bowenliang123 committed Jul 28, 2024
1 parent 8f37390 commit d9d2109
Show file tree
Hide file tree
Showing 5 changed files with 52 additions and 3 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -199,7 +199,15 @@ class DataSourceV2RelationTableExtractor extends TableExtractor {
lookupExtractor[TableTableExtractor].apply(spark, v2Relation.table)
.map { table =>
val maybeOwner = TableExtractor.getOwner(v2Relation)
table.copy(catalog = maybeCatalog, owner = maybeOwner)
val maybeDatabase: Option[String] = table.database match {
case Some(x) => Some(x)
case None =>
val maybeIdentifier = invokeAs[Option[AnyRef]](v2Relation, "identifier")
maybeIdentifier.flatMap { id =>
lookupExtractor[IdentifierTableExtractor].apply(spark, id)
}.flatMap(table => table.database)
}
table.copy(catalog = maybeCatalog, database = maybeDatabase, owner = maybeOwner)
}
case _ => None
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -173,7 +173,7 @@ class PolicyJsonFileGenerator extends AnyFunSuite {
name = "all - database, udf",
description = "Policy for all - database, udf",
resources = Map(
databaseRes(defaultDb, sparkCatalog, icebergNamespace, namespace1),
databaseRes(defaultDb, sparkCatalog, icebergNamespace, namespace1, paimonNamespace),
allTableRes,
allColumnRes),
policyItems = List(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -229,7 +229,7 @@
"isAuditEnabled" : true,
"resources" : {
"database" : {
"values" : [ "default", "spark_catalog", "iceberg_ns", "ns1" ],
"values" : [ "default", "spark_catalog", "iceberg_ns", "ns1", "paimon_ns" ],
"isExcludes" : false,
"isRecursive" : false
},
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@ object RangerTestNamespace {
val sparkCatalog = "spark_catalog"
val icebergNamespace = "iceberg_ns"
val hudiNamespace = "hudi_ns"
val paimonNamespace = "paimon_ns"
val deltaNamespace = "delta_ns"
val namespace1 = "ns1"
val namespace2 = "ns2"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,8 @@
*/
package org.apache.kyuubi.plugin.spark.authz.ranger

import scala.util.Properties

import org.scalatest.Outcome

import org.apache.kyuubi.Utils
Expand All @@ -32,6 +34,7 @@ class PaimonCatalogRangerSparkExtensionSuite extends RangerSparkExtensionSuite {
override protected val catalogImpl: String = "hive"
private def isSupportedVersion = true

val scalaVersion: String = Properties.versionString
val catalogV2 = "paimon_catalog"
val namespace1 = "paimon_ns"
val table1 = "table1"
Expand Down Expand Up @@ -81,4 +84,41 @@ class PaimonCatalogRangerSparkExtensionSuite extends RangerSparkExtensionSuite {
doAs(admin, createTable)
}
}

test("[KYUUBI #6541] INSERT/SELECT TABLE") {
val tName = "t_paimon"

/**
* paimon-spark run on Scala 2.12.
*/
if (scalaVersion.startsWith("version 2.12")) {
withCleanTmpResources(Seq((s"$catalogV2.$namespace1.$tName", "table"))) {

doAs(bob, sql(createTableSql(namespace1, tName)))

interceptEndsWith[AccessControlException] {
doAs(someone, sql(s"INSERT INTO $catalogV2.$namespace1.$tName VALUES (1, 'name_1')"))
}(s"does not have [update] privilege on [$namespace1/$tName]")
doAs(bob, sql(s"INSERT INTO $catalogV2.$namespace1.$tName VALUES (1, 'name_1')"))
doAs(bob, sql(s"INSERT INTO $catalogV2.$namespace1.$tName VALUES (1, 'name_2')"))

interceptEndsWith[AccessControlException] {
doAs(someone, sql(s"SELECT id FROM $catalogV2.$namespace1.$tName").show())
}(s"does not have [select] privilege on [$namespace1/$tName/id]")
doAs(bob, sql(s"SELECT name FROM $catalogV2.$namespace1.$tName").show())
}
}

}

def createTableSql(namespace: String, table: String): String =
s"""
|CREATE TABLE IF NOT EXISTS $catalogV2.$namespace.$table
|(id int, name string)
|USING paimon
|OPTIONS (
| 'primary-key' = 'id'
|)
|""".stripMargin

}

0 comments on commit d9d2109

Please sign in to comment.