diff --git a/.github/workflows/main.yaml b/.github/workflows/main.yaml index 16a334be5..cc11ef89f 100644 --- a/.github/workflows/main.yaml +++ b/.github/workflows/main.yaml @@ -36,12 +36,11 @@ jobs: ./secrets/decrypt.sh secrets/secring.asc.gpg ~/.sbt/gpg/secring.asc - name: Run tests env: - TEST_OIDC_READ_CLIENT_ID: ${{ secrets.BLUEFIELD_CLIENT_ID_2 }} - TEST_OIDC_READ_CLIENT_SECRET: ${{ secrets.BLUEFIELD_CLIENT_SECRET_2 }} - TEST_OIDC_READ_TENANT: ${{ secrets.TEST_AAD_TENANT_BLUEFIELD_2 }} TEST_CLIENT_ID_BLUEFIELD: ${{ secrets.BLUEFIELD_CLIENT_ID_2 }} TEST_CLIENT_SECRET_BLUEFIELD: ${{ secrets.BLUEFIELD_CLIENT_SECRET_2 }} TEST_AAD_TENANT_BLUEFIELD: ${{ secrets.TEST_AAD_TENANT_BLUEFIELD_2 }} + TEST_PROJECT: "spark-datasource-bluefield-tests" + TEST_CLUSTER: "bluefield.cognitedata.com" run: | TEST="test +Test/compile" if [ ${{github.ref }} == "refs/heads/master" ]; then diff --git a/src/test/scala/cognite/spark/v1/DataPointsRelationTest.scala b/src/test/scala/cognite/spark/v1/DataPointsRelationTest.scala index 0c8b8d393..8000d0614 100644 --- a/src/test/scala/cognite/spark/v1/DataPointsRelationTest.scala +++ b/src/test/scala/cognite/spark/v1/DataPointsRelationTest.scala @@ -1061,7 +1061,6 @@ class DataPointsRelationTest val df = spark.read .format("cognite.spark.v1") - .option("apiKey", jetfiretest2ApiKey) .option("type", "datapoints") .option("collectMetrics", "true") .option("metricsPrefix", metricsPrefix) diff --git a/src/test/scala/cognite/spark/v1/SdkV1RddTest.scala b/src/test/scala/cognite/spark/v1/SdkV1RddTest.scala index 8b9b795dc..4c6d2f4ac 100644 --- a/src/test/scala/cognite/spark/v1/SdkV1RddTest.scala +++ b/src/test/scala/cognite/spark/v1/SdkV1RddTest.scala @@ -33,6 +33,13 @@ class SdkV1RddTest extends FlatSpec with Matchers with ParallelTestExecution wit implicit val implicitBackend: SttpBackend[IO, Any] = CdpConnector.retryingSttpBackend(3, 5) val sdkRdd = { + val readOidcCredentials = OAuth2.ClientCredentials( + tokenUri = uri"https://login.microsoftonline.com/fake-tenant/oauth2/v2.0/token", + clientId = "fake_client_id", + clientSecret = "fake_secret", + scopes = List("https://api.cognitedata.com/.default"), + cdfProjectName = "fake_project" + ) val relationConfig = getDefaultConfig( CdfSparkAuth.OAuth2ClientCredentials(readOidcCredentials)(implicitBackend), readOidcCredentials.cdfProjectName diff --git a/src/test/scala/cognite/spark/v1/SparkTest.scala b/src/test/scala/cognite/spark/v1/SparkTest.scala index 836c73cc8..27557d85b 100644 --- a/src/test/scala/cognite/spark/v1/SparkTest.scala +++ b/src/test/scala/cognite/spark/v1/SparkTest.scala @@ -40,9 +40,10 @@ trait SparkTest { val clientId = sys.env("TEST_CLIENT_ID_BLUEFIELD") val clientSecret = sys.env("TEST_CLIENT_SECRET_BLUEFIELD") private val aadTenant = sys.env("TEST_AAD_TENANT_BLUEFIELD") + val project = sys.env("TEST_PROJECT") + val cluster = sys.env("TEST_CLUSTER") val tokenUri = s"https://login.microsoftonline.com/$aadTenant/oauth2/v2.0/token" - val project = "jetfiretest2" - val scopes = "https://api.cognitedata.com/.default" + val scopes = s"https://$cluster/.default" } val writeCredentials = OAuth2.ClientCredentials( @@ -59,7 +60,7 @@ trait SparkTest { val writeClient: GenericClient[IO] = new GenericClient( applicationName = "jetfire-test", projectName = writeCredentials.cdfProjectName, - baseUrl = s"https://api.cognitedata.com", + baseUrl = s"https://${OIDCWrite.cluster}", authProvider = writeAuthProvider, apiVersion = None, clientTag = None, @@ -84,42 +85,15 @@ trait SparkTest { .option("scopes", OIDCWrite.scopes) } - private val readClientId = System.getenv("TEST_OIDC_READ_CLIENT_ID") - // readClientSecret has to be renewed every 180 days at https://hub.cognite.com/open-industrial-data-211 - private val readClientSecret = System.getenv("TEST_OIDC_READ_CLIENT_SECRET") - private val readAadTenant = System.getenv("TEST_OIDC_READ_TENANT") - - assert( - readClientId != null && !readClientId.isEmpty, - "Environment variable \"TEST_OIDC_READ_CLIENT_ID\" was not set") - assert( - readClientSecret != null && !readClientSecret.isEmpty, - "Environment variable \"TEST_OIDC_READ_CLIENT_SECRET\" was not set") - assert( - readAadTenant != null && !readAadTenant.isEmpty, - "Environment variable \"TEST_OIDC_READ_TENANT\" was not set") - - private val readTokenUri = s"https://login.microsoftonline.com/$readAadTenant/oauth2/v2.0/token" - - val readOidcCredentials = OAuth2.ClientCredentials( - tokenUri = uri"$readTokenUri", - clientId = readClientId, - clientSecret = readClientSecret, - scopes = List("https://api.cognitedata.com/.default"), - cdfProjectName = "publicdata" - ) - def dataFrameReaderUsingOidc: DataFrameReader = spark.read .format("cognite.spark.v1") - .option("tokenUri", readTokenUri) - .option("clientId", readClientId) - .option("clientSecret", readClientSecret) - .option("project", "publicdata") - .option("scopes", "https://api.cognitedata.com/.default") - - // not needed to run tests, only for replicating some problems specific to this tenant - lazy val jetfiretest2ApiKey = System.getenv("TEST_APU_KEY_JETFIRETEST2") + .option("tokenUri", OIDCWrite.tokenUri) + .option("clientId", OIDCWrite.clientId) + .option("clientSecret", OIDCWrite.clientSecret) + .option("project", OIDCWrite.project) + .option("scopes", OIDCWrite.scopes) + val testDataSetId = 86163806167772L