diff --git a/build.sbt b/build.sbt index 1c4b70c5d8..fac44f06cd 100644 --- a/build.sbt +++ b/build.sbt @@ -25,6 +25,7 @@ val coreDependencies = Seq( "org.apache.spark" %% "spark-mllib" % sparkVersion % "compile", "org.apache.spark" %% "spark-avro" % sparkVersion % "provided", "org.apache.spark" %% "spark-tags" % sparkVersion % "test", + "com.globalmentor" % "hadoop-bare-naked-local-fs" % "0.1.0" % "test", "org.scalatest" %% "scalatest" % "3.2.14" % "test") val extraDependencies = Seq( "org.scalactic" %% "scalactic" % "3.2.14", diff --git a/cognitive/src/test/scala/com/microsoft/azure/synapse/ml/cognitive/anomaly/MultivariateAnamolyDetectionSuite.scala b/cognitive/src/test/scala/com/microsoft/azure/synapse/ml/cognitive/anomaly/MultivariateAnamolyDetectionSuite.scala index 7a4150a0d0..d0204c2365 100644 --- a/cognitive/src/test/scala/com/microsoft/azure/synapse/ml/cognitive/anomaly/MultivariateAnamolyDetectionSuite.scala +++ b/cognitive/src/test/scala/com/microsoft/azure/synapse/ml/cognitive/anomaly/MultivariateAnamolyDetectionSuite.scala @@ -4,7 +4,7 @@ package com.microsoft.azure.synapse.ml.cognitive.anomaly import com.microsoft.azure.synapse.ml.Secrets -import com.microsoft.azure.synapse.ml.core.test.base.TestBase +import com.microsoft.azure.synapse.ml.core.test.base.{Flaky, TestBase} import com.microsoft.azure.synapse.ml.core.test.benchmarks.DatasetUtils import com.microsoft.azure.synapse.ml.core.test.fuzzing.{EstimatorFuzzing, TestObject, TransformerFuzzing} import org.apache.hadoop.conf.Configuration @@ -62,7 +62,8 @@ trait MADTestUtils extends TestBase with AnomalyKey with StorageCredentials { } -class SimpleFitMultivariateAnomalySuite extends EstimatorFuzzing[SimpleFitMultivariateAnomaly] with MADTestUtils { +class SimpleFitMultivariateAnomalySuite extends EstimatorFuzzing[SimpleFitMultivariateAnomaly] + with MADTestUtils with Flaky { def simpleMultiAnomalyEstimator: SimpleFitMultivariateAnomaly = new SimpleFitMultivariateAnomaly() .setSubscriptionKey(anomalyKey) diff --git a/core/src/test/scala/com/microsoft/azure/synapse/ml/core/test/base/TestBase.scala b/core/src/test/scala/com/microsoft/azure/synapse/ml/core/test/base/TestBase.scala index 56d1f7581f..41c4583593 100644 --- a/core/src/test/scala/com/microsoft/azure/synapse/ml/core/test/base/TestBase.scala +++ b/core/src/test/scala/com/microsoft/azure/synapse/ml/core/test/base/TestBase.scala @@ -6,7 +6,9 @@ package com.microsoft.azure.synapse.ml.core.test.base import breeze.linalg.norm.Impl import breeze.linalg.{*, norm, DenseMatrix => BDM, DenseVector => BDV} import breeze.math.Field +import com.globalmentor.apache.hadoop.fs.BareLocalFileSystem import org.apache.commons.io.FileUtils +import org.apache.hadoop.fs.FileSystem import org.apache.spark._ import org.apache.spark.ml._ import org.apache.spark.sql.{DataFrame, _} @@ -67,6 +69,8 @@ trait SparkSessionManagement { .config(sparkConfiguration) .getOrCreate() sess.sparkContext.setLogLevel(logLevel) + sess.sparkContext.hadoopConfiguration + .setClass("fs.file.impl", classOf[BareLocalFileSystem], classOf[FileSystem]) sess } diff --git a/pipeline.yaml b/pipeline.yaml index 509aabb578..b7a1bc4532 100644 --- a/pipeline.yaml +++ b/pipeline.yaml @@ -93,7 +93,6 @@ jobs: source activate synapseml sbt packagePython sbt publishBlob publishDocs publishR publishPython uploadNotebooks - sbt publishSigned sbt genBuildInfo echo "##vso[task.uploadsummary]$(pwd)/target/Build.md" displayName: Publish Artifacts