From 5c0b80361d951ae118e741c160d171381958c02d Mon Sep 17 00:00:00 2001 From: Markus Cozowicz Date: Thu, 26 Oct 2023 19:05:25 +0200 Subject: [PATCH] feat: add certified event feature categories (#2109) * add certified event feature categories * fix scala style * rename cognitiveservices to aiservices --- .../cognitive/anomaly/AnomalyDetection.scala | 8 ++--- .../MultivariateAnomalyDetection.scala | 8 ++--- .../ml/cognitive/bing/BingImageSearch.scala | 4 +-- .../synapse/ml/cognitive/face/Face.scala | 12 +++---- .../cognitive/form/FormOntologyLearner.scala | 6 ++-- .../ml/cognitive/form/FormRecognizer.scala | 18 +++++----- .../ml/cognitive/form/FormRecognizerV3.scala | 4 +-- .../ml/cognitive/language/AnalyzeText.scala | 4 +-- .../openai/OpenAIChatCompletion.scala | 9 +++-- .../cognitive/openai/OpenAICompletion.scala | 9 +++-- .../ml/cognitive/openai/OpenAIEmbedding.scala | 4 +-- .../ml/cognitive/openai/OpenAIPrompt.scala | 4 +-- .../ml/cognitive/search/AzureSearch.scala | 8 ++--- .../speech/SpeakerEmotionInference.scala | 4 +-- .../ml/cognitive/speech/SpeechToText.scala | 4 +-- .../ml/cognitive/speech/SpeechToTextSDK.scala | 12 +++---- .../ml/cognitive/speech/TextToSpeech.scala | 8 ++--- .../ml/cognitive/text/TextAnalytics.scala | 17 ++++----- .../translate/DocumentTranslator.scala | 4 +-- .../cognitive/translate/TextTranslator.scala | 14 ++++---- .../ml/cognitive/vision/ComputerVision.scala | 18 +++++----- .../ml/geospatial/CheckPointInPolygon.scala | 4 +-- .../synapse/ml/geospatial/Geocoders.scala | 6 ++-- .../synapse/ml/automl/FindBestModel.scala | 6 ++-- .../ml/automl/TuneHyperparameters.scala | 6 ++-- .../synapse/ml/causal/DoubleMLEstimator.scala | 6 ++-- .../ml/causal/OrthoForestDMLEstimator.scala | 6 ++-- .../OrthoForestVariableTransformer.scala | 4 +-- .../ml/causal/ResidualTransformer.scala | 4 +-- .../synapse/ml/explainers/ICEExplainer.scala | 4 +-- .../synapse/ml/explainers/ImageLIME.scala | 3 +- .../synapse/ml/explainers/ImageSHAP.scala | 3 +- .../synapse/ml/explainers/TabularLIME.scala | 3 +- .../synapse/ml/explainers/TabularSHAP.scala | 3 +- .../synapse/ml/explainers/TextLIME.scala | 3 +- .../synapse/ml/explainers/TextSHAP.scala | 3 +- .../synapse/ml/explainers/VectorLIME.scala | 3 +- .../synapse/ml/explainers/VectorSHAP.scala | 3 +- .../exploratory/AggregateBalanceMeasure.scala | 4 +-- .../DistributionBalanceMeasure.scala | 4 +-- .../exploratory/FeatureBalanceMeasure.scala | 4 +-- .../ml/featurize/CleanMissingData.scala | 6 ++-- .../synapse/ml/featurize/CountSelector.scala | 6 ++-- .../synapse/ml/featurize/DataConversion.scala | 4 +-- .../synapse/ml/featurize/Featurize.scala | 4 +-- .../synapse/ml/featurize/IndexToValue.scala | 4 +-- .../synapse/ml/featurize/ValueIndexer.scala | 6 ++-- .../ml/featurize/text/MultiNGram.scala | 4 +-- .../ml/featurize/text/PageSplitter.scala | 4 +-- .../ml/featurize/text/TextFeaturizer.scala | 4 +-- .../ml/image/SuperpixelTransformer.scala | 4 +-- .../azure/synapse/ml/image/UnrollImage.scala | 6 ++-- .../synapse/ml/io/http/HTTPTransformer.scala | 4 +-- .../azure/synapse/ml/io/http/Parsers.scala | 12 +++---- .../ml/io/http/SimpleHTTPTransformer.scala | 4 +-- .../ml/isolationforest/IsolationForest.scala | 10 +++--- .../synapse/ml/logging/FeatureNames.scala | 36 +++++++++++++++++++ .../synapse/ml/logging/SynapseMLLogging.scala | 26 +++++++------- .../azure/synapse/ml/nn/ConditionalKNN.scala | 6 ++-- .../microsoft/azure/synapse/ml/nn/KNN.scala | 6 ++-- .../ml/recommendation/RankingAdapter.scala | 6 ++-- .../ml/recommendation/RankingEvaluator.scala | 4 +-- .../RankingTrainValidationSplit.scala | 6 ++-- .../RecommendationIndexer.scala | 6 ++-- .../azure/synapse/ml/recommendation/SAR.scala | 4 +-- .../synapse/ml/recommendation/SARModel.scala | 4 +-- .../azure/synapse/ml/stages/Cacher.scala | 4 +-- .../synapse/ml/stages/ClassBalancer.scala | 6 ++-- .../azure/synapse/ml/stages/DropColumns.scala | 4 +-- .../synapse/ml/stages/EnsembleByKey.scala | 4 +-- .../azure/synapse/ml/stages/Explode.scala | 4 +-- .../azure/synapse/ml/stages/Lambda.scala | 4 +-- .../ml/stages/MiniBatchTransformer.scala | 10 +++--- .../ml/stages/MultiColumnAdapter.scala | 4 +-- .../ml/stages/PartitionConsolidator.scala | 4 +-- .../synapse/ml/stages/RenameColumn.scala | 4 +-- .../azure/synapse/ml/stages/Repartition.scala | 4 +-- .../synapse/ml/stages/SelectColumns.scala | 4 +-- .../ml/stages/StratifiedRepartition.scala | 4 +-- .../synapse/ml/stages/SummarizeData.scala | 4 +-- .../synapse/ml/stages/TextPreprocessor.scala | 4 +-- .../azure/synapse/ml/stages/Timer.scala | 6 ++-- .../synapse/ml/stages/UDFTransformer.scala | 4 +-- .../synapse/ml/stages/UnicodeNormalize.scala | 4 +-- .../ml/train/ComputeModelStatistics.scala | 4 +-- .../train/ComputePerInstanceStatistics.scala | 4 +-- .../synapse/ml/train/TrainClassifier.scala | 6 ++-- .../synapse/ml/train/TrainRegressor.scala | 6 ++-- .../synapse/ml/onnx/ImageFeaturizer.scala | 4 +-- .../azure/synapse/ml/onnx/ONNXModel.scala | 4 +-- .../ml/lightgbm/LightGBMClassifier.scala | 10 +++--- .../synapse/ml/lightgbm/LightGBMRanker.scala | 10 +++--- .../ml/lightgbm/LightGBMRegressor.scala | 10 +++--- .../synapse/ml/opencv/ImageSetAugmenter.scala | 4 +-- .../synapse/ml/opencv/ImageTransformer.scala | 4 +-- .../synapse/ml/policyeval/CressieRead.scala | 4 +-- .../ml/policyeval/CressieReadInterval.scala | 4 +-- .../azure/synapse/ml/policyeval/Ips.scala | 4 +-- .../azure/synapse/ml/policyeval/Snips.scala | 4 +-- .../azure/synapse/ml/vw/KahanSum.scala | 4 +-- .../azure/synapse/ml/vw/VectorZipper.scala | 4 +-- .../ml/vw/VowpalWabbitCSETransformer.scala | 4 +-- .../ml/vw/VowpalWabbitClassifier.scala | 6 ++-- .../ml/vw/VowpalWabbitContextualBandit.scala | 6 ++-- .../ml/vw/VowpalWabbitDSJsonTransformer.scala | 5 +-- .../ml/vw/VowpalWabbitFeaturizer.scala | 4 +-- .../synapse/ml/vw/VowpalWabbitGeneric.scala | 6 ++-- .../vw/VowpalWabbitGenericProgressive.scala | 4 +-- .../ml/vw/VowpalWabbitInteractions.scala | 4 +-- .../synapse/ml/vw/VowpalWabbitRegressor.scala | 6 ++-- 110 files changed, 353 insertions(+), 311 deletions(-) create mode 100644 core/src/main/scala/com/microsoft/azure/synapse/ml/logging/FeatureNames.scala diff --git a/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/anomaly/AnomalyDetection.scala b/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/anomaly/AnomalyDetection.scala index cf60fbac63..6b8af39634 100644 --- a/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/anomaly/AnomalyDetection.scala +++ b/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/anomaly/AnomalyDetection.scala @@ -8,7 +8,7 @@ import com.microsoft.azure.synapse.ml.cognitive.anomaly.AnomalyDetectorProtocol. import com.microsoft.azure.synapse.ml.core.contracts.HasOutputCol import com.microsoft.azure.synapse.ml.core.schema.DatasetExtensions import com.microsoft.azure.synapse.ml.io.http.ErrorUtils -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import com.microsoft.azure.synapse.ml.param.ServiceParam import org.apache.http.entity.{AbstractHttpEntity, StringEntity} import org.apache.spark.injections.UDFUtils @@ -148,7 +148,7 @@ abstract class AnomalyDetectorBase(override val uid: String) extends CognitiveSe object DetectLastAnomaly extends ComplexParamsReadable[DetectLastAnomaly] with Serializable class DetectLastAnomaly(override val uid: String) extends AnomalyDetectorBase(uid) with SynapseMLLogging { - logClass() + logClass(FeatureNames.AiServices.Anomaly) def this() = this(Identifiable.randomUID("DetectLastAnomaly")) @@ -165,7 +165,7 @@ class DetectLastAnomaly(override val uid: String) extends AnomalyDetectorBase(ui object DetectAnomalies extends ComplexParamsReadable[DetectAnomalies] with Serializable class DetectAnomalies(override val uid: String) extends AnomalyDetectorBase(uid) with SynapseMLLogging { - logClass() + logClass(FeatureNames.AiServices.Anomaly) def this() = this(Identifiable.randomUID("DetectAnomalies")) @@ -183,7 +183,7 @@ object SimpleDetectAnomalies extends ComplexParamsReadable[SimpleDetectAnomalies class SimpleDetectAnomalies(override val uid: String) extends AnomalyDetectorBase(uid) with HasOutputCol with SynapseMLLogging { - logClass() + logClass(FeatureNames.AiServices.Anomaly) def this() = this(Identifiable.randomUID("SimpleDetectAnomalies")) diff --git a/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/anomaly/MultivariateAnomalyDetection.scala b/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/anomaly/MultivariateAnomalyDetection.scala index 168bd35213..f7ec8f0006 100644 --- a/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/anomaly/MultivariateAnomalyDetection.scala +++ b/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/anomaly/MultivariateAnomalyDetection.scala @@ -14,7 +14,7 @@ import com.microsoft.azure.synapse.ml.core.schema.DatasetExtensions import com.microsoft.azure.synapse.ml.io.http.HandlingUtils.{convertAndClose, sendWithRetries} import com.microsoft.azure.synapse.ml.io.http.RESTHelpers.{Client, retry} import com.microsoft.azure.synapse.ml.io.http._ -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import com.microsoft.azure.synapse.ml.stages._ import com.microsoft.azure.synapse.ml.param.CognitiveServiceStructParam import org.apache.commons.io.IOUtils @@ -416,7 +416,7 @@ object SimpleFitMultivariateAnomaly extends ComplexParamsReadable[SimpleFitMulti class SimpleFitMultivariateAnomaly(override val uid: String) extends Estimator[SimpleDetectMultivariateAnomaly] with MADBase { - logClass() + logClass(FeatureNames.AiServices.Anomaly) def this() = this(Identifiable.randomUID("SimpleFitMultivariateAnomaly")) @@ -569,7 +569,7 @@ object SimpleDetectMultivariateAnomaly extends ComplexParamsReadable[SimpleDetec class SimpleDetectMultivariateAnomaly(override val uid: String) extends Model[SimpleDetectMultivariateAnomaly] with MADBase with HasHandler with DetectMAParams { - logClass() + logClass(FeatureNames.AiServices.Anomaly) def this() = this(Identifiable.randomUID("SimpleDetectMultivariateAnomaly")) @@ -654,7 +654,7 @@ class DetectLastMultivariateAnomaly(override val uid: String) extends CognitiveS with HasSetLocation with HasCognitiveServiceInput with HasBatchSize with ComplexParamsWritable with Wrappable with HasErrorCol with SynapseMLLogging with DetectMAParams { - logClass() + logClass(FeatureNames.AiServices.Anomaly) def this() = this(Identifiable.randomUID("DetectLastMultivariateAnomaly")) diff --git a/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/bing/BingImageSearch.scala b/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/bing/BingImageSearch.scala index 2dcc06218a..661c115f8c 100644 --- a/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/bing/BingImageSearch.scala +++ b/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/bing/BingImageSearch.scala @@ -5,7 +5,7 @@ package com.microsoft.azure.synapse.ml.cognitive.bing import com.microsoft.azure.synapse.ml.cognitive._ import com.microsoft.azure.synapse.ml.core.utils.AsyncUtils -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import com.microsoft.azure.synapse.ml.param.ServiceParam import com.microsoft.azure.synapse.ml.stages.Lambda import org.apache.commons.io.IOUtils @@ -67,7 +67,7 @@ object BingImageSearch extends ComplexParamsReadable[BingImageSearch] with Seria class BingImageSearch(override val uid: String) extends CognitiveServicesBase(uid) with HasCognitiveServiceInput with HasInternalJsonOutputParser with SynapseMLLogging with HasSetLinkedService { - logClass() + logClass(FeatureNames.AiServices.BingImage) override protected lazy val pyInternalWrapper = true diff --git a/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/face/Face.scala b/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/face/Face.scala index 0182d6885b..3c984ff26b 100644 --- a/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/face/Face.scala +++ b/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/face/Face.scala @@ -5,7 +5,7 @@ package com.microsoft.azure.synapse.ml.cognitive.face import com.microsoft.azure.synapse.ml.cognitive._ import com.microsoft.azure.synapse.ml.cognitive.vision.HasImageUrl -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import com.microsoft.azure.synapse.ml.param.ServiceParam import org.apache.http.entity.{AbstractHttpEntity, StringEntity} import org.apache.spark.ml.ComplexParamsReadable @@ -21,7 +21,7 @@ class DetectFace(override val uid: String) extends CognitiveServicesBase(uid) with HasImageUrl with HasServiceParams with HasCognitiveServiceInput with HasInternalJsonOutputParser with HasSetLocation with SynapseMLLogging with HasSetLinkedService { - logClass() + logClass(FeatureNames.AiServices.Face) def this() = this(Identifiable.randomUID("DetectFace")) @@ -100,7 +100,7 @@ class FindSimilarFace(override val uid: String) with HasMaxNumOfCandidatesReturned with HasFaceIds with HasCognitiveServiceInput with HasInternalJsonOutputParser with HasSetLocation with SynapseMLLogging with HasSetLinkedService { - logClass() + logClass(FeatureNames.AiServices.Face) def this() = this(Identifiable.randomUID("FindSimilarFace")) @@ -189,7 +189,7 @@ class GroupFaces(override val uid: String) extends CognitiveServicesBase(uid) with HasServiceParams with HasFaceIds with HasSetLocation with HasCognitiveServiceInput with HasInternalJsonOutputParser with SynapseMLLogging with HasSetLinkedService { - logClass() + logClass(FeatureNames.AiServices.Face) def this() = this(Identifiable.randomUID("GroupFaces")) @@ -212,7 +212,7 @@ class IdentifyFaces(override val uid: String) with HasMaxNumOfCandidatesReturned with HasFaceIds with HasCognitiveServiceInput with HasInternalJsonOutputParser with HasSetLocation with SynapseMLLogging with HasSetLinkedService { - logClass() + logClass(FeatureNames.AiServices.Face) def this() = this(Identifiable.randomUID("IdentifyFaces")) @@ -281,7 +281,7 @@ class VerifyFaces(override val uid: String) extends CognitiveServicesBase(uid) with HasServiceParams with HasCognitiveServiceInput with HasInternalJsonOutputParser with HasSetLocation with SynapseMLLogging with HasSetLinkedService { - logClass() + logClass(FeatureNames.AiServices.Face) def this() = this(Identifiable.randomUID("VerifyFaces")) diff --git a/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/form/FormOntologyLearner.scala b/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/form/FormOntologyLearner.scala index 4236b0f5c4..89ee15621b 100644 --- a/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/form/FormOntologyLearner.scala +++ b/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/form/FormOntologyLearner.scala @@ -5,7 +5,7 @@ package com.microsoft.azure.synapse.ml.cognitive.form import com.microsoft.azure.synapse.ml.codegen.Wrappable import com.microsoft.azure.synapse.ml.core.contracts.{HasInputCol, HasOutputCol} -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import com.microsoft.azure.synapse.ml.param.DataTypeParam import org.apache.spark.injections.UDFUtils import org.apache.spark.ml.param.ParamMap @@ -42,7 +42,7 @@ object FormOntologyLearner extends DefaultParamsReadable[FormOntologyLearner] { class FormOntologyLearner(override val uid: String) extends Estimator[FormOntologyTransformer] with SynapseMLLogging with DefaultParamsWritable with HasInputCol with HasOutputCol with Wrappable { - logClass() + logClass(FeatureNames.AiServices.Form) def this() = this(Identifiable.randomUID("FormOntologyLearner")) @@ -87,7 +87,7 @@ object FormOntologyTransformer extends ComplexParamsReadable[FormOntologyTransfo class FormOntologyTransformer(override val uid: String) extends Model[FormOntologyTransformer] with SynapseMLLogging with ComplexParamsWritable with HasInputCol with HasOutputCol with Wrappable { - logClass() + logClass(FeatureNames.AiServices.Form) val ontology: DataTypeParam = new DataTypeParam( parent = this, diff --git a/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/form/FormRecognizer.scala b/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/form/FormRecognizer.scala index 70a07027ab..fd21a8747c 100644 --- a/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/form/FormRecognizer.scala +++ b/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/form/FormRecognizer.scala @@ -5,7 +5,7 @@ package com.microsoft.azure.synapse.ml.cognitive.form import com.microsoft.azure.synapse.ml.cognitive._ import com.microsoft.azure.synapse.ml.cognitive.vision.{BasicAsyncReply, HasImageInput, ReadLine} -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import com.microsoft.azure.synapse.ml.param.ServiceParam import com.microsoft.azure.synapse.ml.stages.UDFTransformer import org.apache.http.client.methods.{HttpGet, HttpRequestBase} @@ -187,7 +187,7 @@ object AnalyzeLayout extends ComplexParamsReadable[AnalyzeLayout] class AnalyzeLayout(override val uid: String) extends FormRecognizerBase(uid) with SynapseMLLogging with HasPages { - logClass() + logClass(FeatureNames.AiServices.Form) def this() = this(Identifiable.randomUID("AnalyzeLayout")) @@ -220,7 +220,7 @@ object AnalyzeReceipts extends ComplexParamsReadable[AnalyzeReceipts] class AnalyzeReceipts(override val uid: String) extends FormRecognizerBase(uid) with SynapseMLLogging with HasPages with HasTextDetails with HasLocale { - logClass() + logClass(FeatureNames.AiServices.Form) def this() = this(Identifiable.randomUID("AnalyzeReceipts")) @@ -234,7 +234,7 @@ object AnalyzeBusinessCards extends ComplexParamsReadable[AnalyzeBusinessCards] class AnalyzeBusinessCards(override val uid: String) extends FormRecognizerBase(uid) with SynapseMLLogging with HasPages with HasTextDetails with HasLocale { - logClass() + logClass(FeatureNames.AiServices.Form) def this() = this(Identifiable.randomUID("AnalyzeBusinessCards")) @@ -248,7 +248,7 @@ object AnalyzeInvoices extends ComplexParamsReadable[AnalyzeInvoices] class AnalyzeInvoices(override val uid: String) extends FormRecognizerBase(uid) with SynapseMLLogging with HasPages with HasTextDetails with HasLocale { - logClass() + logClass(FeatureNames.AiServices.Form) def this() = this(Identifiable.randomUID("AnalyzeInvoices")) @@ -262,7 +262,7 @@ object AnalyzeIDDocuments extends ComplexParamsReadable[AnalyzeIDDocuments] class AnalyzeIDDocuments(override val uid: String) extends FormRecognizerBase(uid) with SynapseMLLogging with HasPages with HasTextDetails { - logClass() + logClass(FeatureNames.AiServices.Form) def this() = this(Identifiable.randomUID("AnalyzeIDDocuments")) @@ -277,7 +277,7 @@ object ListCustomModels extends ComplexParamsReadable[ListCustomModels] class ListCustomModels(override val uid: String) extends CognitiveServicesBase(uid) with HasCognitiveServiceInput with HasInternalJsonOutputParser with HasSetLocation with HasSetLinkedService with SynapseMLLogging { - logClass() + logClass(FeatureNames.AiServices.Form) def this() = this(Identifiable.randomUID("ListCustomModels")) @@ -302,7 +302,7 @@ object GetCustomModel extends ComplexParamsReadable[GetCustomModel] class GetCustomModel(override val uid: String) extends CognitiveServicesBase(uid) with HasCognitiveServiceInput with HasInternalJsonOutputParser with HasSetLocation with HasSetLinkedService with SynapseMLLogging with HasModelID { - logClass() + logClass(FeatureNames.AiServices.Form) def this() = this(Identifiable.randomUID("GetCustomModel")) @@ -330,7 +330,7 @@ object AnalyzeCustomModel extends ComplexParamsReadable[AnalyzeCustomModel] class AnalyzeCustomModel(override val uid: String) extends FormRecognizerBase(uid) with SynapseMLLogging with HasTextDetails with HasModelID { - logClass() + logClass(FeatureNames.AiServices.Form) def this() = this(Identifiable.randomUID("AnalyzeCustomModel")) diff --git a/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/form/FormRecognizerV3.scala b/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/form/FormRecognizerV3.scala index 3b9dbd36ac..6162d15b84 100644 --- a/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/form/FormRecognizerV3.scala +++ b/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/form/FormRecognizerV3.scala @@ -5,7 +5,7 @@ package com.microsoft.azure.synapse.ml.cognitive.form import com.microsoft.azure.synapse.ml.cognitive._ import com.microsoft.azure.synapse.ml.cognitive.vision.{BasicAsyncReply, HasImageInput} -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import com.microsoft.azure.synapse.ml.param.ServiceParam import org.apache.http.entity.{AbstractHttpEntity, ByteArrayEntity, ContentType, StringEntity} import org.apache.spark.ml.ComplexParamsReadable @@ -38,7 +38,7 @@ class AnalyzeDocument(override val uid: String) extends CognitiveServicesBaseNoH with HasCognitiveServiceInput with HasInternalJsonOutputParser with BasicAsyncReply with HasPrebuiltModelID with HasPages with HasLocale with HasAPIVersion with HasImageInput with HasSetLocation with SynapseMLLogging with HasSetLinkedService { - logClass() + logClass(FeatureNames.AiServices.Anomaly) setDefault(apiVersion -> Left("2022-08-31")) diff --git a/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/language/AnalyzeText.scala b/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/language/AnalyzeText.scala index d4847b8bb0..eaca384ea2 100644 --- a/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/language/AnalyzeText.scala +++ b/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/language/AnalyzeText.scala @@ -5,7 +5,7 @@ package com.microsoft.azure.synapse.ml.cognitive.language import com.microsoft.azure.synapse.ml.cognitive._ import com.microsoft.azure.synapse.ml.cognitive.text.{TADocument, TextAnalyticsAutoBatch} -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import com.microsoft.azure.synapse.ml.param.ServiceParam import com.microsoft.azure.synapse.ml.stages.{FixedMiniBatchTransformer, FlattenBatch, HasBatchSize, UDFTransformer} import org.apache.http.entity.{AbstractHttpEntity, StringEntity} @@ -127,7 +127,7 @@ class AnalyzeText(override val uid: String) extends CognitiveServicesBase(uid) with HasCognitiveServiceInput with HasInternalJsonOutputParser with HasSetLocation with HasAPIVersion with HasCountryHint with TextAnalyticsAutoBatch with HasBatchSize with AnalyzeTextTaskParameters with SynapseMLLogging { - logClass() + logClass(FeatureNames.AiServices.Language) def this() = this(Identifiable.randomUID("AnalyzeText")) diff --git a/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/openai/OpenAIChatCompletion.scala b/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/openai/OpenAIChatCompletion.scala index 1af328474e..4e6270d448 100644 --- a/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/openai/OpenAIChatCompletion.scala +++ b/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/openai/OpenAIChatCompletion.scala @@ -3,10 +3,9 @@ package com.microsoft.azure.synapse.ml.cognitive.openai -import com.microsoft.azure.synapse.ml.cognitive.{ - CognitiveServicesBase, HasCognitiveServiceInput, HasInternalJsonOutputParser -} -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.cognitive.{CognitiveServicesBase, HasCognitiveServiceInput, + HasInternalJsonOutputParser} +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import com.microsoft.azure.synapse.ml.param.AnyJsonFormat.anyFormat import org.apache.http.entity.{AbstractHttpEntity, ContentType, StringEntity} import org.apache.spark.ml.ComplexParamsReadable @@ -24,7 +23,7 @@ object OpenAIChatCompletion extends ComplexParamsReadable[OpenAIChatCompletion] class OpenAIChatCompletion(override val uid: String) extends CognitiveServicesBase(uid) with HasOpenAITextParams with HasCognitiveServiceInput with HasInternalJsonOutputParser with SynapseMLLogging { - logClass() + logClass(FeatureNames.AiServices.OpenAI) val messagesCol: Param[String] = new Param[String]( this, "messagesCol", "The column messages to generate chat completions for," + diff --git a/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/openai/OpenAICompletion.scala b/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/openai/OpenAICompletion.scala index 50fa491167..9069aa479b 100644 --- a/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/openai/OpenAICompletion.scala +++ b/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/openai/OpenAICompletion.scala @@ -3,10 +3,9 @@ package com.microsoft.azure.synapse.ml.cognitive.openai -import com.microsoft.azure.synapse.ml.cognitive.{ - CognitiveServicesBase, HasCognitiveServiceInput, HasInternalJsonOutputParser -} -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.cognitive.{CognitiveServicesBase, + HasCognitiveServiceInput, HasInternalJsonOutputParser} +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import com.microsoft.azure.synapse.ml.param.AnyJsonFormat.anyFormat import org.apache.http.entity.{AbstractHttpEntity, ContentType, StringEntity} import org.apache.spark.ml.ComplexParamsReadable @@ -23,7 +22,7 @@ object OpenAICompletion extends ComplexParamsReadable[OpenAICompletion] class OpenAICompletion(override val uid: String) extends CognitiveServicesBase(uid) with HasOpenAITextParams with HasPromptInputs with HasCognitiveServiceInput with HasInternalJsonOutputParser with SynapseMLLogging { - logClass() + logClass(FeatureNames.AiServices.OpenAI) def this() = this(Identifiable.randomUID("OpenAICompletion")) diff --git a/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/openai/OpenAIEmbedding.scala b/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/openai/OpenAIEmbedding.scala index c241d7e33f..cb0c828931 100644 --- a/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/openai/OpenAIEmbedding.scala +++ b/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/openai/OpenAIEmbedding.scala @@ -6,7 +6,7 @@ package com.microsoft.azure.synapse.ml.cognitive.openai import com.microsoft.azure.synapse.ml.cognitive.{CognitiveServicesBase, HasCognitiveServiceInput, HasServiceParams} import com.microsoft.azure.synapse.ml.core.contracts.HasInputCol import com.microsoft.azure.synapse.ml.io.http.JSONOutputParser -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import com.microsoft.azure.synapse.ml.param.ServiceParam import org.apache.http.entity.{AbstractHttpEntity, ContentType, StringEntity} import org.apache.spark.ml.ComplexParamsReadable @@ -24,7 +24,7 @@ object OpenAIEmbedding extends ComplexParamsReadable[OpenAIEmbedding] class OpenAIEmbedding (override val uid: String) extends CognitiveServicesBase(uid) with HasOpenAISharedParams with HasCognitiveServiceInput with SynapseMLLogging { - logClass() + logClass(FeatureNames.AiServices.OpenAI) def this() = this(Identifiable.randomUID("OpenAIEmbedding")) diff --git a/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/openai/OpenAIPrompt.scala b/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/openai/OpenAIPrompt.scala index 87531a8221..7be5ad2a66 100644 --- a/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/openai/OpenAIPrompt.scala +++ b/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/openai/OpenAIPrompt.scala @@ -7,7 +7,7 @@ import com.microsoft.azure.synapse.ml.cognitive._ import com.microsoft.azure.synapse.ml.core.contracts.HasOutputCol import com.microsoft.azure.synapse.ml.core.spark.Functions import com.microsoft.azure.synapse.ml.io.http.{ConcurrencyParams, HasErrorCol, HasURL} -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import com.microsoft.azure.synapse.ml.param.StringStringMapParam import org.apache.spark.ml.param.{BooleanParam, Param, ParamMap, ParamValidators} import org.apache.spark.ml.util.Identifiable @@ -26,7 +26,7 @@ class OpenAIPrompt(override val uid: String) extends Transformer with HasSubscriptionKey with HasAADToken with HasCustomAuthHeader with ComplexParamsWritable with SynapseMLLogging { - logClass() + logClass(FeatureNames.AiServices.OpenAI) def this() = this(Identifiable.randomUID("OpenAIPrompt")) diff --git a/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/search/AzureSearch.scala b/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/search/AzureSearch.scala index 54764d6404..00786333bc 100644 --- a/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/search/AzureSearch.scala +++ b/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/search/AzureSearch.scala @@ -4,11 +4,11 @@ package com.microsoft.azure.synapse.ml.cognitive.search import com.microsoft.azure.synapse.ml.cognitive.search.AzureSearchProtocol._ -import com.microsoft.azure.synapse.ml.cognitive.{CognitiveServicesBase, - HasCognitiveServiceInput, HasInternalJsonOutputParser, HasServiceParams} +import com.microsoft.azure.synapse.ml.cognitive.{CognitiveServicesBase, HasCognitiveServiceInput, + HasInternalJsonOutputParser, HasServiceParams} import com.microsoft.azure.synapse.ml.io.http.{ErrorUtils, SimpleHTTPTransformer} import com.microsoft.azure.synapse.ml.io.powerbi.StreamMaterializer -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import com.microsoft.azure.synapse.ml.stages.{FixedMiniBatchTransformer, HasBatchSize, Lambda} import org.apache.http.Consts import org.apache.http.entity.{AbstractHttpEntity, ContentType, StringEntity} @@ -90,7 +90,7 @@ class AddDocuments(override val uid: String) extends CognitiveServicesBase(uid) with HasCognitiveServiceInput with HasInternalJsonOutputParser with HasActionCol with HasServiceName with HasIndexName with HasBatchSize with SynapseMLLogging { - logClass() + logClass(FeatureNames.AiServices.Search) def this() = this(Identifiable.randomUID("AddDocuments")) diff --git a/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/speech/SpeakerEmotionInference.scala b/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/speech/SpeakerEmotionInference.scala index 7884901f34..1fec4df278 100644 --- a/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/speech/SpeakerEmotionInference.scala +++ b/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/speech/SpeakerEmotionInference.scala @@ -4,7 +4,7 @@ package com.microsoft.azure.synapse.ml.cognitive.speech import com.microsoft.azure.synapse.ml.cognitive._ -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import com.microsoft.azure.synapse.ml.param.ServiceParam import com.microsoft.azure.synapse.ml.stages.Lambda import org.apache.http.client.methods.HttpRequestBase @@ -23,7 +23,7 @@ class SpeakerEmotionInference(override val uid: String) extends CognitiveServicesBase(uid) with HasLocaleCol with HasVoiceNameCol with HasTextCol with HasSetLocation with HasCognitiveServiceInput with HasInternalJsonOutputParser with SynapseMLLogging { - logClass() + logClass(FeatureNames.AiServices.Speech) def this() = this(Identifiable.randomUID(classOf[SpeakerEmotionInference].getSimpleName)) diff --git a/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/speech/SpeechToText.scala b/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/speech/SpeechToText.scala index c35201dc73..448a7de00a 100644 --- a/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/speech/SpeechToText.scala +++ b/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/speech/SpeechToText.scala @@ -4,7 +4,7 @@ package com.microsoft.azure.synapse.ml.cognitive.speech import com.microsoft.azure.synapse.ml.cognitive._ -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import com.microsoft.azure.synapse.ml.param.ServiceParam import org.apache.http.entity.{AbstractHttpEntity, ByteArrayEntity} import org.apache.spark.ml.ComplexParamsReadable @@ -23,7 +23,7 @@ object SpeechToText extends ComplexParamsReadable[SpeechToText] with Serializabl class SpeechToText(override val uid: String) extends CognitiveServicesBase(uid) with HasCognitiveServiceInput with HasInternalJsonOutputParser with HasSetLocation with SynapseMLLogging with HasSetLinkedServiceUsingLocation { - logClass() + logClass(FeatureNames.AiServices.Speech) def this() = this(Identifiable.randomUID("SpeechToText")) diff --git a/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/speech/SpeechToTextSDK.scala b/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/speech/SpeechToTextSDK.scala index 3796fb6f49..7179ce7a8c 100644 --- a/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/speech/SpeechToTextSDK.scala +++ b/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/speech/SpeechToTextSDK.scala @@ -10,14 +10,12 @@ import com.microsoft.azure.synapse.ml.core.contracts.HasOutputCol import com.microsoft.azure.synapse.ml.core.schema.{DatasetExtensions, SparkBindings} import com.microsoft.azure.synapse.ml.core.utils.OsUtils import com.microsoft.azure.synapse.ml.io.http.HasURL -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import com.microsoft.azure.synapse.ml.param.ServiceParam import com.microsoft.cognitiveservices.speech._ import com.microsoft.cognitiveservices.speech.audio._ -import com.microsoft.cognitiveservices.speech.transcription.{ - Conversation, ConversationTranscriber, - ConversationTranscriptionEventArgs, Participant -} +import com.microsoft.cognitiveservices.speech.transcription.{Conversation, ConversationTranscriber, + ConversationTranscriptionEventArgs, Participant} import com.microsoft.cognitiveservices.speech.util.EventHandler import org.apache.commons.io.FilenameUtils import org.apache.hadoop.fs.Path @@ -438,7 +436,7 @@ abstract class SpeechSDKBase extends Transformer } class SpeechToTextSDK(override val uid: String) extends SpeechSDKBase with SynapseMLLogging { - logClass() + logClass(FeatureNames.AiServices.Speech) override type ResponseType = SpeechResponse @@ -511,7 +509,7 @@ class SpeechToTextSDK(override val uid: String) extends SpeechSDKBase with Synap object ConversationTranscription extends ComplexParamsReadable[ConversationTranscription] class ConversationTranscription(override val uid: String) extends SpeechSDKBase with SynapseMLLogging { - logClass() + logClass(FeatureNames.AiServices.Speech) override type ResponseType = TranscriptionResponse diff --git a/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/speech/TextToSpeech.scala b/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/speech/TextToSpeech.scala index 6e16c9f1d8..61aa2f9bae 100644 --- a/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/speech/TextToSpeech.scala +++ b/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/speech/TextToSpeech.scala @@ -3,11 +3,11 @@ package com.microsoft.azure.synapse.ml.cognitive.speech -import com.microsoft.azure.synapse.ml.cognitive.{HasServiceParams, - HasSetLinkedServiceUsingLocation, HasSetLocation, HasSubscriptionKey} +import com.microsoft.azure.synapse.ml.cognitive.{HasServiceParams, HasSetLinkedServiceUsingLocation, HasSetLocation, + HasSubscriptionKey} import com.microsoft.azure.synapse.ml.core.env.StreamUtilities.using import com.microsoft.azure.synapse.ml.io.http.{HasErrorCol, HasURL} -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import com.microsoft.azure.synapse.ml.param.ServiceParam import com.microsoft.cognitiveservices.speech._ import org.apache.hadoop.fs.{FileSystem, Path} @@ -31,7 +31,7 @@ class TextToSpeech(override val uid: String) with HasSetLocation with HasServiceParams with HasErrorCol with HasURL with HasSubscriptionKey with ComplexParamsWritable with SynapseMLLogging with HasSetLinkedServiceUsingLocation { - logClass() + logClass(FeatureNames.AiServices.Speech) setDefault(errorCol -> (uid + "_errors")) diff --git a/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/text/TextAnalytics.scala b/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/text/TextAnalytics.scala index 9db82606cb..f1f8bbe998 100644 --- a/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/text/TextAnalytics.scala +++ b/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/text/TextAnalytics.scala @@ -7,6 +7,7 @@ import com.microsoft.azure.synapse.ml.cognitive._ import com.microsoft.azure.synapse.ml.cognitive.vision.BasicAsyncReply import com.microsoft.azure.synapse.ml.core.schema.SparkBindings import com.microsoft.azure.synapse.ml.io.http.HasHandler +import com.microsoft.azure.synapse.ml.logging.FeatureNames import com.microsoft.azure.synapse.ml.param.{ServiceParam, StringStringMapParam} import com.microsoft.azure.synapse.ml.stages.{FixedMiniBatchTransformer, FlattenBatch, HasBatchSize, UDFTransformer} import org.apache.http.client.methods.{HttpPost, HttpRequestBase} @@ -280,7 +281,7 @@ object TextSentiment extends ComplexParamsReadable[TextSentiment] " com.microsoft.azure.synapse.ml.cognitive.language.AnalyzeText instead", "v0.11.3") class TextSentiment(override val uid: String) extends TextAnalyticsBase(uid) with HasStringIndexType with HasHandler { - logClass() + logClass(FeatureNames.AiServices.Text) type T = TextSentimentScoredDoc @@ -318,7 +319,7 @@ object KeyPhraseExtractor extends ComplexParamsReadable[KeyPhraseExtractor] " com.microsoft.azure.synapse.ml.cognitive.language.AnalyzeText instead", "v0.11.3") class KeyPhraseExtractor(override val uid: String) extends TextAnalyticsBase(uid) with HasStringIndexType with HasHandler { - logClass() + logClass(FeatureNames.AiServices.Text) type T = KeyPhraseScoredDoc @@ -341,7 +342,7 @@ object NER extends ComplexParamsReadable[NER] " com.microsoft.azure.synapse.ml.cognitive.language.AnalyzeText instead", "v0.11.3") class NER(override val uid: String) extends TextAnalyticsBase(uid) with HasStringIndexType with HasHandler { - logClass() + logClass(FeatureNames.AiServices.Text) type T = NERScoredDoc @@ -364,7 +365,7 @@ object PII extends ComplexParamsReadable[PII] " com.microsoft.azure.synapse.ml.cognitive.language.AnalyzeText instead", "v0.11.3") class PII(override val uid: String) extends TextAnalyticsBase(uid) with HasStringIndexType with HasHandler { - logClass() + logClass(FeatureNames.AiServices.Text) type T = PIIScoredDoc @@ -397,7 +398,7 @@ object LanguageDetector extends ComplexParamsReadable[LanguageDetector] " com.microsoft.azure.synapse.ml.cognitive.language.AnalyzeText instead", "v0.11.3") class LanguageDetector(override val uid: String) extends TextAnalyticsBase(uid) with HasStringIndexType with HasHandler { - logClass() + logClass(FeatureNames.AiServices.Text) type T = LanguageDetectorScoredDoc @@ -420,7 +421,7 @@ object EntityDetector extends ComplexParamsReadable[EntityDetector] " com.microsoft.azure.synapse.ml.cognitive.language.AnalyzeText instead", "v0.11.3") class EntityDetector(override val uid: String) extends TextAnalyticsBase(uid) with HasStringIndexType with HasHandler { - logClass() + logClass(FeatureNames.AiServices.Text) type T = EntityDetectorScoredDoc @@ -445,7 +446,7 @@ class AnalyzeHealthText(override val uid: String) extends TextAnalyticsBaseNoBinding(uid) with HasUnpackedBinding with HasStringIndexType with BasicAsyncReply { - logClass() + logClass(FeatureNames.AiServices.Text) type T = AnalyzeHealthTextScoredDoc @@ -493,7 +494,7 @@ object TextAnalyze extends ComplexParamsReadable[TextAnalyze] " com.microsoft.azure.synapse.ml.cognitive.language.AnalyzeText instead", "v0.11.3") class TextAnalyze(override val uid: String) extends TextAnalyticsBaseNoBinding(uid) with BasicAsyncReply { - logClass() + logClass(FeatureNames.AiServices.Text) def this() = this(Identifiable.randomUID("TextAnalyze")) diff --git a/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/translate/DocumentTranslator.scala b/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/translate/DocumentTranslator.scala index 510f516e40..801ea0b1f1 100644 --- a/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/translate/DocumentTranslator.scala +++ b/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/translate/DocumentTranslator.scala @@ -10,7 +10,7 @@ import com.microsoft.azure.synapse.ml.cognitive.search.HasServiceName import com.microsoft.azure.synapse.ml.cognitive.vision.BasicAsyncReply import com.microsoft.azure.synapse.ml.io.http.HandlingUtils.{convertAndClose, sendWithRetries} import com.microsoft.azure.synapse.ml.io.http.{HTTPResponseData, HeaderValues} -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import com.microsoft.azure.synapse.ml.param.ServiceParam import org.apache.commons.io.IOUtils import org.apache.http.client.methods.HttpGet @@ -57,7 +57,7 @@ class DocumentTranslator(override val uid: String) extends CognitiveServicesBase import TranslatorJsonProtocol._ - logClass() + logClass(FeatureNames.AiServices.Translate) def this() = this(Identifiable.randomUID("DocumentTranslator")) diff --git a/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/translate/TextTranslator.scala b/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/translate/TextTranslator.scala index ed5212e892..b39a7b9e8b 100644 --- a/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/translate/TextTranslator.scala +++ b/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/translate/TextTranslator.scala @@ -6,7 +6,7 @@ package com.microsoft.azure.synapse.ml.cognitive.translate import com.microsoft.azure.synapse.ml.cognitive._ import com.microsoft.azure.synapse.ml.core.schema.DatasetExtensions import com.microsoft.azure.synapse.ml.io.http.SimpleHTTPTransformer -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import com.microsoft.azure.synapse.ml.param.ServiceParam import com.microsoft.azure.synapse.ml.stages.{DropColumns, Lambda} import org.apache.http.client.methods.{HttpPost, HttpRequestBase} @@ -209,7 +209,7 @@ object Translate extends ComplexParamsReadable[Translate] class Translate(override val uid: String) extends TextTranslatorBase(uid) with TextAsOnlyEntity with SynapseMLLogging { - logClass() + logClass(FeatureNames.AiServices.Translate) def this() = this(Identifiable.randomUID("Translate")) @@ -384,7 +384,7 @@ object Transliterate extends ComplexParamsReadable[Transliterate] class Transliterate(override val uid: String) extends TextTranslatorBase(uid) with TextAsOnlyEntity with SynapseMLLogging { - logClass() + logClass(FeatureNames.AiServices.Translate) def this() = this(Identifiable.randomUID("Transliterate")) @@ -419,7 +419,7 @@ object Detect extends ComplexParamsReadable[Detect] class Detect(override val uid: String) extends TextTranslatorBase(uid) with TextAsOnlyEntity with SynapseMLLogging { - logClass() + logClass(FeatureNames.AiServices.Translate) def this() = this(Identifiable.randomUID("Detect")) @@ -432,7 +432,7 @@ object BreakSentence extends ComplexParamsReadable[BreakSentence] class BreakSentence(override val uid: String) extends TextTranslatorBase(uid) with TextAsOnlyEntity with SynapseMLLogging { - logClass() + logClass(FeatureNames.AiServices.Translate) def this() = this(Identifiable.randomUID("BreakSentence")) @@ -461,7 +461,7 @@ object DictionaryLookup extends ComplexParamsReadable[DictionaryLookup] class DictionaryLookup(override val uid: String) extends TextTranslatorBase(uid) with TextAsOnlyEntity with HasFromLanguage with HasToLanguage with SynapseMLLogging { - logClass() + logClass(FeatureNames.AiServices.Translate) def this() = this(Identifiable.randomUID("DictionaryLookup")) @@ -495,7 +495,7 @@ object DictionaryExamples extends ComplexParamsReadable[DictionaryExamples] class DictionaryExamples(override val uid: String) extends TextTranslatorBase(uid) with HasTextAndTranslationInput with HasFromLanguage with HasToLanguage with HasCognitiveServiceInput with SynapseMLLogging { - logClass() + logClass(FeatureNames.AiServices.Translate) def this() = this(Identifiable.randomUID("DictionaryExamples")) diff --git a/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/vision/ComputerVision.scala b/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/vision/ComputerVision.scala index d99aea0b3b..65398566a2 100644 --- a/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/vision/ComputerVision.scala +++ b/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/vision/ComputerVision.scala @@ -8,7 +8,7 @@ import com.microsoft.azure.synapse.ml.cognitive._ import com.microsoft.azure.synapse.ml.cognitive.text.HasLanguage import com.microsoft.azure.synapse.ml.io.http.HandlingUtils._ import com.microsoft.azure.synapse.ml.io.http._ -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import com.microsoft.azure.synapse.ml.param.ServiceParam import com.microsoft.azure.synapse.ml.stages.UDFTransformer import org.apache.commons.io.IOUtils @@ -191,7 +191,7 @@ class OCR(override val uid: String) extends CognitiveServicesBase(uid) with HasLanguage with HasImageInput with HasDetectOrientation with HasCognitiveServiceInput with HasInternalJsonOutputParser with HasSetLocation with SynapseMLLogging with HasSetLinkedService { - logClass() + logClass(FeatureNames.AiServices.Vision) def this() = this(Identifiable.randomUID("OCR")) @@ -370,7 +370,7 @@ class RecognizeText(override val uid: String) with BasicAsyncReply with HasImageInput with HasCognitiveServiceInput with HasInternalJsonOutputParser with HasSetLocation with SynapseMLLogging with HasSetLinkedService { - logClass() + logClass(FeatureNames.AiServices.Vision) def this() = this(Identifiable.randomUID("RecognizeText")) @@ -416,7 +416,7 @@ class ReadImage(override val uid: String) with BasicAsyncReply with HasImageInput with HasCognitiveServiceInput with HasInternalJsonOutputParser with HasSetLocation with SynapseMLLogging with HasSetLinkedService { - logClass() + logClass(FeatureNames.AiServices.Vision) def this() = this(Identifiable.randomUID("ReadImage")) @@ -449,7 +449,7 @@ class GenerateThumbnails(override val uid: String) with HasWidth with HasHeight with HasSmartCropping with HasCognitiveServiceInput with HasSetLocation with SynapseMLLogging with HasSetLinkedService { - logClass() + logClass(FeatureNames.AiServices.Vision) def this() = this(Identifiable.randomUID("GenerateThumbnails")) @@ -466,7 +466,7 @@ class AnalyzeImage(override val uid: String) extends CognitiveServicesBase(uid) with HasImageInput with HasInternalJsonOutputParser with HasCognitiveServiceInput with HasSetLocation with SynapseMLLogging with HasSetLinkedService { - logClass() + logClass(FeatureNames.AiServices.Vision) val visualFeatures = new ServiceParam[Seq[String]]( this, "visualFeatures", "what visual feature types to return", @@ -565,7 +565,7 @@ class RecognizeDomainSpecificContent(override val uid: String) extends CognitiveServicesBase(uid) with HasImageInput with HasServiceParams with HasCognitiveServiceInput with HasInternalJsonOutputParser with HasSetLocation with SynapseMLLogging with HasSetLinkedService { - logClass() + logClass(FeatureNames.AiServices.Vision) def this() = this(Identifiable.randomUID("RecognizeDomainSpecificContent")) @@ -590,7 +590,7 @@ class TagImage(override val uid: String) extends CognitiveServicesBase(uid) with HasImageInput with HasCognitiveServiceInput with HasInternalJsonOutputParser with HasSetLocation with SynapseMLLogging with HasSetLinkedService { - logClass() + logClass(FeatureNames.AiServices.Vision) def this() = this(Identifiable.randomUID("TagImage")) @@ -618,7 +618,7 @@ class DescribeImage(override val uid: String) extends CognitiveServicesBase(uid) with HasCognitiveServiceInput with HasImageInput with HasInternalJsonOutputParser with HasSetLocation with SynapseMLLogging with HasSetLinkedService { - logClass() + logClass(FeatureNames.AiServices.Vision) def this() = this(Identifiable.randomUID("DescribeImage")) diff --git a/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/geospatial/CheckPointInPolygon.scala b/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/geospatial/CheckPointInPolygon.scala index 9dbf95e449..d17023ad56 100644 --- a/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/geospatial/CheckPointInPolygon.scala +++ b/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/geospatial/CheckPointInPolygon.scala @@ -6,7 +6,7 @@ package com.microsoft.azure.synapse.ml.geospatial import com.microsoft.azure.synapse.ml.build.BuildInfo import com.microsoft.azure.synapse.ml.cognitive._ import com.microsoft.azure.synapse.ml.io.http.{CustomInputParser, HTTPInputParser, HeaderValues} -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import org.apache.http.client.methods.{HttpGet, HttpRequestBase} import org.apache.spark.ml.ComplexParamsReadable import org.apache.spark.ml.util.Identifiable @@ -21,7 +21,7 @@ class CheckPointInPolygon(override val uid: String) extends CognitiveServicesBase(uid) with HasInternalJsonOutputParser with SynapseMLLogging with HasServiceParams with HasSubscriptionKey with HasSetGeography with HasLatLonPairInput with HasUserDataIdInput { - logClass() + logClass(FeatureNames.Geospatial) protected def inputFunc: Row => Option[HttpRequestBase] = { { row: Row => diff --git a/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/geospatial/Geocoders.scala b/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/geospatial/Geocoders.scala index 61c99e75c2..93546310e0 100644 --- a/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/geospatial/Geocoders.scala +++ b/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/geospatial/Geocoders.scala @@ -6,7 +6,7 @@ package com.microsoft.azure.synapse.ml.geospatial import com.microsoft.azure.synapse.ml.build.BuildInfo import com.microsoft.azure.synapse.ml.cognitive._ import com.microsoft.azure.synapse.ml.io.http.{CustomInputParser, HTTPInputParser, HasURL, HeaderValues} -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import com.microsoft.azure.synapse.ml.stages.Lambda import org.apache.http.client.methods.{HttpPost, HttpRequestBase} import org.apache.http.entity.StringEntity @@ -28,7 +28,7 @@ class AddressGeocoder(override val uid: String) extends CognitiveServicesBaseNoHandler(uid) with HasServiceParams with HasSubscriptionKey with HasURL with HasAddressInput with HasInternalJsonOutputParser with MapsAsyncReply with SynapseMLLogging { - logClass() + logClass(FeatureNames.Geospatial) def urlPath: String = "" @@ -79,7 +79,7 @@ class ReverseAddressGeocoder(override val uid: String) extends CognitiveServicesBaseNoHandler(uid) with HasInternalJsonOutputParser with MapsAsyncReply with SynapseMLLogging with HasServiceParams with HasSubscriptionKey with HasURL with HasLatLonPairInput { - logClass() + logClass(FeatureNames.Geospatial) def urlPath: String = "" diff --git a/core/src/main/scala/com/microsoft/azure/synapse/ml/automl/FindBestModel.scala b/core/src/main/scala/com/microsoft/azure/synapse/ml/automl/FindBestModel.scala index cfbde49f94..e43bf427f2 100644 --- a/core/src/main/scala/com/microsoft/azure/synapse/ml/automl/FindBestModel.scala +++ b/core/src/main/scala/com/microsoft/azure/synapse/ml/automl/FindBestModel.scala @@ -6,7 +6,7 @@ package com.microsoft.azure.synapse.ml.automl import com.microsoft.azure.synapse.ml.codegen.Wrappable import com.microsoft.azure.synapse.ml.core.contracts.HasEvaluationMetric import com.microsoft.azure.synapse.ml.core.metrics.MetricConstants -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import com.microsoft.azure.synapse.ml.param.{DataFrameParam, TransformerArrayParam, TransformerParam} import com.microsoft.azure.synapse.ml.train.ComputeModelStatistics import org.apache.spark.ml._ @@ -52,7 +52,7 @@ trait FindBestModelParams extends Wrappable with ComplexParamsWritable with HasE /** Evaluates and chooses the best model from a list of models. */ class FindBestModel(override val uid: String) extends Estimator[BestModel] with FindBestModelParams with SynapseMLLogging { - logClass() + logClass(FeatureNames.AutoML) def this() = this(Identifiable.randomUID("FindBestModel")) @@ -139,7 +139,7 @@ trait HasBestModel extends Params { /** Model produced by [[FindBestModel]]. */ class BestModel(val uid: String) extends Model[BestModel] with ComplexParamsWritable with Wrappable with HasBestModel with SynapseMLLogging { - logClass() + logClass(FeatureNames.AutoML) def this() = this(Identifiable.randomUID("BestModel")) diff --git a/core/src/main/scala/com/microsoft/azure/synapse/ml/automl/TuneHyperparameters.scala b/core/src/main/scala/com/microsoft/azure/synapse/ml/automl/TuneHyperparameters.scala index df5d95fd58..b895a53399 100644 --- a/core/src/main/scala/com/microsoft/azure/synapse/ml/automl/TuneHyperparameters.scala +++ b/core/src/main/scala/com/microsoft/azure/synapse/ml/automl/TuneHyperparameters.scala @@ -7,7 +7,7 @@ import com.google.common.util.concurrent.ThreadFactoryBuilder import com.microsoft.azure.synapse.ml.codegen.Wrappable import com.microsoft.azure.synapse.ml.core.contracts.HasEvaluationMetric import com.microsoft.azure.synapse.ml.core.metrics.MetricConstants -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import com.microsoft.azure.synapse.ml.param.{EstimatorArrayParam, ParamSpace, ParamSpaceParam} import com.microsoft.azure.synapse.ml.train.{ComputeModelStatistics, TrainedClassifierModel, TrainedRegressorModel} import org.apache.spark.SparkException @@ -37,7 +37,7 @@ import scala.util.control.NonFatal */ class TuneHyperparameters(override val uid: String) extends Estimator[TuneHyperparametersModel] with Wrappable with ComplexParamsWritable with HasEvaluationMetric with SynapseMLLogging { - logClass() + logClass(FeatureNames.AutoML) def this() = this(Identifiable.randomUID("TuneHyperparameters")) @@ -231,7 +231,7 @@ object TuneHyperparameters extends ComplexParamsReadable[TuneHyperparameters] class TuneHyperparametersModel(val uid: String) extends Model[TuneHyperparametersModel] with ComplexParamsWritable with Wrappable with HasBestModel with SynapseMLLogging { - logClass() + logClass(FeatureNames.AutoML) def this() = this(Identifiable.randomUID("TuneHyperparametersModel")) diff --git a/core/src/main/scala/com/microsoft/azure/synapse/ml/causal/DoubleMLEstimator.scala b/core/src/main/scala/com/microsoft/azure/synapse/ml/causal/DoubleMLEstimator.scala index a7e5d83215..738b7ffeed 100644 --- a/core/src/main/scala/com/microsoft/azure/synapse/ml/causal/DoubleMLEstimator.scala +++ b/core/src/main/scala/com/microsoft/azure/synapse/ml/causal/DoubleMLEstimator.scala @@ -6,7 +6,7 @@ package com.microsoft.azure.synapse.ml.causal import com.microsoft.azure.synapse.ml.codegen.Wrappable import com.microsoft.azure.synapse.ml.core.schema.{DatasetExtensions, SchemaConstants} import com.microsoft.azure.synapse.ml.core.utils.StopWatch -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import com.microsoft.azure.synapse.ml.stages.DropColumns import com.microsoft.azure.synapse.ml.train.{TrainClassifier, TrainRegressor} import org.apache.commons.math3.stat.descriptive.rank.Percentile @@ -64,7 +64,7 @@ class DoubleMLEstimator(override val uid: String) extends Estimator[DoubleMLModel] with ComplexParamsWritable with DoubleMLParams with SynapseMLLogging with Wrappable { - logClass() + logClass(FeatureNames.Causal) def this() = this(Identifiable.randomUID("DoubleMLEstimator")) @@ -314,7 +314,7 @@ object DoubleMLEstimator extends ComplexParamsReadable[DoubleMLEstimator] { /** Model produced by [[DoubleMLEstimator]]. */ class DoubleMLModel(val uid: String) extends Model[DoubleMLModel] with DoubleMLParams with ComplexParamsWritable with Wrappable with SynapseMLLogging { - logClass() + logClass(FeatureNames.Causal) override protected lazy val pyInternalWrapper = true diff --git a/core/src/main/scala/com/microsoft/azure/synapse/ml/causal/OrthoForestDMLEstimator.scala b/core/src/main/scala/com/microsoft/azure/synapse/ml/causal/OrthoForestDMLEstimator.scala index 7db88531a8..46c7e4a959 100644 --- a/core/src/main/scala/com/microsoft/azure/synapse/ml/causal/OrthoForestDMLEstimator.scala +++ b/core/src/main/scala/com/microsoft/azure/synapse/ml/causal/OrthoForestDMLEstimator.scala @@ -4,7 +4,7 @@ package com.microsoft.azure.synapse.ml.causal import com.microsoft.azure.synapse.ml.codegen.Wrappable -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import com.microsoft.azure.synapse.ml.param.TransformerArrayParam import com.microsoft.azure.synapse.ml.stages.DropColumns import org.apache.commons.math3.stat.descriptive.rank.Percentile @@ -32,7 +32,7 @@ class OrthoForestDMLEstimator(override val uid: String) extends Estimator[OrthoForestDMLModel] with ComplexParamsWritable with OrthoForestDMLParams with Wrappable with SynapseMLLogging with HasOutcomeCol { - logClass() + logClass(FeatureNames.Causal) type EstimatorWithPC = Estimator[_ <: Model[_] with HasPredictionCol] with HasPredictionCol @@ -160,7 +160,7 @@ class OrthoForestDMLModel(val uid: String) extends Model[OrthoForestDMLModel] with OrthoForestDMLParams with ComplexParamsWritable with Wrappable with SynapseMLLogging { - logClass() + logClass(FeatureNames.Causal) override protected lazy val pyInternalWrapper = false diff --git a/core/src/main/scala/com/microsoft/azure/synapse/ml/causal/OrthoForestVariableTransformer.scala b/core/src/main/scala/com/microsoft/azure/synapse/ml/causal/OrthoForestVariableTransformer.scala index cdf35d0577..7fe63fd1ab 100644 --- a/core/src/main/scala/com/microsoft/azure/synapse/ml/causal/OrthoForestVariableTransformer.scala +++ b/core/src/main/scala/com/microsoft/azure/synapse/ml/causal/OrthoForestVariableTransformer.scala @@ -5,7 +5,7 @@ package com.microsoft.azure.synapse.ml.causal import com.microsoft.azure.synapse.ml.codegen.Wrappable import com.microsoft.azure.synapse.ml.core.contracts.HasOutputCol -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import org.apache.spark.ml.Transformer import org.apache.spark.ml.param.{Param, ParamMap} import org.apache.spark.ml.util.{DefaultParamsReadable, DefaultParamsWritable, Identifiable} @@ -23,7 +23,7 @@ import org.apache.spark.sql.{DataFrame, Dataset} class OrthoForestVariableTransformer(override val uid: String) extends Transformer with HasOutputCol with DefaultParamsWritable with Wrappable with SynapseMLLogging { - logClass() + logClass(FeatureNames.Causal) def this() = this(Identifiable.randomUID("OrthoForestVariableTransformer")) diff --git a/core/src/main/scala/com/microsoft/azure/synapse/ml/causal/ResidualTransformer.scala b/core/src/main/scala/com/microsoft/azure/synapse/ml/causal/ResidualTransformer.scala index adda1a67a9..de6dfe9d3f 100644 --- a/core/src/main/scala/com/microsoft/azure/synapse/ml/causal/ResidualTransformer.scala +++ b/core/src/main/scala/com/microsoft/azure/synapse/ml/causal/ResidualTransformer.scala @@ -5,7 +5,7 @@ package com.microsoft.azure.synapse.ml.causal import com.microsoft.azure.synapse.ml.codegen.Wrappable import com.microsoft.azure.synapse.ml.core.contracts.HasOutputCol -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import org.apache.spark.ml.Transformer import org.apache.spark.ml.functions.vector_to_array import org.apache.spark.ml.linalg.SQLDataTypes @@ -22,7 +22,7 @@ import org.apache.spark.sql.{DataFrame, Dataset} class ResidualTransformer(override val uid: String) extends Transformer with HasOutputCol with DefaultParamsWritable with Wrappable with SynapseMLLogging { - logClass() + logClass(FeatureNames.Causal) def this() = this(Identifiable.randomUID("ComputeResidualsTransformer")) diff --git a/core/src/main/scala/com/microsoft/azure/synapse/ml/explainers/ICEExplainer.scala b/core/src/main/scala/com/microsoft/azure/synapse/ml/explainers/ICEExplainer.scala index 4d3eeb90a5..f7df32907a 100644 --- a/core/src/main/scala/com/microsoft/azure/synapse/ml/explainers/ICEExplainer.scala +++ b/core/src/main/scala/com/microsoft/azure/synapse/ml/explainers/ICEExplainer.scala @@ -6,7 +6,7 @@ package com.microsoft.azure.synapse.ml.explainers import com.microsoft.azure.synapse.ml.codegen.Wrappable import com.microsoft.azure.synapse.ml.core.schema.DatasetExtensions import com.microsoft.azure.synapse.ml.core.utils.BreezeUtils._ -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import com.microsoft.azure.synapse.ml.param.TypedArrayParam import org.apache.spark.injections.UDFUtils import org.apache.spark.ml.linalg.{SQLDataTypes, Vector} @@ -134,7 +134,7 @@ class ICETransformer(override val uid: String) extends Transformer with Wrappable with ComplexParamsWritable with SynapseMLLogging { - logClass() + logClass(FeatureNames.Explainers) override protected lazy val pyInternalWrapper = true diff --git a/core/src/main/scala/com/microsoft/azure/synapse/ml/explainers/ImageLIME.scala b/core/src/main/scala/com/microsoft/azure/synapse/ml/explainers/ImageLIME.scala index c915a3f191..e7424f016f 100644 --- a/core/src/main/scala/com/microsoft/azure/synapse/ml/explainers/ImageLIME.scala +++ b/core/src/main/scala/com/microsoft/azure/synapse/ml/explainers/ImageLIME.scala @@ -7,6 +7,7 @@ import breeze.stats.distributions.RandBasis import com.microsoft.azure.synapse.ml.core.schema.{DatasetExtensions, ImageSchemaUtils} import com.microsoft.azure.synapse.ml.image.{HasCellSize, HasModifier, SuperpixelData} import com.microsoft.azure.synapse.ml.io.image.ImageUtils +import com.microsoft.azure.synapse.ml.logging.FeatureNames import org.apache.spark.injections.UDFUtils import org.apache.spark.ml.ComplexParamsReadable import org.apache.spark.ml.image.ImageSchema @@ -39,7 +40,7 @@ class ImageLIME(override val uid: String) extends LIMEBase(uid) with ImageLIMEParams with ImageExplainer { - logClass() + logClass(FeatureNames.Explainers) def this() = { this(Identifiable.randomUID("ImageLIME")) diff --git a/core/src/main/scala/com/microsoft/azure/synapse/ml/explainers/ImageSHAP.scala b/core/src/main/scala/com/microsoft/azure/synapse/ml/explainers/ImageSHAP.scala index 12251dbbc0..9e4ca14474 100644 --- a/core/src/main/scala/com/microsoft/azure/synapse/ml/explainers/ImageSHAP.scala +++ b/core/src/main/scala/com/microsoft/azure/synapse/ml/explainers/ImageSHAP.scala @@ -6,6 +6,7 @@ package com.microsoft.azure.synapse.ml.explainers import com.microsoft.azure.synapse.ml.core.schema.{DatasetExtensions, ImageSchemaUtils} import com.microsoft.azure.synapse.ml.image.{HasCellSize, HasModifier, SuperpixelData} import com.microsoft.azure.synapse.ml.io.image.ImageUtils +import com.microsoft.azure.synapse.ml.logging.FeatureNames import org.apache.spark.injections.UDFUtils import org.apache.spark.ml.ComplexParamsReadable import org.apache.spark.ml.image.ImageSchema @@ -37,7 +38,7 @@ class ImageSHAP(override val uid: String) with ImageSHAPParams with ImageExplainer { - logClass() + logClass(FeatureNames.Explainers) def this() = { this(Identifiable.randomUID("ImageSHAP")) diff --git a/core/src/main/scala/com/microsoft/azure/synapse/ml/explainers/TabularLIME.scala b/core/src/main/scala/com/microsoft/azure/synapse/ml/explainers/TabularLIME.scala index ef58a4b7bc..0a0150c7a2 100644 --- a/core/src/main/scala/com/microsoft/azure/synapse/ml/explainers/TabularLIME.scala +++ b/core/src/main/scala/com/microsoft/azure/synapse/ml/explainers/TabularLIME.scala @@ -5,6 +5,7 @@ package com.microsoft.azure.synapse.ml.explainers import breeze.stats.distributions.RandBasis import com.microsoft.azure.synapse.ml.core.schema.DatasetExtensions +import com.microsoft.azure.synapse.ml.logging.FeatureNames import org.apache.spark.injections.UDFUtils import org.apache.spark.ml.param.StringArrayParam import org.apache.spark.ml.param.shared.HasInputCols @@ -19,7 +20,7 @@ class TabularLIME(override val uid: String) with HasInputCols with HasBackgroundData { - logClass() + logClass(FeatureNames.Explainers) def this() = { this(Identifiable.randomUID("TabularLIME")) diff --git a/core/src/main/scala/com/microsoft/azure/synapse/ml/explainers/TabularSHAP.scala b/core/src/main/scala/com/microsoft/azure/synapse/ml/explainers/TabularSHAP.scala index dd1c5b5bf9..74d2656eee 100644 --- a/core/src/main/scala/com/microsoft/azure/synapse/ml/explainers/TabularSHAP.scala +++ b/core/src/main/scala/com/microsoft/azure/synapse/ml/explainers/TabularSHAP.scala @@ -5,6 +5,7 @@ package com.microsoft.azure.synapse.ml.explainers import breeze.stats.distributions.RandBasis import com.microsoft.azure.synapse.ml.core.schema.DatasetExtensions +import com.microsoft.azure.synapse.ml.logging.FeatureNames import org.apache.spark.injections.UDFUtils import org.apache.spark.ml.ComplexParamsReadable import org.apache.spark.ml.param.shared.HasInputCols @@ -18,7 +19,7 @@ class TabularSHAP(override val uid: String) with HasInputCols with HasBackgroundData { - logClass() + logClass(FeatureNames.Explainers) def this() = { this(Identifiable.randomUID("TabularSHAP")) diff --git a/core/src/main/scala/com/microsoft/azure/synapse/ml/explainers/TextLIME.scala b/core/src/main/scala/com/microsoft/azure/synapse/ml/explainers/TextLIME.scala index e499317189..a18c0d1e3a 100644 --- a/core/src/main/scala/com/microsoft/azure/synapse/ml/explainers/TextLIME.scala +++ b/core/src/main/scala/com/microsoft/azure/synapse/ml/explainers/TextLIME.scala @@ -5,6 +5,7 @@ package com.microsoft.azure.synapse.ml.explainers import breeze.stats.distributions.RandBasis import com.microsoft.azure.synapse.ml.core.schema.DatasetExtensions +import com.microsoft.azure.synapse.ml.logging.FeatureNames import org.apache.spark.injections.UDFUtils import org.apache.spark.ml.ComplexParamsReadable import org.apache.spark.ml.linalg.SQLDataTypes.VectorType @@ -27,7 +28,7 @@ class TextLIME(override val uid: String) with TextLIMEParams with TextExplainer { - logClass() + logClass(FeatureNames.Explainers) def this() = { this(Identifiable.randomUID("TextLIME")) diff --git a/core/src/main/scala/com/microsoft/azure/synapse/ml/explainers/TextSHAP.scala b/core/src/main/scala/com/microsoft/azure/synapse/ml/explainers/TextSHAP.scala index b54b81b050..d7b6b77c5c 100644 --- a/core/src/main/scala/com/microsoft/azure/synapse/ml/explainers/TextSHAP.scala +++ b/core/src/main/scala/com/microsoft/azure/synapse/ml/explainers/TextSHAP.scala @@ -4,6 +4,7 @@ package com.microsoft.azure.synapse.ml.explainers import com.microsoft.azure.synapse.ml.core.schema.DatasetExtensions +import com.microsoft.azure.synapse.ml.logging.FeatureNames import org.apache.spark.injections.UDFUtils import org.apache.spark.ml.ComplexParamsReadable import org.apache.spark.ml.linalg.SQLDataTypes.VectorType @@ -26,7 +27,7 @@ class TextSHAP(override val uid: String) with TextSHAPParams with TextExplainer { - logClass() + logClass(FeatureNames.Explainers) def this() = { this(Identifiable.randomUID("TextSHAP")) diff --git a/core/src/main/scala/com/microsoft/azure/synapse/ml/explainers/VectorLIME.scala b/core/src/main/scala/com/microsoft/azure/synapse/ml/explainers/VectorLIME.scala index 32255813d8..b69a373f11 100644 --- a/core/src/main/scala/com/microsoft/azure/synapse/ml/explainers/VectorLIME.scala +++ b/core/src/main/scala/com/microsoft/azure/synapse/ml/explainers/VectorLIME.scala @@ -5,6 +5,7 @@ package com.microsoft.azure.synapse.ml.explainers import breeze.stats.distributions.RandBasis import com.microsoft.azure.synapse.ml.core.schema.DatasetExtensions +import com.microsoft.azure.synapse.ml.logging.FeatureNames import org.apache.spark.injections.UDFUtils import org.apache.spark.ml.ComplexParamsReadable import org.apache.spark.ml.linalg.SQLDataTypes.VectorType @@ -19,7 +20,7 @@ import org.apache.spark.sql.{DataFrame, Row} class VectorLIME(override val uid: String) extends LIMEBase(uid) with HasInputCol with HasBackgroundData { - logClass() + logClass(FeatureNames.Explainers) def this() = { this(Identifiable.randomUID("VectorLIME")) diff --git a/core/src/main/scala/com/microsoft/azure/synapse/ml/explainers/VectorSHAP.scala b/core/src/main/scala/com/microsoft/azure/synapse/ml/explainers/VectorSHAP.scala index f443b4fdb0..7dc28bfa6f 100644 --- a/core/src/main/scala/com/microsoft/azure/synapse/ml/explainers/VectorSHAP.scala +++ b/core/src/main/scala/com/microsoft/azure/synapse/ml/explainers/VectorSHAP.scala @@ -5,6 +5,7 @@ package com.microsoft.azure.synapse.ml.explainers import breeze.stats.distributions.RandBasis import com.microsoft.azure.synapse.ml.core.schema.DatasetExtensions +import com.microsoft.azure.synapse.ml.logging.FeatureNames import org.apache.spark.injections.UDFUtils import org.apache.spark.ml.ComplexParamsReadable import org.apache.spark.ml.linalg.SQLDataTypes.VectorType @@ -20,7 +21,7 @@ class VectorSHAP(override val uid: String) with HasInputCol with HasBackgroundData { - logClass() + logClass(FeatureNames.Explainers) def this() = { this(Identifiable.randomUID("VectorSHAP")) diff --git a/core/src/main/scala/com/microsoft/azure/synapse/ml/exploratory/AggregateBalanceMeasure.scala b/core/src/main/scala/com/microsoft/azure/synapse/ml/exploratory/AggregateBalanceMeasure.scala index ccf2a8b582..4d982b2c1c 100644 --- a/core/src/main/scala/com/microsoft/azure/synapse/ml/exploratory/AggregateBalanceMeasure.scala +++ b/core/src/main/scala/com/microsoft/azure/synapse/ml/exploratory/AggregateBalanceMeasure.scala @@ -5,7 +5,7 @@ package com.microsoft.azure.synapse.ml.exploratory import com.microsoft.azure.synapse.ml.codegen.Wrappable import com.microsoft.azure.synapse.ml.core.schema.DatasetExtensions -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import org.apache.spark.ml.param._ import org.apache.spark.ml.util.Identifiable import org.apache.spark.ml.{ComplexParamsReadable, ComplexParamsWritable, Transformer} @@ -34,7 +34,7 @@ class AggregateBalanceMeasure(override val uid: String) with Wrappable with SynapseMLLogging { - logClass() + logClass(FeatureNames.Core) def this() = this(Identifiable.randomUID("AggregateBalanceMeasure")) diff --git a/core/src/main/scala/com/microsoft/azure/synapse/ml/exploratory/DistributionBalanceMeasure.scala b/core/src/main/scala/com/microsoft/azure/synapse/ml/exploratory/DistributionBalanceMeasure.scala index a2933dd4e0..565ea9d617 100644 --- a/core/src/main/scala/com/microsoft/azure/synapse/ml/exploratory/DistributionBalanceMeasure.scala +++ b/core/src/main/scala/com/microsoft/azure/synapse/ml/exploratory/DistributionBalanceMeasure.scala @@ -6,7 +6,7 @@ package com.microsoft.azure.synapse.ml.exploratory import breeze.stats.distributions.ChiSquared import com.microsoft.azure.synapse.ml.codegen.Wrappable import com.microsoft.azure.synapse.ml.core.schema.DatasetExtensions -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import com.microsoft.azure.synapse.ml.param.ArrayMapParam import org.apache.spark.ml.param._ import org.apache.spark.ml.util.Identifiable @@ -45,7 +45,7 @@ class DistributionBalanceMeasure(override val uid: String) with Wrappable with SynapseMLLogging { - logClass() + logClass(FeatureNames.Core) def this() = this(Identifiable.randomUID("DistributionBalanceMeasure")) diff --git a/core/src/main/scala/com/microsoft/azure/synapse/ml/exploratory/FeatureBalanceMeasure.scala b/core/src/main/scala/com/microsoft/azure/synapse/ml/exploratory/FeatureBalanceMeasure.scala index b847dddbcc..bf4ed80fe0 100644 --- a/core/src/main/scala/com/microsoft/azure/synapse/ml/exploratory/FeatureBalanceMeasure.scala +++ b/core/src/main/scala/com/microsoft/azure/synapse/ml/exploratory/FeatureBalanceMeasure.scala @@ -5,7 +5,7 @@ package com.microsoft.azure.synapse.ml.exploratory import com.microsoft.azure.synapse.ml.codegen.Wrappable import com.microsoft.azure.synapse.ml.core.schema.DatasetExtensions -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import org.apache.spark.ml.param._ import org.apache.spark.ml.param.shared.HasLabelCol import org.apache.spark.ml.util.Identifiable @@ -43,7 +43,7 @@ class FeatureBalanceMeasure(override val uid: String) with Wrappable with SynapseMLLogging { - logClass() + logClass(FeatureNames.Core) def this() = this(Identifiable.randomUID("FeatureBalanceMeasure")) diff --git a/core/src/main/scala/com/microsoft/azure/synapse/ml/featurize/CleanMissingData.scala b/core/src/main/scala/com/microsoft/azure/synapse/ml/featurize/CleanMissingData.scala index d534be90a1..87c84a1fcf 100644 --- a/core/src/main/scala/com/microsoft/azure/synapse/ml/featurize/CleanMissingData.scala +++ b/core/src/main/scala/com/microsoft/azure/synapse/ml/featurize/CleanMissingData.scala @@ -5,7 +5,7 @@ package com.microsoft.azure.synapse.ml.featurize import com.microsoft.azure.synapse.ml.codegen.Wrappable import com.microsoft.azure.synapse.ml.core.contracts.{HasInputCols, HasOutputCols} -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import com.microsoft.azure.synapse.ml.param.UntypedArrayParam import org.apache.spark.annotation.DeveloperApi import org.apache.spark.ml._ @@ -50,7 +50,7 @@ object CleanMissingData extends DefaultParamsReadable[CleanMissingData] { */ class CleanMissingData(override val uid: String) extends Estimator[CleanMissingDataModel] with HasInputCols with HasOutputCols with Wrappable with DefaultParamsWritable with SynapseMLLogging { - logClass() + logClass(FeatureNames.Featurize) def this() = this(Identifiable.randomUID("CleanMissingData")) @@ -146,7 +146,7 @@ class CleanMissingData(override val uid: String) extends Estimator[CleanMissingD class CleanMissingDataModel(val uid: String) extends Model[CleanMissingDataModel] with ComplexParamsWritable with Wrappable with HasInputCols with HasOutputCols with SynapseMLLogging { - logClass() + logClass(FeatureNames.Featurize) def this() = this(Identifiable.randomUID("CleanMissingDataModel")) diff --git a/core/src/main/scala/com/microsoft/azure/synapse/ml/featurize/CountSelector.scala b/core/src/main/scala/com/microsoft/azure/synapse/ml/featurize/CountSelector.scala index ac2276674d..44d984d79e 100644 --- a/core/src/main/scala/com/microsoft/azure/synapse/ml/featurize/CountSelector.scala +++ b/core/src/main/scala/com/microsoft/azure/synapse/ml/featurize/CountSelector.scala @@ -5,7 +5,7 @@ package com.microsoft.azure.synapse.ml.featurize import com.microsoft.azure.synapse.ml.codegen.Wrappable import com.microsoft.azure.synapse.ml.core.contracts.{HasInputCol, HasOutputCol} -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import org.apache.spark.ml.feature._ import org.apache.spark.ml.linalg.SQLDataTypes.VectorType import org.apache.spark.ml.linalg.Vector @@ -22,7 +22,7 @@ object CountSelector extends DefaultParamsReadable[CountSelector] /** Drops vector indicies with no nonzero data. */ class CountSelector(override val uid: String) extends Estimator[CountSelectorModel] with Wrappable with DefaultParamsWritable with HasInputCol with HasOutputCol with SynapseMLLogging { - logClass() + logClass(FeatureNames.Featurize) def this() = this(Identifiable.randomUID("CountBasedFeatureSelector")) @@ -55,7 +55,7 @@ object CountSelectorModel extends DefaultParamsReadable[CountSelectorModel] class CountSelectorModel(val uid: String) extends Model[CountSelectorModel] with HasInputCol with HasOutputCol with DefaultParamsWritable with Wrappable with SynapseMLLogging { - logClass() + logClass(FeatureNames.Featurize) def this() = this(Identifiable.randomUID("CountBasedFeatureSelectorModel")) diff --git a/core/src/main/scala/com/microsoft/azure/synapse/ml/featurize/DataConversion.scala b/core/src/main/scala/com/microsoft/azure/synapse/ml/featurize/DataConversion.scala index 56fd8950a8..6b2b2667bc 100644 --- a/core/src/main/scala/com/microsoft/azure/synapse/ml/featurize/DataConversion.scala +++ b/core/src/main/scala/com/microsoft/azure/synapse/ml/featurize/DataConversion.scala @@ -4,7 +4,7 @@ package com.microsoft.azure.synapse.ml.featurize import com.microsoft.azure.synapse.ml.codegen.Wrappable -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import org.apache.spark.ml.Transformer import org.apache.spark.ml.param.{Param, ParamMap, StringArrayParam} import org.apache.spark.ml.util.{DefaultParamsReadable, DefaultParamsWritable, Identifiable} @@ -21,7 +21,7 @@ import java.sql.Timestamp */ class DataConversion(override val uid: String) extends Transformer with Wrappable with DefaultParamsWritable with SynapseMLLogging { - logClass() + logClass(FeatureNames.Featurize) def this() = this(Identifiable.randomUID("DataConversion")) diff --git a/core/src/main/scala/com/microsoft/azure/synapse/ml/featurize/Featurize.scala b/core/src/main/scala/com/microsoft/azure/synapse/ml/featurize/Featurize.scala index ac1bbf706c..0861fcbdf7 100644 --- a/core/src/main/scala/com/microsoft/azure/synapse/ml/featurize/Featurize.scala +++ b/core/src/main/scala/com/microsoft/azure/synapse/ml/featurize/Featurize.scala @@ -6,7 +6,7 @@ package com.microsoft.azure.synapse.ml.featurize import com.microsoft.azure.synapse.ml.codegen.Wrappable import com.microsoft.azure.synapse.ml.core.contracts.{HasInputCols, HasOutputCol} import com.microsoft.azure.synapse.ml.featurize.text.TextFeaturizer -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import com.microsoft.azure.synapse.ml.stages.{DropColumns, Lambda, UDFTransformer} import org.apache.spark.ml.feature.{OneHotEncoder, SQLTransformer, VectorAssembler} import org.apache.spark.ml.linalg.SQLDataTypes.VectorType @@ -34,7 +34,7 @@ object Featurize extends DefaultParamsReadable[Featurize] /** Featurizes a dataset. Converts the specified columns to feature columns. */ class Featurize(override val uid: String) extends Estimator[PipelineModel] with Wrappable with DefaultParamsWritable with HasOutputCol with HasInputCols with SynapseMLLogging { - logClass() + logClass(FeatureNames.Featurize) def this() = this(Identifiable.randomUID("Featurize")) diff --git a/core/src/main/scala/com/microsoft/azure/synapse/ml/featurize/IndexToValue.scala b/core/src/main/scala/com/microsoft/azure/synapse/ml/featurize/IndexToValue.scala index 4e8a0ae847..3a29f66070 100644 --- a/core/src/main/scala/com/microsoft/azure/synapse/ml/featurize/IndexToValue.scala +++ b/core/src/main/scala/com/microsoft/azure/synapse/ml/featurize/IndexToValue.scala @@ -7,7 +7,7 @@ import com.microsoft.azure.synapse.ml.codegen.Wrappable import com.microsoft.azure.synapse.ml.core.contracts.{HasInputCol, HasOutputCol} import com.microsoft.azure.synapse.ml.core.schema.SchemaConstants._ import com.microsoft.azure.synapse.ml.core.schema.{CategoricalColumnInfo, CategoricalUtilities} -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import org.apache.spark.ml.Transformer import org.apache.spark.ml.param._ import org.apache.spark.ml.util._ @@ -28,7 +28,7 @@ object IndexToValue extends DefaultParamsReadable[IndexToValue] class IndexToValue(val uid: String) extends Transformer with HasInputCol with HasOutputCol with Wrappable with DefaultParamsWritable with SynapseMLLogging { - logClass() + logClass(FeatureNames.Featurize) def this() = this(Identifiable.randomUID("IndexToValue")) diff --git a/core/src/main/scala/com/microsoft/azure/synapse/ml/featurize/ValueIndexer.scala b/core/src/main/scala/com/microsoft/azure/synapse/ml/featurize/ValueIndexer.scala index be6df541d8..80dd62022a 100644 --- a/core/src/main/scala/com/microsoft/azure/synapse/ml/featurize/ValueIndexer.scala +++ b/core/src/main/scala/com/microsoft/azure/synapse/ml/featurize/ValueIndexer.scala @@ -6,7 +6,7 @@ package com.microsoft.azure.synapse.ml.featurize import com.microsoft.azure.synapse.ml.codegen.Wrappable import com.microsoft.azure.synapse.ml.core.contracts.{HasInputCol, HasOutputCol} import com.microsoft.azure.synapse.ml.core.schema.CategoricalMap -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import com.microsoft.azure.synapse.ml.param.UntypedArrayParam import org.apache.spark.annotation.DeveloperApi import org.apache.spark.ml._ @@ -56,7 +56,7 @@ object NullOrdering { */ class ValueIndexer(override val uid: String) extends Estimator[ValueIndexerModel] with ValueIndexerParams with SynapseMLLogging { - logClass() + logClass(FeatureNames.Featurize) def this() = this(Identifiable.randomUID("ValueIndexer")) @@ -106,7 +106,7 @@ class ValueIndexer(override val uid: String) extends Estimator[ValueIndexerModel /** Model produced by [[ValueIndexer]]. */ class ValueIndexerModel(val uid: String) extends Model[ValueIndexerModel] with ValueIndexerParams with ComplexParamsWritable with SynapseMLLogging { - logClass() + logClass(FeatureNames.Featurize) def this() = this(Identifiable.randomUID("ValueIndexerModel")) diff --git a/core/src/main/scala/com/microsoft/azure/synapse/ml/featurize/text/MultiNGram.scala b/core/src/main/scala/com/microsoft/azure/synapse/ml/featurize/text/MultiNGram.scala index 1c2cb7e198..fd958d1216 100644 --- a/core/src/main/scala/com/microsoft/azure/synapse/ml/featurize/text/MultiNGram.scala +++ b/core/src/main/scala/com/microsoft/azure/synapse/ml/featurize/text/MultiNGram.scala @@ -6,7 +6,7 @@ package com.microsoft.azure.synapse.ml.featurize.text import com.microsoft.azure.synapse.ml.codegen.Wrappable import com.microsoft.azure.synapse.ml.core.contracts.{HasInputCol, HasOutputCol} import com.microsoft.azure.synapse.ml.core.schema.DatasetExtensions -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import com.microsoft.azure.synapse.ml.param.TypedIntArrayParam import org.apache.spark.ml._ import org.apache.spark.ml.feature._ @@ -25,7 +25,7 @@ object MultiNGram extends DefaultParamsReadable[MultiNGram] class MultiNGram(override val uid: String) extends Transformer with HasInputCol with HasOutputCol with Wrappable with DefaultParamsWritable with SynapseMLLogging { - logClass() + logClass(FeatureNames.Featurize) def this() = this(Identifiable.randomUID("MultiNGram")) diff --git a/core/src/main/scala/com/microsoft/azure/synapse/ml/featurize/text/PageSplitter.scala b/core/src/main/scala/com/microsoft/azure/synapse/ml/featurize/text/PageSplitter.scala index bd4a5d9634..91110dc0c4 100644 --- a/core/src/main/scala/com/microsoft/azure/synapse/ml/featurize/text/PageSplitter.scala +++ b/core/src/main/scala/com/microsoft/azure/synapse/ml/featurize/text/PageSplitter.scala @@ -5,7 +5,7 @@ package com.microsoft.azure.synapse.ml.featurize.text import com.microsoft.azure.synapse.ml.codegen.Wrappable import com.microsoft.azure.synapse.ml.core.contracts.{HasInputCol, HasOutputCol} -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import org.apache.spark.injections.UDFUtils import org.apache.spark.ml._ import org.apache.spark.ml.param._ @@ -23,7 +23,7 @@ object PageSplitter extends DefaultParamsReadable[PageSplitter] class PageSplitter(override val uid: String) extends Transformer with HasInputCol with HasOutputCol with Wrappable with DefaultParamsWritable with SynapseMLLogging { - logClass() + logClass(FeatureNames.Featurize) def this() = this(Identifiable.randomUID("PageSplitter")) diff --git a/core/src/main/scala/com/microsoft/azure/synapse/ml/featurize/text/TextFeaturizer.scala b/core/src/main/scala/com/microsoft/azure/synapse/ml/featurize/text/TextFeaturizer.scala index ded629426c..ffa536bdbc 100644 --- a/core/src/main/scala/com/microsoft/azure/synapse/ml/featurize/text/TextFeaturizer.scala +++ b/core/src/main/scala/com/microsoft/azure/synapse/ml/featurize/text/TextFeaturizer.scala @@ -5,7 +5,7 @@ package com.microsoft.azure.synapse.ml.featurize.text import com.microsoft.azure.synapse.ml.codegen.Wrappable import com.microsoft.azure.synapse.ml.core.contracts.{HasInputCol, HasOutputCol} -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import com.microsoft.azure.synapse.ml.stages.DropColumns import org.apache.spark.ml.attribute.AttributeGroup import org.apache.spark.ml.feature._ @@ -193,7 +193,7 @@ object TextFeaturizer extends DefaultParamsReadable[TextFeaturizer] class TextFeaturizer(override val uid: String) extends Estimator[PipelineModel] with TextFeaturizerParams with HasInputCol with HasOutputCol with SynapseMLLogging { - logClass() + logClass(FeatureNames.Featurize) def this() = this(Identifiable.randomUID("TextFeaturizer")) diff --git a/core/src/main/scala/com/microsoft/azure/synapse/ml/image/SuperpixelTransformer.scala b/core/src/main/scala/com/microsoft/azure/synapse/ml/image/SuperpixelTransformer.scala index b7351ef8c5..96d8d81c5a 100644 --- a/core/src/main/scala/com/microsoft/azure/synapse/ml/image/SuperpixelTransformer.scala +++ b/core/src/main/scala/com/microsoft/azure/synapse/ml/image/SuperpixelTransformer.scala @@ -6,7 +6,7 @@ package com.microsoft.azure.synapse.ml.image import com.microsoft.azure.synapse.ml.codegen.Wrappable import com.microsoft.azure.synapse.ml.core.contracts.{HasInputCol, HasOutputCol} import com.microsoft.azure.synapse.ml.core.schema.ImageSchemaUtils -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import org.apache.spark.ml.Transformer import org.apache.spark.ml.param.{DoubleParam, ParamMap, Params} import org.apache.spark.ml.util.{DefaultParamsReadable, DefaultParamsWritable, Identifiable} @@ -37,7 +37,7 @@ trait HasModifier extends Params { class SuperpixelTransformer(val uid: String) extends Transformer with HasInputCol with HasOutputCol with Wrappable with DefaultParamsWritable with HasCellSize with HasModifier with SynapseMLLogging { - logClass() + logClass(FeatureNames.Image) def this() = this(Identifiable.randomUID("SuperpixelTransformer")) diff --git a/core/src/main/scala/com/microsoft/azure/synapse/ml/image/UnrollImage.scala b/core/src/main/scala/com/microsoft/azure/synapse/ml/image/UnrollImage.scala index ccc67d5f6f..840c915e54 100644 --- a/core/src/main/scala/com/microsoft/azure/synapse/ml/image/UnrollImage.scala +++ b/core/src/main/scala/com/microsoft/azure/synapse/ml/image/UnrollImage.scala @@ -7,7 +7,7 @@ import com.microsoft.azure.synapse.ml.codegen.Wrappable import com.microsoft.azure.synapse.ml.core.contracts.{HasInputCol, HasOutputCol} import com.microsoft.azure.synapse.ml.core.schema.ImageSchemaUtils import com.microsoft.azure.synapse.ml.io.image.ImageUtils -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import org.apache.spark.injections.UDFUtils import org.apache.spark.ml.Transformer import org.apache.spark.ml.linalg.SQLDataTypes.VectorType @@ -168,7 +168,7 @@ object UnrollImage extends DefaultParamsReadable[UnrollImage] { */ class UnrollImage(val uid: String) extends Transformer with HasInputCol with HasOutputCol with Wrappable with DefaultParamsWritable with SynapseMLLogging { - logClass() + logClass(FeatureNames.Image) import UnrollImage._ @@ -203,7 +203,7 @@ object UnrollBinaryImage extends DefaultParamsReadable[UnrollBinaryImage] */ class UnrollBinaryImage(val uid: String) extends Transformer with HasInputCol with HasOutputCol with Wrappable with DefaultParamsWritable with SynapseMLLogging { - logClass() + logClass(FeatureNames.Image) import UnrollImage._ def this() = this(Identifiable.randomUID("UnrollImage")) diff --git a/core/src/main/scala/com/microsoft/azure/synapse/ml/io/http/HTTPTransformer.scala b/core/src/main/scala/com/microsoft/azure/synapse/ml/io/http/HTTPTransformer.scala index 674c9c9f23..40dc67dc54 100644 --- a/core/src/main/scala/com/microsoft/azure/synapse/ml/io/http/HTTPTransformer.scala +++ b/core/src/main/scala/com/microsoft/azure/synapse/ml/io/http/HTTPTransformer.scala @@ -6,7 +6,7 @@ package com.microsoft.azure.synapse.ml.io.http import com.microsoft.azure.synapse.ml.codegen.Wrappable import com.microsoft.azure.synapse.ml.core.contracts.{HasInputCol, HasOutputCol} import com.microsoft.azure.synapse.ml.io.http.HandlingUtils.HandlerFunc -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import com.microsoft.azure.synapse.ml.param.UDFParam import org.apache.http.impl.client.CloseableHttpClient import org.apache.spark.injections.UDFUtils @@ -96,7 +96,7 @@ class HTTPTransformer(val uid: String) extends Transformer with ConcurrencyParams with HasInputCol with HasOutputCol with HasHandler with ComplexParamsWritable with SynapseMLLogging { - logClass() + logClass(FeatureNames.Core) setDefault(handler -> HandlingUtils.advancedUDF(100, 500, 1000)) //scalastyle:ignore magic.number diff --git a/core/src/main/scala/com/microsoft/azure/synapse/ml/io/http/Parsers.scala b/core/src/main/scala/com/microsoft/azure/synapse/ml/io/http/Parsers.scala index 684e91034c..0dd3a60d6e 100644 --- a/core/src/main/scala/com/microsoft/azure/synapse/ml/io/http/Parsers.scala +++ b/core/src/main/scala/com/microsoft/azure/synapse/ml/io/http/Parsers.scala @@ -7,7 +7,7 @@ import com.microsoft.azure.synapse.ml.codegen.Wrappable import com.microsoft.azure.synapse.ml.core.contracts.{HasInputCol, HasOutputCol} import com.microsoft.azure.synapse.ml.core.schema.DatasetExtensions.{findUnusedColumnName => newCol} import com.microsoft.azure.synapse.ml.core.serialize.ComplexParam -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import com.microsoft.azure.synapse.ml.param._ import com.microsoft.azure.synapse.ml.stages.UDFTransformer import org.apache.http.client.methods.HttpRequestBase @@ -35,7 +35,7 @@ object JSONInputParser extends ComplexParamsReadable[JSONInputParser] class JSONInputParser(val uid: String) extends HTTPInputParser with HasURL with ComplexParamsWritable with SynapseMLLogging { - logClass() + logClass(FeatureNames.Core) def this() = this(Identifiable.randomUID("JSONInputParser")) @@ -92,7 +92,7 @@ class JSONInputParser(val uid: String) extends HTTPInputParser object CustomInputParser extends ComplexParamsReadable[CustomInputParser] with Serializable class CustomInputParser(val uid: String) extends HTTPInputParser with ComplexParamsWritable with SynapseMLLogging { - logClass() + logClass(FeatureNames.Core) def this() = this(Identifiable.randomUID("CustomInputParser")) @@ -154,7 +154,7 @@ abstract class HTTPOutputParser extends Transformer with HasInputCol with HasOut object JSONOutputParser extends ComplexParamsReadable[JSONOutputParser] class JSONOutputParser(val uid: String) extends HTTPOutputParser with ComplexParamsWritable with SynapseMLLogging { - logClass() + logClass(FeatureNames.Core) override protected lazy val pyInternalWrapper = true @@ -209,7 +209,7 @@ class JSONOutputParser(val uid: String) extends HTTPOutputParser with ComplexPar object StringOutputParser extends ComplexParamsReadable[StringOutputParser] class StringOutputParser(val uid: String) extends HTTPOutputParser with ComplexParamsWritable with SynapseMLLogging { - logClass() + logClass(FeatureNames.Core) def this() = this(Identifiable.randomUID("StringOutputParser")) @@ -230,7 +230,7 @@ class StringOutputParser(val uid: String) extends HTTPOutputParser with ComplexP object CustomOutputParser extends ComplexParamsReadable[CustomOutputParser] with Serializable class CustomOutputParser(val uid: String) extends HTTPOutputParser with ComplexParamsWritable with SynapseMLLogging { - logClass() + logClass(FeatureNames.Core) def this() = this(Identifiable.randomUID("CustomOutputParser")) diff --git a/core/src/main/scala/com/microsoft/azure/synapse/ml/io/http/SimpleHTTPTransformer.scala b/core/src/main/scala/com/microsoft/azure/synapse/ml/io/http/SimpleHTTPTransformer.scala index 2ac095c142..2e2d7472eb 100644 --- a/core/src/main/scala/com/microsoft/azure/synapse/ml/io/http/SimpleHTTPTransformer.scala +++ b/core/src/main/scala/com/microsoft/azure/synapse/ml/io/http/SimpleHTTPTransformer.scala @@ -5,7 +5,7 @@ package com.microsoft.azure.synapse.ml.io.http import com.microsoft.azure.synapse.ml.core.contracts.{HasInputCol, HasOutputCol} import com.microsoft.azure.synapse.ml.core.schema.DatasetExtensions.{findUnusedColumnName => newCol} -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import com.microsoft.azure.synapse.ml.param.TransformerParam import com.microsoft.azure.synapse.ml.stages.{DropColumns, FlattenBatch, HasMiniBatcher, Lambda} import org.apache.commons.io.IOUtils @@ -65,7 +65,7 @@ object ErrorUtils extends Serializable { class SimpleHTTPTransformer(val uid: String) extends Transformer with ConcurrencyParams with HasMiniBatcher with HasHandler with HasInputCol with HasOutputCol with ComplexParamsWritable with HasErrorCol with SynapseMLLogging { - logClass() + logClass(FeatureNames.Core) override protected lazy val pyInternalWrapper = true diff --git a/core/src/main/scala/com/microsoft/azure/synapse/ml/isolationforest/IsolationForest.scala b/core/src/main/scala/com/microsoft/azure/synapse/ml/isolationforest/IsolationForest.scala index 132f18dae9..afe0ae5624 100644 --- a/core/src/main/scala/com/microsoft/azure/synapse/ml/isolationforest/IsolationForest.scala +++ b/core/src/main/scala/com/microsoft/azure/synapse/ml/isolationforest/IsolationForest.scala @@ -3,10 +3,10 @@ package com.microsoft.azure.synapse.ml.isolationforest -import com.linkedin.relevance.isolationforest.{IsolationForestParams, - IsolationForest => IsolationForestSource, IsolationForestModel => IsolationForestModelSource} +import com.linkedin.relevance.isolationforest.{IsolationForestParams, IsolationForest => IsolationForestSource, + IsolationForestModel => IsolationForestModelSource} import com.microsoft.azure.synapse.ml.codegen.Wrappable -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import com.microsoft.azure.synapse.ml.param.TransformerParam import org.apache.spark.ml.param.ParamMap import org.apache.spark.ml.util._ @@ -19,7 +19,7 @@ object IsolationForest extends DefaultParamsReadable[IsolationForest] class IsolationForest(override val uid: String, val that: IsolationForestSource) extends Estimator[IsolationForestModel] with IsolationForestParams with DefaultParamsWritable with Wrappable with SynapseMLLogging { - logClass() + logClass(FeatureNames.IsolationForest) def this(uid: String) = this(uid, new IsolationForestSource(uid)) @@ -43,7 +43,7 @@ class IsolationForest(override val uid: String, val that: IsolationForestSource) class IsolationForestModel(override val uid: String) extends Model[IsolationForestModel] with IsolationForestParams with ComplexParamsWritable with Wrappable with SynapseMLLogging { - logClass() + logClass(FeatureNames.IsolationForest) override lazy val pyInternalWrapper = true diff --git a/core/src/main/scala/com/microsoft/azure/synapse/ml/logging/FeatureNames.scala b/core/src/main/scala/com/microsoft/azure/synapse/ml/logging/FeatureNames.scala new file mode 100644 index 0000000000..b54959304d --- /dev/null +++ b/core/src/main/scala/com/microsoft/azure/synapse/ml/logging/FeatureNames.scala @@ -0,0 +1,36 @@ +// Copyright (C) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See LICENSE in project root for information. + +package com.microsoft.azure.synapse.ml.logging + +object FeatureNames { + object AiServices { + val Anomaly = "aiservice-anomalydetection" + val BingImage = "aiservice-bingimage" + val Face = "aiservice-face" + val Form = "aiservice-form" + val Language = "aiservice-language" + val OpenAI = "aiservice-openai" + val Search = "aiservice-search" + val Speech = "aiservice-speech" + val Text = "aiservice-text" + val Translate = "aiservice-translate" + val Vision = "aiservice-vision" + } + + val AutoML = "automl" + val Causal = "causal" + val Explainers = "explainers" + val Featurize = "featurize" + val Geospatial = "geospatial" + val Image = "image" + val IsolationForest = "isolationforest" + val NearestNeighbor = "nearestneighbor" + val Recommendation = "recommendation" + val DeepLearning = "deeplearning" + val OpenCV = "opencv" + val LightGBM = "lightgbm" + val VowpalWabbit = "vowpalwabbit" + + val Core = "core" +} diff --git a/core/src/main/scala/com/microsoft/azure/synapse/ml/logging/SynapseMLLogging.scala b/core/src/main/scala/com/microsoft/azure/synapse/ml/logging/SynapseMLLogging.scala index f5fe72c4db..0102dbafa8 100644 --- a/core/src/main/scala/com/microsoft/azure/synapse/ml/logging/SynapseMLLogging.scala +++ b/core/src/main/scala/com/microsoft/azure/synapse/ml/logging/SynapseMLLogging.scala @@ -114,21 +114,21 @@ trait SynapseMLLogging extends Logging { protected def logBase(methodName: String, numCols: Option[Int], executionSeconds: Option[Double], - logCertifiedEvent: Boolean = false + featureName: Option[String] ): Unit = { logBase(getPayload( methodName, numCols, executionSeconds, - None), logCertifiedEvent) + None), featureName) } - protected def logBase(info: Map[String, String], logCertifiedEvent: Boolean): Unit = { - if (logCertifiedEvent) { + protected def logBase(info: Map[String, String], featureName: Option[String]): Unit = { + if (featureName.isDefined) { Future { logToCertifiedEvents( info("libraryName"), - info("method"), + featureName.get, info -- Seq("libraryName", "method") ) }.failed.map { @@ -145,23 +145,23 @@ trait SynapseMLLogging extends Logging { e) } - def logClass(): Unit = { - logBase("constructor", None, None, true) + def logClass(featureName: String): Unit = { + logBase("constructor", None, None, Some(featureName)) } - def logFit[T](f: => T, columns: Int, logCertifiedEvent: Boolean = true): T = { - logVerb("fit", f, Some(columns), logCertifiedEvent) + def logFit[T](f: => T, columns: Int): T = { + logVerb("fit", f, Some(columns)) } - def logTransform[T](f: => T, columns: Int, logCertifiedEvent: Boolean = true): T = { - logVerb("transform", f, Some(columns), logCertifiedEvent) + def logTransform[T](f: => T, columns: Int): T = { + logVerb("transform", f, Some(columns)) } - def logVerb[T](verb: String, f: => T, columns: Option[Int] = None, logCertifiedEvent: Boolean = false): T = { + def logVerb[T](verb: String, f: => T, columns: Option[Int] = None): T = { val startTime = System.nanoTime() try { val ret = f - logBase(verb, columns, Some((System.nanoTime() - startTime) / 1e9), logCertifiedEvent) + logBase(verb, columns, Some((System.nanoTime() - startTime) / 1e9), None) ret } catch { case e: Exception => diff --git a/core/src/main/scala/com/microsoft/azure/synapse/ml/nn/ConditionalKNN.scala b/core/src/main/scala/com/microsoft/azure/synapse/ml/nn/ConditionalKNN.scala index a0aca204ff..5d93bfd97b 100644 --- a/core/src/main/scala/com/microsoft/azure/synapse/ml/nn/ConditionalKNN.scala +++ b/core/src/main/scala/com/microsoft/azure/synapse/ml/nn/ConditionalKNN.scala @@ -5,7 +5,7 @@ package com.microsoft.azure.synapse.ml.nn import breeze.linalg.{DenseVector => BDV} import com.microsoft.azure.synapse.ml.core.contracts.HasLabelCol -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import com.microsoft.azure.synapse.ml.param.ConditionalBallTreeParam import org.apache.spark.broadcast.Broadcast import org.apache.spark.injections.UDFUtils @@ -31,7 +31,7 @@ object ConditionalKNN extends DefaultParamsReadable[ConditionalKNN] class ConditionalKNN(override val uid: String) extends Estimator[ConditionalKNNModel] with ConditionalKNNParams with DefaultParamsWritable with OptimizedCKNNFitting with SynapseMLLogging { - logClass() + logClass(FeatureNames.NearestNeighbor) def this() = this(Identifiable.randomUID("ConditionalKNN")) @@ -72,7 +72,7 @@ private[ml] object KNNFuncHolder { class ConditionalKNNModel(val uid: String) extends Model[ConditionalKNNModel] with ComplexParamsWritable with ConditionalKNNParams with SynapseMLLogging { - logClass() + logClass(FeatureNames.NearestNeighbor) def this() = this(Identifiable.randomUID("ConditionalKNNModel")) diff --git a/core/src/main/scala/com/microsoft/azure/synapse/ml/nn/KNN.scala b/core/src/main/scala/com/microsoft/azure/synapse/ml/nn/KNN.scala index e630b0414b..ebf17dc5fc 100644 --- a/core/src/main/scala/com/microsoft/azure/synapse/ml/nn/KNN.scala +++ b/core/src/main/scala/com/microsoft/azure/synapse/ml/nn/KNN.scala @@ -6,7 +6,7 @@ package com.microsoft.azure.synapse.ml.nn import breeze.linalg.{DenseVector => BDV} import com.microsoft.azure.synapse.ml.codegen.Wrappable import com.microsoft.azure.synapse.ml.core.contracts.{HasFeaturesCol, HasOutputCol} -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import com.microsoft.azure.synapse.ml.param.BallTreeParam import org.apache.spark.broadcast.Broadcast import org.apache.spark.injections.UDFUtils @@ -48,7 +48,7 @@ trait KNNParams extends HasFeaturesCol with Wrappable with HasOutputCol { class KNN(override val uid: String) extends Estimator[KNNModel] with KNNParams with DefaultParamsWritable with OptimizedKNNFitting with SynapseMLLogging { - logClass() + logClass(FeatureNames.NearestNeighbor) def this() = this(Identifiable.randomUID("KNN")) @@ -78,7 +78,7 @@ class KNN(override val uid: String) extends Estimator[KNNModel] with KNNParams class KNNModel(val uid: String) extends Model[KNNModel] with ComplexParamsWritable with KNNParams with SynapseMLLogging { - logClass() + logClass(FeatureNames.NearestNeighbor) def this() = this(Identifiable.randomUID("KNNModel")) diff --git a/core/src/main/scala/com/microsoft/azure/synapse/ml/recommendation/RankingAdapter.scala b/core/src/main/scala/com/microsoft/azure/synapse/ml/recommendation/RankingAdapter.scala index 37623faf37..27e76846dc 100644 --- a/core/src/main/scala/com/microsoft/azure/synapse/ml/recommendation/RankingAdapter.scala +++ b/core/src/main/scala/com/microsoft/azure/synapse/ml/recommendation/RankingAdapter.scala @@ -5,7 +5,7 @@ package com.microsoft.azure.synapse.ml.recommendation import com.microsoft.azure.synapse.ml.codegen.Wrappable import com.microsoft.azure.synapse.ml.core.contracts.HasLabelCol -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import com.microsoft.azure.synapse.ml.param.{EstimatorParam, TransformerParam} import org.apache.spark.ml._ import org.apache.spark.ml.param._ @@ -70,7 +70,7 @@ trait Mode extends HasRecommenderCols { class RankingAdapter(override val uid: String) extends Estimator[RankingAdapterModel] with ComplexParamsWritable with RankingParams with Mode with Wrappable with SynapseMLLogging { - logClass() + logClass(FeatureNames.Recommendation) def this() = this(Identifiable.randomUID("RecommenderAdapter")) @@ -119,7 +119,7 @@ object RankingAdapter extends ComplexParamsReadable[RankingAdapter] class RankingAdapterModel private[ml](val uid: String) extends Model[RankingAdapterModel] with ComplexParamsWritable with Wrappable with RankingParams with Mode with SynapseMLLogging { - logClass() + logClass(FeatureNames.Recommendation) def this() = this(Identifiable.randomUID("RankingAdapterModel")) diff --git a/core/src/main/scala/com/microsoft/azure/synapse/ml/recommendation/RankingEvaluator.scala b/core/src/main/scala/com/microsoft/azure/synapse/ml/recommendation/RankingEvaluator.scala index 72668e03a0..93ee686c5b 100644 --- a/core/src/main/scala/com/microsoft/azure/synapse/ml/recommendation/RankingEvaluator.scala +++ b/core/src/main/scala/com/microsoft/azure/synapse/ml/recommendation/RankingEvaluator.scala @@ -3,7 +3,7 @@ package com.microsoft.azure.synapse.ml.recommendation -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import org.apache.spark.ml.evaluation.Evaluator import org.apache.spark.ml.param._ import org.apache.spark.ml.recommendation.{HasRecommenderCols, RecEvaluatorParams} @@ -98,7 +98,7 @@ class AdvancedRankingMetrics(predictionAndLabels: RDD[(Array[Any], Array[Any])], class RankingEvaluator(override val uid: String) extends Evaluator with RecEvaluatorParams with HasRecommenderCols with ComplexParamsWritable with SynapseMLLogging { - logClass() + logClass(FeatureNames.Recommendation) def this() = this(Identifiable.randomUID("recEval")) diff --git a/core/src/main/scala/com/microsoft/azure/synapse/ml/recommendation/RankingTrainValidationSplit.scala b/core/src/main/scala/com/microsoft/azure/synapse/ml/recommendation/RankingTrainValidationSplit.scala index 17e5141a79..115790c00b 100644 --- a/core/src/main/scala/com/microsoft/azure/synapse/ml/recommendation/RankingTrainValidationSplit.scala +++ b/core/src/main/scala/com/microsoft/azure/synapse/ml/recommendation/RankingTrainValidationSplit.scala @@ -4,7 +4,7 @@ package com.microsoft.azure.synapse.ml.recommendation import com.microsoft.azure.synapse.ml.codegen.Wrappable -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import com.microsoft.azure.synapse.ml.param.{ModelParam, TypedDoubleArrayParam} import org.apache.spark.ml.evaluation.Evaluator import org.apache.spark.ml.param._ @@ -25,7 +25,7 @@ import scala.util.Random class RankingTrainValidationSplit(override val uid: String) extends Estimator[RankingTrainValidationSplitModel] with RankingTrainValidationSplitParams with Wrappable with ComplexParamsWritable with RecommendationParams with SynapseMLLogging { - logClass() + logClass(FeatureNames.Recommendation) override lazy val pyInternalWrapper: Boolean = true @@ -293,7 +293,7 @@ class RankingTrainValidationSplitModel( override val uid: String) extends Model[RankingTrainValidationSplitModel] with Wrappable with ComplexParamsWritable with SynapseMLLogging { - logClass() + logClass(FeatureNames.Recommendation) override protected lazy val pyInternalWrapper = true diff --git a/core/src/main/scala/com/microsoft/azure/synapse/ml/recommendation/RecommendationIndexer.scala b/core/src/main/scala/com/microsoft/azure/synapse/ml/recommendation/RecommendationIndexer.scala index 57a230384c..84042a75d3 100644 --- a/core/src/main/scala/com/microsoft/azure/synapse/ml/recommendation/RecommendationIndexer.scala +++ b/core/src/main/scala/com/microsoft/azure/synapse/ml/recommendation/RecommendationIndexer.scala @@ -4,7 +4,7 @@ package com.microsoft.azure.synapse.ml.recommendation import com.microsoft.azure.synapse.ml.codegen.Wrappable -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import com.microsoft.azure.synapse.ml.param.TransformerParam import org.apache.spark.ml.attribute.NominalAttribute import org.apache.spark.ml.feature.{StringIndexer, StringIndexerModel} @@ -18,7 +18,7 @@ import org.apache.spark.sql.{DataFrame, Dataset} class RecommendationIndexer(override val uid: String) extends Estimator[RecommendationIndexerModel] with RecommendationIndexerBase with Wrappable with SynapseMLLogging { - logClass() + logClass(FeatureNames.Recommendation) def this() = this(Identifiable.randomUID("RecommendationIndexer")) @@ -53,7 +53,7 @@ object RecommendationIndexer extends ComplexParamsReadable[RecommendationIndexer class RecommendationIndexerModel(override val uid: String) extends Model[RecommendationIndexerModel] with RecommendationIndexerBase with Wrappable with SynapseMLLogging { - logClass() + logClass(FeatureNames.Recommendation) override def copy(extra: ParamMap): RecommendationIndexerModel = defaultCopy(extra) diff --git a/core/src/main/scala/com/microsoft/azure/synapse/ml/recommendation/SAR.scala b/core/src/main/scala/com/microsoft/azure/synapse/ml/recommendation/SAR.scala index 0addd6b66e..6e9d0ace45 100644 --- a/core/src/main/scala/com/microsoft/azure/synapse/ml/recommendation/SAR.scala +++ b/core/src/main/scala/com/microsoft/azure/synapse/ml/recommendation/SAR.scala @@ -5,7 +5,7 @@ package com.microsoft.azure.synapse.ml.recommendation import breeze.linalg.{CSCMatrix => BSM} import com.microsoft.azure.synapse.ml.codegen.Wrappable -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import org.apache.spark.ml.Estimator import org.apache.spark.ml.param._ import org.apache.spark.ml.recommendation.{RecommendationParams, Constants => C} @@ -35,7 +35,7 @@ import scala.language.existentials */ class SAR(override val uid: String) extends Estimator[SARModel] with SARParams with DefaultParamsWritable with SynapseMLLogging { - logClass() + logClass(FeatureNames.Recommendation) /** @group getParam */ def getSimilarityFunction: String = $(similarityFunction) diff --git a/core/src/main/scala/com/microsoft/azure/synapse/ml/recommendation/SARModel.scala b/core/src/main/scala/com/microsoft/azure/synapse/ml/recommendation/SARModel.scala index b92339cff1..83a5de4022 100644 --- a/core/src/main/scala/com/microsoft/azure/synapse/ml/recommendation/SARModel.scala +++ b/core/src/main/scala/com/microsoft/azure/synapse/ml/recommendation/SARModel.scala @@ -4,7 +4,7 @@ package com.microsoft.azure.synapse.ml.recommendation import com.microsoft.azure.synapse.ml.codegen.Wrappable -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import com.microsoft.azure.synapse.ml.param.DataFrameParam import org.apache.spark.ml.param.ParamMap import org.apache.spark.ml.recommendation.{BaseRecommendationModel, Constants} @@ -22,7 +22,7 @@ import org.apache.spark.sql.{DataFrame, Dataset, Row} */ class SARModel(override val uid: String) extends Model[SARModel] with BaseRecommendationModel with Wrappable with SARParams with ComplexParamsWritable with SynapseMLLogging { - logClass() + logClass(FeatureNames.Recommendation) override protected lazy val pyInternalWrapper = true diff --git a/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/Cacher.scala b/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/Cacher.scala index 6fabce6739..f8bc39bf12 100644 --- a/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/Cacher.scala +++ b/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/Cacher.scala @@ -4,7 +4,7 @@ package com.microsoft.azure.synapse.ml.stages import com.microsoft.azure.synapse.ml.codegen.Wrappable -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import org.apache.spark.ml.Transformer import org.apache.spark.ml.param.{BooleanParam, ParamMap} import org.apache.spark.ml.util.{DefaultParamsReadable, DefaultParamsWritable, Identifiable} @@ -12,7 +12,7 @@ import org.apache.spark.sql.types.StructType import org.apache.spark.sql.{DataFrame, Dataset} class Cacher(val uid: String) extends Transformer with Wrappable with DefaultParamsWritable with SynapseMLLogging { - logClass() + logClass(FeatureNames.Core) val disable = new BooleanParam(this, "disable", "Whether or disable caching (so that you can turn it off during evaluation)") diff --git a/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/ClassBalancer.scala b/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/ClassBalancer.scala index 401469c7ab..40aa1051d8 100644 --- a/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/ClassBalancer.scala +++ b/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/ClassBalancer.scala @@ -5,7 +5,7 @@ package com.microsoft.azure.synapse.ml.stages import com.microsoft.azure.synapse.ml.codegen.Wrappable import com.microsoft.azure.synapse.ml.core.contracts.{HasInputCol, HasOutputCol} -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import com.microsoft.azure.synapse.ml.param.DataFrameParam import org.apache.spark.ml.param.{BooleanParam, ParamMap} import org.apache.spark.ml.util.{DefaultParamsReadable, DefaultParamsWritable, Identifiable} @@ -25,7 +25,7 @@ import org.apache.spark.sql.{DataFrame, Dataset} */ class ClassBalancer(override val uid: String) extends Estimator[ClassBalancerModel] with DefaultParamsWritable with HasInputCol with HasOutputCol with Wrappable with SynapseMLLogging { - logClass() + logClass(FeatureNames.Core) def this() = this(Identifiable.randomUID("ClassBalancer")) @@ -66,7 +66,7 @@ object ClassBalancer extends DefaultParamsReadable[ClassBalancer] class ClassBalancerModel(val uid: String) extends Model[ClassBalancerModel] with ComplexParamsWritable with Wrappable with HasInputCol with HasOutputCol with SynapseMLLogging { - logClass() + logClass(FeatureNames.Core) def this() = this(Identifiable.randomUID("ClassBalancerModel")) diff --git a/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/DropColumns.scala b/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/DropColumns.scala index 818d06bc99..1a4c7491d2 100644 --- a/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/DropColumns.scala +++ b/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/DropColumns.scala @@ -4,7 +4,7 @@ package com.microsoft.azure.synapse.ml.stages import com.microsoft.azure.synapse.ml.codegen.Wrappable -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import org.apache.spark.ml.Transformer import org.apache.spark.ml.param._ import org.apache.spark.ml.util._ @@ -19,7 +19,7 @@ object DropColumns extends DefaultParamsReadable[DropColumns] */ class DropColumns(val uid: String) extends Transformer with Wrappable with DefaultParamsWritable with SynapseMLLogging { - logClass() + logClass(FeatureNames.Core) def this() = this(Identifiable.randomUID("DropColumns")) diff --git a/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/EnsembleByKey.scala b/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/EnsembleByKey.scala index 9129af28c2..72484947c4 100644 --- a/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/EnsembleByKey.scala +++ b/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/EnsembleByKey.scala @@ -4,7 +4,7 @@ package com.microsoft.azure.synapse.ml.stages import com.microsoft.azure.synapse.ml.codegen.Wrappable -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import com.microsoft.azure.synapse.ml.param.StringIntMapParam import org.apache.spark.ml.Transformer import org.apache.spark.ml.linalg.SQLDataTypes._ @@ -21,7 +21,7 @@ object EnsembleByKey extends DefaultParamsReadable[EnsembleByKey] class EnsembleByKey(val uid: String) extends Transformer with Wrappable with DefaultParamsWritable with SynapseMLLogging { - logClass() + logClass(FeatureNames.Core) def this() = this(Identifiable.randomUID("EnsembleByKey")) diff --git a/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/Explode.scala b/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/Explode.scala index 682bdf6655..5501f3a5fe 100644 --- a/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/Explode.scala +++ b/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/Explode.scala @@ -5,7 +5,7 @@ package com.microsoft.azure.synapse.ml.stages import com.microsoft.azure.synapse.ml.codegen.Wrappable import com.microsoft.azure.synapse.ml.core.contracts.{HasInputCol, HasOutputCol} -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import org.apache.spark.ml.Transformer import org.apache.spark.ml.param._ import org.apache.spark.ml.util._ @@ -17,7 +17,7 @@ object Explode extends DefaultParamsReadable[Explode] class Explode(val uid: String) extends Transformer with HasInputCol with HasOutputCol with Wrappable with DefaultParamsWritable with SynapseMLLogging { - logClass() + logClass(FeatureNames.Core) def this() = this(Identifiable.randomUID("Explode")) diff --git a/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/Lambda.scala b/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/Lambda.scala index 53e9efa5b2..8be7ba1700 100644 --- a/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/Lambda.scala +++ b/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/Lambda.scala @@ -4,7 +4,7 @@ package com.microsoft.azure.synapse.ml.stages import com.microsoft.azure.synapse.ml.codegen.Wrappable -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import com.microsoft.azure.synapse.ml.param.UDFParam import org.apache.spark.SparkContext import org.apache.spark.injections.UDFUtils @@ -22,7 +22,7 @@ object Lambda extends ComplexParamsReadable[Lambda] with Serializable { } class Lambda(val uid: String) extends Transformer with Wrappable with ComplexParamsWritable with SynapseMLLogging { - logClass() + logClass(FeatureNames.Core) def this() = this(Identifiable.randomUID("Lambda")) diff --git a/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/MiniBatchTransformer.scala b/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/MiniBatchTransformer.scala index d32633edc4..0fef316649 100644 --- a/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/MiniBatchTransformer.scala +++ b/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/MiniBatchTransformer.scala @@ -4,7 +4,7 @@ package com.microsoft.azure.synapse.ml.stages import com.microsoft.azure.synapse.ml.codegen.Wrappable -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import com.microsoft.azure.synapse.ml.param.TransformerParam import org.apache.spark.ml.Transformer import org.apache.spark.ml.param._ @@ -54,7 +54,7 @@ object DynamicMiniBatchTransformer extends DefaultParamsReadable[DynamicMiniBatc class DynamicMiniBatchTransformer(val uid: String) extends MiniBatchBase with SynapseMLLogging { - logClass() + logClass(FeatureNames.Core) val maxBatchSize: Param[Int] = new IntParam( this, "maxBatchSize", "The max size of the buffer") @@ -78,7 +78,7 @@ object TimeIntervalMiniBatchTransformer extends DefaultParamsReadable[TimeInterv class TimeIntervalMiniBatchTransformer(val uid: String) extends MiniBatchBase with SynapseMLLogging { - logClass() + logClass(FeatureNames.Core) val maxBatchSize: Param[Int] = new IntParam( this, "maxBatchSize", "The max size of the buffer") @@ -152,7 +152,7 @@ trait HasBatchSize extends Params { class FixedMiniBatchTransformer(val uid: String) extends MiniBatchBase with HasBatchSize with SynapseMLLogging { - logClass() + logClass(FeatureNames.Core) val maxBufferSize: Param[Int] = new IntParam( this, "maxBufferSize", "The max size of the buffer") @@ -188,7 +188,7 @@ object FlattenBatch extends DefaultParamsReadable[FlattenBatch] class FlattenBatch(val uid: String) extends Transformer with Wrappable with DefaultParamsWritable with SynapseMLLogging { - logClass() + logClass(FeatureNames.Core) def this() = this(Identifiable.randomUID("FlattenBatch")) diff --git a/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/MultiColumnAdapter.scala b/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/MultiColumnAdapter.scala index 14a81859a5..847056dbc0 100644 --- a/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/MultiColumnAdapter.scala +++ b/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/MultiColumnAdapter.scala @@ -4,7 +4,7 @@ package com.microsoft.azure.synapse.ml.stages import com.microsoft.azure.synapse.ml.codegen.Wrappable -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import com.microsoft.azure.synapse.ml.param.PipelineStageParam import org.apache.spark.ml._ import org.apache.spark.ml.param.{ParamMap, StringArrayParam} @@ -19,7 +19,7 @@ object MultiColumnAdapter extends ComplexParamsReadable[MultiColumnAdapter] */ class MultiColumnAdapter(override val uid: String) extends Estimator[PipelineModel] with Wrappable with ComplexParamsWritable with SynapseMLLogging { - logClass() + logClass(FeatureNames.Core) def this() = this(Identifiable.randomUID("MultiColumnAdapter")) diff --git a/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/PartitionConsolidator.scala b/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/PartitionConsolidator.scala index 1b2cf34ca5..bd6e1340bf 100644 --- a/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/PartitionConsolidator.scala +++ b/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/PartitionConsolidator.scala @@ -5,7 +5,7 @@ package com.microsoft.azure.synapse.ml.stages import com.microsoft.azure.synapse.ml.core.contracts.{HasInputCol, HasOutputCol} import com.microsoft.azure.synapse.ml.io.http.{ConcurrencyParams, SharedSingleton} -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import org.apache.spark.ml.param._ import org.apache.spark.ml.util.{DefaultParamsReadable, Identifiable} import org.apache.spark.ml.{ComplexParamsWritable, Transformer} @@ -23,7 +23,7 @@ class PartitionConsolidator(val uid: String) extends Transformer with ConcurrencyParams with HasInputCol with HasOutputCol with ComplexParamsWritable with SynapseMLLogging { - logClass() + logClass(FeatureNames.Core) def this() = this(Identifiable.randomUID("PartitionConsolidator")) diff --git a/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/RenameColumn.scala b/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/RenameColumn.scala index b2f5cb1119..40617fb23f 100644 --- a/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/RenameColumn.scala +++ b/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/RenameColumn.scala @@ -5,7 +5,7 @@ package com.microsoft.azure.synapse.ml.stages import com.microsoft.azure.synapse.ml.codegen.Wrappable import com.microsoft.azure.synapse.ml.core.contracts.{HasInputCol, HasOutputCol} -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import org.apache.spark.ml.Transformer import org.apache.spark.ml.param.ParamMap import org.apache.spark.ml.util.{DefaultParamsReadable, DefaultParamsWritable, Identifiable} @@ -20,7 +20,7 @@ object RenameColumn extends DefaultParamsReadable[RenameColumn] */ class RenameColumn(val uid: String) extends Transformer with Wrappable with DefaultParamsWritable with HasInputCol with HasOutputCol with SynapseMLLogging { - logClass() + logClass(FeatureNames.Core) def this() = this(Identifiable.randomUID("RenameColumn")) diff --git a/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/Repartition.scala b/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/Repartition.scala index 17f2931576..94d4a1f9aa 100644 --- a/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/Repartition.scala +++ b/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/Repartition.scala @@ -4,7 +4,7 @@ package com.microsoft.azure.synapse.ml.stages import com.microsoft.azure.synapse.ml.codegen.Wrappable -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import org.apache.spark.ml.Transformer import org.apache.spark.ml.param._ import org.apache.spark.ml.util.{DefaultParamsReadable, DefaultParamsWritable, Identifiable} @@ -18,7 +18,7 @@ object Repartition extends DefaultParamsReadable[Repartition] * @param uid The id of the module */ class Repartition(val uid: String) extends Transformer with Wrappable with DefaultParamsWritable with SynapseMLLogging { - logClass() + logClass(FeatureNames.Core) def this() = this(Identifiable.randomUID("Repartition")) diff --git a/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/SelectColumns.scala b/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/SelectColumns.scala index 4274ec0655..665ad9de58 100644 --- a/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/SelectColumns.scala +++ b/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/SelectColumns.scala @@ -4,7 +4,7 @@ package com.microsoft.azure.synapse.ml.stages import com.microsoft.azure.synapse.ml.codegen.Wrappable -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import org.apache.spark.ml.Transformer import org.apache.spark.ml.param._ import org.apache.spark.ml.util._ @@ -22,7 +22,7 @@ object SelectColumns extends DefaultParamsReadable[SelectColumns] class SelectColumns(val uid: String) extends Transformer with Wrappable with DefaultParamsWritable with SynapseMLLogging { - logClass() + logClass(FeatureNames.Core) def this() = this(Identifiable.randomUID("SelectColumns")) diff --git a/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/StratifiedRepartition.scala b/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/StratifiedRepartition.scala index 7ae19a5fbc..d8808fd44f 100644 --- a/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/StratifiedRepartition.scala +++ b/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/StratifiedRepartition.scala @@ -5,7 +5,7 @@ package com.microsoft.azure.synapse.ml.stages import com.microsoft.azure.synapse.ml.codegen.Wrappable import com.microsoft.azure.synapse.ml.core.contracts.HasLabelCol -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import org.apache.spark.RangePartitioner import org.apache.spark.ml.Transformer import org.apache.spark.ml.param._ @@ -30,7 +30,7 @@ object StratifiedRepartition extends DefaultParamsReadable[DropColumns] */ class StratifiedRepartition(val uid: String) extends Transformer with Wrappable with DefaultParamsWritable with HasLabelCol with HasSeed with SynapseMLLogging { - logClass() + logClass(FeatureNames.Core) def this() = this(Identifiable.randomUID("StratifiedRepartition")) diff --git a/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/SummarizeData.scala b/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/SummarizeData.scala index e18d1ec2fb..9e7257f4da 100644 --- a/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/SummarizeData.scala +++ b/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/SummarizeData.scala @@ -4,7 +4,7 @@ package com.microsoft.azure.synapse.ml.stages import com.microsoft.azure.synapse.ml.codegen.Wrappable -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import org.apache.spark.ml.Transformer import org.apache.spark.ml.param.{BooleanParam, DoubleParam, ParamMap} import org.apache.spark.ml.util.{DefaultParamsReadable, DefaultParamsWritable, Identifiable} @@ -101,7 +101,7 @@ trait SummarizeDataParams extends Wrappable with DefaultParamsWritable { class SummarizeData(override val uid: String) extends Transformer with SummarizeDataParams with SynapseMLLogging { - logClass() + logClass(FeatureNames.Core) def this() = this(Identifiable.randomUID("SummarizeData")) diff --git a/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/TextPreprocessor.scala b/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/TextPreprocessor.scala index c9fc5b45d1..82f52641e7 100644 --- a/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/TextPreprocessor.scala +++ b/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/TextPreprocessor.scala @@ -5,7 +5,7 @@ package com.microsoft.azure.synapse.ml.stages import com.microsoft.azure.synapse.ml.codegen.Wrappable import com.microsoft.azure.synapse.ml.core.contracts.{HasInputCol, HasOutputCol} -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import com.microsoft.azure.synapse.ml.param.StringStringMapParam import org.apache.spark.ml.param.{Param, ParamMap} import org.apache.spark.ml.util.Identifiable @@ -97,7 +97,7 @@ object TextPreprocessor extends ComplexParamsReadable[TextPreprocessor] */ class TextPreprocessor(val uid: String) extends Transformer with HasInputCol with HasOutputCol with Wrappable with ComplexParamsWritable with SynapseMLLogging { - logClass() + logClass(FeatureNames.Core) def this() = this(Identifiable.randomUID("TextPreprocessor")) diff --git a/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/Timer.scala b/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/Timer.scala index fe6092c4f7..fde6f1cc93 100644 --- a/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/Timer.scala +++ b/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/Timer.scala @@ -4,7 +4,7 @@ package com.microsoft.azure.synapse.ml.stages import com.microsoft.azure.synapse.ml.codegen.Wrappable -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import com.microsoft.azure.synapse.ml.param.{PipelineStageParam, TransformerParam} import org.apache.spark.ml._ import org.apache.spark.ml.param.{BooleanParam, ParamMap} @@ -55,7 +55,7 @@ trait TimerParams extends Wrappable { class Timer(val uid: String) extends Estimator[TimerModel] with TimerParams with ComplexParamsWritable with SynapseMLLogging { - logClass() + logClass(FeatureNames.Core) def this() = this(Identifiable.randomUID("Timer")) @@ -93,7 +93,7 @@ object TimerModel extends ComplexParamsReadable[TimerModel] class TimerModel(val uid: String) extends Model[TimerModel] with TimerParams with ComplexParamsWritable with SynapseMLLogging { - logClass() + logClass(FeatureNames.Core) def this() = this(Identifiable.randomUID("TimerModel")) diff --git a/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/UDFTransformer.scala b/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/UDFTransformer.scala index 7a964d4b7a..70389b88fb 100644 --- a/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/UDFTransformer.scala +++ b/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/UDFTransformer.scala @@ -6,7 +6,7 @@ package com.microsoft.azure.synapse.ml.stages import com.microsoft.azure.synapse.ml.codegen.Wrappable import com.microsoft.azure.synapse.ml.core.contracts.{HasInputCol, HasInputCols, HasOutputCol} import com.microsoft.azure.synapse.ml.core.serialize.ComplexParam -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import com.microsoft.azure.synapse.ml.param.{UDFParam, UDPyFParam} import org.apache.spark.injections.UDFUtils import org.apache.spark.ml.param.ParamMap @@ -26,7 +26,7 @@ object UDFTransformer extends ComplexParamsReadable[UDFTransformer] with Seriali */ class UDFTransformer(val uid: String) extends Transformer with Wrappable with ComplexParamsWritable with HasInputCol with HasInputCols with HasOutputCol with SynapseMLLogging { - logClass() + logClass(FeatureNames.Core) def this() = this(Identifiable.randomUID("UDFTransformer")) diff --git a/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/UnicodeNormalize.scala b/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/UnicodeNormalize.scala index 546691540f..5ef14b3764 100644 --- a/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/UnicodeNormalize.scala +++ b/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/UnicodeNormalize.scala @@ -5,7 +5,7 @@ package com.microsoft.azure.synapse.ml.stages import com.microsoft.azure.synapse.ml.codegen.Wrappable import com.microsoft.azure.synapse.ml.core.contracts.{HasInputCol, HasOutputCol} -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import org.apache.spark.ml.param.{BooleanParam, Param, ParamMap} import org.apache.spark.ml.util.Identifiable import org.apache.spark.ml.{ComplexParamsReadable, ComplexParamsWritable, Transformer} @@ -21,7 +21,7 @@ object UnicodeNormalize extends ComplexParamsReadable[UnicodeNormalize] */ class UnicodeNormalize(val uid: String) extends Transformer with HasInputCol with HasOutputCol with Wrappable with ComplexParamsWritable with SynapseMLLogging { - logClass() + logClass(FeatureNames.Core) def this() = this(Identifiable.randomUID("UnicodeNormalize")) diff --git a/core/src/main/scala/com/microsoft/azure/synapse/ml/train/ComputeModelStatistics.scala b/core/src/main/scala/com/microsoft/azure/synapse/ml/train/ComputeModelStatistics.scala index 767ec41423..277dbf0f97 100644 --- a/core/src/main/scala/com/microsoft/azure/synapse/ml/train/ComputeModelStatistics.scala +++ b/core/src/main/scala/com/microsoft/azure/synapse/ml/train/ComputeModelStatistics.scala @@ -7,7 +7,7 @@ import com.microsoft.azure.synapse.ml.codegen.Wrappable import com.microsoft.azure.synapse.ml.core.contracts._ import com.microsoft.azure.synapse.ml.core.metrics.{MetricConstants, MetricUtils} import com.microsoft.azure.synapse.ml.core.schema.{CategoricalUtilities, SchemaConstants, SparkSchema} -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import org.apache.log4j.Logger import org.apache.spark.ml.Transformer import org.apache.spark.ml.linalg.{SQLDataTypes, Vector} @@ -57,7 +57,7 @@ trait ComputeModelStatisticsParams extends Wrappable with DefaultParamsWritable /** Evaluates the given scored dataset. */ class ComputeModelStatistics(override val uid: String) extends Transformer with ComputeModelStatisticsParams with SynapseMLLogging { - logClass() + logClass(FeatureNames.Core) def this() = this(Identifiable.randomUID("ComputeModelStatistics")) diff --git a/core/src/main/scala/com/microsoft/azure/synapse/ml/train/ComputePerInstanceStatistics.scala b/core/src/main/scala/com/microsoft/azure/synapse/ml/train/ComputePerInstanceStatistics.scala index 795b87135a..cceecf77ea 100644 --- a/core/src/main/scala/com/microsoft/azure/synapse/ml/train/ComputePerInstanceStatistics.scala +++ b/core/src/main/scala/com/microsoft/azure/synapse/ml/train/ComputePerInstanceStatistics.scala @@ -7,7 +7,7 @@ import com.microsoft.azure.synapse.ml.codegen.Wrappable import com.microsoft.azure.synapse.ml.core.contracts._ import com.microsoft.azure.synapse.ml.core.metrics.{MetricConstants, MetricUtils} import com.microsoft.azure.synapse.ml.core.schema.{CategoricalUtilities, SchemaConstants, SparkSchema} -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import org.apache.spark.ml.Transformer import org.apache.spark.ml.param.ParamMap import org.apache.spark.ml.util.{DefaultParamsReadable, DefaultParamsWritable, Identifiable} @@ -44,7 +44,7 @@ trait CPISParams extends Wrappable with DefaultParamsWritable */ class ComputePerInstanceStatistics(override val uid: String) extends Transformer with CPISParams with SynapseMLLogging { - logClass() + logClass(FeatureNames.Core) def this() = this(Identifiable.randomUID("ComputePerInstanceStatistics")) diff --git a/core/src/main/scala/com/microsoft/azure/synapse/ml/train/TrainClassifier.scala b/core/src/main/scala/com/microsoft/azure/synapse/ml/train/TrainClassifier.scala index 7847bc50ff..1212acc05f 100644 --- a/core/src/main/scala/com/microsoft/azure/synapse/ml/train/TrainClassifier.scala +++ b/core/src/main/scala/com/microsoft/azure/synapse/ml/train/TrainClassifier.scala @@ -7,7 +7,7 @@ import com.microsoft.azure.synapse.ml.codegen.Wrappable import com.microsoft.azure.synapse.ml.core.schema.{CategoricalUtilities, SchemaConstants, SparkSchema} import com.microsoft.azure.synapse.ml.core.utils.CastUtilities._ import com.microsoft.azure.synapse.ml.featurize.{Featurize, FeaturizeUtilities, ValueIndexer, ValueIndexerModel} -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import com.microsoft.azure.synapse.ml.param.UntypedArrayParam import org.apache.spark.annotation.DeveloperApi import org.apache.spark.ml._ @@ -50,7 +50,7 @@ import scala.collection.JavaConverters._ * In addition to any generic learner that inherits from Predictor. */ class TrainClassifier(override val uid: String) extends AutoTrainer[TrainedClassifierModel] with SynapseMLLogging { - logClass() + logClass(FeatureNames.Core) def this() = this(Identifiable.randomUID("TrainClassifier")) @@ -305,7 +305,7 @@ object TrainClassifier extends ComplexParamsReadable[TrainClassifier] { /** Model produced by [[TrainClassifier]]. */ class TrainedClassifierModel(val uid: String) extends AutoTrainedModel[TrainedClassifierModel] with Wrappable with SynapseMLLogging { - logClass() + logClass(FeatureNames.Core) def this() = this(Identifiable.randomUID("TrainClassifierModel")) diff --git a/core/src/main/scala/com/microsoft/azure/synapse/ml/train/TrainRegressor.scala b/core/src/main/scala/com/microsoft/azure/synapse/ml/train/TrainRegressor.scala index 06b81f0270..7ec6c87121 100644 --- a/core/src/main/scala/com/microsoft/azure/synapse/ml/train/TrainRegressor.scala +++ b/core/src/main/scala/com/microsoft/azure/synapse/ml/train/TrainRegressor.scala @@ -6,7 +6,7 @@ package com.microsoft.azure.synapse.ml.train import com.microsoft.azure.synapse.ml.codegen.Wrappable import com.microsoft.azure.synapse.ml.core.schema.{SchemaConstants, SparkSchema} import com.microsoft.azure.synapse.ml.featurize.{Featurize, FeaturizeUtilities} -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import org.apache.spark.annotation.DeveloperApi import org.apache.spark.ml._ import org.apache.spark.ml.param._ @@ -20,7 +20,7 @@ import java.util.UUID /** Trains a regression model. */ class TrainRegressor(override val uid: String) extends AutoTrainer[TrainedRegressorModel] with SynapseMLLogging { - logClass() + logClass(FeatureNames.Core) def this() = this(Identifiable.randomUID("TrainRegressor")) @@ -149,7 +149,7 @@ object TrainRegressor extends ComplexParamsReadable[TrainRegressor] { class TrainedRegressorModel(val uid: String) extends AutoTrainedModel[TrainedRegressorModel] with Wrappable with SynapseMLLogging { - logClass() + logClass(FeatureNames.Core) def this() = this(Identifiable.randomUID("TrainedRegressorModel")) diff --git a/deep-learning/src/main/scala/com/microsoft/azure/synapse/ml/onnx/ImageFeaturizer.scala b/deep-learning/src/main/scala/com/microsoft/azure/synapse/ml/onnx/ImageFeaturizer.scala index 44223d3cfd..fce900d7b1 100644 --- a/deep-learning/src/main/scala/com/microsoft/azure/synapse/ml/onnx/ImageFeaturizer.scala +++ b/deep-learning/src/main/scala/com/microsoft/azure/synapse/ml/onnx/ImageFeaturizer.scala @@ -7,7 +7,7 @@ import com.microsoft.azure.synapse.ml.codegen.Wrappable import com.microsoft.azure.synapse.ml.core.contracts.{HasInputCol, HasOutputCol} import com.microsoft.azure.synapse.ml.core.schema.DatasetExtensions import com.microsoft.azure.synapse.ml.core.schema.DatasetExtensions.findUnusedColumnName -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import com.microsoft.azure.synapse.ml.opencv.ImageTransformer import com.microsoft.azure.synapse.ml.param.TransformerParam import org.apache.spark.ml.linalg.DenseVector @@ -33,7 +33,7 @@ object ImageFeaturizer extends ComplexParamsReadable[ImageFeaturizer] */ class ImageFeaturizer(val uid: String) extends Transformer with HasInputCol with HasOutputCol with Wrappable with ComplexParamsWritable with SynapseMLLogging { - logClass() + logClass(FeatureNames.DeepLearning) def this() = this(Identifiable.randomUID("ImageFeaturizer")) diff --git a/deep-learning/src/main/scala/com/microsoft/azure/synapse/ml/onnx/ONNXModel.scala b/deep-learning/src/main/scala/com/microsoft/azure/synapse/ml/onnx/ONNXModel.scala index 4792ba5668..3378cd9721 100644 --- a/deep-learning/src/main/scala/com/microsoft/azure/synapse/ml/onnx/ONNXModel.scala +++ b/deep-learning/src/main/scala/com/microsoft/azure/synapse/ml/onnx/ONNXModel.scala @@ -11,7 +11,7 @@ import com.microsoft.azure.synapse.ml.codegen.Wrappable import com.microsoft.azure.synapse.ml.core.env.StreamUtilities.using import com.microsoft.azure.synapse.ml.core.schema.DatasetExtensions import com.microsoft.azure.synapse.ml.core.utils.BreezeUtils._ -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import com.microsoft.azure.synapse.ml.onnx.ONNXRuntime._ import com.microsoft.azure.synapse.ml.onnx.ONNXUtils._ import com.microsoft.azure.synapse.ml.param.{ByteArrayParam, StringStringMapParam} @@ -151,7 +151,7 @@ class ONNXModel(override val uid: String) override protected lazy val pyInternalWrapper = true - logClass() + logClass(FeatureNames.DeepLearning) def this() = this(Identifiable.randomUID("ONNXModel")) diff --git a/lightgbm/src/main/scala/com/microsoft/azure/synapse/ml/lightgbm/LightGBMClassifier.scala b/lightgbm/src/main/scala/com/microsoft/azure/synapse/ml/lightgbm/LightGBMClassifier.scala index 5916d085f6..cd498500c3 100644 --- a/lightgbm/src/main/scala/com/microsoft/azure/synapse/ml/lightgbm/LightGBMClassifier.scala +++ b/lightgbm/src/main/scala/com/microsoft/azure/synapse/ml/lightgbm/LightGBMClassifier.scala @@ -4,9 +4,9 @@ package com.microsoft.azure.synapse.ml.lightgbm import com.microsoft.azure.synapse.ml.lightgbm.booster.LightGBMBooster -import com.microsoft.azure.synapse.ml.lightgbm.params.{ - BaseTrainParams, ClassifierTrainParams, LightGBMModelParams, LightGBMPredictionParams} -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.lightgbm.params.{BaseTrainParams, ClassifierTrainParams, + LightGBMModelParams, LightGBMPredictionParams} +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import org.apache.spark.ml.classification.{ProbabilisticClassificationModel, ProbabilisticClassifier} import org.apache.spark.ml.linalg.{Vector, Vectors} import org.apache.spark.ml.param._ @@ -27,7 +27,7 @@ object LightGBMClassifier extends DefaultParamsReadable[LightGBMClassifier] class LightGBMClassifier(override val uid: String) extends ProbabilisticClassifier[Vector, LightGBMClassifier, LightGBMClassificationModel] with LightGBMBase[LightGBMClassificationModel] with SynapseMLLogging { - logClass() + logClass(FeatureNames.LightGBM) def this() = this(Identifiable.randomUID("LightGBMClassifier")) @@ -102,7 +102,7 @@ class LightGBMClassificationModel(override val uid: String) extends ProbabilisticClassificationModel[Vector, LightGBMClassificationModel] with LightGBMModelParams with LightGBMModelMethods with LightGBMPredictionParams with HasActualNumClasses with ComplexParamsWritable with SynapseMLLogging { - logClass() + logClass(FeatureNames.LightGBM) def this() = this(Identifiable.randomUID("LightGBMClassificationModel")) diff --git a/lightgbm/src/main/scala/com/microsoft/azure/synapse/ml/lightgbm/LightGBMRanker.scala b/lightgbm/src/main/scala/com/microsoft/azure/synapse/ml/lightgbm/LightGBMRanker.scala index 9dc62831c6..ceaa39c462 100644 --- a/lightgbm/src/main/scala/com/microsoft/azure/synapse/ml/lightgbm/LightGBMRanker.scala +++ b/lightgbm/src/main/scala/com/microsoft/azure/synapse/ml/lightgbm/LightGBMRanker.scala @@ -4,9 +4,9 @@ package com.microsoft.azure.synapse.ml.lightgbm import com.microsoft.azure.synapse.ml.lightgbm.booster.LightGBMBooster -import com.microsoft.azure.synapse.ml.lightgbm.params.{ - BaseTrainParams, LightGBMModelParams, LightGBMPredictionParams, RankerTrainParams} -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.lightgbm.params.{BaseTrainParams, LightGBMModelParams, + LightGBMPredictionParams, RankerTrainParams} +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import org.apache.spark.ml.linalg.Vector import org.apache.spark.ml.param._ import org.apache.spark.ml.util._ @@ -26,7 +26,7 @@ object LightGBMRanker extends DefaultParamsReadable[LightGBMRanker] class LightGBMRanker(override val uid: String) extends Ranker[Vector, LightGBMRanker, LightGBMRankerModel] with LightGBMBase[LightGBMRankerModel] with SynapseMLLogging { - logClass() + logClass(FeatureNames.LightGBM) def this() = this(Identifiable.randomUID("LightGBMRanker")) @@ -127,7 +127,7 @@ class LightGBMRankerModel(override val uid: String) with LightGBMModelMethods with LightGBMPredictionParams with ComplexParamsWritable with SynapseMLLogging { - logClass() + logClass(FeatureNames.LightGBM) def this() = this(Identifiable.randomUID("LightGBMRankerModel")) diff --git a/lightgbm/src/main/scala/com/microsoft/azure/synapse/ml/lightgbm/LightGBMRegressor.scala b/lightgbm/src/main/scala/com/microsoft/azure/synapse/ml/lightgbm/LightGBMRegressor.scala index 20c4871d26..9e2c119bca 100644 --- a/lightgbm/src/main/scala/com/microsoft/azure/synapse/ml/lightgbm/LightGBMRegressor.scala +++ b/lightgbm/src/main/scala/com/microsoft/azure/synapse/ml/lightgbm/LightGBMRegressor.scala @@ -4,9 +4,9 @@ package com.microsoft.azure.synapse.ml.lightgbm import com.microsoft.azure.synapse.ml.lightgbm.booster.LightGBMBooster -import com.microsoft.azure.synapse.ml.lightgbm.params.{ - BaseTrainParams, LightGBMModelParams, LightGBMPredictionParams, RegressorTrainParams} -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.lightgbm.params.{BaseTrainParams, LightGBMModelParams, + LightGBMPredictionParams, RegressorTrainParams} +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import org.apache.spark.ml.linalg.Vector import org.apache.spark.ml.param._ import org.apache.spark.ml.regression.RegressionModel @@ -39,7 +39,7 @@ object LightGBMRegressor extends DefaultParamsReadable[LightGBMRegressor] class LightGBMRegressor(override val uid: String) extends BaseRegressor[Vector, LightGBMRegressor, LightGBMRegressionModel] with LightGBMBase[LightGBMRegressionModel] with SynapseMLLogging { - logClass() + logClass(FeatureNames.LightGBM) def this() = this(Identifiable.randomUID("LightGBMRegressor")) @@ -98,7 +98,7 @@ class LightGBMRegressionModel(override val uid: String) with LightGBMModelMethods with LightGBMPredictionParams with ComplexParamsWritable with SynapseMLLogging { - logClass() + logClass(FeatureNames.LightGBM) def this() = this(Identifiable.randomUID("LightGBMRegressionModel")) diff --git a/opencv/src/main/scala/com/microsoft/azure/synapse/ml/opencv/ImageSetAugmenter.scala b/opencv/src/main/scala/com/microsoft/azure/synapse/ml/opencv/ImageSetAugmenter.scala index 5e023543e1..97d76f4fbc 100644 --- a/opencv/src/main/scala/com/microsoft/azure/synapse/ml/opencv/ImageSetAugmenter.scala +++ b/opencv/src/main/scala/com/microsoft/azure/synapse/ml/opencv/ImageSetAugmenter.scala @@ -5,7 +5,7 @@ package com.microsoft.azure.synapse.ml.opencv import com.microsoft.azure.synapse.ml.codegen.Wrappable import com.microsoft.azure.synapse.ml.core.contracts.{HasInputCol, HasOutputCol} -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import org.apache.spark.ml._ import org.apache.spark.ml.image.ImageSchema import org.apache.spark.ml.param._ @@ -17,7 +17,7 @@ object ImageSetAugmenter extends DefaultParamsReadable[ImageSetAugmenter] class ImageSetAugmenter(val uid: String) extends Transformer with HasInputCol with HasOutputCol with DefaultParamsWritable with Wrappable with SynapseMLLogging { - logClass() + logClass(FeatureNames.OpenCV) def this() = this(Identifiable.randomUID("ImageSetAugmenter")) diff --git a/opencv/src/main/scala/com/microsoft/azure/synapse/ml/opencv/ImageTransformer.scala b/opencv/src/main/scala/com/microsoft/azure/synapse/ml/opencv/ImageTransformer.scala index e2c856933d..c3f3fc9a92 100644 --- a/opencv/src/main/scala/com/microsoft/azure/synapse/ml/opencv/ImageTransformer.scala +++ b/opencv/src/main/scala/com/microsoft/azure/synapse/ml/opencv/ImageTransformer.scala @@ -6,7 +6,7 @@ package com.microsoft.azure.synapse.ml.opencv import com.microsoft.azure.synapse.ml.codegen.Wrappable import com.microsoft.azure.synapse.ml.core.contracts.{HasInputCol, HasOutputCol} import com.microsoft.azure.synapse.ml.core.schema.{BinaryFileSchema, ImageSchemaUtils} -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import com.microsoft.azure.synapse.ml.param.{ArrayMapParam, DataTypeParam} import org.apache.spark.injections.UDFUtils import org.apache.spark.ml.image.ImageSchema @@ -428,7 +428,7 @@ object ImageTransformer extends DefaultParamsReadable[ImageTransformer] { */ class ImageTransformer(val uid: String) extends Transformer with HasInputCol with HasOutputCol with Wrappable with ComplexParamsWritable with SynapseMLLogging { - logClass() + logClass(FeatureNames.OpenCV) import ImageTransformer._ import ImageTransformerStage._ diff --git a/vw/src/main/scala/com/microsoft/azure/synapse/ml/policyeval/CressieRead.scala b/vw/src/main/scala/com/microsoft/azure/synapse/ml/policyeval/CressieRead.scala index 9b82112d9b..a4cfdbfc19 100644 --- a/vw/src/main/scala/com/microsoft/azure/synapse/ml/policyeval/CressieRead.scala +++ b/vw/src/main/scala/com/microsoft/azure/synapse/ml/policyeval/CressieRead.scala @@ -3,7 +3,7 @@ package com.microsoft.azure.synapse.ml.policyeval -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import com.microsoft.azure.synapse.ml.policyeval import com.microsoft.azure.synapse.ml.vw.KahanSum import org.apache.spark.ml.util.Identifiable @@ -21,7 +21,7 @@ class CressieRead with SynapseMLLogging { override val uid: String = Identifiable.randomUID("BanditEstimatorCressieRead") - logClass() + logClass(FeatureNames.VowpalWabbit) def zero: CressieReadBuffer = policyeval.CressieReadBuffer() diff --git a/vw/src/main/scala/com/microsoft/azure/synapse/ml/policyeval/CressieReadInterval.scala b/vw/src/main/scala/com/microsoft/azure/synapse/ml/policyeval/CressieReadInterval.scala index f046e00e5b..0a4785a5d0 100644 --- a/vw/src/main/scala/com/microsoft/azure/synapse/ml/policyeval/CressieReadInterval.scala +++ b/vw/src/main/scala/com/microsoft/azure/synapse/ml/policyeval/CressieReadInterval.scala @@ -4,7 +4,7 @@ package com.microsoft.azure.synapse.ml.policyeval import breeze.stats.distributions.FDistribution -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import com.microsoft.azure.synapse.ml.policyeval import com.microsoft.azure.synapse.ml.vw.KahanSum import org.apache.spark.ml.util.Identifiable @@ -24,7 +24,7 @@ class CressieReadInterval(empiricalBounds: Boolean) with SynapseMLLogging { override val uid: String = Identifiable.randomUID("BanditEstimatorCressieReadInterval") - logClass() + logClass(FeatureNames.VowpalWabbit) val alpha = 0.05 val atol = 1e-9 diff --git a/vw/src/main/scala/com/microsoft/azure/synapse/ml/policyeval/Ips.scala b/vw/src/main/scala/com/microsoft/azure/synapse/ml/policyeval/Ips.scala index c8a193bfda..f32d51d59c 100644 --- a/vw/src/main/scala/com/microsoft/azure/synapse/ml/policyeval/Ips.scala +++ b/vw/src/main/scala/com/microsoft/azure/synapse/ml/policyeval/Ips.scala @@ -3,7 +3,7 @@ package com.microsoft.azure.synapse.ml.policyeval -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import org.apache.spark.ml.util.Identifiable import org.apache.spark.sql.expressions.Aggregator import org.apache.spark.sql.{Encoder, Encoders} @@ -19,7 +19,7 @@ class Ips with SynapseMLLogging { override val uid: String = Identifiable.randomUID("BanditEstimatorIps") - logClass() + logClass(FeatureNames.VowpalWabbit) def zero: IpsBuffer = IpsBuffer(0, 0) diff --git a/vw/src/main/scala/com/microsoft/azure/synapse/ml/policyeval/Snips.scala b/vw/src/main/scala/com/microsoft/azure/synapse/ml/policyeval/Snips.scala index 0a07b4f922..63490565dd 100644 --- a/vw/src/main/scala/com/microsoft/azure/synapse/ml/policyeval/Snips.scala +++ b/vw/src/main/scala/com/microsoft/azure/synapse/ml/policyeval/Snips.scala @@ -3,7 +3,7 @@ package com.microsoft.azure.synapse.ml.policyeval -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import org.apache.spark.ml.util.Identifiable import org.apache.spark.sql.expressions.Aggregator import org.apache.spark.sql.{Encoder, Encoders} @@ -19,7 +19,7 @@ class Snips with SynapseMLLogging { override val uid: String = Identifiable.randomUID("BanditEstimatorSnips") - logClass() + logClass(FeatureNames.VowpalWabbit) def zero: SnipsBuffer = SnipsBuffer(0, 0) diff --git a/vw/src/main/scala/com/microsoft/azure/synapse/ml/vw/KahanSum.scala b/vw/src/main/scala/com/microsoft/azure/synapse/ml/vw/KahanSum.scala index 6254c9800c..4936044b73 100644 --- a/vw/src/main/scala/com/microsoft/azure/synapse/ml/vw/KahanSum.scala +++ b/vw/src/main/scala/com/microsoft/azure/synapse/ml/vw/KahanSum.scala @@ -3,7 +3,7 @@ package com.microsoft.azure.synapse.ml.vw -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import org.apache.spark.ml.util.Identifiable import org.apache.spark.sql.{Encoder, Encoders} import org.apache.spark.sql.expressions.Aggregator @@ -19,7 +19,7 @@ class KahanSumAggregator with SynapseMLLogging { override val uid: String = Identifiable.randomUID("BanditEstimatorIps") - logClass() + logClass(FeatureNames.VowpalWabbit) def zero: KahanSum = KahanSum() diff --git a/vw/src/main/scala/com/microsoft/azure/synapse/ml/vw/VectorZipper.scala b/vw/src/main/scala/com/microsoft/azure/synapse/ml/vw/VectorZipper.scala index e473b9bb51..ae195dcf35 100644 --- a/vw/src/main/scala/com/microsoft/azure/synapse/ml/vw/VectorZipper.scala +++ b/vw/src/main/scala/com/microsoft/azure/synapse/ml/vw/VectorZipper.scala @@ -5,7 +5,7 @@ package com.microsoft.azure.synapse.ml.vw import com.microsoft.azure.synapse.ml.codegen.Wrappable import com.microsoft.azure.synapse.ml.core.contracts.{HasInputCols, HasOutputCol} -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import org.apache.spark.ml.param._ import org.apache.spark.ml.util._ import org.apache.spark.ml.{ComplexParamsReadable, ComplexParamsWritable, Transformer} @@ -20,7 +20,7 @@ object VectorZipper extends ComplexParamsReadable[VectorZipper] */ class VectorZipper(override val uid: String) extends Transformer with HasInputCols with HasOutputCol with Wrappable with ComplexParamsWritable with SynapseMLLogging { - logClass() + logClass(FeatureNames.VowpalWabbit) def this() = this(Identifiable.randomUID("VectorZipper")) diff --git a/vw/src/main/scala/com/microsoft/azure/synapse/ml/vw/VowpalWabbitCSETransformer.scala b/vw/src/main/scala/com/microsoft/azure/synapse/ml/vw/VowpalWabbitCSETransformer.scala index 6171f97075..f08f95f046 100644 --- a/vw/src/main/scala/com/microsoft/azure/synapse/ml/vw/VowpalWabbitCSETransformer.scala +++ b/vw/src/main/scala/com/microsoft/azure/synapse/ml/vw/VowpalWabbitCSETransformer.scala @@ -4,7 +4,7 @@ package com.microsoft.azure.synapse.ml.vw import com.microsoft.azure.synapse.ml.codegen.Wrappable -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import com.microsoft.azure.synapse.ml.policyeval.PolicyEvalUDAFUtil import org.apache.spark.ml.param.{DoubleParam, FloatParam, ParamMap, StringArrayParam} import org.apache.spark.ml.util.Identifiable @@ -24,7 +24,7 @@ class VowpalWabbitCSETransformer(override val uid: String) import VowpalWabbitDSJsonTransformer._ import VowpalWabbitCSETransformer._ - logClass() + logClass(FeatureNames.VowpalWabbit) def this() = this(Identifiable.randomUID("VowpalWabbitCSETransformer")) diff --git a/vw/src/main/scala/com/microsoft/azure/synapse/ml/vw/VowpalWabbitClassifier.scala b/vw/src/main/scala/com/microsoft/azure/synapse/ml/vw/VowpalWabbitClassifier.scala index 8e3a0d14a2..7837b69915 100644 --- a/vw/src/main/scala/com/microsoft/azure/synapse/ml/vw/VowpalWabbitClassifier.scala +++ b/vw/src/main/scala/com/microsoft/azure/synapse/ml/vw/VowpalWabbitClassifier.scala @@ -5,7 +5,7 @@ package com.microsoft.azure.synapse.ml.vw import com.microsoft.azure.synapse.ml.codegen.Wrappable import com.microsoft.azure.synapse.ml.core.schema.DatasetExtensions._ -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import org.apache.spark.ml.classification.{ProbabilisticClassificationModel, ProbabilisticClassifier} import org.apache.spark.ml.linalg.{Vector, Vectors} import org.apache.spark.ml.param._ @@ -27,7 +27,7 @@ class VowpalWabbitClassifier(override val uid: String) with VowpalWabbitBaseSpark with ComplexParamsWritable with SynapseMLLogging { - logClass() + logClass(FeatureNames.VowpalWabbit) override protected lazy val pyInternalWrapper = true @@ -93,7 +93,7 @@ class VowpalWabbitClassificationModel(override val uid: String) extends ProbabilisticClassificationModel[Row, VowpalWabbitClassificationModel] with VowpalWabbitBaseModelSpark with ComplexParamsWritable with Wrappable with SynapseMLLogging { - logClass() + logClass(FeatureNames.VowpalWabbit) def this() = this(Identifiable.randomUID("VowpalWabbitClassificationModel")) diff --git a/vw/src/main/scala/com/microsoft/azure/synapse/ml/vw/VowpalWabbitContextualBandit.scala b/vw/src/main/scala/com/microsoft/azure/synapse/ml/vw/VowpalWabbitContextualBandit.scala index b9e6c488fa..9772c2dbc7 100644 --- a/vw/src/main/scala/com/microsoft/azure/synapse/ml/vw/VowpalWabbitContextualBandit.scala +++ b/vw/src/main/scala/com/microsoft/azure/synapse/ml/vw/VowpalWabbitContextualBandit.scala @@ -5,7 +5,7 @@ package com.microsoft.azure.synapse.ml.vw import com.microsoft.azure.synapse.ml.core.utils.ParamsStringBuilder import com.microsoft.azure.synapse.ml.io.http.SharedVariable -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import org.apache.spark.ml.ParamInjections.HasParallelismInjected import org.apache.spark.ml.linalg.SQLDataTypes.VectorType import org.apache.spark.ml.param._ @@ -107,7 +107,7 @@ class VowpalWabbitContextualBandit(override val uid: String) with VowpalWabbitContextualBanditBase with HasParallelismInjected with ComplexParamsWritable with SynapseMLLogging { - logClass() + logClass(FeatureNames.VowpalWabbit) override protected lazy val pyInternalWrapper = true @@ -313,7 +313,7 @@ class VowpalWabbitContextualBanditModel(override val uid: String) with VowpalWabbitBaseModelSpark with VowpalWabbitContextualBanditBase with ComplexParamsWritable with SynapseMLLogging { - logClass() + logClass(FeatureNames.VowpalWabbit) def this() = this(Identifiable.randomUID("VowpalWabbitContextualBanditModel")) diff --git a/vw/src/main/scala/com/microsoft/azure/synapse/ml/vw/VowpalWabbitDSJsonTransformer.scala b/vw/src/main/scala/com/microsoft/azure/synapse/ml/vw/VowpalWabbitDSJsonTransformer.scala index 7de7a2d0bd..ca5e5e9a5e 100644 --- a/vw/src/main/scala/com/microsoft/azure/synapse/ml/vw/VowpalWabbitDSJsonTransformer.scala +++ b/vw/src/main/scala/com/microsoft/azure/synapse/ml/vw/VowpalWabbitDSJsonTransformer.scala @@ -4,13 +4,14 @@ package com.microsoft.azure.synapse.ml.vw import com.microsoft.azure.synapse.ml.codegen.Wrappable -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import com.microsoft.azure.synapse.ml.param.StringStringMapParam import org.apache.spark.ml.param.{Param, ParamMap} import org.apache.spark.ml.{ComplexParamsReadable, ComplexParamsWritable, Transformer} import org.apache.spark.ml.util.Identifiable import org.apache.spark.sql.types.StructType import org.apache.spark.sql.{DataFrame, Dataset, functions => F, types => T} + import scala.jdk.CollectionConverters.mapAsScalaMapConverter class VowpalWabbitDSJsonTransformer(override val uid: String) @@ -20,7 +21,7 @@ class VowpalWabbitDSJsonTransformer(override val uid: String) with ComplexParamsWritable { import VowpalWabbitDSJsonTransformer._ - logClass() + logClass(FeatureNames.VowpalWabbit) def this() = this(Identifiable.randomUID("VowpalWabbitDSJsonTransformer")) diff --git a/vw/src/main/scala/com/microsoft/azure/synapse/ml/vw/VowpalWabbitFeaturizer.scala b/vw/src/main/scala/com/microsoft/azure/synapse/ml/vw/VowpalWabbitFeaturizer.scala index 72fb64da03..eff5f58f19 100644 --- a/vw/src/main/scala/com/microsoft/azure/synapse/ml/vw/VowpalWabbitFeaturizer.scala +++ b/vw/src/main/scala/com/microsoft/azure/synapse/ml/vw/VowpalWabbitFeaturizer.scala @@ -5,7 +5,7 @@ package com.microsoft.azure.synapse.ml.vw import com.microsoft.azure.synapse.ml.codegen.Wrappable import com.microsoft.azure.synapse.ml.core.contracts.{HasInputCols, HasOutputCol} -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import com.microsoft.azure.synapse.ml.vw.featurizer._ import org.apache.spark.ml.linalg.SQLDataTypes.VectorType import org.apache.spark.ml.linalg.Vectors @@ -26,7 +26,7 @@ class VowpalWabbitFeaturizer(override val uid: String) extends Transformer with HasInputCols with HasOutputCol with HasNumBits with HasSumCollisions with Wrappable with ComplexParamsWritable with SynapseMLLogging { - logClass() + logClass(FeatureNames.VowpalWabbit) def this() = this(Identifiable.randomUID("VowpalWabbitFeaturizer")) setDefault(inputCols -> Array()) diff --git a/vw/src/main/scala/com/microsoft/azure/synapse/ml/vw/VowpalWabbitGeneric.scala b/vw/src/main/scala/com/microsoft/azure/synapse/ml/vw/VowpalWabbitGeneric.scala index d50cb3343f..c1b3f8e0bc 100644 --- a/vw/src/main/scala/com/microsoft/azure/synapse/ml/vw/VowpalWabbitGeneric.scala +++ b/vw/src/main/scala/com/microsoft/azure/synapse/ml/vw/VowpalWabbitGeneric.scala @@ -5,7 +5,7 @@ package com.microsoft.azure.synapse.ml.vw import com.microsoft.azure.synapse.ml.codegen.Wrappable import com.microsoft.azure.synapse.ml.core.contracts.HasInputCol -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import org.apache.spark.ml.{ComplexParamsReadable, ComplexParamsWritable, Estimator, Model} import org.apache.spark.ml.param.ParamMap import org.apache.spark.ml.util.Identifiable @@ -21,7 +21,7 @@ class VowpalWabbitGeneric(override val uid: String) with VowpalWabbitBaseLearner with HasInputCol with SynapseMLLogging { - logClass() + logClass(FeatureNames.VowpalWabbit) override protected lazy val pyInternalWrapper = true @@ -87,7 +87,7 @@ class VowpalWabbitGenericModel(override val uid: String) with VowpalWabbitBaseModel with HasInputCol with ComplexParamsWritable with Wrappable with SynapseMLLogging { - logClass() + logClass(FeatureNames.VowpalWabbit) override protected lazy val pyInternalWrapper = true diff --git a/vw/src/main/scala/com/microsoft/azure/synapse/ml/vw/VowpalWabbitGenericProgressive.scala b/vw/src/main/scala/com/microsoft/azure/synapse/ml/vw/VowpalWabbitGenericProgressive.scala index 3ac6db931d..e7dd121ce3 100644 --- a/vw/src/main/scala/com/microsoft/azure/synapse/ml/vw/VowpalWabbitGenericProgressive.scala +++ b/vw/src/main/scala/com/microsoft/azure/synapse/ml/vw/VowpalWabbitGenericProgressive.scala @@ -4,7 +4,7 @@ package com.microsoft.azure.synapse.ml.vw import com.microsoft.azure.synapse.ml.core.contracts.HasInputCol -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import org.apache.spark.ml.ComplexParamsReadable import org.apache.spark.ml.param.ParamMap import org.apache.spark.ml.util.Identifiable @@ -19,7 +19,7 @@ class VowpalWabbitGenericProgressive(override val uid: String) extends VowpalWabbitBaseProgressive with HasInputCol with SynapseMLLogging { - logClass() + logClass(FeatureNames.VowpalWabbit) def this() = this(Identifiable.randomUID("VowpalWabbitGenericProgressive")) diff --git a/vw/src/main/scala/com/microsoft/azure/synapse/ml/vw/VowpalWabbitInteractions.scala b/vw/src/main/scala/com/microsoft/azure/synapse/ml/vw/VowpalWabbitInteractions.scala index 109506d457..0aaf7d8eac 100644 --- a/vw/src/main/scala/com/microsoft/azure/synapse/ml/vw/VowpalWabbitInteractions.scala +++ b/vw/src/main/scala/com/microsoft/azure/synapse/ml/vw/VowpalWabbitInteractions.scala @@ -5,7 +5,7 @@ package com.microsoft.azure.synapse.ml.vw import com.microsoft.azure.synapse.ml.codegen.Wrappable import com.microsoft.azure.synapse.ml.core.contracts.{HasInputCols, HasOutputCol} -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import org.apache.spark.ml.linalg.SQLDataTypes.VectorType import org.apache.spark.ml.linalg.{Vector, Vectors} import org.apache.spark.ml.param.ParamMap @@ -25,7 +25,7 @@ class VowpalWabbitInteractions(override val uid: String) extends Transformer with HasInputCols with HasOutputCol with HasNumBits with HasSumCollisions with Wrappable with ComplexParamsWritable with SynapseMLLogging { - logClass() + logClass(FeatureNames.VowpalWabbit) def this() = this(Identifiable.randomUID("VowpalWabbitInteractions")) diff --git a/vw/src/main/scala/com/microsoft/azure/synapse/ml/vw/VowpalWabbitRegressor.scala b/vw/src/main/scala/com/microsoft/azure/synapse/ml/vw/VowpalWabbitRegressor.scala index eca0dd76de..b73d7a253b 100644 --- a/vw/src/main/scala/com/microsoft/azure/synapse/ml/vw/VowpalWabbitRegressor.scala +++ b/vw/src/main/scala/com/microsoft/azure/synapse/ml/vw/VowpalWabbitRegressor.scala @@ -4,7 +4,7 @@ package com.microsoft.azure.synapse.ml.vw import com.microsoft.azure.synapse.ml.codegen.Wrappable -import com.microsoft.azure.synapse.ml.logging.SynapseMLLogging +import com.microsoft.azure.synapse.ml.logging.{FeatureNames, SynapseMLLogging} import org.apache.spark.ml.param._ import org.apache.spark.ml.regression.RegressionModel import org.apache.spark.ml.util._ @@ -20,7 +20,7 @@ class VowpalWabbitRegressor(override val uid: String) extends BaseRegressor[Row, VowpalWabbitRegressor, VowpalWabbitRegressionModel] with VowpalWabbitBaseSpark with ComplexParamsWritable with SynapseMLLogging { - logClass() + logClass(FeatureNames.VowpalWabbit) override protected lazy val pyInternalWrapper = true @@ -46,7 +46,7 @@ class VowpalWabbitRegressionModel(override val uid: String) extends RegressionModel[Row, VowpalWabbitRegressionModel] with VowpalWabbitBaseModelSpark with ComplexParamsWritable with Wrappable with SynapseMLLogging { - logClass() + logClass(FeatureNames.VowpalWabbit) def this() = this(Identifiable.randomUID("VowpalWabbitRegressionModel"))