Skip to content

Commit

Permalink
Merge pull request #855 from msvinaykumar/getDataToGenRecommendation
Browse files Browse the repository at this point in the history
Get data to generate recommendation
  • Loading branch information
dinogun authored Aug 14, 2023
2 parents 2ae36cd + 780f615 commit 18ca637
Show file tree
Hide file tree
Showing 13 changed files with 200 additions and 235 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -15,17 +15,17 @@
*******************************************************************************/
package com.autotune.analyzer.experiment;

import com.autotune.analyzer.kruizeObject.KruizeObject;
import com.autotune.analyzer.performanceProfiles.PerformanceProfile;
import com.autotune.analyzer.performanceProfiles.PerformanceProfileInterface.PerfProfileInterface;
import com.autotune.analyzer.utils.AnalyzerConstants;
import com.autotune.common.data.ValidationOutputData;
import com.autotune.common.data.result.ExperimentResultData;
import com.autotune.analyzer.kruizeObject.KruizeObject;
import com.autotune.analyzer.performanceProfiles.PerformanceProfile;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import javax.servlet.http.HttpServletResponse;
import java.lang.reflect.InvocationTargetException;
import java.sql.Timestamp;
import java.util.List;
import java.util.Map;

Expand Down Expand Up @@ -99,34 +99,18 @@ public ValidationOutputData validateAndUpdateResults(
}

// Generate recommendations and add it to the kruize object
public boolean generateAndAddRecommendations(Map<String, KruizeObject> experimentsMap, List<ExperimentResultData> experimentResultDataList) {
if (null == experimentResultDataList)
return false;
if (experimentResultDataList.size() == 0)
return false;
for (ExperimentResultData experimentResultData: experimentResultDataList) {
// TODO: Log the list of invalid experiments and return the error instead of bailing out completely
if (!experimentsMap.containsKey(experimentResultData.getExperiment_name())) {
LOGGER.error("Trying to locate Recommendation for non existent experiment: " +
experimentResultData.getExperiment_name());
continue;
}
KruizeObject kruizeObject = experimentsMap.get(experimentResultData.getExperiment_name());
if (AnalyzerConstants.PerformanceProfileConstants.perfProfileInstances.containsKey(kruizeObject.getPerformanceProfile())) {
try {
PerfProfileInterface perfProfileInstance =
(PerfProfileInterface) AnalyzerConstants.PerformanceProfileConstants
.perfProfileInstances.get(kruizeObject.getPerformanceProfile())
.getDeclaredConstructor().newInstance();
perfProfileInstance.generateRecommendation(kruizeObject, experimentResultData);
} catch (InstantiationException | IllegalAccessException | InvocationTargetException | NoSuchMethodException e ) {
e.printStackTrace();
}
} else {
LOGGER.error("No Recommendation Engine mapping found for performance profile: " +
kruizeObject.getPerformanceProfile() + ". Cannot process recommendations for the experiment");
}
public void generateAndAddRecommendations(KruizeObject kruizeObject, List<ExperimentResultData> experimentResultDataList, Timestamp interval_start_time, Timestamp interval_end_time) throws Exception {
if (AnalyzerConstants.PerformanceProfileConstants.perfProfileInstances.containsKey(kruizeObject.getPerformanceProfile())) {
PerfProfileInterface perfProfileInstance =
(PerfProfileInterface) AnalyzerConstants.PerformanceProfileConstants
.perfProfileInstances.get(kruizeObject.getPerformanceProfile())
.getDeclaredConstructor().newInstance();
perfProfileInstance.generateRecommendation(kruizeObject, experimentResultDataList, interval_start_time, interval_end_time);
} else {
throw new Exception("No Recommendation Engine mapping found for performance profile: " +
kruizeObject.getPerformanceProfile() + ". Cannot process recommendations for the experiment");
}
return true;
}


}
Original file line number Diff line number Diff line change
Expand Up @@ -15,16 +15,14 @@
*******************************************************************************/
package com.autotune.analyzer.experiment;

import com.autotune.analyzer.utils.AnalyzerErrorConstants;
import com.autotune.analyzer.performanceProfiles.utils.PerformanceProfileUtil;
import com.autotune.common.data.ValidationOutputData;
import com.autotune.common.data.result.ContainerData;
import com.autotune.common.data.result.ExperimentResultData;
import com.autotune.analyzer.kruizeObject.KruizeObject;
import com.autotune.analyzer.performanceProfiles.PerformanceProfile;
import com.autotune.analyzer.performanceProfiles.utils.PerformanceProfileUtil;
import com.autotune.analyzer.utils.AnalyzerConstants;
import com.autotune.analyzer.utils.AnalyzerErrorConstants;
import com.autotune.common.data.ValidationOutputData;
import com.autotune.common.data.result.ExperimentResultData;
import com.autotune.common.data.result.IntervalResults;
import com.autotune.common.k8sObjects.K8sObject;
import com.autotune.database.service.ExperimentDBService;
import com.autotune.utils.KruizeConstants;
import org.slf4j.Logger;
Expand All @@ -44,7 +42,7 @@ public class ExperimentResultValidation {
private Map<String, KruizeObject> mainKruizeExperimentMAP;
private Map<String, PerformanceProfile> performanceProfileMap;

public ExperimentResultValidation(Map<String, KruizeObject> mainKruizeExperimentMAP,Map<String, PerformanceProfile> performanceProfileMap) {
public ExperimentResultValidation(Map<String, KruizeObject> mainKruizeExperimentMAP, Map<String, PerformanceProfile> performanceProfileMap) {
this.mainKruizeExperimentMAP = mainKruizeExperimentMAP;
this.performanceProfileMap = performanceProfileMap;
}
Expand All @@ -57,7 +55,7 @@ public void validate(List<ExperimentResultData> experimentResultDataList, Map<St
String expName = resultData.getExperiment_name();
if (null != expName && !expName.isEmpty() && null != resultData.getIntervalEndTime() && null != resultData.getIntervalStartTime()) {
try {
new ExperimentDBService().loadExperimentAndResultsFromDBByName(mainKruizeExperimentMAP, expName);
new ExperimentDBService().loadExperimentFromDBByName(mainKruizeExperimentMAP, expName);
} catch (Exception e) {
LOGGER.error("Loading saved experiment {} failed: {} ", expName, e.getMessage());
}
Expand All @@ -76,12 +74,12 @@ public void validate(List<ExperimentResultData> experimentResultDataList, Map<St
Double durationInSeconds = intervalResults.getDuration_in_seconds();
String measurementDurationInMins = kruizeObject.getTrial_settings().getMeasurement_durationMinutes();
LOGGER.debug("Duration in seconds = {}", intervalResults.getDuration_in_seconds());
if ( durationInSeconds < 0) {
if (durationInSeconds < 0) {
errorMsg = errorMsg.concat(AnalyzerErrorConstants.AutotuneObjectErrors.WRONG_TIMESTAMP);
resultData.setValidationOutputData(new ValidationOutputData(false, errorMsg, HttpServletResponse.SC_BAD_REQUEST));
break;
} else {
Double parsedMeasurementDuration = Double.parseDouble(measurementDurationInMins.substring(0, measurementDurationInMins.length()-3));
Double parsedMeasurementDuration = Double.parseDouble(measurementDurationInMins.substring(0, measurementDurationInMins.length() - 3));
// Calculate the lower and upper bounds for the acceptable range i.e. +-5 seconds
double lowerRange = Math.abs((parsedMeasurementDuration * KruizeConstants.TimeConv.NO_OF_SECONDS_PER_MINUTE) - (KruizeConstants.TimeConv.MEASUREMENT_DURATION_THRESHOLD_SECONDS));
double upperRange = (parsedMeasurementDuration * KruizeConstants.TimeConv.NO_OF_SECONDS_PER_MINUTE) + (KruizeConstants.TimeConv.MEASUREMENT_DURATION_THRESHOLD_SECONDS);
Expand Down Expand Up @@ -141,24 +139,6 @@ public void validate(List<ExperimentResultData> experimentResultDataList, Map<St
resultData.setValidationOutputData(new ValidationOutputData(false, errorMsg, HttpServletResponse.SC_BAD_REQUEST));
break;
}

// check if resultData is present
boolean isExist = false;
for (K8sObject k8sObject : kruizeObject.getKubernetes_objects()) {
for (ContainerData containerData : k8sObject.getContainerDataMap().values()) {
if (null != containerData.getResults()) {
if (null != containerData.getResults().get(resultData.getIntervalEndTime())) {
isExist = true;
break;
}
}
}
}
if (isExist) {
errorMsg = errorMsg.concat(String.format("Experiment name : %s already contains result for timestamp : %s", resultData.getExperiment_name(), resultData.getIntervalEndTime()));
resultData.setValidationOutputData(new ValidationOutputData(false, errorMsg, HttpServletResponse.SC_CONFLICT));
break;
}
/*
Fetch the performance profile from the Map corresponding to the name in the kruize object,
and then validate the Performance Profile data
Expand All @@ -181,8 +161,8 @@ public void validate(List<ExperimentResultData> experimentResultDataList, Map<St
resultData.setValidationOutputData(new ValidationOutputData(false, errorMsg, HttpServletResponse.SC_BAD_REQUEST));
break;
}
} catch (Exception e) {
LOGGER.error("Caught Exception: {}",e);
} catch (Exception e) {
LOGGER.error("Caught Exception: {}", e);
errorMsg = "Validation failed: " + e.getMessage();
proceed = false;
resultData.setValidationOutputData(new ValidationOutputData(false, errorMsg, HttpServletResponse.SC_INTERNAL_SERVER_ERROR));
Expand Down Expand Up @@ -211,6 +191,7 @@ public void validate(List<ExperimentResultData> experimentResultDataList, Map<St
setErrorMessage("Validation failed: " + e.getMessage());
}
}

public void markFailed(String message) {
setSuccess(false);
setErrorMessage(message);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,19 +16,16 @@
package com.autotune.analyzer.performanceProfiles.PerformanceProfileInterface;

import com.autotune.analyzer.kruizeObject.KruizeObject;
import com.autotune.analyzer.performanceProfiles.PerformanceProfile;
import com.autotune.analyzer.performanceProfiles.utils.PerformanceProfileUtil;
import com.autotune.analyzer.recommendations.engine.KruizeRecommendationEngine;
import com.autotune.analyzer.utils.AnalyzerConstants;
import com.autotune.common.data.metrics.Metric;
import com.autotune.common.data.metrics.MetricResults;
import com.autotune.common.data.result.*;
import com.autotune.common.data.result.ContainerData;
import com.autotune.analyzer.recommendations.engine.KruizeRecommendationEngine;
import com.autotune.common.data.result.ExperimentResultData;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.lang.reflect.InvocationTargetException;
import java.util.*;
import java.sql.Timestamp;
import java.util.HashMap;
import java.util.List;

/**
* Util class to validate the performance profile metrics with the experiment results metrics.
Expand All @@ -43,10 +40,6 @@ private String validateValues(HashMap<String, MetricResults> funcVar, List<Strin
return "";
}

@Override
public void generateRecommendation(KruizeObject kruizeObject, ExperimentResultData experimentResultData) {

}

// TODO: Update these based on requirements, currently leaving Invalid as Default impl doesn't need engine
@Override
Expand All @@ -59,4 +52,9 @@ public AnalyzerConstants.RegisterRecommendationEngineStatus registerEngine(Kruiz
public List<KruizeRecommendationEngine> getEngines() {
return null;
}

@Override
public void generateRecommendation(KruizeObject kruizeObject, List<ExperimentResultData> experimentResultDataList, Timestamp interval_start_time, Timestamp interval_end_time) {

}
}
Original file line number Diff line number Diff line change
@@ -1,12 +1,9 @@
/*******************************************************************************
* Copyright (c) 2022 Red Hat, IBM Corporation and others.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
Expand All @@ -20,12 +17,17 @@
import com.autotune.analyzer.performanceProfiles.PerformanceProfile;
import com.autotune.common.data.result.ExperimentResultData;

import java.sql.Timestamp;
import java.util.List;

/**
* Abstraction layer containing methods for the validation of the Performance Profiles with respect to the updated experiment.
* and to parse the objective function data.
*/
public interface PerfProfileInterface {
// name, validateResults, validateProfile, recommend
// name, validateResults, validateProfile, recommend
String getName(PerformanceProfile profile);
void generateRecommendation(KruizeObject kruizeObject, ExperimentResultData experimentResultData);

void generateRecommendation(KruizeObject kruizeObject, List<ExperimentResultData> experimentResultDataList, Timestamp interval_start_time, Timestamp interval_end_time) throws Exception;

}
Loading

0 comments on commit 18ca637

Please sign in to comment.