-
Notifications
You must be signed in to change notification settings - Fork 21
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #95 from vishnuchalla/telco-splunk
Integrating Splunk for Telco KPIs
- Loading branch information
Showing
16 changed files
with
983 additions
and
504 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,41 @@ | ||
import zlib | ||
import hashlib | ||
from cryptography.fernet import Fernet | ||
|
||
symmetric_encryptor = b'k3tGwuK6O59c0SEMmnIeJUEpTN5kuxibPy8Q8VfYC6A=' | ||
|
||
def hash_encrypt_json(json_data): | ||
# Serialize the JSON data to a string | ||
json_str = str(json_data) | ||
|
||
# Generate an MD5 hash of the JSON string | ||
hash_digest = hashlib.md5(json_str.encode()).hexdigest() | ||
|
||
# Compress the JSON string | ||
compressed_data = zlib.compress(json_str.encode()) | ||
|
||
cipher = Fernet(symmetric_encryptor) | ||
|
||
# Encrypt the compressed JSON string | ||
encrypted_data = cipher.encrypt(compressed_data) | ||
|
||
return hash_digest, encrypted_data | ||
|
||
def decrypt_unhash_json(hash_digest, encrypted_data): | ||
cipher = Fernet(symmetric_encryptor) | ||
|
||
# Decrypt the encrypted JSON data | ||
decompressed_data = cipher.decrypt(encrypted_data) | ||
|
||
# Decompress the decrypted data | ||
decompressed_json_str = zlib.decompress(decompressed_data).decode() | ||
|
||
# Verify hash digest | ||
calculated_hash = hashlib.md5(decompressed_json_str.encode()).hexdigest() | ||
if calculated_hash != hash_digest: | ||
raise ValueError("Hash digest does not match") | ||
|
||
# Deserialize the JSON string back to JSON data | ||
json_data = eval(decompressed_json_str) | ||
|
||
return json_data |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,61 @@ | ||
from datetime import date | ||
import pandas as pd | ||
from app import config | ||
from app.services.splunk import SplunkService | ||
import app.api.v1.commons.hasher as hasher | ||
from datetime import datetime, timezone | ||
|
||
|
||
async def getData(start_datetime: date, end_datetime: date, configpath: str): | ||
test_types = ["oslat", "cyclictest", "cpu_util", "deployment", "ptp", "reboot", "rfc-2544"] | ||
cfg = config.get_config() | ||
try: | ||
jenkins_url = cfg.get('telco.config.job_url') | ||
except Exception as e: | ||
print(f"Error reading telco configuration: {e}") | ||
test_type_execution_times = { | ||
"oslat": 3720, | ||
"cyclictest": 3720, | ||
"cpu_util": 6600, | ||
"deployment": 3720, | ||
"ptp": 4200, | ||
"reboot": 1980, | ||
"rfc-2544": 5580, | ||
} | ||
query = { | ||
"earliest_time": "{}T00:00:00".format(start_datetime.strftime('%Y-%m-%d')), | ||
"latest_time": "{}T23:59:59".format(end_datetime.strftime('%Y-%m-%d')), | ||
"output_mode": "json" | ||
} | ||
searchList = ' OR '.join(['test_type="{}"'.format(test_type) for test_type in test_types]) | ||
splunk = SplunkService(configpath=configpath) | ||
response = await splunk.query(query=query, searchList=searchList) | ||
mapped_list = [] | ||
|
||
for each_response in response: | ||
end_timestamp = int(each_response['timestamp']) | ||
test_data = each_response['data'] | ||
hash_digest, encrypted_data = hasher.hash_encrypt_json(each_response) | ||
execution_time_seconds = test_type_execution_times.get(test_data['test_type'], 0) | ||
start_timestamp = end_timestamp - execution_time_seconds | ||
start_time_utc = datetime.fromtimestamp(start_timestamp, tz=timezone.utc) | ||
end_time_utc = datetime.fromtimestamp(end_timestamp, tz=timezone.utc) | ||
|
||
mapped_list.append({ | ||
"uuid": hash_digest, | ||
"encryptedData": encrypted_data.decode('utf-8'), | ||
"ciSystem": "Jenkins", | ||
"testName": test_data['test_type'], | ||
"version": test_data['ocp_version'], | ||
"releaseStream": test_data['ocp_build'], | ||
"startDate": str(start_time_utc), | ||
"endDate": str(end_time_utc), | ||
"buildUrl": jenkins_url + "/" + str(test_data['cluster_artifacts']['ref']['jenkins_build']), | ||
"jobStatus": "success" | ||
}) | ||
|
||
jobs = pd.json_normalize(mapped_list) | ||
if len(jobs) == 0: | ||
return jobs | ||
|
||
return jobs |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,34 +1,15 @@ | ||
from ....commons.ocp import getData | ||
from ....commons.utils import getReleaseStream | ||
from datetime import date | ||
|
||
|
||
################################################################ | ||
# This will return a DataFrame from OCP required by the CPT endpoint | ||
################################################################ | ||
async def ocpMapper(start_datetime: date, end_datetime: date, configpath: str): | ||
df = await getData(start_datetime, end_datetime, configpath) | ||
async def ocpMapper(start_datetime: date, end_datetime: date): | ||
df = await getData(start_datetime, end_datetime, f'ocp.elasticsearch') | ||
df.insert(len(df.columns), "product", "ocp") | ||
df["releaseStream"] = df.apply(getReleaseStream, axis=1) | ||
df["version"] = df["shortVersion"] | ||
df["testName"] = df["benchmark"] | ||
return df | ||
|
||
|
||
def getReleaseStream(row): | ||
if row["releaseStream"].__contains__("fast"): | ||
return "Fast" | ||
elif row["releaseStream"].__contains__("stable"): | ||
return "Stable" | ||
elif row["releaseStream"].__contains__("eus"): | ||
return "EUS" | ||
elif row["releaseStream"].__contains__("candidate"): | ||
return "Release Candidate" | ||
elif row["releaseStream"].__contains__("rc"): | ||
return "Release Candidate" | ||
elif row["releaseStream"].__contains__("nightly"): | ||
return "Nightly" | ||
elif row["releaseStream"].__contains__("ci"): | ||
return "ci" | ||
elif row["releaseStream"].__contains__("ec"): | ||
return "Engineering Candidate" | ||
return "Stable" |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,13 @@ | ||
from ....commons.telco import getData | ||
from ....commons.utils import getReleaseStream | ||
from datetime import date | ||
|
||
|
||
##################################################################### | ||
# This will return a DataFrame from Telco required by the CPT endpoint | ||
##################################################################### | ||
async def telcoMapper(start_datetime: date, end_datetime: date): | ||
df = await getData(start_datetime, end_datetime, f'telco.splunk') | ||
df.insert(len(df.columns), "product", "telco") | ||
df["releaseStream"] = df.apply(getReleaseStream, axis=1) | ||
return df |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,56 @@ | ||
import json | ||
from fastapi import Response | ||
from datetime import datetime, timedelta, date | ||
from fastapi import APIRouter | ||
from ...commons.telco import getData | ||
from ...commons.example_responses import telco_200_response, response_422 | ||
from fastapi.param_functions import Query | ||
|
||
router = APIRouter() | ||
|
||
|
||
|
||
@router.get('/api/v1/telco/jobs', | ||
summary="Returns a job list", | ||
description="Returns a list of jobs in the specified dates. \ | ||
If not dates are provided the API will default the values. \ | ||
`startDate`: will be set to the day of the request minus 5 days.\ | ||
`endDate`: will be set to the day of the request.", | ||
responses={ | ||
200: telco_200_response(), | ||
422: response_422(), | ||
},) | ||
async def jobs(start_date: date = Query(None, description="Start date for searching jobs, format: 'YYYY-MM-DD'", examples=["2020-11-10"]), | ||
end_date: date = Query(None, description="End date for searching jobs, format: 'YYYY-MM-DD'", examples=["2020-11-15"]), | ||
pretty: bool = Query(False, description="Output content in pretty format.")): | ||
if start_date is None: | ||
start_date = datetime.utcnow().date() | ||
start_date = start_date - timedelta(days=7) | ||
|
||
if end_date is None: | ||
end_date = datetime.utcnow().date() | ||
|
||
if start_date > end_date: | ||
return Response(content=json.dumps({'error': "invalid date format, start_date must be less than end_date"}), status_code=422) | ||
|
||
results = await getData(start_date, end_date, 'telco.splunk') | ||
|
||
if len(results) >= 1 : | ||
response = { | ||
'startDate': start_date.__str__(), | ||
'endDate': end_date.__str__(), | ||
'results': results.to_dict('records') | ||
} | ||
else : | ||
response = { | ||
'startDate': start_date.__str__(), | ||
'endDate': end_date.__str__(), | ||
'results': [] | ||
} | ||
|
||
if pretty: | ||
json_str = json.dumps(response, indent=4) | ||
return Response(content=json_str, media_type='application/json') | ||
|
||
jsonstring = json.dumps(response) | ||
return jsonstring |
Oops, something went wrong.