Skip to content

Commit

Permalink
Merge branch 'main' into revamp
Browse files Browse the repository at this point in the history
  • Loading branch information
chentex committed Jul 16, 2024
2 parents 7189136 + 5c69e85 commit b84fa11
Show file tree
Hide file tree
Showing 8 changed files with 148 additions and 10 deletions.
4 changes: 4 additions & 0 deletions backend/app/api/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
from app.api.v1.endpoints.quay import quayGraphs
from app.api.v1.endpoints.telco import telcoJobs
from app.api.v1.endpoints.telco import telcoGraphs
from app.api.v1.endpoints.ocm import ocmJobs


router = APIRouter()
Expand All @@ -35,3 +36,6 @@

# Horreum endpoint
router.include_router(horreum.router, tags=['horreum'])

# OCM endpoint
router.include_router(ocmJobs.router, tags=['ocm'])
52 changes: 52 additions & 0 deletions backend/app/api/v1/commons/ocm.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
from datetime import date, datetime
import pandas as pd
from app.services.search import ElasticService


async def getData(start_datetime: date, end_datetime: date, configpath: str):
query = {
"query": {
"bool": {
"filter": {
"range": {
"metrics.earliest": {
"format": "yyyy-MM-dd"
}
}
}
}
}
}

es = ElasticService(configpath=configpath)
response = await es.post(query=query, start_date=start_datetime, end_date=end_datetime, timestamp_field='metrics.earliest')
await es.close()
tasks = [item['_source'] for item in response]
jobs = pd.json_normalize(tasks)
if len(jobs) == 0:
return jobs

if 'buildUrl' not in jobs.columns:
jobs.insert(len(jobs.columns), "buildUrl", "")
if 'ciSystem' not in jobs.columns:
jobs.insert(len(jobs.columns), "ciSystem", "")
jobs.fillna('', inplace=True)
jobs['jobStatus'] = jobs.apply(convertJobStatus, axis=1)
return jobs


def fillCiSystem(row):
currDate = datetime.strptime(row["metrics.earliest"][:26], '%Y-%m-%dT%H:%M:%S.%f')
if currDate > datetime(2024, 6, 24):
return "Jenkins"
else:
return "Airflow"


def convertJobStatus(row):
if row["metrics.success"] >= 0.80:
return "success"
elif row["metrics.success"] < 0.40:
return "failure"
else:
return "unstable"
5 changes: 5 additions & 0 deletions backend/app/api/v1/endpoints/cpt/cptJobs.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
from .maps.quay import quayMapper
from .maps.hce import hceMapper
from .maps.telco import telcoMapper
from .maps.ocm import ocmMapper
from ...commons.example_responses import cpt_200_response, response_422
from fastapi.param_functions import Query

Expand All @@ -20,8 +21,10 @@
"quay": quayMapper,
"hce": hceMapper,
"telco": telcoMapper,
"ocm": ocmMapper,
}


@router.get('/api/v1/cpt/jobs',
summary="Returns a job list from all the products.",
description="Returns a list of jobs in the specified dates. \
Expand Down Expand Up @@ -69,6 +72,7 @@ async def jobs(start_date: date = Query(None, description="Start date for search
jsonstring = json.dumps(response)
return jsonstring


async def fetch_product_async(product, start_date, end_date):
try:
df = await products[product](start_date, end_date)
Expand All @@ -79,5 +83,6 @@ async def fetch_product_async(product, start_date, end_date):
print(f"Error in mapper for product {product}: {e}")
return pd.DataFrame()


def fetch_product(product, start_date, end_date):
return asyncio.run(fetch_product_async(product, start_date, end_date))
18 changes: 18 additions & 0 deletions backend/app/api/v1/endpoints/cpt/maps/ocm.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
from ....commons.ocm import getData
from datetime import date


################################################################
# This will return a DataFrame from OCM required by the CPT endpoint
################################################################
async def ocmMapper(start_datetime: date, end_datetime: date):
df = await getData(start_datetime, end_datetime, f'ocm.elasticsearch')
if len(df) == 0:
return df
df.insert(len(df.columns), "product", "ocm")
df.insert(len(df.columns), "releaseStream", "Nightly")
df["testName"] = df["attack"]
df["startDate"] = df["metrics.earliest"]
df["endDate"] = df["metrics.end"]

return df
55 changes: 55 additions & 0 deletions backend/app/api/v1/endpoints/ocm/ocmJobs.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
import json
from fastapi import Response
from datetime import datetime, timedelta, date
from fastapi import APIRouter
from ...commons.ocm import getData
from ...commons.example_responses import ocp_200_response, response_422
from fastapi.param_functions import Query

router = APIRouter()


@router.get('/api/v1/ocm/jobs',
summary="Returns a job list",
description="Returns a list of jobs in the specified dates. \
If not dates are provided the API will default the values. \
`startDate`: will be set to the day of the request minus 5 days.\
`endDate`: will be set to the day of the request.",
responses={
200: ocp_200_response(),
422: response_422(),
},)
async def jobs(start_date: date = Query(None, description="Start date for searching jobs, format: 'YYYY-MM-DD'", examples=["2020-11-10"]),
end_date: date = Query(None, description="End date for searching jobs, format: 'YYYY-MM-DD'", examples=["2020-11-15"]),
pretty: bool = Query(False, description="Output contet in pretty format.")):
if start_date is None:
start_date = datetime.utcnow().date()
start_date = start_date - timedelta(days=5)

if end_date is None:
end_date = datetime.utcnow().date()

if start_date > end_date:
return Response(content=json.dumps({'error': "invalid date format, start_date must be less than end_date"}), status_code=422)

results = await getData(start_date, end_date, 'ocm.elasticsearch')

if len(results) >= 1:
response = {
'startDate': start_date.__str__(),
'endDate': end_date.__str__(),
'results': results.to_dict('records')
}
else:
response = {
'startDate': start_date.__str__(),
'endDate': end_date.__str__(),
'results': []
}

if pretty:
json_str = json.dumps(response, indent=4)
return Response(content=json_str, media_type='application/json')

jsonstring = json.dumps(response)
return jsonstring
7 changes: 4 additions & 3 deletions frontend/src/helpers/Utils.js
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ const getCPTUpdatedData = (data, testName, product, ciSystem, jobStatus, release
}

const getOCPUpdatedData = (data, platform, benchmark, version, workerCount, networkType, ciSystem, jobType, isRehearse,
ipsec, fips, encrypted, encryptionType, publish, computeArch, controlPlaneArch) => {
ipsec, fips, encrypted, encryptionType, publish, computeArch, controlPlaneArch, jobStatus) => {
const filterValues = {
"platform": platform, "benchmark": benchmark,
"shortVersion": version, "workerNodesCount": workerCount,
Expand All @@ -34,6 +34,7 @@ const getOCPUpdatedData = (data, platform, benchmark, version, workerCount, netw
"ipsec": ipsec, "fips": fips, "encrypted": encrypted,
"encryptionType": encryptionType, "publish": publish,
"computeArch": computeArch, "controlPlaneArch": controlPlaneArch,
"jobStatus": jobStatus,
}
let filteredData = data
for (let [keyName, value] of Object.entries(filterValues))
Expand All @@ -58,8 +59,8 @@ const getQuayUpdatedData = (data, platform, benchmark, releaseStream, workerCoun

const getTelcoUpdatedData = (data, benchmark, version, releaseStream, ciSystem, formal, nodeName, cpu) => {
const filterValues = {
"cpu": cpu, "benchmark": benchmark, "shortVersion": version,
"releaseStream": releaseStream, "formal": formal, "ciSystem": ciSystem,
"cpu": cpu, "benchmark": benchmark, "shortVersion": version,
"releaseStream": releaseStream, "formal": formal, "ciSystem": ciSystem,
"nodeName": nodeName,
}
let filteredData = data
Expand Down
15 changes: 8 additions & 7 deletions frontend/src/store/Actions/ActionCreator.js
Original file line number Diff line number Diff line change
Expand Up @@ -116,11 +116,12 @@ export const fetchOCPJobsData = (startDate = '', endDate='') => async dispatch =
const allPublish = GetPublish(results)
const computeArchs = GetComputeArchs(results)
const controlPlaneArchs = GetControlPlaneArchs(results)
const jobStatuses = GetStatuses(results)
const updatedTime = new Date().toLocaleString().replace(', ', ' ').toString();
await dispatch(getOCPJobsData({
data: results, benchmarks, versions, waitForUpdate: false, platforms, workers, networkTypes,
updatedTime, ciSystems, jobTypes, rehearses, allIpsec, allFips, allEncrypted, encryptionTypes,
allPublish, computeArchs, controlPlaneArchs, startDate: api_data.startDate, endDate: api_data.endDate
allPublish, computeArchs, controlPlaneArchs, jobStatuses, startDate: api_data.startDate, endDate: api_data.endDate
}))
await dispatch(updateOCPMetaData({data: results}))
}
Expand Down Expand Up @@ -163,7 +164,7 @@ export const fetchQuayJobsData = (startDate = '', endDate='') => async dispatch
const updatedTime = new Date().toLocaleString().replace(', ', ' ').toString();
await dispatch(getQuayJobsData({
data: results, benchmarks, releaseStreams, waitForUpdate: false, platforms, workers,
hitSizes, concurrencies, imagePushPulls, updatedTime, ciSystems, startDate: api_data.startDate,
hitSizes, concurrencies, imagePushPulls, updatedTime, ciSystems, startDate: api_data.startDate,
endDate: api_data.endDate
}))
await dispatch(updateQuayMetaData({data: results}))
Expand Down Expand Up @@ -273,7 +274,7 @@ const GetCiSystems = (api_data) => {
}

export const GetVersions = (api_data) => {
return Array.from(new Set(api_data.map(item => item.shortVersion).filter(shortVersion => shortVersion !== null && shortVersion !== "").map(shortVersion => shortVersion.toUpperCase().trim()))).sort();
return Array.from(new Set(api_data.map(item => item.shortVersion).filter(shortVersion => shortVersion !== null && shortVersion !== "").map(shortVersion => shortVersion.toUpperCase().trim()))).sort();
}

export const GetBenchmarks = (api_data) => {
Expand Down Expand Up @@ -316,19 +317,19 @@ const GetStatuses = (api_data) => {
}

const GetReleaseStreams = (api_data) => {
return Array.from(new Set(api_data.map(item => item.releaseStream).filter(releaseStream => releaseStream !== null && releaseStream !== "").map(releaseStream => releaseStream.toUpperCase().trim()))).sort();
return Array.from(new Set(api_data.map(item => item.releaseStream).filter(releaseStream => releaseStream !== null && releaseStream !== "").map(releaseStream => releaseStream.toUpperCase().trim()))).sort();
}

const GetFormals = (api_data) => {
return Array.from(new Set(api_data.map(item => item.formal).filter(formal => formal !== null && formal !== "").map(formal => formal.toUpperCase().trim()))).sort();
return Array.from(new Set(api_data.map(item => item.formal).filter(formal => formal !== null && formal !== "").map(formal => formal.toUpperCase().trim()))).sort();
}

const GetNodeNames = (api_data) => {
return Array.from(new Set(api_data.map(item => item.nodeName).filter(nodeName => nodeName !== null && nodeName !== "").map(nodeName => nodeName.toUpperCase().trim()))).sort();
return Array.from(new Set(api_data.map(item => item.nodeName).filter(nodeName => nodeName !== null && nodeName !== "").map(nodeName => nodeName.toUpperCase().trim()))).sort();
}

const GetCpus = (api_data) => {
return Array.from(new Set(api_data.map(item => item.cpu).filter(cpu => cpu !== null && cpu !== "").map(cpu => cpu.toUpperCase().trim()))).sort();
return Array.from(new Set(api_data.map(item => item.cpu).filter(cpu => cpu !== null && cpu !== "").map(cpu => cpu.toUpperCase().trim()))).sort();
}

const GetTestNames = (api_data) => {
Expand Down
2 changes: 2 additions & 0 deletions frontend/src/store/reducers/InitialData.js
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ export const OCP_INITIAL_DATA = {
allPublish: ["All"],
computeArchs: ["All"],
controlPlaneArchs: ["All"],
jobStatuses: ["All"],
selectedBenchmark: "All",
selectedVersion: "All",
selectedPlatform: "All",
Expand All @@ -35,6 +36,7 @@ export const OCP_INITIAL_DATA = {
selectedPublish: "All",
selectedComputeArch: "All",
selectedControlPlaneArch: "All",
selectedJobStatus: "All",
waitForUpdate: false,
platforms: ["All"],
copyData: [],
Expand Down

0 comments on commit b84fa11

Please sign in to comment.