Skip to content

Commit

Permalink
pagination params
Browse files Browse the repository at this point in the history
  • Loading branch information
MVarshini committed Dec 12, 2024
1 parent fafeed0 commit e0fbbd2
Show file tree
Hide file tree
Showing 24 changed files with 449 additions and 255 deletions.
14 changes: 9 additions & 5 deletions backend/app/api/v1/commons/hce.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,23 +3,27 @@
from app.services.search import ElasticService


async def getData(start_datetime: date, end_datetime: date, configpath: str):
async def getData(
start_datetime: date, end_datetime: date, size: int, offset: int, configpath: str
):
query = {
"query": {"bool": {"filter": {"range": {"date": {"format": "yyyy-MM-dd"}}}}}
"size": size,
"from": offset,
"query": {"bool": {"filter": {"range": {"date": {"format": "yyyy-MM-dd"}}}}},
}

es = ElasticService(configpath=configpath)
response = await es.post(
query=query,
size=size,
start_date=start_datetime,
end_date=end_datetime,
timestamp_field="date",
)
await es.close()
tasks = [item["_source"] for item in response]
tasks = [item["_source"] for item in response["data"]]
jobs = pd.json_normalize(tasks)
jobs[["group"]] = jobs[["group"]].fillna(0)
jobs.fillna("", inplace=True)
if len(jobs) == 0:
return jobs
return jobs
return {"data": jobs, "total": response["total"]}
13 changes: 9 additions & 4 deletions backend/app/api/v1/commons/ocm.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,24 +3,29 @@
from app.services.search import ElasticService


async def getData(start_datetime: date, end_datetime: date, configpath: str):
async def getData(
start_datetime: date, end_datetime: date, size: int, offset: int, configpath: str
):
query = {
"size": size,
"from": offset,
"query": {
"bool": {
"filter": {"range": {"metrics.earliest": {"format": "yyyy-MM-dd"}}}
}
}
},
}

es = ElasticService(configpath=configpath)
response = await es.post(
query=query,
size=size,
start_date=start_datetime,
end_date=end_datetime,
timestamp_field="metrics.earliest",
)
await es.close()
tasks = [item["_source"] for item in response]
tasks = [item["_source"] for item in response["data"]]
jobs = pd.json_normalize(tasks)
if len(jobs) == 0:
return jobs
Expand All @@ -31,7 +36,7 @@ async def getData(start_datetime: date, end_datetime: date, configpath: str):
jobs.insert(len(jobs.columns), "ciSystem", "")
jobs.fillna("", inplace=True)
jobs["jobStatus"] = jobs.apply(convertJobStatus, axis=1)
return jobs
return {"data": jobs, "total": response["total"]}


def fillCiSystem(row):
Expand Down
19 changes: 13 additions & 6 deletions backend/app/api/v1/commons/ocp.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,23 +4,30 @@
from app.services.search import ElasticService


async def getData(start_datetime: date, end_datetime: date, size:int, offset:int, configpath: str):
async def getData(
start_datetime: date, end_datetime: date, size: int, offset: int, configpath: str
):
query = {
"size": size,
"from": offset,
"query": {
"bool": {"filter": {"range": {"timestamp": {"format": "yyyy-MM-dd"}}}}
}
},
}

es = ElasticService(configpath=configpath)
response = await es.post(query=query, size=size, start_date=start_datetime, end_date=end_datetime, timestamp_field='timestamp')
response = await es.post(
query=query,
size=size,
start_date=start_datetime,
end_date=end_datetime,
timestamp_field="timestamp",
)
await es.close()
tasks = [item['_source'] for item in response["data"]]
tasks = [item["_source"] for item in response["data"]]
jobs = pd.json_normalize(tasks)
if len(jobs) == 0:
return jobs

jobs[
["masterNodesCount", "workerNodesCount", "infraNodesCount", "totalNodesCount"]
] = jobs[
Expand Down Expand Up @@ -49,7 +56,7 @@ async def getData(start_datetime: date, end_datetime: date, size:int, offset:int
jbs = cleanJobs
jbs["shortVersion"] = jbs["ocpVersion"].str.slice(0, 4)

return ({'data':jbs, 'total': response['total']})
return {"data": jbs, "total": response["total"]}


def fillEncryptionType(row):
Expand Down
1 change: 0 additions & 1 deletion backend/app/api/v1/commons/quay.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,5 +43,4 @@ async def getData(start_datetime: date, end_datetime: date, size, offset, config
cleanJobs = jobs[jobs['platform'] != ""]

jbs = cleanJobs

return ({'data':jbs, 'total': response['total']})
22 changes: 17 additions & 5 deletions backend/app/api/v1/commons/telco.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,18 @@
import app.api.v1.endpoints.telco.telcoGraphs as telcoGraphs


async def getData(start_datetime: date, end_datetime: date, size: int, offset: int, configpath: str):
test_types = ["oslat", "cyclictest", "cpu_util", "deployment", "ptp", "reboot", "rfc-2544"]
async def getData(
start_datetime: date, end_datetime: date, size: int, offset: int, configpath: str
):
test_types = [
"oslat",
"cyclictest",
"cpu_util",
"deployment",
"ptp",
"reboot",
"rfc-2544",
]
cfg = config.get_config()
try:
jenkins_url = cfg.get("telco.config.job_url")
Expand All @@ -33,10 +43,12 @@ async def getData(start_datetime: date, end_datetime: date, size: int, offset: i
['test_type="{}"'.format(test_type) for test_type in test_types]
)
splunk = SplunkService(configpath=configpath)
response = await splunk.query(query=query, size=size, offset=offset, searchList=searchList)
response = await splunk.query(
query=query, size=size, offset=offset, searchList=searchList
)
mapped_list = []

for each_response in response:
for each_response in response["data"]:
end_timestamp = int(each_response["timestamp"])
test_data = each_response["data"]
threshold = await telcoGraphs.process_json(test_data, True)
Expand Down Expand Up @@ -78,4 +90,4 @@ async def getData(start_datetime: date, end_datetime: date, size: int, offset: i
if len(jobs) == 0:
return jobs

return jobs
return {"data": jobs, "total": response["total"]}
2 changes: 1 addition & 1 deletion backend/app/api/v1/commons/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ async def getMetadata(uuid: str, configpath: str):
es = ElasticService(configpath=configpath)
response = await es.post(query=query)
await es.close()
meta = [item["_source"] for item in response]
meta = [item["_source"] for item in response["data"]]
return meta[0]


Expand Down
136 changes: 99 additions & 37 deletions backend/app/api/v1/endpoints/cpt/cptJobs.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,21 +25,34 @@
}


@router.get('/api/v1/cpt/jobs',
summary="Returns a job list from all the products.",
description="Returns a list of jobs in the specified dates of requested size \
@router.get(
"/api/v1/cpt/jobs",
summary="Returns a job list from all the products.",
description="Returns a list of jobs in the specified dates of requested size \
If not dates are provided the API will default the values. \
`startDate`: will be set to the day of the request minus 5 days.\
`endDate`: will be set to the day of the request.",
responses={
200: cpt_200_response(),
422: response_422(),
},)
async def jobs(start_date: date = Query(None, description="Start date for searching jobs, format: 'YYYY-MM-DD'", examples=["2020-11-10"]),
end_date: date = Query(None, description="End date for searching jobs, format: 'YYYY-MM-DD'", examples=["2020-11-15"]),
pretty: bool = Query(False, description="Output contet in pretty format."),
size: int = Query(None, description="Number of jobs to fetch"),
offset: int = Query(None, description="Offset Number to fetch jobs from"),):
responses={
200: cpt_200_response(),
422: response_422(),
},
)
async def jobs(
start_date: date = Query(
None,
description="Start date for searching jobs, format: 'YYYY-MM-DD'",
examples=["2020-11-10"],
),
end_date: date = Query(
None,
description="End date for searching jobs, format: 'YYYY-MM-DD'",
examples=["2020-11-15"],
),
pretty: bool = Query(False, description="Output contet in pretty format."),
size: int = Query(None, description="Number of jobs to fetch"),
offset: int = Query(None, description="Offset Number to fetch jobs from"),
totalJobs: int = Query(None, description="Offset Number to fetch jobs from"),
):
if start_date is None:
start_date = datetime.utcnow().date()
start_date = start_date - timedelta(days=5)
Expand All @@ -56,23 +69,36 @@ async def jobs(start_date: date = Query(None, description="Start date for search
)

results_df = pd.DataFrame()
total_dict = {}
total = 0
with ProcessPoolExecutor(max_workers=cpu_count()) as executor:
futures = {
executor.submit(fetch_product, product, start_date, end_date): product
executor.submit(
fetch_product, product, start_date, end_date, size, offset
): product
for product in products
}
for future in as_completed(futures):
product = futures[future]
try:
result = future.result()
results_df = pd.concat([results_df, result])
total_dict[product] = result["total"]
results_df = pd.concat([results_df, result["data"]])
except Exception as e:
print(f"Error fetching data for product {product}: {e}")

num = 0 if totalJobs is None else int(totalJobs)
if totalJobs == 0:
for product in total_dict:
total += int(total_dict[product])

totalJobs = max(total, num)
response = {
"startDate": start_date.__str__(),
"endDate": end_date.__str__(),
"results": results_df.to_dict("records"),
"total": totalJobs,
"offset": offset + size,
}

if pretty:
Expand All @@ -83,34 +109,70 @@ async def jobs(start_date: date = Query(None, description="Start date for search
return jsonstring


async def fetch_product_async(product, start_date, end_date):
async def fetch_product_async(product, start_date, end_date, size, offset):
try:
df = await products[product](start_date, end_date)
return (
df.loc[
:,
[
"ciSystem",
"uuid",
"releaseStream",
"jobStatus",
"buildUrl",
"startDate",
"endDate",
"product",
"version",
"testName",
],
]
if len(df) != 0
else df
)
response = await products[product](start_date, end_date, size, offset)
if response:
df = response["data"]
return {
"data": (
df.loc[
:,
[
"ciSystem",
"uuid",
"releaseStream",
"jobStatus",
"buildUrl",
"startDate",
"endDate",
"product",
"version",
"testName",
],
]
if len(df) != 0
else df
),
"total": response["total"],
}
except ConnectionError:
print("Connection Error in mapper for product " + product)
except Exception as e:
print(f"Error in mapper for product {product}: {e}")
return pd.DataFrame()


def fetch_product(product, start_date, end_date):
return asyncio.run(fetch_product_async(product, start_date, end_date))
def fetch_product(product, start_date, end_date, size, offset):
return asyncio.run(fetch_product_async(product, start_date, end_date, size, offset))


def is_requested_size_available(total_count, offset, requested_size):
"""
Check if the requested size of data is available starting from a given offset.
Args:
total_count (int): Total number of available records.
offset (int): The starting position in the dataset.
requested_size (int): The number of records requested.
Returns:
bool: True if the requested size is available, False otherwise.
"""
return (offset + requested_size) <= total_count


def calculate_remaining_data(total_count, offset, requested_size):
"""
Calculate the remaining number of data items that can be fetched based on the requested size.
Args:
total_count (int): Total number of available records.
offset (int): The starting position in the dataset.
requested_size (int): The number of records requested.
Returns:
int: The number of records that can be fetched, which may be less than or equal to requested_size.
"""
available_data = total_count - offset # Data available from the offset
return min(available_data, requested_size)
Loading

0 comments on commit e0fbbd2

Please sign in to comment.