Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Logic to use internal ES instance #86

Merged
merged 1 commit into from
Feb 21, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 11 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -41,8 +41,18 @@ indice=
username=
password=
```
If you also have an archived internal instance that keeps track of older data, it can be specified with '.internal' suffix. Example of our `OCP` internal archived instance's configuration.
```toml
[ocp.elasticsearch.internal]
url=
indice=
# [optional] common prefix for all indexes
prefix=
username=
password=
```

Internally the API when serving the `/ocp` enpoints will use this connection.
Internally the API when serving the `/ocp` enpoints will use this connection. Also it is suggested to create indexes with same name in the archived instances too to avoid further complications.

The `jira` table requires a `url` key and a `personal_access_token` key. The `url` is a string value that points to the URL address of your Jira resource. The [Personal Access Token](https://confluence.atlassian.com/enterprise/using-personal-access-tokens-1026032365.html) is a string value that is the credential issued to authenticate and authorize this application with your Jira resource.

Expand Down
6 changes: 2 additions & 4 deletions backend/app/api/v1/commons/hce.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,13 +17,11 @@ async def getData(start_datetime: date, end_datetime: date, configpath: str):
}
}
}
query['query']['bool']['filter']['range']['date']['lte'] = str(end_datetime)
query['query']['bool']['filter']['range']['date']['gte'] = str(start_datetime)

es = ElasticService(configpath=configpath)
response = await es.post(query)
response = await es.post(query=query, start_date=start_datetime, end_date=end_datetime, timestamp_field='date')
await es.close()
tasks = [item['_source'] for item in response["hits"]["hits"]]
tasks = [item['_source'] for item in response]
jobs = pd.json_normalize(tasks)
jobs[['group']] = jobs[['group']].fillna(0)
jobs.fillna('', inplace=True)
Expand Down
6 changes: 2 additions & 4 deletions backend/app/api/v1/commons/ocp.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,13 +17,11 @@ async def getData(start_datetime: date, end_datetime: date, configpath: str):
}
}
}
query['query']['bool']['filter']['range']['timestamp']['lte'] = str(end_datetime)
query['query']['bool']['filter']['range']['timestamp']['gte'] = str(start_datetime)

es = ElasticService(configpath=configpath)
response = await es.post(query)
response = await es.post(query=query, start_date=start_datetime, end_date=end_datetime, timestamp_field='timestamp')
await es.close()
tasks = [item['_source'] for item in response["hits"]["hits"]]
tasks = [item['_source'] for item in response]
jobs = pd.json_normalize(tasks)
if len(jobs) == 0:
return jobs
Expand Down
6 changes: 3 additions & 3 deletions backend/app/api/v1/commons/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ async def getMetadata(uuid: str, configpath: str) :
}
print(query)
es = ElasticService(configpath=configpath)
response = await es.post(query)
response = await es.post(query=query)
await es.close()
meta = [item['_source'] for item in response["hits"]["hits"]]
return meta[0]
meta = [item['_source'] for item in response]
return meta[0]
16 changes: 8 additions & 8 deletions backend/app/api/v1/endpoints/ocp/graph.py
Original file line number Diff line number Diff line change
Expand Up @@ -218,9 +218,9 @@ async def jobSummary(uuids: list):
}
print(query)
es = ElasticService(configpath="ocp.elasticsearch",index=index)
response = await es.post(query)
response = await es.post(query=query)
await es.close()
runs = [item['_source'] for item in response["hits"]["hits"]]
runs = [item['_source'] for item in response]
return runs

async def processBurner(data: dict) :
Expand Down Expand Up @@ -344,9 +344,9 @@ async def getBurnerResults(uuid: str, uuids: list, index: str ):
}
print(query)
es = ElasticService(configpath="ocp.elasticsearch",index=index)
response = await es.post(query)
response = await es.post(query=query)
await es.close()
runs = [item['_source'] for item in response["hits"]["hits"]]
runs = [item['_source'] for item in response]
return runs

async def getResults(uuid: str, uuids: list, index: str ):
Expand All @@ -364,9 +364,9 @@ async def getResults(uuid: str, uuids: list, index: str ):
}
print(query)
es = ElasticService(configpath="ocp.elasticsearch",index=index)
response = await es.post(query)
response = await es.post(query=query)
await es.close()
runs = [item['_source'] for item in response["hits"]["hits"]]
runs = [item['_source'] for item in response]
return runs

async def getMatchRuns(meta: dict, workerCount: False):
Expand Down Expand Up @@ -414,9 +414,9 @@ async def getMatchRuns(meta: dict, workerCount: False):

print(query)
es = ElasticService(configpath="ocp.elasticsearch")
response = await es.post(query)
response = await es.post(query=query)
await es.close()
runs = [item['_source'] for item in response["hits"]["hits"]]
runs = [item['_source'] for item in response]
uuids = []
for run in runs :
uuids.append(run["uuid"])
Expand Down
4 changes: 2 additions & 2 deletions backend/app/api/v1/endpoints/ocp/results.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ async def results_for_job(
}

es = ElasticService(configpath="ocp.elasticsearch")
response = await es.post(query)
response = await es.post(query=query)
await es.close()
tasks = [item['_source'] for item in response["hits"]["hits"]]
tasks = [item['_source'] for item in response]
return tasks
4 changes: 2 additions & 2 deletions backend/app/api/v1/endpoints/quay/quayGraphs.py
Original file line number Diff line number Diff line change
Expand Up @@ -308,9 +308,9 @@ async def getMatchRuns(meta: dict):

print(query)
es = ElasticService(configpath="quay.elasticsearch")
response = await es.post(query)
response = await es.post(query=query)
await es.close()
runs = [item['_source'] for item in response["hits"]["hits"]]
runs = [item['_source'] for item in response]
uuids = []
for run in runs :
uuids.append(run["uuid"])
Expand Down
Loading
Loading