forked from openai/chatgpt-retrieval-plugin
-
Notifications
You must be signed in to change notification settings - Fork 4
/
main.py
156 lines (132 loc) · 4.61 KB
/
main.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
import os
from typing import Optional
import uvicorn
from fastapi import FastAPI, File, Form, HTTPException, Depends, Body, UploadFile
from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
from fastapi.staticfiles import StaticFiles
from loguru import logger
from models.api import (
DeleteRequest,
DeleteResponse,
QueryRequest,
QueryResponse,
UpsertRequest,
UpsertResponse,
)
from datastore.factory import get_datastore
from services.file import get_document_from_file
from models.models import DocumentMetadata, Source
bearer_scheme = HTTPBearer()
BEARER_TOKEN = os.environ.get("BEARER_TOKEN")
assert BEARER_TOKEN is not None
def validate_token(credentials: HTTPAuthorizationCredentials = Depends(bearer_scheme)):
if credentials.scheme != "Bearer" or credentials.credentials != BEARER_TOKEN:
raise HTTPException(status_code=401, detail="Invalid or missing token")
return credentials
app = FastAPI(dependencies=[Depends(validate_token)])
app.mount("/.well-known", StaticFiles(directory=".well-known"), name="static")
# Create a sub-application, in order to access just the query endpoint in an OpenAPI schema, found at http://0.0.0.0:8000/sub/openapi.json when the app is running locally
sub_app = FastAPI(
title="Retrieval Plugin API",
description="A retrieval API for querying and filtering documents based on natural language queries and metadata",
version="1.0.0",
servers=[{"url": "https://your-app-url.com"}],
dependencies=[Depends(validate_token)],
)
app.mount("/sub", sub_app)
@app.post(
"/upsert-file",
response_model=UpsertResponse,
)
async def upsert_file(
file: UploadFile = File(...),
metadata: Optional[str] = Form(None),
):
try:
metadata_obj = (
DocumentMetadata.parse_raw(metadata)
if metadata
else DocumentMetadata(source=Source.file)
)
except:
metadata_obj = DocumentMetadata(source=Source.file)
document = await get_document_from_file(file, metadata_obj)
try:
ids = await datastore.upsert([document])
return UpsertResponse(ids=ids)
except Exception as e:
logger.error(e)
raise HTTPException(status_code=500, detail=f"str({e})")
@app.post(
"/upsert",
response_model=UpsertResponse,
)
async def upsert(
request: UpsertRequest = Body(...),
):
try:
ids = await datastore.upsert(request.documents)
return UpsertResponse(ids=ids)
except Exception as e:
logger.error(e)
raise HTTPException(status_code=500, detail="Internal Service Error")
@app.post(
"/query",
response_model=QueryResponse,
)
async def query_main(
request: QueryRequest = Body(...),
):
try:
results = await datastore.query(
request.queries,
)
return QueryResponse(results=results)
except Exception as e:
logger.error(e)
raise HTTPException(status_code=500, detail="Internal Service Error")
@sub_app.post(
"/query",
response_model=QueryResponse,
# NOTE: We are describing the shape of the API endpoint input due to a current limitation in parsing arrays of objects from OpenAPI schemas. This will not be necessary in the future.
description="Accepts search query objects array each with query and optional filter. Break down complex questions into sub-questions. Refine results by criteria, e.g. time / source, don't do this often. Split queries if ResponseTooLargeError occurs.",
)
async def query(
request: QueryRequest = Body(...),
):
try:
results = await datastore.query(
request.queries,
)
return QueryResponse(results=results)
except Exception as e:
logger.error(e)
raise HTTPException(status_code=500, detail="Internal Service Error")
@app.delete(
"/delete",
response_model=DeleteResponse,
)
async def delete(
request: DeleteRequest = Body(...),
):
if not (request.ids or request.filter or request.delete_all):
raise HTTPException(
status_code=400,
detail="One of ids, filter, or delete_all is required",
)
try:
success = await datastore.delete(
ids=request.ids,
filter=request.filter,
delete_all=request.delete_all,
)
return DeleteResponse(success=success)
except Exception as e:
logger.error(e)
raise HTTPException(status_code=500, detail="Internal Service Error")
@app.on_event("startup")
async def startup():
global datastore
datastore = await get_datastore()
def start():
uvicorn.run("server.main:app", host="0.0.0.0", port=8000, reload=True)