forked from Marker-Inc-Korea/AutoRAG
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathextracted_sample.yaml
47 lines (47 loc) · 1.11 KB
/
extracted_sample.yaml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
vectordb:
- name: default
db_type: chroma
client_type: persistent
embedding_model: openai
collection_name: openai
path: ${PROJECT_DIR}/data/chroma
node_lines:
- node_line_name: retrieve_node_line
nodes:
- node_type: retrieval
modules:
- module_type: vectordb
vectordb: default
top_k: 3
strategy:
metrics:
- retrieval_f1
- retrieval_recall
- retrieval_precision
- node_line_name: post_retrieve_node_line
nodes:
- node_type: prompt_maker
modules:
- module_type: fstring
prompt: "Read the passages and answer the given question. \n Question: {query} \n Passage: {retrieved_contents} \n Answer : "
strategy:
generator_modules:
- batch: 2
llm: openai
module_type: llama_index_llm
metrics:
- bleu
- meteor
- rouge
- node_type: generator
modules:
- batch: 2
llm: openai
model: gpt-3.5-turbo-16k
module_type: llama_index_llm
strategy:
metrics:
- metric_name: bleu
- metric_name: meteor
- embedding_model: openai
metric_name: sem_score