forked from elastic/kibana
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Replace with conversational retrieval chain
- Loading branch information
1 parent
db50883
commit ae400c5
Showing
3 changed files
with
117 additions
and
55 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
97 changes: 97 additions & 0 deletions
97
...ck/plugins/elastic_assistant/server/lib/langchain/conversational_retrieval_chain/index.ts
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,97 @@ | ||
import { ChatPromptTemplate, MessagesPlaceholder } from 'langchain/prompts'; | ||
import { | ||
RunnableBranch, | ||
RunnableSequence, | ||
} from 'langchain/runnables'; | ||
import type { VectorStoreRetriever } from 'langchain/vectorstores/base'; | ||
import type { BaseLanguageModel } from 'langchain/base_language'; | ||
import type { BaseMessage } from 'langchain/schema'; | ||
import { Document } from 'langchain/document'; | ||
import { StringOutputParser } from 'langchain/schema/output_parser'; | ||
|
||
const CONDENSE_QUESTION_SYSTEM_TEMPLATE = `You are an experienced researcher, expert at interpreting and answering questions based on provided sources. | ||
Your job is to remove references to chat history from incoming questions, rephrasing them as standalone questions.`; | ||
|
||
const CONDENSE_QUESTION_HUMAN_TEMPLATE = `Using only previous conversation as context, rephrase the following question to be a standalone question. | ||
Do not respond with anything other than a rephrased standalone question. Be concise, but complete and resolve all references to the chat history. | ||
<question> | ||
{question} | ||
</question>`; | ||
const condenseQuestionPrompt = ChatPromptTemplate.fromMessages([ | ||
['system', CONDENSE_QUESTION_SYSTEM_TEMPLATE], | ||
new MessagesPlaceholder('chat_history'), | ||
['human', CONDENSE_QUESTION_HUMAN_TEMPLATE], | ||
]); | ||
|
||
const ANSWER_SYSTEM_TEMPLATE = `You are an experienced researcher, expert at interpreting and answering questions based on provided sources. | ||
Using the provided context, answer the user's question to the best of your ability using only the resources provided. | ||
You must only use information from the provided search results. | ||
If there is no information in the context relevant to the question at hand, just say "Hmm, I'm not sure." | ||
Anything between the following \`context\` html blocks is retrieved from a knowledge bank, not part of the conversation with the user. | ||
<context> | ||
{context} | ||
</context>`; | ||
|
||
const ANSWER_HUMAN_TEMPLATE = `Answer the following question to the best of your ability: | ||
{standalone_question}`; | ||
|
||
const answerPrompt = ChatPromptTemplate.fromMessages([ | ||
["system", ANSWER_SYSTEM_TEMPLATE], | ||
new MessagesPlaceholder("chat_history"), | ||
["human", ANSWER_HUMAN_TEMPLATE], | ||
]); | ||
|
||
const formatDocuments = (docs: Document[]) => { | ||
return docs | ||
.map((doc, i) => { | ||
return `<doc>\n${doc.pageContent}\n</doc>`; | ||
}) | ||
.join("\n"); | ||
}; | ||
|
||
export function createConversationalRetrievalChain({ | ||
model, | ||
retriever, | ||
}: { | ||
model: BaseLanguageModel; | ||
retriever: VectorStoreRetriever; | ||
}) { | ||
const retrievalChain = RunnableSequence.from([ | ||
(input) => input.standalone_question, | ||
retriever, | ||
formatDocuments, | ||
]).withConfig({ runName: "RetrievalChain" }); | ||
|
||
const standaloneQuestionChain = RunnableSequence.from([ | ||
condenseQuestionPrompt, | ||
model, | ||
new StringOutputParser(), | ||
]).withConfig({ runName: "RephraseQuestionChain" }); | ||
|
||
const answerChain = RunnableSequence.from([ | ||
{ | ||
standalone_question: (input) => input.standalone_question, | ||
chat_history: (input) => input.chat_history, | ||
context: retrievalChain, | ||
}, | ||
answerPrompt, | ||
model, | ||
]).withConfig({ runName: "AnswerGenerationChain" }); | ||
|
||
const conversationalRetrievalChain = RunnableSequence.from<{question: string, chat_history: BaseMessage[]}>([ | ||
{ | ||
// Small optimization - only rephrase if the question is a followup | ||
standalone_question: RunnableBranch.from([ | ||
[(input) => input.chat_history.length > 0, standaloneQuestionChain], | ||
(input) => input.question, | ||
]), | ||
chat_history: (input) => input.chat_history, | ||
}, | ||
answerChain, | ||
]); | ||
return conversationalRetrievalChain; | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters