diff --git a/src/graphql/mutations/CreateAIReply.js b/src/graphql/mutations/CreateAIReply.js index effe0c59..f72d6e8e 100644 --- a/src/graphql/mutations/CreateAIReply.js +++ b/src/graphql/mutations/CreateAIReply.js @@ -89,7 +89,7 @@ export async function createNewAIReply({ ...completionOptions, }; - const { update: updateAIResponse } = createAIResponse({ + const { update: updateAIResponse, getAIResponseId } = createAIResponse({ user, docId: article.id, type: 'AI_REPLY', @@ -97,7 +97,10 @@ export async function createNewAIReply({ }); // Resolves to completed or errored AI response. - const apiResult = await getOpenAI() + const apiResult = await getOpenAI({ + traceId: await getAIResponseId(), + traceName: `AI Reply for article ${article.id}`, + }) .chat.completions.create(completionRequest) .catch((error) => { console.error(error); diff --git a/src/graphql/util.js b/src/graphql/util.js index fe359ba5..fdbfde3c 100644 --- a/src/graphql/util.js +++ b/src/graphql/util.js @@ -781,7 +781,7 @@ function extractTextFromFullTextAnnotation(fullTextAnnotation) { export async function createTranscript(queryInfo, fileUrl, user) { if (!user) throw new Error('[createTranscript] user is required'); - const { update } = createAIResponse({ + const { update, getAIResponseId } = createAIResponse({ user, type: 'TRANSCRIPT', docId: queryInfo.id, @@ -823,7 +823,10 @@ export async function createTranscript(queryInfo, fileUrl, user) { // Ref: https://github.com/openai/openai-node/issues/77#issuecomment-1500899486 const audio = ffmpeg(fileResp.body).noVideo().format('mp3').pipe(); - const data = await getOpenAI().audio.transcriptions.create({ + const data = await getOpenAI({ + traceId: await getAIResponseId(), + traceName: `Whisper transcript for media ${queryInfo.id}`, + }).audio.transcriptions.create({ // Ref: https://github.com/openai/openai-node/issues/77#issuecomment-2265072410 file: await toFile(audio, 'file.mp3', { type: 'audio/mp3' }), model: 'whisper-1',