Skip to content

Commit

Permalink
Front-End: Fixing indexing issue when chaining messages
Browse files Browse the repository at this point in the history
  • Loading branch information
king112ola committed Aug 10, 2024
1 parent cf91c89 commit 9a175ce
Show file tree
Hide file tree
Showing 3 changed files with 28 additions and 52 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -123,27 +123,6 @@ const NotificationSection = () => {
break;
}

// if (messages[lastMessageId - 1].contentType == "image")
// {

// messages[lastMessageId - 1].messageBody.forEach(element => {

// switch (Object.keys(element)[0]) {
// case 'imageUrlOnIpfs':
// setIpfsHash(element.imageUrlOnIpfs)
// break;
// case 'promptOnIpfs':
// // element.prompt should be the one stored on the ipfs, but i think we should use the redux record for simplicity
// setPromptForIpfsHash(messages[lastMessageId - 1].prompt)
// default:
// break;
// }
// //.split('ipfs/')[1]
// })}
// setIpfsHash
// if (messages[lastMessageId - 1].contentType == "image")
//

}, [messages])
//

Expand Down
8 changes: 1 addition & 7 deletions Front-End/src/views/chatgpt/dropdownMenu/index.jsx
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,6 @@ export const DropdownMenu = ({ id, itemid, handleMessageInput, scrollRef }) => {
// Load message/ Init message from Redux store
const messages = useSelector((state) => state.messagesFromUserAndServer.messages, shallowEqual)

// Find the last Id from the message list
const lastMessageId = useSelector((state) => state.messagesFromUserAndServer.lastMessageId, shallowEqual)

// close all of the dropdown menu and with the closing fade out + slide down effect
const clearDropdownMenu = (handleMessageInput) => {

Expand Down Expand Up @@ -61,10 +58,7 @@ export const DropdownMenu = ({ id, itemid, handleMessageInput, scrollRef }) => {
break;
}

// Inject a user side message to notify the user that the message is being redirected
handleMessageInput("Sending to " +aiEngineNameOutputToScreen[desireAiEngine] +"...", desireAiEngine,null, null, null, true)

clearDropdownMenu(() => handleMessageInput(inputFromUser, desireAiEngine, hiddenFromUser))
clearDropdownMenu(() => handleMessageInput(inputFromUser,desireAiEngine,null, null, null, true))

}

Expand Down
51 changes: 27 additions & 24 deletions Front-End/src/views/chatgpt/index.jsx
Original file line number Diff line number Diff line change
Expand Up @@ -256,32 +256,44 @@ const ChatGptIndex = () => {

// Create method that handles the input and dispatch to the reducer
// desireAiEngine only be used when data-chaining happens, if input is from input filed, desireAiEngine will he undefined
const handleMessageInput = async (inputFromUser, desireAiEngine, hiddenFromUser, inputType, extraConfigPdfTransLanguage, internalMessage) => {
const handleMessageInput = async (inputFromUser, desireAiEngine, hiddenFromUser, inputType, extraConfigPdfTransLanguage, messageChaining) => {

setLoadingStage('loading');

// saving the user sending message into redux, change to the desire ai engine when data chaining happens

let responseData

let messageToSave
// Stop sending request to AI Engines if its a internal message
if (messageChaining) {
dispatch(SET_AddMessage({
id: lastMessageId + 1,
messageBody: "Sending to " + aiEngineNameOutputToScreen[desireAiEngine] + "...",
sender: 'User',
contentType: inputType ?? 'text',
prompt: inputFromUser,
currentAiEngine: desireAiEngine ?? currentAiEngine,
hiddenFromUser: hiddenFromUser ?? false,
messageChaining: messageChaining ?? false
}))

dispatch(SET_AddMessage({
id: Number(customAlphabet('1234567890', 64)()),
messageBody: inputFromUser,
sender: 'User',
contentType: inputType ?? 'text',
currentAiEngine: desireAiEngine ?? currentAiEngine,
hiddenFromUser: hiddenFromUser ?? false,
internalMessage: internalMessage?? false
}))
}
else {
dispatch(SET_AddMessage({
id: lastMessageId + 1,
messageBody: inputFromUser,
sender: 'User',
contentType: inputType ?? 'text',
currentAiEngine: desireAiEngine ?? currentAiEngine,
hiddenFromUser: hiddenFromUser ?? false,
messageChaining: messageChaining ?? false
}))
}

// Stop sending request to AI Engines if its a internal message
if(internalMessage) return
let messageToSave

try {

responseData = await aiEngineApiCall(inputFromUser, desireAiEngine ?? currentAiEngine, inputType == 'pdf' ? extraConfigPdfTransLanguage : null)
let responseData = await aiEngineApiCall(inputFromUser, desireAiEngine ?? currentAiEngine, inputType == 'pdf' ? extraConfigPdfTransLanguage : null)

// Set loading to false when the request is complete
setTimeout(() => {
Expand All @@ -294,7 +306,6 @@ const ChatGptIndex = () => {
let dynamicSelectedAiEngine

messageToSave = {
// TODO: make this id to be nano id as well, but got content type error, need fix
id: lastMessageId + 2,
sender: desireAiEngine ?? currentAiEngine,
prompt: inputFromUser,
Expand Down Expand Up @@ -395,14 +406,6 @@ const ChatGptIndex = () => {
} catch (error) {

console.error("Error occurs while handleMessageInput:", error)
// messageToSave = {
// id: lastMessageId + 2,
// sender: desireAiEngine ?? currentAiEngine,
// contentType: "text",
// prompt: inputFromUser,
// currentAiEngine: desireAiEngine ?? currentAiEngine,
// messageBody: responseData,
// }

} finally {

Expand Down

0 comments on commit 9a175ce

Please sign in to comment.