Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

LLM sends a message before state transition completes #67

Open
Atharv24 opened this issue Dec 19, 2024 · 1 comment
Open

LLM sends a message before state transition completes #67

Atharv24 opened this issue Dec 19, 2024 · 1 comment

Comments

@Atharv24
Copy link

Atharv24 commented Dec 19, 2024

Current Setup
Interview Bot with Dynamic flow handling
initial node (greet user) -> call start_interview function (only a print statement here) -> call transition function (picks question to be asked from flow manager state) -> create and set node to question_node (specifies the question to be asked in LLM context)

Expected Functionality

  • LLM greets the user
  • Calls the start interview tool
  • Tool runs successfully
  • Transition to new question node with question to be asked in context
  • LLM asks the question to user

Actual Behaviour

  • LLM greets the user
  • Calls the start interview tool
  • Tool runs successfully
  • Transition to new question node with question to be asked in context
  • LLM asks a question on its own. Checking the LLM context at this point, I can see the new node context is not there. When I say one more line, next context that is printed shows the expected context messages.

As per my understanding, LLM sends a messages before the new node context is updated. Let me know if any code is needed from my end. I use the same format as in the dynamic flow example.

UPDATE: I added a 1 second sleep after setting the node and before the transition function ends, it works fine now.

@Atharv24
Copy link
Author

class InterviewStartResult(FlowResult):
    status: str

class AnswerCollectionResult(FlowResult):
    answer: str

class QuestionType(StrEnum):
    SUBJECTIVE = "SUBJECTIVE"
    CODING = "CODING"
    SUBJECTIVE_PROBING = "SUBJECTIVE_PROBING"

def create_initial_node(bot_prompt: str) -> NodeConfig:
    """Create the initial greeting node."""
    return {
        "messages": [
            {
                "role": "system",
                "content": bot_prompt,
            }
        ],
        "functions": [
            {
                "type": "function",
                "function": {
                    "name": "start_interview",
                    "handler": handle_start_interview,
                    "description": "Start the interview after candidate confirms",
                    "parameters": {"type": "object", "properties": {}},
                },
            }
        ],
    }

def create_question_node(
    question: dict, 
) -> NodeConfig:
    """Create node for asking questions with appropriate prompts."""

    question_content = (
        "Ask the candidate the following question:\n"
        f"QUESTION : {question['question']}\n\n"
        "Instructions:\n"
        "Probe the candidate's answer to ensure they have provided a complete and accurate response, if the candidate's answer is incomplete or unclear. In such case, probe further to clarify their response. If the candidate's answer is complete and accurate, call collect_answer function. If the candidate wants to skip the question or is unable to answer, call collect_answer function with answer as '<<skipped>>'."
    )

    logger.info(
        "\n=== Creating New Question Node ===\n"
        f"Question Type: {question['type']}\n"
        f"Question: {question['question']}\n"
        "================================="
    )

    return {
        "messages": [
            {
                "role": "system",
                "content": question_content,
            }
        ],
        "functions": [
            {
                "type": "function",
                "function": {
                    "name": "move_to_next_question",
                    "handler": move_to_next_question,
                    "description": "Move to next question",
                    "parameters": {
                        "type": "object",
                        "properties": {
                            "answer": {"type": "string"}
                        },
                        "required": ["answer"],
                    },
                },
            }
        ],
    }

def create_end_node(ending_prompt: str) -> NodeConfig:
    """Create the final node."""
    return {
        "messages": [
            {
                "role": "system",
                "content": ending_prompt,
            }
        ],
        "functions": [],
        "post_actions": [{"type": "end_conversation"}],
    }

async def handle_start_interview(args: FlowArgs) -> InterviewStartResult:
    """Start the interview."""
    logger.debug("Processing start_interview transition")
    return {"status": "started"}

async def move_to_next_question(args: FlowArgs) -> AnswerCollectionResult:
    """Process answer collection."""
    answer = args["answer"]
    logger.debug(f"move_to_next_question handler executing with answer: {answer}")
    return {"answer": answer} 
    
async def handle_interview_transition(function_name: str, args: Dict, flow_manager: FlowManager):
    """Handle transitions between interview flow states."""
    logger.debug(f"Transition callback executing for function: {function_name} with args: {args}")

    if function_name == "start_interview":
        await handle_start_interview(flow_manager)
    elif function_name == "move_to_next_question":
        await handle_move_to_next_question(args, flow_manager)

async def handle_start_interview(flow_manager: FlowManager):
    """Start interview with first question."""
    question_bank = flow_manager.state["question_bank"]
    if not question_bank:
        await flow_manager.set_node("end", create_end_node(flow_manager.state["ending_prompt"]))
        return
    flow_manager.state["total_questions"] = len(question_bank)
    
    await ask_next_question(flow_manager)

async def handle_move_to_next_question(args: Dict, flow_manager: FlowManager):
    """Process answer collection."""
    answer = args["answer"]
    current_question = flow_manager.state["question_bank"][flow_manager.state["current_question_index"]]

    logger.info(
        "\n=== Processing Answer ===\n"
        f"Question Type: {current_question['type']}\n"
        f"Answer: {answer}\n"
        f"Current Question Index: {flow_manager.state['current_question_index']}\n"
        "======================="
    )

    flow_manager.state["current_question_index"] += 1
    await ask_next_question(flow_manager)

async def ask_next_question(flow_manager: FlowManager):
    """Helper function to ask the next question or end interview."""
    current_index = flow_manager.state["current_question_index"]
    question_bank = flow_manager.state["question_bank"]
    
    if current_index >= len(question_bank):
        await flow_manager.set_node("end", create_end_node(flow_manager.state["ending_prompt"]))
        return
    
    next_question = question_bank[current_index]
    
    await flow_manager.set_node(
        f"question_{current_index + 1}",
        create_question_node(
            next_question,
        )
    )
    await flow_manager.state["websocket"].send(json.dumps({
        "type": "question_asked",
        "question": next_question
    }))

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Labels
None yet
Projects
None yet
Development

No branches or pull requests

1 participant