Skip to content

Commit

Permalink
refactor
Browse files Browse the repository at this point in the history
  • Loading branch information
doodledood committed Nov 15, 2023
1 parent f7f6162 commit 85af0f2
Show file tree
Hide file tree
Showing 16 changed files with 30 additions and 36 deletions.
23 changes: 9 additions & 14 deletions chatflock/base.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from typing import Any, Dict, List, Optional, Sequence, TypeVar
from typing import Any, Callable, List, Optional, Sequence, TypeVar

import abc
import dataclasses
Expand Down Expand Up @@ -36,9 +36,6 @@ def on_participant_joined_chat(self, chat: "Chat", participant: "ChatParticipant
def on_participant_left_chat(self, chat: "Chat", participant: "ChatParticipant") -> None:
pass

def initialize(self) -> None:
pass

def __str__(self) -> str:
return self.name

Expand All @@ -49,7 +46,7 @@ def detailed_str(self, level: int = 0) -> str:

class ActiveChatParticipant(ChatParticipant):
symbol: str
messages_hidden: bool
messages_hidden: bool = False

def __init__(self, name: str, symbol: str = "👤", messages_hidden: bool = False):
super().__init__(name=name)
Expand Down Expand Up @@ -77,6 +74,9 @@ class ChatMessage(BaseModel):


class ChatConductor(abc.ABC):
def __init__(self, composition_generator: "ChatCompositionGenerator"):
self.composition_generator = composition_generator

@abc.abstractmethod
def select_next_speaker(self, chat: "Chat") -> Optional[ActiveChatParticipant]:
raise NotImplementedError()
Expand All @@ -90,22 +90,17 @@ def get_chat_result(self, chat: "Chat") -> str:

return last_message.content

def initialize_chat(self, chat: "Chat", **kwargs: Any) -> None:
# Make sure all participants are initialized.
for active_participant in chat.get_active_participants():
active_participant.initialize()

for non_active_participant in chat.get_non_active_participants():
non_active_participant.initialize()
def prepare_chat(self, chat: "Chat", **kwargs: Any) -> None:
pass

def initiate_chat_with_result(
def initiate_dialog(
self,
chat: "Chat",
initial_message: Optional[str] = None,
from_participant: Optional[ChatParticipant] = None,
**kwargs: Any,
) -> str:
self.initialize_chat(chat=chat, **kwargs)
self.prepare_chat(chat=chat, **kwargs)

active_participants = chat.get_active_participants()
if len(active_participants) <= 1:
Expand Down
4 changes: 2 additions & 2 deletions chatflock/conductors/langchain.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,7 +124,7 @@ def create_next_speaker_first_human_prompt(self, chat: "Chat") -> str:

return str(prompt)

def initialize_chat(self, chat: "Chat", **kwargs: Any) -> None:
def prepare_chat(self, chat: "Chat", **kwargs: Any) -> None:
# If a composition generator is provided, generate a new composition for the chat before starting.
if self.composition_generator is not None and not self.composition_initialized:
composition_suggestion = kwargs.get("composition_suggestion", None)
Expand Down Expand Up @@ -154,7 +154,7 @@ def initialize_chat(self, chat: "Chat", **kwargs: Any) -> None:

self.composition_initialized = True

super().initialize_chat(chat=chat, **kwargs)
super().prepare_chat(chat=chat, **kwargs)

def select_next_speaker(self, chat: Chat) -> Optional[ActiveChatParticipant]:
participants = chat.get_active_participants()
Expand Down
2 changes: 1 addition & 1 deletion chatflock/parsing_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ def chat_messages_to_pydantic(
)
conductor = RoundRobinChatConductor()

_ = conductor.initiate_chat_with_result(chat=parser_chat)
_ = conductor.initiate_dialog(chat=parser_chat)

if json_parser.output is None:
raise MessageCouldNotBeParsedError("An output could not be parsed from the chat messages.")
Expand Down
5 changes: 2 additions & 3 deletions chatflock/participants/internal_group.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,10 +36,9 @@ def __init__(

super().__init__(name=group_name, **kwargs)

def initialize(self):
# Make sure the chat & conductor are initialized, as it may be a dynamic chat with
# no participants yet.
self.inner_chat_conductor.initialize_chat(chat=self.inner_chat)
self.inner_chat_conductor.prepare_chat(chat=self.inner_chat)

def respond_to_chat(self, chat: "Chat") -> str:
if self.clear_inner_chat_before_responding:
Expand All @@ -64,7 +63,7 @@ def respond_to_chat(self, chat: "Chat") -> str:
answerer=leader,
)

group_response = self.inner_chat_conductor.initiate_chat_with_result(
group_response = self.inner_chat_conductor.initiate_dialog(
chat=self.inner_chat, initial_message=request_for_group
)

Expand Down
4 changes: 2 additions & 2 deletions chatflock/use_cases/bshr.py
Original file line number Diff line number Diff line change
Expand Up @@ -165,11 +165,11 @@ def generate_queries(
if spinner is not None:
spinner.stop()

_ = chat_conductor.initiate_chat_with_result(
_ = chat_conductor.initiate_dialog(
chat=chat, initial_message=f"What is your information need or query?", from_participant=query_generator
)
else:
_ = chat_conductor.initiate_chat_with_result(
_ = chat_conductor.initiate_dialog(
chat=chat,
initial_message=str(
StructuredString(
Expand Down
2 changes: 1 addition & 1 deletion chatflock/use_cases/request_response.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,6 @@ def get_response(
)

chat_conductor = RoundRobinChatConductor()
answer = chat_conductor.initiate_chat_with_result(chat=chat, initial_message=query, from_participant=user)
answer = chat_conductor.initiate_dialog(chat=chat, initial_message=query, from_participant=user)

return answer, chat
2 changes: 1 addition & 1 deletion chatflock/web_research/web_research.py
Original file line number Diff line number Diff line change
Expand Up @@ -201,7 +201,7 @@ def get_answer(
max_total_messages=2,
)
chat_conductor = RoundRobinChatConductor()
final_answer = chat_conductor.initiate_chat_with_result(
final_answer = chat_conductor.initiate_dialog(
chat=chat,
initial_message=str(
StructuredString(
Expand Down
4 changes: 2 additions & 2 deletions examples/automatic_chat_simple_composition.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,11 +36,11 @@ def automatic_simple_chat_composition(model: str = "gpt-4-1106-preview", tempera

# Not necessary in practice since initiation is done automatically when calling `initiate_chat_with_result`.
# However, this is needed to eagerly generate the composition. Default is lazy.
chat_conductor.initialize_chat(chat=chat)
chat_conductor.prepare_chat(chat=chat)

print(f"Generated Composition:\n=================\n{chat.active_participants_str}\n=================\n\n")

result = chat_conductor.initiate_chat_with_result(chat=chat)
result = chat_conductor.initiate_dialog(chat=chat)
print(result)


Expand Down
4 changes: 2 additions & 2 deletions examples/automatic_hierarchical_chat_composition.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ def create_chat(**kwargs: Any) -> Chat:
# It's not necessary in practice to manually call `initialize_chat` since initiation is done automatically
# when calling `initiate_chat_with_result`. However, this is needed to eagerly generate the composition.
# Default is lazy and will happen when the chat is initiated.
chat_conductor.initialize_chat(
chat_conductor.prepare_chat(
chat=chat,
# Only relevant when passing in a composition generator
composition_suggestion="DevCompany: Includes a CEO, Product Team, Marketing Team, and a Development "
Expand All @@ -64,7 +64,7 @@ def create_chat(**kwargs: Any) -> Chat:
print(f"\nGenerated composition:\n=================\n{chat.active_participants_str}\n=================\n\n")

# You can also pass in a composition suggestion here.
result = chat_conductor.initiate_chat_with_result(chat=chat)
result = chat_conductor.initiate_dialog(chat=chat)
print(result)


Expand Down
4 changes: 2 additions & 2 deletions examples/automatic_internal_group_participant.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,11 +43,11 @@ def automatic_internal_group_participant(model: str = "gpt-4-1106-preview", temp

# Not necessary in practice since initiation is done automatically when calling `initiate_chat_with_result`.
# However, this is needed to eagerly generate the composition. Default is lazy.
chat_conductor.initialize_chat(chat=chat)
chat_conductor.prepare_chat(chat=chat)
print(f"\nGenerated composition:\n=================\n{chat.active_participants_str}\n=================\n\n")

# You can also pass in a composition suggestion here.
result = chat_conductor.initiate_chat_with_result(chat=chat)
result = chat_conductor.initiate_dialog(chat=chat)
print(result)


Expand Down
2 changes: 1 addition & 1 deletion examples/chatgpt_clone.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ def chatgpt_clone(model: str = "gpt-4-1106-preview", temperature: float = 0.0) -
)

chat_conductor = RoundRobinChatConductor()
chat_conductor.initiate_chat_with_result(chat=chat)
chat_conductor.initiate_dialog(chat=chat)


if __name__ == "__main__":
Expand Down
2 changes: 1 addition & 1 deletion examples/chatgpt_clone_with_additional_tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ def chatgpt_clone_with_additional_tools(
)

chat_conductor = RoundRobinChatConductor()
chat_conductor.initiate_chat_with_result(chat=chat)
chat_conductor.initiate_dialog(chat=chat)

page_retriever.close()

Expand Down
2 changes: 1 addition & 1 deletion examples/chatgpt_clone_with_langchain_memory.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ def chatgpt_clone_with_langchain_memory(model: str = "gpt-4-1106-preview", tempe
chat = Chat(backing_store=backing_store, renderer=TerminalChatRenderer(), initial_participants=participants)

chat_conductor = RoundRobinChatConductor()
chat_conductor.initiate_chat_with_result(chat=chat)
chat_conductor.initiate_dialog(chat=chat)


if __name__ == "__main__":
Expand Down
2 changes: 1 addition & 1 deletion examples/chatgpt_clone_with_langchain_retrieval.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ def chatgpt_clone_with_langchain_retrieval(model: str = "gpt-4-1106-preview", te
)

chat_conductor = RoundRobinChatConductor()
chat_conductor.initiate_chat_with_result(chat=chat)
chat_conductor.initiate_dialog(chat=chat)


if __name__ == "__main__":
Expand Down
2 changes: 1 addition & 1 deletion examples/manual_internal_group_participant.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ def manual_internal_group_participant(model: str = "gpt-4-1106-preview", tempera

chat_conductor = RoundRobinChatConductor()

chat_conductor.initiate_chat_with_result(chat=chat)
chat_conductor.initiate_dialog(chat=chat)


if __name__ == "__main__":
Expand Down
2 changes: 1 addition & 1 deletion examples/three_way_ai_conductor.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ def three_way_ai_conductor(model: str = "gpt-4-1106-preview", temperature: float
termination_condition="When the user finds the food satisfactory.",
)

chat_conductor.initiate_chat_with_result(chat=chat)
chat_conductor.initiate_dialog(chat=chat)


if __name__ == "__main__":
Expand Down

0 comments on commit 85af0f2

Please sign in to comment.