Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Bugfix: Fix several issues relating to GPT4 #49

Merged
merged 9 commits into from
Feb 7, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
38 changes: 25 additions & 13 deletions app/config.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,27 @@
import os

import types
import copy
from guidance.llms import OpenAI
import guidance.llms._openai
from pyaml_env import parse_config
from pydantic import BaseModel, validator, Field, typing
from pydantic import BaseModel, validator

old_add_text_to_chat_mode = guidance.llms._openai.add_text_to_chat_mode


def new_add_text_to_chat_mode(chat_mode):
if isinstance(chat_mode, (types.AsyncGeneratorType, types.GeneratorType)):
return guidance.llms._openai.add_text_to_chat_mode_generator(chat_mode)
else:
for c in chat_mode["choices"]:
if "message" in c and "content" in c["message"]:
c["text"] = c["message"]["content"]
else:
c["text"] = " "
return chat_mode


guidance.llms._openai.add_text_to_chat_mode = new_add_text_to_chat_mode


class LLMModelSpecs(BaseModel):
Expand All @@ -21,7 +40,6 @@ def get_instance(cls):
class OpenAIConfig(LLMModelConfig):
spec: LLMModelSpecs
llm_credentials: dict
instance: typing.Any = Field(repr=False)

@validator("type")
def check_type(cls, v):
Expand All @@ -30,15 +48,13 @@ def check_type(cls, v):
return v

def get_instance(cls):
if cls.instance is not None:
return cls.instance
cls.instance = OpenAI(**cls.llm_credentials)
return cls.instance
print("Specs", cls.llm_credentials)
llm_credentials = copy.deepcopy(cls.llm_credentials)
return OpenAI(**llm_credentials)


class StrategyLLMConfig(LLMModelConfig):
llms: list[str]
instance: typing.Any = Field(repr=False)

@validator("type")
def check_type(cls, v):
Expand All @@ -47,13 +63,9 @@ def check_type(cls, v):
return v

def get_instance(cls):
if cls.instance is not None:
return cls.instance
# Local import needed to avoid circular dependency
from app.llms.strategy_llm import StrategyLLM

cls.instance = StrategyLLM(cls.llms)
return cls.instance
return StrategyLLM(cls.llms)


class APIKeyConfig(BaseModel):
Expand Down
40 changes: 25 additions & 15 deletions app/services/circuit_breaker.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,9 @@
import logging
from enum import Enum
from app.services.cache import cache_store
from threading import Lock

health_locks = {}

log = logging.getLogger(__name__)

Expand Down Expand Up @@ -109,23 +112,30 @@ def get_status(cls, checkhealth_func, cache_key: str):
Returns: Status of the cache_key.
Circuit is CLOSED if cache_key is up and running, and OPEN otherwise.
"""
status_key = f"{cache_key}:status"
status = cache_store.get(status_key)
if cache_key not in health_locks:
health_locks[cache_key] = Lock()

if status:
return cls.Status(status)
# Acquire the lock
with health_locks[cache_key]:
status_key = f"{cache_key}:status"
status = cache_store.get(status_key)

is_up = False
try:
is_up = checkhealth_func()
except Exception as e:
log.error(e)
if status:
return cls.Status(status)

is_up = False
try:
is_up = checkhealth_func()
except Exception as e:
log.error(e)

if is_up:
cache_store.set(
status_key, cls.Status.CLOSED, ex=cls.CLOSED_TTL_SECONDS
)
return cls.Status.CLOSED

if is_up:
cache_store.set(
status_key, cls.Status.CLOSED, ex=cls.CLOSED_TTL_SECONDS
status_key, cls.Status.OPEN, ex=cls.OPEN_TTL_SECONDS
)
return cls.Status.CLOSED

cache_store.set(status_key, cls.Status.OPEN, ex=cls.OPEN_TTL_SECONDS)
return cls.Status.OPEN
return cls.Status.OPEN
3 changes: 3 additions & 0 deletions docker/pyris-production.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,11 @@ services:
condition: service_started
pull_policy: always
restart: always
environment:
OPENAI_LOG: "debug"
volumes:
- ${PYRIS_APPLICATION_YML_FILE:-../application.example.yml}:/app/application.yml:ro
command: "poetry run uvicorn app.main:app --host '0.0.0.0'"
networks:
- pyris

Expand Down