Skip to content

Commit

Permalink
revoke serialization (#14456)
Browse files Browse the repository at this point in the history
  • Loading branch information
hwchase17 authored Dec 8, 2023
1 parent ff0d551 commit 02ee007
Show file tree
Hide file tree
Showing 26 changed files with 106 additions and 6 deletions.
8 changes: 8 additions & 0 deletions libs/langchain/langchain/agents/openai_assistant/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,10 @@ class OpenAIAssistantFinish(AgentFinish):
run_id: str
thread_id: str

@classmethod
def is_lc_serializable(cls) -> bool:
return False


class OpenAIAssistantAction(AgentAction):
"""AgentAction with info needed to submit custom tool output to existing run."""
Expand All @@ -36,6 +40,10 @@ class OpenAIAssistantAction(AgentAction):
run_id: str
thread_id: str

@classmethod
def is_lc_serializable(cls) -> bool:
return False


def _get_openai_client() -> openai.OpenAI:
try:
Expand Down
4 changes: 4 additions & 0 deletions libs/langchain/langchain/agents/schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,10 @@
class AgentScratchPadChatPromptTemplate(ChatPromptTemplate):
"""Chat prompt template for the agent scratchpad."""

@classmethod
def is_lc_serializable(cls) -> bool:
return False

def _construct_agent_scratchpad(
self, intermediate_steps: List[Tuple[AgentAction, str]]
) -> str:
Expand Down
4 changes: 4 additions & 0 deletions libs/langchain/langchain/chains/api/openapi/requests_chain.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,10 @@ def _type(self) -> str:
class APIRequesterChain(LLMChain):
"""Get the request parser."""

@classmethod
def is_lc_serializable(cls) -> bool:
return False

@classmethod
def from_llm_and_typescript(
cls,
Expand Down
4 changes: 4 additions & 0 deletions libs/langchain/langchain/chains/api/openapi/response_chain.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,10 @@ def _type(self) -> str:
class APIResponderChain(LLMChain):
"""Get the response parser."""

@classmethod
def is_lc_serializable(cls) -> bool:
return False

@classmethod
def from_llm(
cls, llm: BaseLanguageModel, verbose: bool = True, **kwargs: Any
Expand Down
4 changes: 4 additions & 0 deletions libs/langchain/langchain/chains/conversation/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,10 @@ class Config:
extra = Extra.forbid
arbitrary_types_allowed = True

@classmethod
def is_lc_serializable(cls) -> bool:
return False

@property
def input_keys(self) -> List[str]:
"""Use this since so some prompt vars come from history."""
Expand Down
8 changes: 8 additions & 0 deletions libs/langchain/langchain/chains/flare/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,10 @@ class _ResponseChain(LLMChain):

prompt: BasePromptTemplate = PROMPT

@classmethod
def is_lc_serializable(cls) -> bool:
return False

@property
def input_keys(self) -> List[str]:
return self.prompt.input_variables
Expand Down Expand Up @@ -77,6 +81,10 @@ class QuestionGeneratorChain(LLMChain):
prompt: BasePromptTemplate = QUESTION_GENERATOR_PROMPT
"""Prompt template for the chain."""

@classmethod
def is_lc_serializable(cls) -> bool:
return False

@property
def input_keys(self) -> List[str]:
"""Input keys for the chain."""
Expand Down
4 changes: 4 additions & 0 deletions libs/langchain/langchain/chat_models/anyscale.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,10 @@ def _llm_type(self) -> str:
def lc_secrets(self) -> Dict[str, str]:
return {"anyscale_api_key": "ANYSCALE_API_KEY"}

@classmethod
def is_lc_serializable(cls) -> bool:
return False

anyscale_api_key: SecretStr
"""AnyScale Endpoints API keys."""
model_name: str = Field(default=DEFAULT_MODEL, alias="model")
Expand Down
4 changes: 4 additions & 0 deletions libs/langchain/langchain/chat_models/everlyai.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,10 @@ def _llm_type(self) -> str:
def lc_secrets(self) -> Dict[str, str]:
return {"everlyai_api_key": "EVERLYAI_API_KEY"}

@classmethod
def is_lc_serializable(cls) -> bool:
return False

everlyai_api_key: Optional[str] = None
"""EverlyAI Endpoints API keys."""
model_name: str = Field(default=DEFAULT_MODEL, alias="model")
Expand Down
2 changes: 1 addition & 1 deletion libs/langchain/langchain/chat_models/jinachat.py
Original file line number Diff line number Diff line change
Expand Up @@ -165,7 +165,7 @@ def lc_secrets(self) -> Dict[str, str]:
@classmethod
def is_lc_serializable(cls) -> bool:
"""Return whether this model can be serialized by Langchain."""
return True
return False

client: Any #: :meta private:
temperature: float = 0.7
Expand Down
2 changes: 1 addition & 1 deletion libs/langchain/langchain/chat_models/konko.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ def lc_secrets(self) -> Dict[str, str]:
@classmethod
def is_lc_serializable(cls) -> bool:
"""Return whether this model can be serialized by Langchain."""
return True
return False

client: Any = None #: :meta private:
model: str = Field(default=DEFAULT_MODEL, alias="model")
Expand Down
2 changes: 1 addition & 1 deletion libs/langchain/langchain/chat_models/ollama.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ def _llm_type(self) -> str:
@classmethod
def is_lc_serializable(cls) -> bool:
"""Return whether this model can be serialized by Langchain."""
return True
return False

def _format_message_as_text(self, message: BaseMessage) -> str:
if isinstance(message, ChatMessage):
Expand Down
4 changes: 4 additions & 0 deletions libs/langchain/langchain/chat_models/promptlayer_openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,10 @@ class PromptLayerChatOpenAI(ChatOpenAI):
pl_tags: Optional[List[str]]
return_pl_id: Optional[bool] = False

@classmethod
def is_lc_serializable(cls) -> bool:
return False

def _generate(
self,
messages: List[BaseMessage],
Expand Down
2 changes: 1 addition & 1 deletion libs/langchain/langchain/chat_models/volcengine_maas.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ def _llm_type(self) -> str:
@classmethod
def is_lc_serializable(cls) -> bool:
"""Return whether this model can be serialized by Langchain."""
return True
return False

@property
def _identifying_params(self) -> Dict[str, Any]:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,10 @@ class _DocumentWithState(Document):
state: dict = Field(default_factory=dict)
"""State associated with the document."""

@classmethod
def is_lc_serializable(cls) -> bool:
return False

def to_document(self) -> Document:
"""Convert the DocumentWithState to a Document."""
return Document(page_content=self.page_content, metadata=self.metadata)
Expand Down
4 changes: 4 additions & 0 deletions libs/langchain/langchain/evaluation/comparison/eval_chain.py
Original file line number Diff line number Diff line change
Expand Up @@ -188,6 +188,10 @@ class PairwiseStringEvalChain(PairwiseStringEvaluator, LLMEvalChain, LLMChain):
default_factory=PairwiseStringResultOutputParser
)

@classmethod
def is_lc_serializable(cls) -> bool:
return False

class Config:
"""Configuration for the PairwiseStringEvalChain."""

Expand Down
8 changes: 8 additions & 0 deletions libs/langchain/langchain/evaluation/criteria/eval_chain.py
Original file line number Diff line number Diff line change
Expand Up @@ -232,6 +232,10 @@ class CriteriaEvalChain(StringEvaluator, LLMEvalChain, LLMChain):
"""The name of the criterion being evaluated."""
output_key: str = "results" #: :meta private:

@classmethod
def is_lc_serializable(cls) -> bool:
return False

class Config:
"""Configuration for the QAEvalChain."""

Expand Down Expand Up @@ -508,6 +512,10 @@ async def _aevaluate_strings(
class LabeledCriteriaEvalChain(CriteriaEvalChain):
"""Criteria evaluation chain that requires references."""

@classmethod
def is_lc_serializable(cls) -> bool:
return False

@property
def requires_reference(self) -> bool:
"""Whether the evaluation requires a reference text."""
Expand Down
12 changes: 12 additions & 0 deletions libs/langchain/langchain/evaluation/qa/eval_chain.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,6 +77,10 @@ class Config:

extra = Extra.ignore

@classmethod
def is_lc_serializable(cls) -> bool:
return False

@property
def evaluation_name(self) -> str:
return "correctness"
Expand Down Expand Up @@ -204,6 +208,10 @@ async def _aevaluate_strings(
class ContextQAEvalChain(LLMChain, StringEvaluator, LLMEvalChain):
"""LLM Chain for evaluating QA w/o GT based on context"""

@classmethod
def is_lc_serializable(cls) -> bool:
return False

@property
def requires_reference(self) -> bool:
"""Whether the chain requires a reference string."""
Expand Down Expand Up @@ -328,6 +336,10 @@ async def _aevaluate_strings(
class CotQAEvalChain(ContextQAEvalChain):
"""LLM Chain for evaluating QA using chain of thought reasoning."""

@classmethod
def is_lc_serializable(cls) -> bool:
return False

@property
def evaluation_name(self) -> str:
return "COT Contextual Accuracy"
Expand Down
4 changes: 4 additions & 0 deletions libs/langchain/langchain/evaluation/qa/generate_chain.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,10 @@ class QAGenerateChain(LLMChain):
output_parser: BaseLLMOutputParser = Field(default=_QA_OUTPUT_PARSER)
output_key: str = "qa_pairs"

@classmethod
def is_lc_serializable(cls) -> bool:
return False

@classmethod
def from_llm(cls, llm: BaseLanguageModel, **kwargs: Any) -> QAGenerateChain:
"""Load QA Generate Chain from LLM."""
Expand Down
4 changes: 4 additions & 0 deletions libs/langchain/langchain/evaluation/scoring/eval_chain.py
Original file line number Diff line number Diff line change
Expand Up @@ -185,6 +185,10 @@ class Config:

extra = Extra.ignore

@classmethod
def is_lc_serializable(cls) -> bool:
return False

@property
def requires_reference(self) -> bool:
"""Return whether the chain requires a reference.
Expand Down
4 changes: 4 additions & 0 deletions libs/langchain/langchain/indexes/_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,10 @@ class _HashedDocument(Document):
metadata_hash: str
"""The hash of the document metadata."""

@classmethod
def is_lc_serializable(cls) -> bool:
return False

@root_validator(pre=True)
def calculate_hashes(cls, values: Dict[str, Any]) -> Dict[str, Any]:
"""Root validator to calculate content and metadata hash."""
Expand Down
4 changes: 4 additions & 0 deletions libs/langchain/langchain/llms/anyscale.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,10 @@ def send_query(llm, text):

prefix_messages: List = Field(default_factory=list)

@classmethod
def is_lc_serializable(cls) -> bool:
return False

@root_validator()
def validate_environment(cls, values: Dict) -> Dict:
"""Validate that api key and python package exists in environment."""
Expand Down
4 changes: 4 additions & 0 deletions libs/langchain/langchain/llms/openlm.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,10 @@
class OpenLM(BaseOpenAI):
"""OpenLM models."""

@classmethod
def is_lc_serializable(cls) -> bool:
return False

@property
def _invocation_params(self) -> Dict[str, Any]:
return {**{"model": self.model_name}, **super()._invocation_params}
Expand Down
4 changes: 4 additions & 0 deletions libs/langchain/langchain/llms/promptlayer_openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,10 @@ class PromptLayerOpenAI(OpenAI):
pl_tags: Optional[List[str]]
return_pl_id: Optional[bool] = False

@classmethod
def is_lc_serializable(cls) -> bool:
return False

def _generate(
self,
prompts: List[str],
Expand Down
2 changes: 1 addition & 1 deletion libs/langchain/langchain/llms/tongyi.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ def lc_secrets(self) -> Dict[str, str]:

@classmethod
def is_lc_serializable(cls) -> bool:
return True
return False

client: Any #: :meta private:
model_name: str = "qwen-plus-v1"
Expand Down
4 changes: 4 additions & 0 deletions libs/langchain/langchain/llms/vllm.py
Original file line number Diff line number Diff line change
Expand Up @@ -147,6 +147,10 @@ def _llm_type(self) -> str:
class VLLMOpenAI(BaseOpenAI):
"""vLLM OpenAI-compatible API client"""

@classmethod
def is_lc_serializable(cls) -> bool:
return False

@property
def _invocation_params(self) -> Dict[str, Any]:
"""Get the parameters used to invoke the model."""
Expand Down
2 changes: 1 addition & 1 deletion libs/langchain/langchain/llms/watsonxllm.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ class Config:

@classmethod
def is_lc_serializable(cls) -> bool:
return True
return False

@property
def lc_secrets(self) -> Dict[str, str]:
Expand Down

0 comments on commit 02ee007

Please sign in to comment.