Skip to content

Commit

Permalink
Merge branch 'di_mgx' into 'mgx_ops'
Browse files Browse the repository at this point in the history
DI features: register tools from path

See merge request pub/MetaGPT!8
  • Loading branch information
garylin2099 committed Mar 30, 2024
2 parents 6e3c16a + 3e10d34 commit 7fc5cc3
Show file tree
Hide file tree
Showing 7 changed files with 231 additions and 30 deletions.
8 changes: 4 additions & 4 deletions metagpt/roles/di/data_interpreter.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from __future__ import annotations

import json
from typing import Literal, Union
from typing import Literal

from pydantic import Field, model_validator

Expand Down Expand Up @@ -39,7 +39,7 @@ class DataInterpreter(Role):
use_plan: bool = True
use_reflection: bool = False
execute_code: ExecuteNbCode = Field(default_factory=ExecuteNbCode, exclude=True)
tools: Union[str, list[str]] = [] # Use special symbol ["<all>"] to indicate use of all registered tools
tools: list[str] = [] # Use special symbol ["<all>"] to indicate use of all registered tools
tool_recommender: ToolRecommender = None
react_mode: Literal["plan_and_act", "react"] = "plan_and_act"
max_react_loop: int = 10 # used for react mode
Expand All @@ -50,7 +50,7 @@ def set_plan_and_tool(self) -> "Interpreter":
self.use_plan = (
self.react_mode == "plan_and_act"
) # create a flag for convenience, overwrite any passed-in value
if self.tools:
if self.tools and not self.tool_recommender:
self.tool_recommender = BM25ToolRecommender(tools=self.tools)
self.set_actions([WriteAnalysisCode])
self._set_state(0)
Expand Down Expand Up @@ -104,7 +104,7 @@ async def _write_and_exec_code(self, max_retry: int = 3):
plan_status = self.planner.get_plan_status() if self.use_plan else ""

# tool info
if self.tools:
if self.tool_recommender:
context = (
self.working_memory.get()[-1].content if self.working_memory.get() else ""
) # thoughts from _think stage in 'react' mode
Expand Down
80 changes: 78 additions & 2 deletions metagpt/tools/tool_convert.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,15 @@
import ast
import inspect

from metagpt.utils.parse_docstring import GoogleDocstringParser, remove_spaces

PARSER = GoogleDocstringParser


def convert_code_to_tool_schema(obj, include: list[str] = None):
def convert_code_to_tool_schema(obj, include: list[str] = None) -> dict:
"""Converts an object (function or class) to a tool schema by inspecting the object"""
docstring = inspect.getdoc(obj)
assert docstring, "no docstring found for the objects, skip registering"
# assert docstring, "no docstring found for the objects, skip registering"

if inspect.isclass(obj):
schema = {"type": "class", "description": remove_spaces(docstring), "methods": {}}
Expand All @@ -27,6 +29,16 @@ def convert_code_to_tool_schema(obj, include: list[str] = None):
return schema


def convert_code_to_tool_schema_ast(code: str) -> list[dict]:
"""Converts a code string to a list of tool schemas by parsing the code with AST"""

visitor = CodeVisitor(code)
parsed_code = ast.parse(code)
visitor.visit(parsed_code)

return visitor.get_tool_schemas()


def function_docstring_to_schema(fn_obj, docstring) -> dict:
"""
Converts a function's docstring into a schema dictionary.
Expand Down Expand Up @@ -62,3 +74,67 @@ def get_class_method_docstring(cls, method_name):
if method.__doc__:
return method.__doc__
return None # No docstring found in the class hierarchy


class CodeVisitor(ast.NodeVisitor):
"""Visit and convert the AST nodes within a code file to tool schemas"""

def __init__(self, source_code: str):
self.tool_schemas = {} # {tool_name: tool_schema}
self.source_code = source_code

def visit_ClassDef(self, node):
class_schemas = {"type": "class", "description": remove_spaces(ast.get_docstring(node)), "methods": {}}
for body_node in node.body:
if isinstance(body_node, (ast.FunctionDef, ast.AsyncFunctionDef)) and (
not body_node.name.startswith("_") or body_node.name == "__init__"
):
func_schemas = self._get_function_schemas(body_node)
class_schemas["methods"].update({body_node.name: func_schemas})
class_schemas["code"] = ast.get_source_segment(self.source_code, node)
self.tool_schemas[node.name] = class_schemas

def visit_FunctionDef(self, node):
self._visit_function(node)

def visit_AsyncFunctionDef(self, node):
self._visit_function(node)

def _visit_function(self, node):
if node.name.startswith("_"):
return
function_schemas = self._get_function_schemas(node)
function_schemas["code"] = ast.get_source_segment(self.source_code, node)
self.tool_schemas[node.name] = function_schemas

def _get_function_schemas(self, node):
docstring = remove_spaces(ast.get_docstring(node))
overall_desc, param_desc = PARSER.parse(docstring)
return {
"type": "async_function" if isinstance(node, ast.AsyncFunctionDef) else "function",
"description": overall_desc,
"signature": self._get_function_signature(node),
"parameters": param_desc,
}

def _get_function_signature(self, node):
args = []
defaults = dict(zip([arg.arg for arg in node.args.args][-len(node.args.defaults) :], node.args.defaults))
for arg in node.args.args:
arg_str = arg.arg
if arg.annotation:
annotation = ast.unparse(arg.annotation)
arg_str += f": {annotation}"
if arg.arg in defaults:
default_value = ast.unparse(defaults[arg.arg])
arg_str += f" = {default_value}"
args.append(arg_str)

return_annotation = ""
if node.returns:
return_annotation = f" -> {ast.unparse(node.returns)}"

return f"({', '.join(args)}){return_annotation}"

def get_tool_schemas(self):
return self.tool_schemas
5 changes: 2 additions & 3 deletions metagpt/tools/tool_recommend.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
import json
from typing import Any

import jieba
import numpy as np
from pydantic import BaseModel, field_validator
from rank_bm25 import BM25Okapi
Expand Down Expand Up @@ -182,7 +181,7 @@ def _init_corpus(self):
self.bm25 = BM25Okapi(tokenized_corpus)

def _tokenize(self, text):
return jieba.lcut(text) # FIXME: needs more sophisticated tokenization
return text.split() # FIXME: needs more sophisticated tokenization

async def recall_tools(self, context: str = "", plan: Plan = None, topk: int = 20) -> list[Tool]:
query = plan.current_task.instruction if plan else context
Expand All @@ -193,7 +192,7 @@ async def recall_tools(self, context: str = "", plan: Plan = None, topk: int = 2
recalled_tools = [list(self.tools.values())[index] for index in top_indexes]

logger.info(
f"Recalled tools: \n{[tool.name for tool in recalled_tools]}; Scores: {[doc_scores[index] for index in top_indexes]}"
f"Recalled tools: \n{[tool.name for tool in recalled_tools]}; Scores: {[np.round(doc_scores[index], 4) for index in top_indexes]}"
)

return recalled_tools
Expand Down
70 changes: 53 additions & 17 deletions metagpt/tools/tool_registry.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,14 +10,17 @@
import inspect
import os
from collections import defaultdict
from typing import Union
from pathlib import Path

import yaml
from pydantic import BaseModel

from metagpt.const import TOOL_SCHEMA_PATH
from metagpt.logs import logger
from metagpt.tools.tool_convert import convert_code_to_tool_schema
from metagpt.tools.tool_convert import (
convert_code_to_tool_schema,
convert_code_to_tool_schema_ast,
)
from metagpt.tools.tool_data_type import Tool, ToolSchema


Expand All @@ -27,21 +30,23 @@ class ToolRegistry(BaseModel):

def register_tool(
self,
tool_name,
tool_path,
schema_path="",
tool_code="",
tags=None,
tool_source_object=None,
include_functions=None,
verbose=False,
tool_name: str,
tool_path: str,
schemas: dict = None,
schema_path: str = "",
tool_code: str = "",
tags: list[str] = None,
tool_source_object=None, # can be any classes or functions
include_functions: list[str] = None,
verbose: bool = False,
):
if self.has_tool(tool_name):
return

schema_path = schema_path or TOOL_SCHEMA_PATH / f"{tool_name}.yml"

schemas = make_schema(tool_source_object, include_functions, schema_path)
if not schemas:
schemas = make_schema(tool_source_object, include_functions, schema_path)

if not schemas:
return
Expand Down Expand Up @@ -117,25 +122,56 @@ def make_schema(tool_source_object, include, path):
schema = convert_code_to_tool_schema(tool_source_object, include=include)
with open(path, "w", encoding="utf-8") as f:
yaml.dump(schema, f, sort_keys=False)
# import json
# with open(str(path).replace("yml", "json"), "w", encoding="utf-8") as f:
# json.dump(schema, f, ensure_ascii=False, indent=4)
except Exception as e:
schema = {}
logger.error(f"Fail to make schema: {e}")

return schema


def validate_tool_names(tools: Union[list[str], str]) -> str:
def validate_tool_names(tools: list[str]) -> dict[str, Tool]:
assert isinstance(tools, list), "tools must be a list of str"
valid_tools = {}
for key in tools:
# one can define either tool names or tool type names, take union to get the whole set
if TOOL_REGISTRY.has_tool(key):
# one can define either tool names OR tool tags OR tool path, take union to get the whole set
# if tool paths are provided, they will be registered on the fly
if os.path.isdir(key) or os.path.isfile(key):
valid_tools.update(register_tools_from_path(key))
elif TOOL_REGISTRY.has_tool(key):
valid_tools.update({key: TOOL_REGISTRY.get_tool(key)})
elif TOOL_REGISTRY.has_tool_tag(key):
valid_tools.update(TOOL_REGISTRY.get_tools_by_tag(key))
else:
logger.warning(f"invalid tool name or tool type name: {key}, skipped")
return valid_tools


def register_tools_from_file(file_path) -> dict[str, Tool]:
file_name = Path(file_path).name
if not file_name.endswith(".py") or file_name == "setup.py" or file_name.startswith("test"):
return {}
registered_tools = {}
code = Path(file_path).read_text(encoding="utf-8")
tool_schemas = convert_code_to_tool_schema_ast(code)
for name, schemas in tool_schemas.items():
tool_code = schemas.pop("code", "")
TOOL_REGISTRY.register_tool(
tool_name=name,
tool_path=file_path,
schemas=schemas,
tool_code=tool_code,
)
registered_tools.update({name: TOOL_REGISTRY.get_tool(name)})
return registered_tools


def register_tools_from_path(path) -> dict[str, Tool]:
tools_registered = {}
if os.path.isfile(path):
tools_registered.update(register_tools_from_file(path))
elif os.path.isdir(path):
for root, _, files in os.walk(path):
for file in files:
file_path = os.path.join(root, file)
tools_registered.update(register_tools_from_file(file_path))
return tools_registered
2 changes: 1 addition & 1 deletion metagpt/utils/parse_docstring.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@


def remove_spaces(text):
return re.sub(r"\s+", " ", text).strip()
return re.sub(r"\s+", " ", text).strip() if text else ""


class DocstringParser:
Expand Down
3 changes: 1 addition & 2 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -69,5 +69,4 @@ imap_tools==1.5.0 # Used by metagpt/tools/libs/email_login.py
qianfan==0.3.2
dashscope==1.14.1
rank-bm25==0.2.2 # for tool recommendation
jieba==0.42.1 # for tool recommendation
gymnasium==0.29.1
gymnasium==0.29.1
Loading

0 comments on commit 7fc5cc3

Please sign in to comment.