From 8690c0c5084e3e91a99f386ddcfd37561a8c73ad Mon Sep 17 00:00:00 2001 From: Li Yin Date: Sat, 9 Nov 2024 21:28:01 -0800 Subject: [PATCH 01/48] update bug report template --- .github/ISSUE_TEMPLATE/1_bug_report.yaml | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/1_bug_report.yaml b/.github/ISSUE_TEMPLATE/1_bug_report.yaml index e15a7dc3f..3bdc75529 100644 --- a/.github/ISSUE_TEMPLATE/1_bug_report.yaml +++ b/.github/ISSUE_TEMPLATE/1_bug_report.yaml @@ -71,10 +71,7 @@ body: Please provide details about your environment, including the following: - OS (e.g., Linux, Windows, macOS) value: | -
- Current environment - -
+ - OS: [e.g., Linux, Windows, macOS] validations: required: false From 22d5ebfc7c6482d8afd36688497eda555efc29e7 Mon Sep 17 00:00:00 2001 From: Li Yin Date: Sat, 9 Nov 2024 21:30:16 -0800 Subject: [PATCH 02/48] fix integration proposal --- .github/ISSUE_TEMPLATE/5_suggest_integration.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/ISSUE_TEMPLATE/5_suggest_integration.yaml b/.github/ISSUE_TEMPLATE/5_suggest_integration.yaml index f26eed2c2..8b2a3870c 100644 --- a/.github/ISSUE_TEMPLATE/5_suggest_integration.yaml +++ b/.github/ISSUE_TEMPLATE/5_suggest_integration.yaml @@ -1,4 +1,4 @@ -name: Feature request +name: New integration proposal description: Propose a new integration for this project, either db, retriever, model_client. We highly recommend you to find a POC from the provider team to work together on this. labels: ['needs triage', 'feature'] body: From 802c3af51d42bbad7032de725e753caacede30d1 Mon Sep 17 00:00:00 2001 From: Li Yin Date: Sat, 9 Nov 2024 21:35:45 -0800 Subject: [PATCH 03/48] fix the issue lables and add use cases template --- .github/ISSUE_TEMPLATE/1_bug_report.yaml | 2 +- .../ISSUE_TEMPLATE/2_suggest_improvement.yaml | 2 +- .github/ISSUE_TEMPLATE/3_feature_request.yaml | 2 +- .github/ISSUE_TEMPLATE/4_documenting.yaml | 2 +- .../ISSUE_TEMPLATE/5_suggest_integration.yaml | 2 +- .../6_suggest_usecases_benchmarks.yaml | 32 +++++++++++++++++++ 6 files changed, 37 insertions(+), 5 deletions(-) create mode 100644 .github/ISSUE_TEMPLATE/6_suggest_usecases_benchmarks.yaml diff --git a/.github/ISSUE_TEMPLATE/1_bug_report.yaml b/.github/ISSUE_TEMPLATE/1_bug_report.yaml index 3bdc75529..7657b0cd9 100644 --- a/.github/ISSUE_TEMPLATE/1_bug_report.yaml +++ b/.github/ISSUE_TEMPLATE/1_bug_report.yaml @@ -1,6 +1,6 @@ name: Report a bug description: Any errors that you encounter. -labels: ['needs triage', 'bug'] +labels: ['bug'] body: - type: markdown attributes: diff --git a/.github/ISSUE_TEMPLATE/2_suggest_improvement.yaml b/.github/ISSUE_TEMPLATE/2_suggest_improvement.yaml index 68bcf055a..a9c471fde 100644 --- a/.github/ISSUE_TEMPLATE/2_suggest_improvement.yaml +++ b/.github/ISSUE_TEMPLATE/2_suggest_improvement.yaml @@ -1,6 +1,6 @@ name: Improvement suggestion description: Suggest an improvement, a code refactor, or deprecation -labels: ['needs triage', 'refactor'] +labels: ['[adalflow] improvement'] body: - type: textarea attributes: diff --git a/.github/ISSUE_TEMPLATE/3_feature_request.yaml b/.github/ISSUE_TEMPLATE/3_feature_request.yaml index 8aa0bc38e..c11f05f27 100644 --- a/.github/ISSUE_TEMPLATE/3_feature_request.yaml +++ b/.github/ISSUE_TEMPLATE/3_feature_request.yaml @@ -1,6 +1,6 @@ name: Feature request description: Propose a feature for this project -labels: ["needs triage", "feature"] +labels: ["[adalflow] new feature request"] body: - type: textarea attributes: diff --git a/.github/ISSUE_TEMPLATE/4_documenting.yaml b/.github/ISSUE_TEMPLATE/4_documenting.yaml index a7c6e7b77..e5b77b0a5 100644 --- a/.github/ISSUE_TEMPLATE/4_documenting.yaml +++ b/.github/ISSUE_TEMPLATE/4_documenting.yaml @@ -1,6 +1,6 @@ name: Typos and doc fixes description: Tell us about how we can improve our documentation and Google colab/ipynb notebooks. -labels: ["needs triage", "docs"] +labels: ["documentation"] body: - type: textarea attributes: diff --git a/.github/ISSUE_TEMPLATE/5_suggest_integration.yaml b/.github/ISSUE_TEMPLATE/5_suggest_integration.yaml index 8b2a3870c..819dbd6ee 100644 --- a/.github/ISSUE_TEMPLATE/5_suggest_integration.yaml +++ b/.github/ISSUE_TEMPLATE/5_suggest_integration.yaml @@ -1,6 +1,6 @@ name: New integration proposal description: Propose a new integration for this project, either db, retriever, model_client. We highly recommend you to find a POC from the provider team to work together on this. -labels: ['needs triage', 'feature'] +labels: ['[adalflow] integration'] body: - type: textarea attributes: diff --git a/.github/ISSUE_TEMPLATE/6_suggest_usecases_benchmarks.yaml b/.github/ISSUE_TEMPLATE/6_suggest_usecases_benchmarks.yaml new file mode 100644 index 000000000..ea93a39ed --- /dev/null +++ b/.github/ISSUE_TEMPLATE/6_suggest_usecases_benchmarks.yaml @@ -0,0 +1,32 @@ +name: Suggest use cases and benchmarks +description: Propose new use cases that AdalFlow should support or benchmarks that we should compare against +labels: ["new use cases/benchmarks"] +body: + - type: textarea + attributes: + label: Description & Motivation + description: A clear and concise description of the new use case or benchmark proposal + placeholder: | + Please outline the motivation for the proposal. + + + - type: textarea + attributes: + label: Pitch + description: A clear and concise description of what you want to happen. + validations: + required: false + + - type: textarea + attributes: + label: Alternatives + description: A clear and concise description of any alternative solutions or features you've considered, if any. + validations: + required: false + + - type: textarea + attributes: + label: Additional context + description: Add any other context or screenshots about the feature request here. + validations: + required: false From c9af641b81bdbb4fdb6e6a9f6667c3ed15a2b875 Mon Sep 17 00:00:00 2001 From: jaggik Date: Mon, 11 Nov 2024 12:46:52 -0500 Subject: [PATCH 04/48] removed pickling section as it is currently broken. fixed few import issues. Added pip install for Adalflow --- tutorials/component.ipynb | 1572 +++++++++++++++++++++---------------- 1 file changed, 896 insertions(+), 676 deletions(-) diff --git a/tutorials/component.ipynb b/tutorials/component.ipynb index 17e371a47..2e459a453 100644 --- a/tutorials/component.ipynb +++ b/tutorials/component.ipynb @@ -1,711 +1,931 @@ { - "cells": [ - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [], - "source": [ - "import re\n", - "from adalflow.core import Component, Generator\n", - "from adalflow.components.model_client import OpenAIClient\n", - "from adalflow.components.model_client import GroqAPIClient\n", - "from adalflow.utils import setup_env # make sure you have a .env file with OPENAI_API_KEY and GROQ_API_KEY" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [], - "source": [ - "template_doc = r\"\"\" You are a doctor User: {{input_str}}\"\"\"" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Let's turn on the library log to help with debugging." - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [ + "cells": [ { - "data": { - "text/plain": [ - "" + "cell_type": "code", + "source": [ + "from IPython.display import clear_output\n", + "\n", + "!pip install -U adalflow[openai,groq,datasets]\n", + "\n", + "clear_output()" + ], + "metadata": { + "id": "Ab_OmE6XTl4h" + }, + "execution_count": 4, + "outputs": [] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": { + "id": "PbAIsBeeTQUk" + }, + "outputs": [], + "source": [ + "import re\n", + "from adalflow.core import Component, Generator\n", + "from adalflow.components.model_client import OpenAIClient\n", + "from adalflow.components.model_client import GroqAPIClient\n", + "from adalflow.utils import setup_env # make sure you have a .env file with OPENAI_API_KEY and GROQ_API_KEY" ] - }, - "execution_count": 3, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "from adalflow.utils import get_logger\n", - "get_logger()" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": {}, - "outputs": [], - "source": [ - "#Toy example\n", - "\n", - "class DocQA(Component):\n", - " def __init__(self):\n", - " super(DocQA, self).__init__()\n", - " self.doc = Generator(\n", - " template=template_doc,\n", - " model_client=OpenAIClient(),\n", - " model_kwargs={\"model\": \"gpt-3.5-turbo\"},\n", - " )\n", - "\n", - " def call(self, query: str) -> str:\n", - " return self.doc(prompt_kwargs={\"input_str\": query}).data\n", - " " - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": 19, - "metadata": {}, - "outputs": [ + }, { - "name": "stdout", - "output_type": "stream", - "text": [ - "{'type': 'DocQA', 'data': {'_components': {'_ordered_dict': True, 'data': [('doc', {'type': 'Generator', 'data': {'_components': {'_ordered_dict': True, 'data': [('prompt', {'type': 'Prompt', 'data': {'_components': {'_ordered_dict': True, 'data': []}, '_parameters': {'_ordered_dict': True, 'data': []}, 'training': False, 'template': ' You are a doctor User: {{input_str}}', 'prompt_variables': [], 'preset_prompt_kwargs': {}}}), ('model_client', {'type': 'OpenAIClient', 'data': {'_components': {'_ordered_dict': True, 'data': []}, '_parameters': {'_ordered_dict': True, 'data': []}, 'training': False, '_api_key': None}})]}, '_parameters': {'_ordered_dict': True, 'data': []}, 'training': False, 'template': ' You are a doctor User: {{input_str}}', 'preset_prompt_kwargs': {}, 'model_kwargs': {'model': 'gpt-3.5-turbo'}, 'output_processors': None, '_trainable_params': []}})]}, '_parameters': {'_ordered_dict': True, 'data': []}, 'training': False}}\n" - ] + "cell_type": "code", + "execution_count": 6, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "kRymwpwHTQUm", + "outputId": "6a992f52-1661-4002-ef74-ed26938c6baa" + }, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Please enter your OpenAI API key: ··········\n", + "API keys have been set.\n" + ] + } + ], + "source": [ + "from getpass import getpass\n", + "import os\n", + "\n", + "# Prompt user to enter their API keys securely\n", + "openai_api_key = getpass(\"Please enter your OpenAI API key: \")\n", + "\n", + "# Set environment variables\n", + "os.environ['OPENAI_API_KEY'] = openai_api_key\n", + "\n", + "print(\"API keys have been set.\")" + ] }, { - "data": { - "text/plain": [ - "{'_components': OrderedDict([('doc',\n", - " Generator(\n", - " model_kwargs={'model': 'gpt-3.5-turbo'}, \n", - " (prompt): Prompt(template: You are a doctor User: {{input_str}})\n", - " (model_client): OpenAIClient()\n", - " ))]),\n", - " '_parameters': OrderedDict(),\n", - " 'training': False}" + "cell_type": "code", + "execution_count": 7, + "metadata": { + "id": "czGDvnVUTQUm" + }, + "outputs": [], + "source": [ + "template_doc = r\"\"\" You are a doctor User: {{input_str}}\"\"\"" ] - }, - "execution_count": 19, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# states\n", - "states = doc.to_dict()\n", - "print(states)\n", - "doc.__dict__" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [ + }, { - "data": { - "text/plain": [ - "{'_components': OrderedDict([('doc',\n", - " Generator(\n", - " model_kwargs={'model': 'gpt-3.5-turbo'}, \n", - " (prompt): Prompt(template: You are a doctor User: {{input_str}})\n", - " (model_client): OpenAIClient()\n", - " ))]),\n", - " '_parameters': OrderedDict(),\n", - " 'training': False}" + "cell_type": "markdown", + "metadata": { + "id": "PPs3gHqeTQUn" + }, + "source": [ + "Let's turn on the library log to help with debugging." ] - }, - "execution_count": 7, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# restore the states\n", - "doc2 = DocQA.from_dict(states)\n", - "# print(doc2.call(\"What is the capital of France?\"))\n", - "doc2.__dict__\n", - "# doc2.to_dict()" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [ + }, { - "data": { - "text/plain": [ - "{'type': 'DocQA',\n", - " 'data': {'_components': {'_ordered_dict': True,\n", - " 'data': [('doc',\n", - " {'type': 'Generator',\n", - " 'data': {'_components': {'_ordered_dict': True,\n", - " 'data': [('prompt',\n", - " {'type': 'Prompt',\n", - " 'data': {'_components': {'_ordered_dict': True, 'data': []},\n", - " '_parameters': {'_ordered_dict': True, 'data': []},\n", - " 'training': False,\n", - " 'template': ' You are a doctor User: {{input_str}}',\n", - " 'prompt_variables': [],\n", - " 'preset_prompt_kwargs': {}}}),\n", - " ('model_client',\n", - " {'type': 'OpenAIClient',\n", - " 'data': {'_components': {'_ordered_dict': True, 'data': []},\n", - " '_parameters': {'_ordered_dict': True, 'data': []},\n", - " 'training': False,\n", - " '_api_key': None}})]},\n", - " '_parameters': {'_ordered_dict': True, 'data': []},\n", - " 'training': False,\n", - " 'template': ' You are a doctor User: {{input_str}}',\n", - " 'preset_prompt_kwargs': {},\n", - " 'model_kwargs': {'model': 'gpt-3.5-turbo'},\n", - " 'output_processors': None,\n", - " '_trainable_params': []}})]},\n", - " '_parameters': {'_ordered_dict': True, 'data': []},\n", - " 'training': False}}" + "cell_type": "code", + "execution_count": 8, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "98QNsOcSTQUn", + "outputId": "d63cba1b-6087-4b04-bb2b-0a9d9d4500a5" + }, + "outputs": [ + { + "output_type": "execute_result", + "data": { + "text/plain": [ + "" + ] + }, + "metadata": {}, + "execution_count": 8 + } + ], + "source": [ + "from adalflow.utils import get_logger\n", + "get_logger()" ] - }, - "execution_count": 8, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "doc2.to_dict() == doc.to_dict()\n", - "doc2.to_dict()" - ] - }, - { - "cell_type": "code", - "execution_count": 18, - "metadata": {}, - "outputs": [], - "source": [ - "# pickle to states\n", - "import pickle\n", - "# from collections import OrderedDict\n", - "# from openai import OpenAI # cant pickle this\n", - "\n", - "# class DummpyDocQA():\n", - "# a = OrderedDict()\n", - "# def __init__(self):\n", - "# self.dummpy = 1\n", - "# self.client = OpenAI()\n", - "\n", - "# doc_dummy = DummpyDocQA()\n", - "with open(\"doc.pkl\", \"wb\") as f:\n", - " pickle.dump(doc.to_dict(), f)" - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "metadata": {}, - "outputs": [], - "source": [ - "# save the serialized states to a file\n", - "from adalflow.utils.file_io import save_pickle, save_json\n", - "states = doc.to_dict()\n", - "# save_json(states, \"doc.json\")\n", - "save_pickle(states, \"doc.pkl\")\n", - "\n", - "# load the serialized states from a file\n", - "from adalflow.utils.file_io import load_pickle, load_json\n", - "states_loaded = load_pickle(\"doc.pkl\")\n", - "# states_loaded = load_json(\"doc.json\")\n", - "\n", - "states_loaded == states\n", - "\n", - "doc3 = DocQA.from_dict(states_loaded)\n" - ] - }, - { - "cell_type": "code", - "execution_count": 13, - "metadata": {}, - "outputs": [ + }, { - "name": "stdout", - "output_type": "stream", - "text": [ - "2024-06-14 17:42:48 - INFO - [generator.py:199:call] - prompt_kwargs: {'input_str': 'What is the capital of France?'}\n", - "2024-06-14 17:42:48 - INFO - [generator.py:200:call] - model_kwargs: {}\n", - "2024-06-14 17:42:48 - WARNING - [prompt_builder.py:120:compose_prompt_kwargs] - Key input_str does not exist in the prompt_kwargs.\n", - "2024-06-14 17:42:48 - INFO - [openai_client.py:139:call] - api_kwargs: {'model': 'gpt-3.5-turbo', 'messages': [{'role': 'system', 'content': ' You are a doctor User: What is the capital of France?'}]}\n", - "2024-06-14 17:42:48 - INFO - [_client.py:1026:_send_single_request] - HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", - "2024-06-14 17:42:48 - INFO - [generator.py:208:call] - output: GeneratorOutput(data='The capital of France is Paris.', error=None, usage=None, raw_response='The capital of France is Paris.')\n" - ] + "cell_type": "code", + "execution_count": 9, + "metadata": { + "id": "b3ey1lozTQUo" + }, + "outputs": [], + "source": [ + "#Toy example\n", + "\n", + "class DocQA(Component):\n", + " def __init__(self):\n", + " super(DocQA, self).__init__()\n", + " self.doc = Generator(\n", + " template=template_doc,\n", + " model_client=OpenAIClient(),\n", + " model_kwargs={\"model\": \"gpt-3.5-turbo\"},\n", + " )\n", + "\n", + " def call(self, query: str) -> str:\n", + " return self.doc(prompt_kwargs={\"input_str\": query}).data\n", + "" + ] }, { - "data": { - "text/plain": [ - "'The capital of France is Paris.'" + "cell_type": "code", + "execution_count": 11, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "TZAHSrbUTQUo", + "outputId": "66e81fb3-17f9-4570-dbbd-681cad1afc65" + }, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "2024-11-11 17:40:52 - prompt_builder - INFO - [prompt_builder.py:65:__init__] - Prompt has variables: ['input_str']\n", + "2024-11-11 17:40:52 - generator - INFO - [generator.py:144:__init__] - Generator Generator initialized.\n" + ] + } + ], + "source": [ + "doc = DocQA()" ] - }, - "execution_count": 13, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "doc3\n", - "doc3.call(\"What is the capital of France?\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [ + }, { - "name": "stdout", - "output_type": "stream", - "text": [ - "2024-06-14 17:12:51 - INFO - [generator.py:199:call] - prompt_kwargs: {'input_str': 'What is the best treatment for headache?'}\n", - "2024-06-14 17:12:51 - INFO - [generator.py:200:call] - model_kwargs: {}\n", - "2024-06-14 17:12:51 - INFO - [openai_client.py:140:call] - api_kwargs: {'model': 'gpt-3.5-turbo', 'messages': [{'role': 'system', 'content': ' You are a doctor User: What is the best treatment for headache?'}]}\n", - "2024-06-14 17:12:54 - INFO - [_client.py:1026:_send_single_request] - HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", - "2024-06-14 17:12:54 - INFO - [generator.py:208:call] - output: GeneratorOutput(data='As a doctor, the best treatment for a headache depends on the cause of the headache. In general, some common treatments for headaches include:\\n\\n1. Over-the-counter pain relievers such as acetaminophen, ibuprofen, or aspirin\\n2. Rest and relaxation in a quiet, dark room\\n3. Hydration\\n4. Applying a cold or warm compress to the forehead or neck\\n5. Avoiding triggers such as stress, lack of sleep, or certain foods\\n6. Practicing relaxation techniques such as deep breathing, meditation, or yoga\\n7. Prescription medications for chronic or severe headaches\\n\\nIt is important to consult with a healthcare provider for a proper diagnosis and treatment plan for your specific type of headache.', error=None, usage=None, raw_response='As a doctor, the best treatment for a headache depends on the cause of the headache. In general, some common treatments for headaches include:\\n\\n1. Over-the-counter pain relievers such as acetaminophen, ibuprofen, or aspirin\\n2. Rest and relaxation in a quiet, dark room\\n3. Hydration\\n4. Applying a cold or warm compress to the forehead or neck\\n5. Avoiding triggers such as stress, lack of sleep, or certain foods\\n6. Practicing relaxation techniques such as deep breathing, meditation, or yoga\\n7. Prescription medications for chronic or severe headaches\\n\\nIt is important to consult with a healthcare provider for a proper diagnosis and treatment plan for your specific type of headache.')\n", - "As a doctor, the best treatment for a headache depends on the cause of the headache. In general, some common treatments for headaches include:\n", - "\n", - "1. Over-the-counter pain relievers such as acetaminophen, ibuprofen, or aspirin\n", - "2. Rest and relaxation in a quiet, dark room\n", - "3. Hydration\n", - "4. Applying a cold or warm compress to the forehead or neck\n", - "5. Avoiding triggers such as stress, lack of sleep, or certain foods\n", - "6. Practicing relaxation techniques such as deep breathing, meditation, or yoga\n", - "7. Prescription medications for chronic or severe headaches\n", - "\n", - "It is important to consult with a healthcare provider for a proper diagnosis and treatment plan for your specific type of headache.\n" - ] - } - ], - "source": [ - "print(doc(\"What is the best treatment for headache?\"))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [ + "cell_type": "code", + "execution_count": 12, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "f-y6l44PTQUp", + "outputId": "e24aabd5-d758-4700-fa0d-46b66a88c412" + }, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "{'type': 'DocQA', 'data': {'_components': {'_ordered_dict': True, 'data': [('doc', {'type': 'Generator', 'data': {'model_str': 'OpenAIClient_gpt-3_5-turbo', 'cache_path': PosixPath('/root/.adalflow/cache_OpenAIClient_gpt-3_5-turbo.db'), 'callbacks': {'on_success': [], 'on_failure': [], 'on_complete': []}, 'cache': , '_components': {'_ordered_dict': True, 'data': [('prompt', {'type': 'Prompt', 'data': {'_components': {'_ordered_dict': True, 'data': []}, '_parameters': {'_ordered_dict': True, 'data': []}, 'training': False, 'teacher_mode': False, 'tracing': False, 'name': 'Prompt', '_init_args': {'template': None, 'prompt_kwargs': {}}, 'template': ' You are a doctor User: {{input_str}}', 'prompt_variables': ['input_str'], 'prompt_kwargs': {}}}), ('model_client', {'type': 'OpenAIClient', 'data': {'_components': {'_ordered_dict': True, 'data': []}, '_parameters': {'_ordered_dict': True, 'data': []}, 'training': False, 'teacher_mode': False, 'tracing': False, 'name': 'OpenAIClient', '_init_args': {'api_key': None, 'chat_completion_parser': None, 'input_type': 'text'}, '_api_key': None, 'chat_completion_parser': , '_input_type': 'text'}})]}, '_parameters': {'_ordered_dict': True, 'data': []}, 'training': False, 'teacher_mode': False, 'tracing': False, 'name': 'Generator', '_init_args': {'model_client': None, 'model_kwargs': {}, 'template': None, 'prompt_kwargs': {}, 'output_processors': None, 'name': None, 'cache_path': None, 'use_cache': False}, 'backward_engine': None, 'template': ' You are a doctor User: {{input_str}}', 'prompt_kwargs': {}, 'model_kwargs': {'model': 'gpt-3.5-turbo'}, 'output_processors': None, 'mock_output': False, 'mock_output_data': 'mock data', 'data_map_func': .default_map_func at 0x7b8d471c97e0>, '_use_cache': False, '_kwargs': {'model_client': {'type': 'OpenAIClient', 'data': {'_components': {'_ordered_dict': True, 'data': []}, '_parameters': {'_ordered_dict': True, 'data': []}, 'training': False, 'teacher_mode': False, 'tracing': False, 'name': 'OpenAIClient', '_init_args': {'api_key': None, 'chat_completion_parser': None, 'input_type': 'text'}, '_api_key': None, 'chat_completion_parser': , '_input_type': 'text'}}, 'model_kwargs': {'model': 'gpt-3.5-turbo'}, 'template': ' You are a doctor User: {{input_str}}', 'prompt_kwargs': {}, 'output_processors': None, 'name': None, 'cache_path': None, 'use_cache': False}, '_teacher': None}})]}, '_parameters': {'_ordered_dict': True, 'data': []}, 'training': False, 'teacher_mode': False, 'tracing': False, 'name': 'DocQA', '_init_args': {}}}\n" + ] + }, + { + "output_type": "execute_result", + "data": { + "text/plain": [ + "{'_components': OrderedDict([('doc',\n", + " Generator(\n", + " model_kwargs={'model': 'gpt-3.5-turbo'}, trainable_prompt_kwargs=[]\n", + " (prompt): Prompt(template: You are a doctor User: {{input_str}}, prompt_variables: ['input_str'])\n", + " (model_client): OpenAIClient()\n", + " ))]),\n", + " '_parameters': OrderedDict(),\n", + " 'training': False,\n", + " 'teacher_mode': False,\n", + " 'tracing': False,\n", + " 'name': 'DocQA',\n", + " '_init_args': {}}" + ] + }, + "metadata": {}, + "execution_count": 12 + } + ], + "source": [ + "# states\n", + "states = doc.to_dict()\n", + "print(states)\n", + "doc.__dict__" + ] + }, { - "name": "stdout", - "output_type": "stream", - "text": [ - "2024-06-14 17:12:54 - INFO - [generator.py:199:call] - prompt_kwargs: {'input_str': 'What is the best treatment for headache?'}\n", - "2024-06-14 17:12:54 - INFO - [generator.py:200:call] - model_kwargs: {}\n", - "2024-06-14 17:12:54 - INFO - [openai_client.py:140:call] - api_kwargs: {'model': 'gpt-3.5-turbo', 'messages': [{'role': 'system', 'content': ' You are a doctor User: What is the best treatment for headache?'}]}\n", - "2024-06-14 17:12:56 - INFO - [_client.py:1026:_send_single_request] - HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", - "2024-06-14 17:12:56 - INFO - [generator.py:208:call] - output: GeneratorOutput(data='As a doctor, the best treatment for a headache can depend on the cause of the headache. If the headache is mild and infrequent, over-the-counter pain relievers such as ibuprofen or acetaminophen can help. Additionally, getting enough rest, staying hydrated, and practicing relaxation techniques such as deep breathing exercises or meditation can also provide relief. If the headache is severe, persistent, or accompanied by other concerning symptoms, it is important to consult a healthcare professional for a proper diagnosis and individualized treatment plan.', error=None, usage=None, raw_response='As a doctor, the best treatment for a headache can depend on the cause of the headache. If the headache is mild and infrequent, over-the-counter pain relievers such as ibuprofen or acetaminophen can help. Additionally, getting enough rest, staying hydrated, and practicing relaxation techniques such as deep breathing exercises or meditation can also provide relief. If the headache is severe, persistent, or accompanied by other concerning symptoms, it is important to consult a healthcare professional for a proper diagnosis and individualized treatment plan.')\n", - "As a doctor, the best treatment for a headache can depend on the cause of the headache. If the headache is mild and infrequent, over-the-counter pain relievers such as ibuprofen or acetaminophen can help. Additionally, getting enough rest, staying hydrated, and practicing relaxation techniques such as deep breathing exercises or meditation can also provide relief. If the headache is severe, persistent, or accompanied by other concerning symptoms, it is important to consult a healthcare professional for a proper diagnosis and individualized treatment plan.\n" - ] - } - ], - "source": [ - "print(doc2(\"What is the best treatment for headache?\"))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [ + "cell_type": "markdown", + "metadata": { + "id": "z_sH59_bTQUp" + }, + "source": [] + }, { - "name": "stdout", - "output_type": "stream", - "text": [ - "('', DocQA(\n", - " (doc): Generator(\n", - " model_kwargs={'model': 'gpt-3.5-turbo'}, \n", - " (prompt): Prompt(template: You are a doctor User: {{input_str}}, prompt_variables: ['input_str'])\n", - " (model_client): OpenAIClient()\n", - " )\n", - "))\n", - "('doc', Generator(\n", - " model_kwargs={'model': 'gpt-3.5-turbo'}, \n", - " (prompt): Prompt(template: You are a doctor User: {{input_str}}, prompt_variables: ['input_str'])\n", - " (model_client): OpenAIClient()\n", - "))\n", - "('doc.prompt', Prompt(template: You are a doctor User: {{input_str}}, prompt_variables: ['input_str']))\n", - "('doc.model_client', OpenAIClient())\n" - ] - } - ], - "source": [ - "# list other subcomponents\n", - "\n", - "for subcomponent in doc.named_components():\n", - " print(subcomponent)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Let's add a parameter" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "from adalflow.core.parameter import Parameter\n", - "\n", - "doc.register_parameter(\"demo\", param=Parameter(data=\"demo\"))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [ + "cell_type": "code", + "execution_count": 13, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "P81kIS2qTQUp", + "outputId": "d8e0e398-d704-4a85-8692-66a8c570b910" + }, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "2024-11-11 17:40:58 - component - INFO - [component.py:350:_restore_value] - Restoring class using from_dict Generator, {'type': 'Generator', 'data': {'model_str': 'OpenAIClient_gpt-3_5-turbo', 'cache_path': PosixPath('/root/.adalflow/cache_OpenAIClient_gpt-3_5-turbo.db'), 'callbacks': {'on_success': [], 'on_failure': [], 'on_complete': []}, 'cache': , '_components': {'_ordered_dict': True, 'data': [('prompt', {'type': 'Prompt', 'data': {'_components': {'_ordered_dict': True, 'data': []}, '_parameters': {'_ordered_dict': True, 'data': []}, 'training': False, 'teacher_mode': False, 'tracing': False, 'name': 'Prompt', '_init_args': {'template': None, 'prompt_kwargs': {}}, 'template': ' You are a doctor User: {{input_str}}', 'prompt_variables': ['input_str'], 'prompt_kwargs': {}}}), ('model_client', {'type': 'OpenAIClient', 'data': {'_components': {'_ordered_dict': True, 'data': []}, '_parameters': {'_ordered_dict': True, 'data': []}, 'training': False, 'teacher_mode': False, 'tracing': False, 'name': 'OpenAIClient', '_init_args': {'api_key': None, 'chat_completion_parser': None, 'input_type': 'text'}, '_api_key': None, 'chat_completion_parser': , '_input_type': 'text'}})]}, '_parameters': {'_ordered_dict': True, 'data': []}, 'training': False, 'teacher_mode': False, 'tracing': False, 'name': 'Generator', '_init_args': {'model_client': None, 'model_kwargs': {}, 'template': None, 'prompt_kwargs': {}, 'output_processors': None, 'name': None, 'cache_path': None, 'use_cache': False}, 'backward_engine': None, 'template': ' You are a doctor User: {{input_str}}', 'prompt_kwargs': {}, 'model_kwargs': {'model': 'gpt-3.5-turbo'}, 'output_processors': None, 'mock_output': False, 'mock_output_data': 'mock data', 'data_map_func': .default_map_func at 0x7b8d471c97e0>, '_use_cache': False, '_kwargs': {'model_client': {'type': 'OpenAIClient', 'data': {'_components': {'_ordered_dict': True, 'data': []}, '_parameters': {'_ordered_dict': True, 'data': []}, 'training': False, 'teacher_mode': False, 'tracing': False, 'name': 'OpenAIClient', '_init_args': {'api_key': None, 'chat_completion_parser': None, 'input_type': 'text'}, '_api_key': None, 'chat_completion_parser': , '_input_type': 'text'}}, 'model_kwargs': {'model': 'gpt-3.5-turbo'}, 'template': ' You are a doctor User: {{input_str}}', 'prompt_kwargs': {}, 'output_processors': None, 'name': None, 'cache_path': None, 'use_cache': False}, '_teacher': None}}\n", + "2024-11-11 17:40:58 - component - INFO - [component.py:350:_restore_value] - Restoring class using from_dict Prompt, {'type': 'Prompt', 'data': {'_components': {'_ordered_dict': True, 'data': []}, '_parameters': {'_ordered_dict': True, 'data': []}, 'training': False, 'teacher_mode': False, 'tracing': False, 'name': 'Prompt', '_init_args': {'template': None, 'prompt_kwargs': {}}, 'template': ' You are a doctor User: {{input_str}}', 'prompt_variables': ['input_str'], 'prompt_kwargs': {}}}\n", + "2024-11-11 17:40:58 - component - INFO - [component.py:350:_restore_value] - Restoring class using from_dict OpenAIClient, {'type': 'OpenAIClient', 'data': {'_components': {'_ordered_dict': True, 'data': []}, '_parameters': {'_ordered_dict': True, 'data': []}, 'training': False, 'teacher_mode': False, 'tracing': False, 'name': 'OpenAIClient', '_init_args': {'api_key': None, 'chat_completion_parser': None, 'input_type': 'text'}, '_api_key': None, 'chat_completion_parser': , '_input_type': 'text'}}\n", + "2024-11-11 17:40:58 - component - INFO - [component.py:350:_restore_value] - Restoring class using from_dict OpenAIClient, {'type': 'OpenAIClient', 'data': {'_components': {'_ordered_dict': True, 'data': []}, '_parameters': {'_ordered_dict': True, 'data': []}, 'training': False, 'teacher_mode': False, 'tracing': False, 'name': 'OpenAIClient', '_init_args': {'api_key': None, 'chat_completion_parser': None, 'input_type': 'text'}, '_api_key': None, 'chat_completion_parser': , '_input_type': 'text'}}\n" + ] + }, + { + "output_type": "execute_result", + "data": { + "text/plain": [ + "{'_components': OrderedDict([('doc',\n", + " Generator(\n", + " model_kwargs={'model': 'gpt-3.5-turbo'}, trainable_prompt_kwargs=[]\n", + " (prompt): Prompt(template: You are a doctor User: {{input_str}}, prompt_variables: ['input_str'])\n", + " (model_client): OpenAIClient()\n", + " ))]),\n", + " '_parameters': OrderedDict(),\n", + " 'training': False,\n", + " 'teacher_mode': False,\n", + " 'tracing': False,\n", + " 'name': 'DocQA',\n", + " '_init_args': {}}" + ] + }, + "metadata": {}, + "execution_count": 13 + } + ], + "source": [ + "# restore the states\n", + "doc2 = DocQA.from_dict(states)\n", + "# print(doc2.call(\"What is the capital of France?\"))\n", + "doc2.__dict__\n", + "# doc2.to_dict()" + ] + }, { - "name": "stdout", - "output_type": "stream", - "text": [ - "('demo', Parameter: demo)\n" - ] - } - ], - "source": [ - "# list all parameters\n", - "for param in doc.named_parameters():\n", - " print(param)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [ + "cell_type": "code", + "execution_count": 14, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "198xYpLGTQUp", + "outputId": "ffd33d12-6db0-45c2-dfb1-3d57460ad4c9" + }, + "outputs": [ + { + "output_type": "execute_result", + "data": { + "text/plain": [ + "{'type': 'DocQA',\n", + " 'data': {'_components': {'_ordered_dict': True,\n", + " 'data': [('doc',\n", + " {'type': 'Generator',\n", + " 'data': {'model_str': 'OpenAIClient_gpt-3_5-turbo',\n", + " 'cache_path': PosixPath('/root/.adalflow/cache_OpenAIClient_gpt-3_5-turbo.db'),\n", + " 'callbacks': {'on_success': [], 'on_failure': [], 'on_complete': []},\n", + " 'cache': ,\n", + " '_components': {'_ordered_dict': True,\n", + " 'data': [('prompt',\n", + " {'type': 'Prompt',\n", + " 'data': {'_components': {'_ordered_dict': True, 'data': []},\n", + " '_parameters': {'_ordered_dict': True, 'data': []},\n", + " 'training': False,\n", + " 'teacher_mode': False,\n", + " 'tracing': False,\n", + " 'name': 'Prompt',\n", + " '_init_args': {'template': None, 'prompt_kwargs': {}},\n", + " 'template': ' You are a doctor User: {{input_str}}',\n", + " 'prompt_variables': ['input_str'],\n", + " 'prompt_kwargs': {}}}),\n", + " ('model_client',\n", + " {'type': 'OpenAIClient',\n", + " 'data': {'_components': {'_ordered_dict': True, 'data': []},\n", + " '_parameters': {'_ordered_dict': True, 'data': []},\n", + " 'training': False,\n", + " 'teacher_mode': False,\n", + " 'tracing': False,\n", + " 'name': 'OpenAIClient',\n", + " '_init_args': {'api_key': None,\n", + " 'chat_completion_parser': None,\n", + " 'input_type': 'text'},\n", + " '_api_key': None,\n", + " 'chat_completion_parser': str>,\n", + " '_input_type': 'text'}})]},\n", + " '_parameters': {'_ordered_dict': True, 'data': []},\n", + " 'training': False,\n", + " 'teacher_mode': False,\n", + " 'tracing': False,\n", + " 'name': 'Generator',\n", + " '_init_args': {'model_client': None,\n", + " 'model_kwargs': {},\n", + " 'template': None,\n", + " 'prompt_kwargs': {},\n", + " 'output_processors': None,\n", + " 'name': None,\n", + " 'cache_path': None,\n", + " 'use_cache': False},\n", + " 'backward_engine': None,\n", + " 'template': ' You are a doctor User: {{input_str}}',\n", + " 'prompt_kwargs': {},\n", + " 'model_kwargs': {'model': 'gpt-3.5-turbo'},\n", + " 'output_processors': None,\n", + " 'mock_output': False,\n", + " 'mock_output_data': 'mock data',\n", + " 'data_map_func': .default_map_func(data: 'GeneratorOutputType') -> str>,\n", + " '_use_cache': False,\n", + " '_kwargs': {'model_client': {'type': 'OpenAIClient',\n", + " 'data': {'_components': {'_ordered_dict': True, 'data': []},\n", + " '_parameters': {'_ordered_dict': True, 'data': []},\n", + " 'training': False,\n", + " 'teacher_mode': False,\n", + " 'tracing': False,\n", + " 'name': 'OpenAIClient',\n", + " '_init_args': {'api_key': None,\n", + " 'chat_completion_parser': None,\n", + " 'input_type': 'text'},\n", + " '_api_key': None,\n", + " 'chat_completion_parser': str>,\n", + " '_input_type': 'text'}},\n", + " 'model_kwargs': {'model': 'gpt-3.5-turbo'},\n", + " 'template': ' You are a doctor User: {{input_str}}',\n", + " 'prompt_kwargs': {},\n", + " 'output_processors': None,\n", + " 'name': None,\n", + " 'cache_path': None,\n", + " 'use_cache': False},\n", + " '_teacher': None}})]},\n", + " '_parameters': {'_ordered_dict': True, 'data': []},\n", + " 'training': False,\n", + " 'teacher_mode': False,\n", + " 'tracing': False,\n", + " 'name': 'DocQA',\n", + " '_init_args': {}}}" + ] + }, + "metadata": {}, + "execution_count": 14 + } + ], + "source": [ + "doc2.to_dict() == doc.to_dict()\n", + "doc2.to_dict()" + ] + }, { - "data": { - "text/plain": [ - "{'type': 'DocQA',\n", - " 'data': {'_components': {'_ordered_dict': True,\n", - " 'data': [('doc',\n", - " {'type': 'Generator',\n", - " 'data': {'_components': {'_ordered_dict': True,\n", - " 'data': [('prompt',\n", - " {'type': 'Prompt',\n", - " 'data': {'_components': {'_ordered_dict': True, 'data': []},\n", - " '_parameters': {'_ordered_dict': True, 'data': []},\n", - " 'training': False,\n", - " '_template_string': ' You are a doctor User: {{input_str}}',\n", - " 'template':