From f03c5fd6a6879d662b2e88c0add066e37c42fa94 Mon Sep 17 00:00:00 2001 From: xukunliu2000 <57486241+liuxukun2000@users.noreply.github.com> Date: Wed, 12 Jul 2023 09:08:33 +0800 Subject: [PATCH] refined code and added some examples (#34) * Allow loading of user-defined tools * Allow loading of user-defined tools, parallelize tasks using threads * refined code and added some examples * fix react * delete auth --------- Co-authored-by: Satan Co-authored-by: Binfeng Xu <65674752+billxbf@users.noreply.github.com> --- .gitignore | 1 + configs/memory.yaml | 4 +- configs/react.yaml | 5 +- examples/agent_as_plugin/agent_as_plugin.py | 9 + examples/basic_usage/basic_usage.py | 9 + examples/custom_agent/configs/env.yaml | 19 + examples/custom_agent/configs/include.yaml | 19 + .../custom_agent/configs/sample_agent.yaml | 18 + examples/custom_agent/custom_agent.ipynb | 715 ++++++++++++++++++ gentopia/__init__.py | 31 +- gentopia/agent/react/agent.py | 3 +- gentopia/output/__init__.py | 3 + gentopia/utils/util.py | 22 +- test.py | 40 +- 14 files changed, 868 insertions(+), 30 deletions(-) create mode 100755 examples/agent_as_plugin/agent_as_plugin.py create mode 100755 examples/basic_usage/basic_usage.py create mode 100755 examples/custom_agent/configs/env.yaml create mode 100755 examples/custom_agent/configs/include.yaml create mode 100755 examples/custom_agent/configs/sample_agent.yaml create mode 100755 examples/custom_agent/custom_agent.ipynb diff --git a/.gitignore b/.gitignore index 1cc5c95..7e28d0d 100644 --- a/.gitignore +++ b/.gitignore @@ -165,3 +165,4 @@ cython_debug/ # option (not recommended) you can uncomment the following to ignore the entire idea folder. #.idea/ /api.key +/examples/test.ipynb diff --git a/configs/memory.yaml b/configs/memory.yaml index be97f97..a83c56f 100644 --- a/configs/memory.yaml +++ b/configs/memory.yaml @@ -6,8 +6,8 @@ description: main agent leveraging OpenAI function call API. prompt_template: !prompt ZeroShotVanillaPrompt memory: memory_type: chroma # chroma or pinecone - threshold_1: 3 # first-level memory - threshold_2: 3 # second-level memory + threshold_1: 1 # first-level memory + threshold_2: 1 # second-level memory params: index: main top_k: 2 diff --git a/configs/react.yaml b/configs/react.yaml index 91b71b9..f98b56e 100755 --- a/configs/react.yaml +++ b/configs/react.yaml @@ -32,8 +32,5 @@ plugins: # - answer things about math -## Authentication -auth: - OPENAI_API_KEY: !file /home/api.key - WOLFRAM_ALPHA_APPID: !file /home/wolfram.key + diff --git a/examples/agent_as_plugin/agent_as_plugin.py b/examples/agent_as_plugin/agent_as_plugin.py new file mode 100755 index 0000000..3957a5e --- /dev/null +++ b/examples/agent_as_plugin/agent_as_plugin.py @@ -0,0 +1,9 @@ +from gentopia.assembler.agent_assembler import AgentAssembler +from gentopia.output import enable_log +from gentopia import chat + +if __name__ == '__main__': + enable_log() + assembler = AgentAssembler(file='configs/main.yaml') + agent = assembler.get_agent() + chat(agent, verbose=True) diff --git a/examples/basic_usage/basic_usage.py b/examples/basic_usage/basic_usage.py new file mode 100755 index 0000000..5f2552d --- /dev/null +++ b/examples/basic_usage/basic_usage.py @@ -0,0 +1,9 @@ +from gentopia.assembler.agent_assembler import AgentAssembler +from gentopia.output import enable_log +from gentopia import chat + +if __name__ == '__main__': + enable_log() + assembler = AgentAssembler(file='configs/mathria.yaml') + agent = assembler.get_agent() + chat(agent) diff --git a/examples/custom_agent/configs/env.yaml b/examples/custom_agent/configs/env.yaml new file mode 100755 index 0000000..323f43d --- /dev/null +++ b/examples/custom_agent/configs/env.yaml @@ -0,0 +1,19 @@ +# Agent Config +name: !env AGENT_NAME +type: openai +version: 0.0.1 +description: main agent leveraging OpenAI function call API. +prompt_template: !prompt ZeroShotVanillaPrompt +llm: + model_name: gpt-4-0613 + params: + temperature: 0.0 + top_p: 0.9 + repetition_penalty: 1.0 + max_tokens: 1024 +target_tasks: + - anything +plugins: + - name: google_search + - name: web_page + - !include sample_agent.yaml diff --git a/examples/custom_agent/configs/include.yaml b/examples/custom_agent/configs/include.yaml new file mode 100755 index 0000000..d5ad8f3 --- /dev/null +++ b/examples/custom_agent/configs/include.yaml @@ -0,0 +1,19 @@ +# Agent Config +name: main +type: openai +version: 0.0.1 +description: main agent leveraging OpenAI function call API. +prompt_template: !prompt ZeroShotVanillaPrompt +llm: + model_name: gpt-4-0613 + params: + temperature: 0.0 + top_p: 0.9 + repetition_penalty: 1.0 + max_tokens: 1024 +target_tasks: + - anything +plugins: + - name: google_search + - name: web_page + - !include sample_agent.yaml diff --git a/examples/custom_agent/configs/sample_agent.yaml b/examples/custom_agent/configs/sample_agent.yaml new file mode 100755 index 0000000..26f1445 --- /dev/null +++ b/examples/custom_agent/configs/sample_agent.yaml @@ -0,0 +1,18 @@ +# Agent Config +name: main +type: openai +version: 0.0.1 +description: main agent leveraging OpenAI function call API. +prompt_template: !prompt ZeroShotVanillaPrompt +llm: + model_name: gpt-4-0613 + params: + temperature: 0.0 + top_p: 0.9 + repetition_penalty: 1.0 + max_tokens: 1024 +target_tasks: + - anything +plugins: + - name: google_search + - name: web_page diff --git a/examples/custom_agent/custom_agent.ipynb b/examples/custom_agent/custom_agent.ipynb new file mode 100755 index 0000000..eb6196c --- /dev/null +++ b/examples/custom_agent/custom_agent.ipynb @@ -0,0 +1,715 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "To make it easier for users to use, Gentopia has expanded on the yaml format and added some features, such as referencing files and using environment variables.\n", + "\n", + "The detailed list is shown in the following table:\n", + "\n", + "| Feature | Syntax | Description |\n", + "| --- | --- |------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|\n", + "| Reference file | `!include` | This syntax can be used to reference other files, such as `!include ./config.yaml`. All contents of the file will be treated as yaml for parsing. This can be used to reference other agents. |\n", + "| Reference Prompt | `!prompt` | This syntax can be used to reference prompts, such as `!prompt ZeroShotVanillaPrompt`. This will reference a pre-defined prompt in Gentopia. To import a custom prompt, use the full Python path, such as `!prompt gentopia.prompt.ZeroShotVanillaPrompt`. |\n", + "| Use environment variable | `!env` | This syntax can be used to reference environment variables, such as `!env GENTOPIA_HOME`. |\n", + "| Reference file | `!file` | This syntax can be used to reference files, such as `!file ./config.yaml`. All contents of the file will be treated as a string for parsing. |" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "from gentopia.assembler.agent_assembler import AgentAssembler\n", + "from gentopia.output import enable_log\n", + "from gentopia import chat" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "This is a sample agent that uses the `!prompt` syntax to reference a pre-defined prompt in Gentopia." + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
RewooAgent(\n",
+       "    name='mathria',\n",
+       "    type=<AgentType.rewoo: 'rewoo'>,\n",
+       "    version='0.0.1',\n",
+       "    description='A math agent capable of solving very complicated math problems. Ask it to solve very hard math \n",
+       "problems.',\n",
+       "    target_tasks=['solving math problems', 'answer things about math'],\n",
+       "    llm={\n",
+       "        'Planner': OpenAIGPTClient(\n",
+       "            model_name='gpt-3.5-turbo-0613',\n",
+       "            params=OpenAIParamModel(\n",
+       "                max_tokens=1024,\n",
+       "                temperature=0.0,\n",
+       "                top_p=0.9,\n",
+       "                presence_penalty=0.0,\n",
+       "                frequency_penalty=0.0,\n",
+       "                n=1,\n",
+       "                stop=[]\n",
+       "            )\n",
+       "        ),\n",
+       "        'Solver': OpenAIGPTClient(\n",
+       "            model_name='gpt-3.5-turbo-0613',\n",
+       "            params=OpenAIParamModel(\n",
+       "                max_tokens=1024,\n",
+       "                temperature=0.0,\n",
+       "                top_p=0.9,\n",
+       "                presence_penalty=0.0,\n",
+       "                frequency_penalty=0.0,\n",
+       "                n=1,\n",
+       "                stop=[]\n",
+       "            )\n",
+       "        )\n",
+       "    },\n",
+       "    prompt_template={\n",
+       "        'Planner': PromptTemplate(\n",
+       "            input_variables=['tool_description', 'task'],\n",
+       "            output_parser=None,\n",
+       "            partial_variables={},\n",
+       "            template=\"You are an AI agent who makes step-by-step plans to solve a problem under the help of \n",
+       "external tools. \\nFor each step, make one plan followed by one tool-call, which will be executed later to retrieve \n",
+       "evidence for that step.\\nYou should store each evidence into a distinct variable #E1, #E2, #E3 ... that can be \n",
+       "referred to in later tool-call inputs.    \\n\\n##Available Tools##\\n{tool_description}\\n\\n##Output Format (Replace \n",
+       "'<...>')##\\n#Plan1: <describe your plan here>\\n#E1: <toolname>[<input here>] (eg. Search[What is Python])\\n#Plan2: \n",
+       "<describe next plan>\\n#E2: <toolname>[<input here, you can use #E1 to represent its expected output>]\\nAnd so \n",
+       "on...\\n  \\n##Your Task##\\n{task}\\n\\n##Now Begin##\\n\",\n",
+       "            template_format='f-string',\n",
+       "            validate_template=True\n",
+       "        ),\n",
+       "        'Solver': PromptTemplate(\n",
+       "            input_variables=['plan_evidence', 'task'],\n",
+       "            output_parser=None,\n",
+       "            partial_variables={},\n",
+       "            template='You are an AI agent who solves a problem with my assistance. I will provide step-by-step \n",
+       "plans(#Plan) and evidences(#E) that could be helpful.\\nYour task is to briefly summarize each step, then make a \n",
+       "short final conclusion for your task.\\n\\n##My Plans and Evidences##\\n{plan_evidence}\\n\\n##Example Output##\\nFirst, \n",
+       "I <did something> , and I think <...>; Second, I <...>, and I think <...>; ....\\nSo, <your conclusion>.\\n\\n##Your \n",
+       "Task##\\n{task}\\n\\n##Now Begin##\\n',\n",
+       "            template_format='f-string',\n",
+       "            validate_template=True\n",
+       "        )\n",
+       "    },\n",
+       "    plugins=[\n",
+       "        WolframAlpha(\n",
+       "            name='wolfram_alpha',\n",
+       "            description='A WolframAlpha search engine. Useful when you need to solve a complicated Mathematical or \n",
+       "Algebraic equation. Input should be an equation or function.',\n",
+       "            args_schema=<class 'pydantic.main.WolframAlphaArgs'>,\n",
+       "            verbose=False,\n",
+       "            handle_tool_error=False\n",
+       "        )\n",
+       "    ],\n",
+       "    args_schema=<class 'pydantic.main.ArgsSchema'>,\n",
+       "    memory=None,\n",
+       "    examples={}\n",
+       ")\n",
+       "
\n" + ], + "text/plain": [ + "\u001B[1;35mRewooAgent\u001B[0m\u001B[1m(\u001B[0m\n", + " \u001B[33mname\u001B[0m=\u001B[32m'mathria'\u001B[0m,\n", + " \u001B[33mtype\u001B[0m=\u001B[1m<\u001B[0m\u001B[1;95mAgentType.rewoo:\u001B[0m\u001B[39m \u001B[0m\u001B[32m'rewoo'\u001B[0m\u001B[39m>,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mversion\u001B[0m\u001B[39m=\u001B[0m\u001B[32m'0.0.1'\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mdescription\u001B[0m\u001B[39m=\u001B[0m\u001B[32m'A math agent capable of solving very complicated math problems. Ask it to solve very hard math \u001B[0m\n", + "\u001B[32mproblems.'\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mtarget_tasks\u001B[0m\u001B[39m=\u001B[0m\u001B[1;39m[\u001B[0m\u001B[32m'solving math problems'\u001B[0m\u001B[39m, \u001B[0m\u001B[32m'answer things about math'\u001B[0m\u001B[1;39m]\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mllm\u001B[0m\u001B[39m=\u001B[0m\u001B[1;39m{\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[32m'Planner'\u001B[0m\u001B[39m: \u001B[0m\u001B[1;35mOpenAIGPTClient\u001B[0m\u001B[1;39m(\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mmodel_name\u001B[0m\u001B[39m=\u001B[0m\u001B[32m'gpt-3.5-turbo-0613'\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mparams\u001B[0m\u001B[39m=\u001B[0m\u001B[1;35mOpenAIParamModel\u001B[0m\u001B[1;39m(\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mmax_tokens\u001B[0m\u001B[39m=\u001B[0m\u001B[1;36m1024\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mtemperature\u001B[0m\u001B[39m=\u001B[0m\u001B[1;36m0\u001B[0m\u001B[1;36m.0\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mtop_p\u001B[0m\u001B[39m=\u001B[0m\u001B[1;36m0\u001B[0m\u001B[1;36m.9\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mpresence_penalty\u001B[0m\u001B[39m=\u001B[0m\u001B[1;36m0\u001B[0m\u001B[1;36m.0\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mfrequency_penalty\u001B[0m\u001B[39m=\u001B[0m\u001B[1;36m0\u001B[0m\u001B[1;36m.0\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mn\u001B[0m\u001B[39m=\u001B[0m\u001B[1;36m1\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mstop\u001B[0m\u001B[39m=\u001B[0m\u001B[1;39m[\u001B[0m\u001B[1;39m]\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[1;39m)\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[1;39m)\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[32m'Solver'\u001B[0m\u001B[39m: \u001B[0m\u001B[1;35mOpenAIGPTClient\u001B[0m\u001B[1;39m(\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mmodel_name\u001B[0m\u001B[39m=\u001B[0m\u001B[32m'gpt-3.5-turbo-0613'\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mparams\u001B[0m\u001B[39m=\u001B[0m\u001B[1;35mOpenAIParamModel\u001B[0m\u001B[1;39m(\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mmax_tokens\u001B[0m\u001B[39m=\u001B[0m\u001B[1;36m1024\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mtemperature\u001B[0m\u001B[39m=\u001B[0m\u001B[1;36m0\u001B[0m\u001B[1;36m.0\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mtop_p\u001B[0m\u001B[39m=\u001B[0m\u001B[1;36m0\u001B[0m\u001B[1;36m.9\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mpresence_penalty\u001B[0m\u001B[39m=\u001B[0m\u001B[1;36m0\u001B[0m\u001B[1;36m.0\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mfrequency_penalty\u001B[0m\u001B[39m=\u001B[0m\u001B[1;36m0\u001B[0m\u001B[1;36m.0\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mn\u001B[0m\u001B[39m=\u001B[0m\u001B[1;36m1\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mstop\u001B[0m\u001B[39m=\u001B[0m\u001B[1;39m[\u001B[0m\u001B[1;39m]\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[1;39m)\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[1;39m)\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[1;39m}\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mprompt_template\u001B[0m\u001B[39m=\u001B[0m\u001B[1;39m{\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[32m'Planner'\u001B[0m\u001B[39m: \u001B[0m\u001B[1;35mPromptTemplate\u001B[0m\u001B[1;39m(\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33minput_variables\u001B[0m\u001B[39m=\u001B[0m\u001B[1;39m[\u001B[0m\u001B[32m'tool_description'\u001B[0m\u001B[39m, \u001B[0m\u001B[32m'task'\u001B[0m\u001B[1;39m]\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33moutput_parser\u001B[0m\u001B[39m=\u001B[0m\u001B[3;35mNone\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mpartial_variables\u001B[0m\u001B[39m=\u001B[0m\u001B[1;39m{\u001B[0m\u001B[1;39m}\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mtemplate\u001B[0m\u001B[39m=\u001B[0m\u001B[32m\"You\u001B[0m\u001B[32m are an AI agent who makes step-by-step plans to solve a problem under the help of \u001B[0m\n", + "\u001B[32mexternal tools. \\nFor each step, make one plan followed by one tool-call, which will be executed later to retrieve \u001B[0m\n", + "\u001B[32mevidence for that step.\\nYou should store each evidence into a distinct variable #E1, #E2, #E3 ... that can be \u001B[0m\n", + "\u001B[32mreferred to in later tool-call inputs. \\n\\n##Available Tools##\\n\u001B[0m\u001B[32m{\u001B[0m\u001B[32mtool_description\u001B[0m\u001B[32m}\u001B[0m\u001B[32m\\n\\n##Output Format \u001B[0m\u001B[32m(\u001B[0m\u001B[32mReplace \u001B[0m\n", + "\u001B[32m'<...>'\u001B[0m\u001B[32m)\u001B[0m\u001B[32m##\\n#Plan1: \\n#E1: \u001B[0m\u001B[32m[\u001B[0m\u001B[32m\u001B[0m\u001B[32m]\u001B[0m\u001B[32m \u001B[0m\u001B[32m(\u001B[0m\u001B[32meg. Search\u001B[0m\u001B[32m[\u001B[0m\u001B[32mWhat is Python\u001B[0m\u001B[32m]\u001B[0m\u001B[32m)\u001B[0m\u001B[32m\\n#Plan2: \u001B[0m\n", + "\u001B[32m\\n#E2: \u001B[0m\u001B[32m[\u001B[0m\u001B[32m\u001B[0m\u001B[32m]\u001B[0m\u001B[32m\\nAnd so \u001B[0m\n", + "\u001B[32mon...\\n \\n##Your Task##\\n\u001B[0m\u001B[32m{\u001B[0m\u001B[32mtask\u001B[0m\u001B[32m}\u001B[0m\u001B[32m\\n\\n##Now Begin##\\n\"\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mtemplate_format\u001B[0m\u001B[39m=\u001B[0m\u001B[32m'f-string'\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mvalidate_template\u001B[0m\u001B[39m=\u001B[0m\u001B[3;92mTrue\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[1;39m)\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[32m'Solver'\u001B[0m\u001B[39m: \u001B[0m\u001B[1;35mPromptTemplate\u001B[0m\u001B[1;39m(\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33minput_variables\u001B[0m\u001B[39m=\u001B[0m\u001B[1;39m[\u001B[0m\u001B[32m'plan_evidence'\u001B[0m\u001B[39m, \u001B[0m\u001B[32m'task'\u001B[0m\u001B[1;39m]\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33moutput_parser\u001B[0m\u001B[39m=\u001B[0m\u001B[3;35mNone\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mpartial_variables\u001B[0m\u001B[39m=\u001B[0m\u001B[1;39m{\u001B[0m\u001B[1;39m}\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mtemplate\u001B[0m\u001B[39m=\u001B[0m\u001B[32m'You are an AI agent who solves a problem with my assistance. I will provide step-by-step \u001B[0m\n", + "\u001B[32mplans\u001B[0m\u001B[32m(\u001B[0m\u001B[32m#Plan\u001B[0m\u001B[32m)\u001B[0m\u001B[32m and evidences\u001B[0m\u001B[32m(\u001B[0m\u001B[32m#E\u001B[0m\u001B[32m)\u001B[0m\u001B[32m that could be helpful.\\nYour task is to briefly summarize each step, then make a \u001B[0m\n", + "\u001B[32mshort final conclusion for your task.\\n\\n##My Plans and Evidences##\\n\u001B[0m\u001B[32m{\u001B[0m\u001B[32mplan_evidence\u001B[0m\u001B[32m}\u001B[0m\u001B[32m\\n\\n##Example Output##\\nFirst, \u001B[0m\n", + "\u001B[32mI , and I think <...>; Second, I <...>, and I think <...>; ....\\nSo, .\\n\\n##Your \u001B[0m\n", + "\u001B[32mTask##\\n\u001B[0m\u001B[32m{\u001B[0m\u001B[32mtask\u001B[0m\u001B[32m}\u001B[0m\u001B[32m\\n\\n##Now Begin##\\n'\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mtemplate_format\u001B[0m\u001B[39m=\u001B[0m\u001B[32m'f-string'\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mvalidate_template\u001B[0m\u001B[39m=\u001B[0m\u001B[3;92mTrue\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[1;39m)\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[1;39m}\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mplugins\u001B[0m\u001B[39m=\u001B[0m\u001B[1;39m[\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[1;35mWolframAlpha\u001B[0m\u001B[1;39m(\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mname\u001B[0m\u001B[39m=\u001B[0m\u001B[32m'wolfram_alpha'\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mdescription\u001B[0m\u001B[39m=\u001B[0m\u001B[32m'A WolframAlpha search engine. Useful when you need to solve a complicated Mathematical or \u001B[0m\n", + "\u001B[32mAlgebraic equation. Input should be an equation or function.'\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33margs_schema\u001B[0m\u001B[39m=,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mverbose\u001B[0m\u001B[39m=\u001B[0m\u001B[3;91mFalse\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mhandle_tool_error\u001B[0m\u001B[39m=\u001B[0m\u001B[3;91mFalse\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[1;39m)\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[1;39m]\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33margs_schema\u001B[0m\u001B[39m=\u001B[0m,\n", + " \u001B[33mmemory\u001B[0m=\u001B[3;35mNone\u001B[0m,\n", + " \u001B[33mexamples\u001B[0m=\u001B[1m{\u001B[0m\u001B[1m}\u001B[0m\n", + "\u001B[1m)\u001B[0m\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n" + ] + } + ], + "source": [ + "assembler = AgentAssembler(file='configs/sample_agent.yaml')\n", + "agent = assembler.get_agent()\n", + "print(agent)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "This is a sample agent that uses the `!include` syntax to reference another agent." + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
OpenAIFunctionChatAgent(\n",
+       "    name='main',\n",
+       "    type=<AgentType.openai: 'openai'>,\n",
+       "    version='0.0.1',\n",
+       "    description='main agent leveraging OpenAI function call API.',\n",
+       "    target_tasks=['anything'],\n",
+       "    llm=OpenAIGPTClient(\n",
+       "        model_name='gpt-4-0613',\n",
+       "        params=OpenAIParamModel(\n",
+       "            max_tokens=1024,\n",
+       "            temperature=0.0,\n",
+       "            top_p=0.9,\n",
+       "            presence_penalty=0.0,\n",
+       "            frequency_penalty=0.0,\n",
+       "            n=1,\n",
+       "            stop=[]\n",
+       "        )\n",
+       "    ),\n",
+       "    prompt_template=PromptTemplate(\n",
+       "        input_variables=['instruction'],\n",
+       "        output_parser=None,\n",
+       "        partial_variables={},\n",
+       "        template='{instruction}',\n",
+       "        template_format='f-string',\n",
+       "        validate_template=True\n",
+       "    ),\n",
+       "    plugins=[\n",
+       "        GoogleSearch(\n",
+       "            name='GoogleSearch',\n",
+       "            description='Tool that searches results from Google. Input should be a search query.',\n",
+       "            args_schema=<class 'pydantic.main.GoogleSearchArgs'>,\n",
+       "            verbose=False,\n",
+       "            handle_tool_error=False\n",
+       "        ),\n",
+       "        WebPage(\n",
+       "            name='WebPage',\n",
+       "            description='Worker that can get web pages through url. Useful when you have a  url and need to find \n",
+       "detailed information.You must make sure that the url is real and correct, come from plugin or user input.Input \n",
+       "should be a url.',\n",
+       "            args_schema=<class 'pydantic.main.WebPageArgs'>,\n",
+       "            verbose=False,\n",
+       "            handle_tool_error=False\n",
+       "        ),\n",
+       "        OpenAIFunctionChatAgent(\n",
+       "            name='main',\n",
+       "            type=<AgentType.openai: 'openai'>,\n",
+       "            version='0.0.1',\n",
+       "            description='main agent leveraging OpenAI function call API.',\n",
+       "            target_tasks=['anything'],\n",
+       "            llm=OpenAIGPTClient(\n",
+       "                model_name='gpt-4-0613',\n",
+       "                params=OpenAIParamModel(\n",
+       "                    max_tokens=1024,\n",
+       "                    temperature=0.0,\n",
+       "                    top_p=0.9,\n",
+       "                    presence_penalty=0.0,\n",
+       "                    frequency_penalty=0.0,\n",
+       "                    n=1,\n",
+       "                    stop=[]\n",
+       "                )\n",
+       "            ),\n",
+       "            prompt_template=PromptTemplate(\n",
+       "                input_variables=['instruction'],\n",
+       "                output_parser=None,\n",
+       "                partial_variables={},\n",
+       "                template='{instruction}',\n",
+       "                template_format='f-string',\n",
+       "                validate_template=True\n",
+       "            ),\n",
+       "            plugins=[\n",
+       "                GoogleSearch(\n",
+       "                    name='GoogleSearch',\n",
+       "                    description='Tool that searches results from Google. Input should be a search query.',\n",
+       "                    args_schema=<class 'pydantic.main.GoogleSearchArgs'>,\n",
+       "                    verbose=False,\n",
+       "                    handle_tool_error=False\n",
+       "                ),\n",
+       "                WebPage(\n",
+       "                    name='WebPage',\n",
+       "                    description='Worker that can get web pages through url. Useful when you have a  url and need to\n",
+       "find detailed information.You must make sure that the url is real and correct, come from plugin or user input.Input\n",
+       "should be a url.',\n",
+       "                    args_schema=<class 'pydantic.main.WebPageArgs'>,\n",
+       "                    verbose=False,\n",
+       "                    handle_tool_error=False\n",
+       "                )\n",
+       "            ],\n",
+       "            args_schema=<class 'pydantic.main.ArgsSchema'>,\n",
+       "            memory=None,\n",
+       "            examples=None,\n",
+       "            message_scratchpad=[{'role': 'system', 'content': 'You are a helpful AI assistant.'}]\n",
+       "        )\n",
+       "    ],\n",
+       "    args_schema=<class 'pydantic.main.ArgsSchema'>,\n",
+       "    memory=None,\n",
+       "    examples=None,\n",
+       "    message_scratchpad=[{'role': 'system', 'content': 'You are a helpful AI assistant.'}]\n",
+       ")\n",
+       "
\n" + ], + "text/plain": [ + "\u001B[1;35mOpenAIFunctionChatAgent\u001B[0m\u001B[1m(\u001B[0m\n", + " \u001B[33mname\u001B[0m=\u001B[32m'main'\u001B[0m,\n", + " \u001B[33mtype\u001B[0m=\u001B[1m<\u001B[0m\u001B[1;95mAgentType.openai:\u001B[0m\u001B[39m \u001B[0m\u001B[32m'openai'\u001B[0m\u001B[39m>,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mversion\u001B[0m\u001B[39m=\u001B[0m\u001B[32m'0.0.1'\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mdescription\u001B[0m\u001B[39m=\u001B[0m\u001B[32m'main agent leveraging OpenAI function call API.'\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mtarget_tasks\u001B[0m\u001B[39m=\u001B[0m\u001B[1;39m[\u001B[0m\u001B[32m'anything'\u001B[0m\u001B[1;39m]\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mllm\u001B[0m\u001B[39m=\u001B[0m\u001B[1;35mOpenAIGPTClient\u001B[0m\u001B[1;39m(\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mmodel_name\u001B[0m\u001B[39m=\u001B[0m\u001B[32m'gpt-4-0613'\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mparams\u001B[0m\u001B[39m=\u001B[0m\u001B[1;35mOpenAIParamModel\u001B[0m\u001B[1;39m(\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mmax_tokens\u001B[0m\u001B[39m=\u001B[0m\u001B[1;36m1024\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mtemperature\u001B[0m\u001B[39m=\u001B[0m\u001B[1;36m0\u001B[0m\u001B[1;36m.0\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mtop_p\u001B[0m\u001B[39m=\u001B[0m\u001B[1;36m0\u001B[0m\u001B[1;36m.9\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mpresence_penalty\u001B[0m\u001B[39m=\u001B[0m\u001B[1;36m0\u001B[0m\u001B[1;36m.0\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mfrequency_penalty\u001B[0m\u001B[39m=\u001B[0m\u001B[1;36m0\u001B[0m\u001B[1;36m.0\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mn\u001B[0m\u001B[39m=\u001B[0m\u001B[1;36m1\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mstop\u001B[0m\u001B[39m=\u001B[0m\u001B[1;39m[\u001B[0m\u001B[1;39m]\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[1;39m)\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[1;39m)\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mprompt_template\u001B[0m\u001B[39m=\u001B[0m\u001B[1;35mPromptTemplate\u001B[0m\u001B[1;39m(\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33minput_variables\u001B[0m\u001B[39m=\u001B[0m\u001B[1;39m[\u001B[0m\u001B[32m'instruction'\u001B[0m\u001B[1;39m]\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33moutput_parser\u001B[0m\u001B[39m=\u001B[0m\u001B[3;35mNone\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mpartial_variables\u001B[0m\u001B[39m=\u001B[0m\u001B[1;39m{\u001B[0m\u001B[1;39m}\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mtemplate\u001B[0m\u001B[39m=\u001B[0m\u001B[32m'\u001B[0m\u001B[32m{\u001B[0m\u001B[32minstruction\u001B[0m\u001B[32m}\u001B[0m\u001B[32m'\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mtemplate_format\u001B[0m\u001B[39m=\u001B[0m\u001B[32m'f-string'\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mvalidate_template\u001B[0m\u001B[39m=\u001B[0m\u001B[3;92mTrue\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[1;39m)\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mplugins\u001B[0m\u001B[39m=\u001B[0m\u001B[1;39m[\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[1;35mGoogleSearch\u001B[0m\u001B[1;39m(\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mname\u001B[0m\u001B[39m=\u001B[0m\u001B[32m'GoogleSearch'\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mdescription\u001B[0m\u001B[39m=\u001B[0m\u001B[32m'Tool that searches results from Google. Input should be a search query.'\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33margs_schema\u001B[0m\u001B[39m=,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mverbose\u001B[0m\u001B[39m=\u001B[0m\u001B[3;91mFalse\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mhandle_tool_error\u001B[0m\u001B[39m=\u001B[0m\u001B[3;91mFalse\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[1;39m)\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[1;35mWebPage\u001B[0m\u001B[1;39m(\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mname\u001B[0m\u001B[39m=\u001B[0m\u001B[32m'WebPage'\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mdescription\u001B[0m\u001B[39m=\u001B[0m\u001B[32m'Worker that can get web pages through url. Useful when you have a url and need to find \u001B[0m\n", + "\u001B[32mdetailed information.You must make sure that the url is real and correct, come from plugin or user input.Input \u001B[0m\n", + "\u001B[32mshould be a url.'\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33margs_schema\u001B[0m\u001B[39m=,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mverbose\u001B[0m\u001B[39m=\u001B[0m\u001B[3;91mFalse\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mhandle_tool_error\u001B[0m\u001B[39m=\u001B[0m\u001B[3;91mFalse\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[1;39m)\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[1;35mOpenAIFunctionChatAgent\u001B[0m\u001B[1;39m(\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mname\u001B[0m\u001B[39m=\u001B[0m\u001B[32m'main'\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mtype\u001B[0m\u001B[39m=,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mversion\u001B[0m\u001B[39m=\u001B[0m\u001B[32m'0.0.1'\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mdescription\u001B[0m\u001B[39m=\u001B[0m\u001B[32m'main agent leveraging OpenAI function call API.'\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mtarget_tasks\u001B[0m\u001B[39m=\u001B[0m\u001B[1;39m[\u001B[0m\u001B[32m'anything'\u001B[0m\u001B[1;39m]\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mllm\u001B[0m\u001B[39m=\u001B[0m\u001B[1;35mOpenAIGPTClient\u001B[0m\u001B[1;39m(\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mmodel_name\u001B[0m\u001B[39m=\u001B[0m\u001B[32m'gpt-4-0613'\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mparams\u001B[0m\u001B[39m=\u001B[0m\u001B[1;35mOpenAIParamModel\u001B[0m\u001B[1;39m(\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mmax_tokens\u001B[0m\u001B[39m=\u001B[0m\u001B[1;36m1024\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mtemperature\u001B[0m\u001B[39m=\u001B[0m\u001B[1;36m0\u001B[0m\u001B[1;36m.0\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mtop_p\u001B[0m\u001B[39m=\u001B[0m\u001B[1;36m0\u001B[0m\u001B[1;36m.9\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mpresence_penalty\u001B[0m\u001B[39m=\u001B[0m\u001B[1;36m0\u001B[0m\u001B[1;36m.0\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mfrequency_penalty\u001B[0m\u001B[39m=\u001B[0m\u001B[1;36m0\u001B[0m\u001B[1;36m.0\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mn\u001B[0m\u001B[39m=\u001B[0m\u001B[1;36m1\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mstop\u001B[0m\u001B[39m=\u001B[0m\u001B[1;39m[\u001B[0m\u001B[1;39m]\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[1;39m)\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[1;39m)\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mprompt_template\u001B[0m\u001B[39m=\u001B[0m\u001B[1;35mPromptTemplate\u001B[0m\u001B[1;39m(\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33minput_variables\u001B[0m\u001B[39m=\u001B[0m\u001B[1;39m[\u001B[0m\u001B[32m'instruction'\u001B[0m\u001B[1;39m]\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33moutput_parser\u001B[0m\u001B[39m=\u001B[0m\u001B[3;35mNone\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mpartial_variables\u001B[0m\u001B[39m=\u001B[0m\u001B[1;39m{\u001B[0m\u001B[1;39m}\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mtemplate\u001B[0m\u001B[39m=\u001B[0m\u001B[32m'\u001B[0m\u001B[32m{\u001B[0m\u001B[32minstruction\u001B[0m\u001B[32m}\u001B[0m\u001B[32m'\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mtemplate_format\u001B[0m\u001B[39m=\u001B[0m\u001B[32m'f-string'\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mvalidate_template\u001B[0m\u001B[39m=\u001B[0m\u001B[3;92mTrue\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[1;39m)\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mplugins\u001B[0m\u001B[39m=\u001B[0m\u001B[1;39m[\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[1;35mGoogleSearch\u001B[0m\u001B[1;39m(\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mname\u001B[0m\u001B[39m=\u001B[0m\u001B[32m'GoogleSearch'\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mdescription\u001B[0m\u001B[39m=\u001B[0m\u001B[32m'Tool that searches results from Google. Input should be a search query.'\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33margs_schema\u001B[0m\u001B[39m=,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mverbose\u001B[0m\u001B[39m=\u001B[0m\u001B[3;91mFalse\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mhandle_tool_error\u001B[0m\u001B[39m=\u001B[0m\u001B[3;91mFalse\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[1;39m)\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[1;35mWebPage\u001B[0m\u001B[1;39m(\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mname\u001B[0m\u001B[39m=\u001B[0m\u001B[32m'WebPage'\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mdescription\u001B[0m\u001B[39m=\u001B[0m\u001B[32m'Worker that can get web pages through url. Useful when you have a url and need to\u001B[0m\n", + "\u001B[32mfind detailed information.You must make sure that the url is real and correct, come from plugin or user input.Input\u001B[0m\n", + "\u001B[32mshould be a url.'\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33margs_schema\u001B[0m\u001B[39m=,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mverbose\u001B[0m\u001B[39m=\u001B[0m\u001B[3;91mFalse\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mhandle_tool_error\u001B[0m\u001B[39m=\u001B[0m\u001B[3;91mFalse\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[1;39m)\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[1;39m]\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33margs_schema\u001B[0m\u001B[39m=,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mmemory\u001B[0m\u001B[39m=\u001B[0m\u001B[3;35mNone\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mexamples\u001B[0m\u001B[39m=\u001B[0m\u001B[3;35mNone\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mmessage_scratchpad\u001B[0m\u001B[39m=\u001B[0m\u001B[1;39m[\u001B[0m\u001B[1;39m{\u001B[0m\u001B[32m'role'\u001B[0m\u001B[39m: \u001B[0m\u001B[32m'system'\u001B[0m\u001B[39m, \u001B[0m\u001B[32m'content'\u001B[0m\u001B[39m: \u001B[0m\u001B[32m'You are a helpful AI assistant.'\u001B[0m\u001B[1;39m}\u001B[0m\u001B[1;39m]\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[1;39m)\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[1;39m]\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33margs_schema\u001B[0m\u001B[39m=\u001B[0m,\n", + " \u001B[33mmemory\u001B[0m=\u001B[3;35mNone\u001B[0m,\n", + " \u001B[33mexamples\u001B[0m=\u001B[3;35mNone\u001B[0m,\n", + " \u001B[33mmessage_scratchpad\u001B[0m=\u001B[1m[\u001B[0m\u001B[1m{\u001B[0m\u001B[32m'role'\u001B[0m: \u001B[32m'system'\u001B[0m, \u001B[32m'content'\u001B[0m: \u001B[32m'You are a helpful AI assistant.'\u001B[0m\u001B[1m}\u001B[0m\u001B[1m]\u001B[0m\n", + "\u001B[1m)\u001B[0m\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n" + ] + } + ], + "source": [ + "assembler = AgentAssembler(file='configs/include.yaml')\n", + "agent = assembler.get_agent()\n", + "print(agent)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "This is a sample agent that uses the `!env` syntax to reference environment variables." + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "os.environ['AGENT_NAME'] = 'gentopia'" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
OpenAIFunctionChatAgent(\n",
+       "    name='gentopia',\n",
+       "    type=<AgentType.openai: 'openai'>,\n",
+       "    version='0.0.1',\n",
+       "    description='main agent leveraging OpenAI function call API.',\n",
+       "    target_tasks=['anything'],\n",
+       "    llm=OpenAIGPTClient(\n",
+       "        model_name='gpt-4-0613',\n",
+       "        params=OpenAIParamModel(\n",
+       "            max_tokens=1024,\n",
+       "            temperature=0.0,\n",
+       "            top_p=0.9,\n",
+       "            presence_penalty=0.0,\n",
+       "            frequency_penalty=0.0,\n",
+       "            n=1,\n",
+       "            stop=[]\n",
+       "        )\n",
+       "    ),\n",
+       "    prompt_template=PromptTemplate(\n",
+       "        input_variables=['instruction'],\n",
+       "        output_parser=None,\n",
+       "        partial_variables={},\n",
+       "        template='{instruction}',\n",
+       "        template_format='f-string',\n",
+       "        validate_template=True\n",
+       "    ),\n",
+       "    plugins=[\n",
+       "        GoogleSearch(\n",
+       "            name='GoogleSearch',\n",
+       "            description='Tool that searches results from Google. Input should be a search query.',\n",
+       "            args_schema=<class 'pydantic.main.GoogleSearchArgs'>,\n",
+       "            verbose=False,\n",
+       "            handle_tool_error=False\n",
+       "        ),\n",
+       "        WebPage(\n",
+       "            name='WebPage',\n",
+       "            description='Worker that can get web pages through url. Useful when you have a  url and need to find \n",
+       "detailed information.You must make sure that the url is real and correct, come from plugin or user input.Input \n",
+       "should be a url.',\n",
+       "            args_schema=<class 'pydantic.main.WebPageArgs'>,\n",
+       "            verbose=False,\n",
+       "            handle_tool_error=False\n",
+       "        ),\n",
+       "        OpenAIFunctionChatAgent(\n",
+       "            name='main',\n",
+       "            type=<AgentType.openai: 'openai'>,\n",
+       "            version='0.0.1',\n",
+       "            description='main agent leveraging OpenAI function call API.',\n",
+       "            target_tasks=['anything'],\n",
+       "            llm=OpenAIGPTClient(\n",
+       "                model_name='gpt-4-0613',\n",
+       "                params=OpenAIParamModel(\n",
+       "                    max_tokens=1024,\n",
+       "                    temperature=0.0,\n",
+       "                    top_p=0.9,\n",
+       "                    presence_penalty=0.0,\n",
+       "                    frequency_penalty=0.0,\n",
+       "                    n=1,\n",
+       "                    stop=[]\n",
+       "                )\n",
+       "            ),\n",
+       "            prompt_template=PromptTemplate(\n",
+       "                input_variables=['instruction'],\n",
+       "                output_parser=None,\n",
+       "                partial_variables={},\n",
+       "                template='{instruction}',\n",
+       "                template_format='f-string',\n",
+       "                validate_template=True\n",
+       "            ),\n",
+       "            plugins=[\n",
+       "                GoogleSearch(\n",
+       "                    name='GoogleSearch',\n",
+       "                    description='Tool that searches results from Google. Input should be a search query.',\n",
+       "                    args_schema=<class 'pydantic.main.GoogleSearchArgs'>,\n",
+       "                    verbose=False,\n",
+       "                    handle_tool_error=False\n",
+       "                ),\n",
+       "                WebPage(\n",
+       "                    name='WebPage',\n",
+       "                    description='Worker that can get web pages through url. Useful when you have a  url and need to\n",
+       "find detailed information.You must make sure that the url is real and correct, come from plugin or user input.Input\n",
+       "should be a url.',\n",
+       "                    args_schema=<class 'pydantic.main.WebPageArgs'>,\n",
+       "                    verbose=False,\n",
+       "                    handle_tool_error=False\n",
+       "                )\n",
+       "            ],\n",
+       "            args_schema=<class 'pydantic.main.ArgsSchema'>,\n",
+       "            memory=None,\n",
+       "            examples=None,\n",
+       "            message_scratchpad=[{'role': 'system', 'content': 'You are a helpful AI assistant.'}]\n",
+       "        )\n",
+       "    ],\n",
+       "    args_schema=<class 'pydantic.main.ArgsSchema'>,\n",
+       "    memory=None,\n",
+       "    examples=None,\n",
+       "    message_scratchpad=[{'role': 'system', 'content': 'You are a helpful AI assistant.'}]\n",
+       ")\n",
+       "
\n" + ], + "text/plain": [ + "\u001B[1;35mOpenAIFunctionChatAgent\u001B[0m\u001B[1m(\u001B[0m\n", + " \u001B[33mname\u001B[0m=\u001B[32m'gentopia'\u001B[0m,\n", + " \u001B[33mtype\u001B[0m=\u001B[1m<\u001B[0m\u001B[1;95mAgentType.openai:\u001B[0m\u001B[39m \u001B[0m\u001B[32m'openai'\u001B[0m\u001B[39m>,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mversion\u001B[0m\u001B[39m=\u001B[0m\u001B[32m'0.0.1'\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mdescription\u001B[0m\u001B[39m=\u001B[0m\u001B[32m'main agent leveraging OpenAI function call API.'\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mtarget_tasks\u001B[0m\u001B[39m=\u001B[0m\u001B[1;39m[\u001B[0m\u001B[32m'anything'\u001B[0m\u001B[1;39m]\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mllm\u001B[0m\u001B[39m=\u001B[0m\u001B[1;35mOpenAIGPTClient\u001B[0m\u001B[1;39m(\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mmodel_name\u001B[0m\u001B[39m=\u001B[0m\u001B[32m'gpt-4-0613'\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mparams\u001B[0m\u001B[39m=\u001B[0m\u001B[1;35mOpenAIParamModel\u001B[0m\u001B[1;39m(\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mmax_tokens\u001B[0m\u001B[39m=\u001B[0m\u001B[1;36m1024\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mtemperature\u001B[0m\u001B[39m=\u001B[0m\u001B[1;36m0\u001B[0m\u001B[1;36m.0\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mtop_p\u001B[0m\u001B[39m=\u001B[0m\u001B[1;36m0\u001B[0m\u001B[1;36m.9\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mpresence_penalty\u001B[0m\u001B[39m=\u001B[0m\u001B[1;36m0\u001B[0m\u001B[1;36m.0\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mfrequency_penalty\u001B[0m\u001B[39m=\u001B[0m\u001B[1;36m0\u001B[0m\u001B[1;36m.0\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mn\u001B[0m\u001B[39m=\u001B[0m\u001B[1;36m1\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mstop\u001B[0m\u001B[39m=\u001B[0m\u001B[1;39m[\u001B[0m\u001B[1;39m]\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[1;39m)\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[1;39m)\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mprompt_template\u001B[0m\u001B[39m=\u001B[0m\u001B[1;35mPromptTemplate\u001B[0m\u001B[1;39m(\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33minput_variables\u001B[0m\u001B[39m=\u001B[0m\u001B[1;39m[\u001B[0m\u001B[32m'instruction'\u001B[0m\u001B[1;39m]\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33moutput_parser\u001B[0m\u001B[39m=\u001B[0m\u001B[3;35mNone\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mpartial_variables\u001B[0m\u001B[39m=\u001B[0m\u001B[1;39m{\u001B[0m\u001B[1;39m}\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mtemplate\u001B[0m\u001B[39m=\u001B[0m\u001B[32m'\u001B[0m\u001B[32m{\u001B[0m\u001B[32minstruction\u001B[0m\u001B[32m}\u001B[0m\u001B[32m'\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mtemplate_format\u001B[0m\u001B[39m=\u001B[0m\u001B[32m'f-string'\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mvalidate_template\u001B[0m\u001B[39m=\u001B[0m\u001B[3;92mTrue\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[1;39m)\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mplugins\u001B[0m\u001B[39m=\u001B[0m\u001B[1;39m[\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[1;35mGoogleSearch\u001B[0m\u001B[1;39m(\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mname\u001B[0m\u001B[39m=\u001B[0m\u001B[32m'GoogleSearch'\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mdescription\u001B[0m\u001B[39m=\u001B[0m\u001B[32m'Tool that searches results from Google. Input should be a search query.'\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33margs_schema\u001B[0m\u001B[39m=,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mverbose\u001B[0m\u001B[39m=\u001B[0m\u001B[3;91mFalse\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mhandle_tool_error\u001B[0m\u001B[39m=\u001B[0m\u001B[3;91mFalse\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[1;39m)\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[1;35mWebPage\u001B[0m\u001B[1;39m(\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mname\u001B[0m\u001B[39m=\u001B[0m\u001B[32m'WebPage'\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mdescription\u001B[0m\u001B[39m=\u001B[0m\u001B[32m'Worker that can get web pages through url. Useful when you have a url and need to find \u001B[0m\n", + "\u001B[32mdetailed information.You must make sure that the url is real and correct, come from plugin or user input.Input \u001B[0m\n", + "\u001B[32mshould be a url.'\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33margs_schema\u001B[0m\u001B[39m=,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mverbose\u001B[0m\u001B[39m=\u001B[0m\u001B[3;91mFalse\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mhandle_tool_error\u001B[0m\u001B[39m=\u001B[0m\u001B[3;91mFalse\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[1;39m)\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[1;35mOpenAIFunctionChatAgent\u001B[0m\u001B[1;39m(\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mname\u001B[0m\u001B[39m=\u001B[0m\u001B[32m'main'\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mtype\u001B[0m\u001B[39m=,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mversion\u001B[0m\u001B[39m=\u001B[0m\u001B[32m'0.0.1'\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mdescription\u001B[0m\u001B[39m=\u001B[0m\u001B[32m'main agent leveraging OpenAI function call API.'\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mtarget_tasks\u001B[0m\u001B[39m=\u001B[0m\u001B[1;39m[\u001B[0m\u001B[32m'anything'\u001B[0m\u001B[1;39m]\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mllm\u001B[0m\u001B[39m=\u001B[0m\u001B[1;35mOpenAIGPTClient\u001B[0m\u001B[1;39m(\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mmodel_name\u001B[0m\u001B[39m=\u001B[0m\u001B[32m'gpt-4-0613'\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mparams\u001B[0m\u001B[39m=\u001B[0m\u001B[1;35mOpenAIParamModel\u001B[0m\u001B[1;39m(\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mmax_tokens\u001B[0m\u001B[39m=\u001B[0m\u001B[1;36m1024\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mtemperature\u001B[0m\u001B[39m=\u001B[0m\u001B[1;36m0\u001B[0m\u001B[1;36m.0\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mtop_p\u001B[0m\u001B[39m=\u001B[0m\u001B[1;36m0\u001B[0m\u001B[1;36m.9\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mpresence_penalty\u001B[0m\u001B[39m=\u001B[0m\u001B[1;36m0\u001B[0m\u001B[1;36m.0\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mfrequency_penalty\u001B[0m\u001B[39m=\u001B[0m\u001B[1;36m0\u001B[0m\u001B[1;36m.0\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mn\u001B[0m\u001B[39m=\u001B[0m\u001B[1;36m1\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mstop\u001B[0m\u001B[39m=\u001B[0m\u001B[1;39m[\u001B[0m\u001B[1;39m]\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[1;39m)\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[1;39m)\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mprompt_template\u001B[0m\u001B[39m=\u001B[0m\u001B[1;35mPromptTemplate\u001B[0m\u001B[1;39m(\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33minput_variables\u001B[0m\u001B[39m=\u001B[0m\u001B[1;39m[\u001B[0m\u001B[32m'instruction'\u001B[0m\u001B[1;39m]\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33moutput_parser\u001B[0m\u001B[39m=\u001B[0m\u001B[3;35mNone\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mpartial_variables\u001B[0m\u001B[39m=\u001B[0m\u001B[1;39m{\u001B[0m\u001B[1;39m}\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mtemplate\u001B[0m\u001B[39m=\u001B[0m\u001B[32m'\u001B[0m\u001B[32m{\u001B[0m\u001B[32minstruction\u001B[0m\u001B[32m}\u001B[0m\u001B[32m'\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mtemplate_format\u001B[0m\u001B[39m=\u001B[0m\u001B[32m'f-string'\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mvalidate_template\u001B[0m\u001B[39m=\u001B[0m\u001B[3;92mTrue\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[1;39m)\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mplugins\u001B[0m\u001B[39m=\u001B[0m\u001B[1;39m[\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[1;35mGoogleSearch\u001B[0m\u001B[1;39m(\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mname\u001B[0m\u001B[39m=\u001B[0m\u001B[32m'GoogleSearch'\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mdescription\u001B[0m\u001B[39m=\u001B[0m\u001B[32m'Tool that searches results from Google. Input should be a search query.'\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33margs_schema\u001B[0m\u001B[39m=,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mverbose\u001B[0m\u001B[39m=\u001B[0m\u001B[3;91mFalse\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mhandle_tool_error\u001B[0m\u001B[39m=\u001B[0m\u001B[3;91mFalse\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[1;39m)\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[1;35mWebPage\u001B[0m\u001B[1;39m(\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mname\u001B[0m\u001B[39m=\u001B[0m\u001B[32m'WebPage'\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mdescription\u001B[0m\u001B[39m=\u001B[0m\u001B[32m'Worker that can get web pages through url. Useful when you have a url and need to\u001B[0m\n", + "\u001B[32mfind detailed information.You must make sure that the url is real and correct, come from plugin or user input.Input\u001B[0m\n", + "\u001B[32mshould be a url.'\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33margs_schema\u001B[0m\u001B[39m=,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mverbose\u001B[0m\u001B[39m=\u001B[0m\u001B[3;91mFalse\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mhandle_tool_error\u001B[0m\u001B[39m=\u001B[0m\u001B[3;91mFalse\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[1;39m)\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[1;39m]\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33margs_schema\u001B[0m\u001B[39m=,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mmemory\u001B[0m\u001B[39m=\u001B[0m\u001B[3;35mNone\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mexamples\u001B[0m\u001B[39m=\u001B[0m\u001B[3;35mNone\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mmessage_scratchpad\u001B[0m\u001B[39m=\u001B[0m\u001B[1;39m[\u001B[0m\u001B[1;39m{\u001B[0m\u001B[32m'role'\u001B[0m\u001B[39m: \u001B[0m\u001B[32m'system'\u001B[0m\u001B[39m, \u001B[0m\u001B[32m'content'\u001B[0m\u001B[39m: \u001B[0m\u001B[32m'You are a helpful AI assistant.'\u001B[0m\u001B[1;39m}\u001B[0m\u001B[1;39m]\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[1;39m)\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[1;39m]\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33margs_schema\u001B[0m\u001B[39m=\u001B[0m,\n", + " \u001B[33mmemory\u001B[0m=\u001B[3;35mNone\u001B[0m,\n", + " \u001B[33mexamples\u001B[0m=\u001B[3;35mNone\u001B[0m,\n", + " \u001B[33mmessage_scratchpad\u001B[0m=\u001B[1m[\u001B[0m\u001B[1m{\u001B[0m\u001B[32m'role'\u001B[0m: \u001B[32m'system'\u001B[0m, \u001B[32m'content'\u001B[0m: \u001B[32m'You are a helpful AI assistant.'\u001B[0m\u001B[1m}\u001B[0m\u001B[1m]\u001B[0m\n", + "\u001B[1m)\u001B[0m\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n" + ] + } + ], + "source": [ + "assembler = AgentAssembler(file='configs/env.yaml')\n", + "agent = assembler.get_agent()\n", + "print(agent)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.11" + } + }, + "nbformat": 4, + "nbformat_minor": 1 +} diff --git a/gentopia/__init__.py b/gentopia/__init__.py index 5a03b5d..284adf1 100644 --- a/gentopia/__init__.py +++ b/gentopia/__init__.py @@ -1 +1,30 @@ -from .assembler.agent_assembler import AgentAssembler \ No newline at end of file +import signal + +from .assembler.agent_assembler import AgentAssembler +from .output import enable_log +from .output.base_output import * +from .output.console_output import ConsoleOutput + + +def chat(agent, output = ConsoleOutput(), verbose = False, log_level = None, log_path = None): + output.panel_print("[green]Welcome to Gentopia!", title="[blue]Gentopia") + if verbose: + output.panel_print(str(agent), title="[red]Agent") + if log_level is not None: + if not check_log(): + enable_log( path=log_path, log_level=log_level) + def handler(signum, frame): + output.print("\n[red]Bye!") + exit(0) + + signal.signal(signal.SIGINT, handler) + + while True: + output.print("[green]User: ", end="") + text = input() + if text: + response = agent.stream(text, output=output) + else: + response = agent.stream(output=output) + + output.done(_all=True) \ No newline at end of file diff --git a/gentopia/agent/react/agent.py b/gentopia/agent/react/agent.py index f03a2ce..25f5b9f 100644 --- a/gentopia/agent/react/agent.py +++ b/gentopia/agent/react/agent.py @@ -146,6 +146,7 @@ def run(self, instruction): return AgentOutput(output=response.content, cost=total_cost, token_usage=total_token) def stream(self, instruction: Optional[str] = None, output: Optional[BaseOutput] = None): + self.intermediate_steps.clear() total_cost = 0.0 total_token = 0 if output is None: @@ -166,8 +167,6 @@ def stream(self, instruction: Optional[str] = None, output: Optional[BaseOutput] # print(i.content) output.clear() - - logging.info(f"Response: {content}") self.intermediate_steps.append([self._parse_output(content), ]) if isinstance(self.intermediate_steps[-1][0], AgentFinish): diff --git a/gentopia/output/__init__.py b/gentopia/output/__init__.py index 1d0752c..5b0daac 100755 --- a/gentopia/output/__init__.py +++ b/gentopia/output/__init__.py @@ -1,7 +1,10 @@ import logging import os + def enable_log(path: str = "./agent.log", log_level: str= "info" ): + if path is None: + path = "./agent.log" os.environ["LOG_LEVEL"] = log_level os.environ["LOG_PATH"] = path logging.basicConfig(level=log_level.upper(), filename=path) diff --git a/gentopia/utils/util.py b/gentopia/utils/util.py index 0b12a57..f25cae7 100755 --- a/gentopia/utils/util.py +++ b/gentopia/utils/util.py @@ -46,4 +46,24 @@ #TODO: get default client param model def get_default_client_param_model(model_name:str) -> BaseParamModel: - return None \ No newline at end of file + return None + +def print_tree(obj, indent=0): + for attr in dir(obj): + if not attr.startswith('_'): + value = getattr(obj, attr) + if not callable(value): + if not isinstance(value, dict) and not isinstance(value, list): + print('| ' * indent + '|--', f'{attr}: {value}') + else: + if not value: + print('| ' * indent + '|--', f'{attr}: {value}') + print('| ' * indent + '|--', f'{attr}:') + if hasattr(value, '__dict__'): + print_tree(value, indent + 1) + elif isinstance(value, list): + for item in value: + print_tree(item, indent + 1) + elif isinstance(value, dict): + for key, item in value.items(): + print_tree(item, indent + 1) \ No newline at end of file diff --git a/test.py b/test.py index 1c48edc..0ed6151 100755 --- a/test.py +++ b/test.py @@ -12,6 +12,7 @@ from gentopia.model.param_model import HuggingfaceParamModel from gentopia.output import enable_log +from gentopia import chat from gentopia.output.console_output import ConsoleOutput import logging @@ -47,24 +48,23 @@ def print_tree(obj, indent=0): print_tree(item, indent + 1) -def ask(agent): - out = ConsoleOutput() - - def handler(signum, frame): - out.print("\n[red]Bye!") - exit(0) - - signal.signal(signal.SIGINT, handler) - while True: - out.print("[green]User: ", end="") - text = input() - if text: - response = agent.stream(text, output=out) - else: - response = agent.stream(output=out) - - out.done(_all=True) - print("\n") +# def ask(agent, output = ConsoleOutput()): +# +# def handler(signum, frame): +# output.print("\n[red]Bye!") +# exit(0) +# +# signal.signal(signal.SIGINT, handler) +# while True: +# output.print("[green]User: ", end="") +# text = input() +# if text: +# response = agent.stream(text, output=output) +# else: +# response = agent.stream(output=output) +# +# output.done(_all=True) + # print("\n") if __name__ == '__main__': @@ -72,13 +72,13 @@ def handler(signum, frame): # config = Config.load('main.yaml') # then tell me what is GIL in python # print(config)calculate sqrt(10),then tell me what is GIL in python, and then calculate sqrt(100) # exit(0)give me some sentences in markdown format - enable_log(log_level='debug') + enable_log(log_level='info') assembler = AgentAssembler(file='configs/mathria.yaml') # # assembler.manager = LocalLLMManager() agent = assembler.get_agent() - ask(agent) + chat(agent) # # print(agent) # x = " What is Trump's current age raised to the 0.43 power?"