diff --git a/.gitignore b/.gitignore index 1cc5c95..7e28d0d 100644 --- a/.gitignore +++ b/.gitignore @@ -165,3 +165,4 @@ cython_debug/ # option (not recommended) you can uncomment the following to ignore the entire idea folder. #.idea/ /api.key +/examples/test.ipynb diff --git a/configs/memory.yaml b/configs/memory.yaml index be97f97..a83c56f 100644 --- a/configs/memory.yaml +++ b/configs/memory.yaml @@ -6,8 +6,8 @@ description: main agent leveraging OpenAI function call API. prompt_template: !prompt ZeroShotVanillaPrompt memory: memory_type: chroma # chroma or pinecone - threshold_1: 3 # first-level memory - threshold_2: 3 # second-level memory + threshold_1: 1 # first-level memory + threshold_2: 1 # second-level memory params: index: main top_k: 2 diff --git a/configs/react.yaml b/configs/react.yaml index 91b71b9..f98b56e 100755 --- a/configs/react.yaml +++ b/configs/react.yaml @@ -32,8 +32,5 @@ plugins: # - answer things about math -## Authentication -auth: - OPENAI_API_KEY: !file /home/api.key - WOLFRAM_ALPHA_APPID: !file /home/wolfram.key + diff --git a/examples/agent_as_plugin/agent_as_plugin.py b/examples/agent_as_plugin/agent_as_plugin.py new file mode 100755 index 0000000..3957a5e --- /dev/null +++ b/examples/agent_as_plugin/agent_as_plugin.py @@ -0,0 +1,9 @@ +from gentopia.assembler.agent_assembler import AgentAssembler +from gentopia.output import enable_log +from gentopia import chat + +if __name__ == '__main__': + enable_log() + assembler = AgentAssembler(file='configs/main.yaml') + agent = assembler.get_agent() + chat(agent, verbose=True) diff --git a/examples/basic_usage/basic_usage.py b/examples/basic_usage/basic_usage.py new file mode 100755 index 0000000..5f2552d --- /dev/null +++ b/examples/basic_usage/basic_usage.py @@ -0,0 +1,9 @@ +from gentopia.assembler.agent_assembler import AgentAssembler +from gentopia.output import enable_log +from gentopia import chat + +if __name__ == '__main__': + enable_log() + assembler = AgentAssembler(file='configs/mathria.yaml') + agent = assembler.get_agent() + chat(agent) diff --git a/examples/custom_agent/configs/env.yaml b/examples/custom_agent/configs/env.yaml new file mode 100755 index 0000000..323f43d --- /dev/null +++ b/examples/custom_agent/configs/env.yaml @@ -0,0 +1,19 @@ +# Agent Config +name: !env AGENT_NAME +type: openai +version: 0.0.1 +description: main agent leveraging OpenAI function call API. +prompt_template: !prompt ZeroShotVanillaPrompt +llm: + model_name: gpt-4-0613 + params: + temperature: 0.0 + top_p: 0.9 + repetition_penalty: 1.0 + max_tokens: 1024 +target_tasks: + - anything +plugins: + - name: google_search + - name: web_page + - !include sample_agent.yaml diff --git a/examples/custom_agent/configs/include.yaml b/examples/custom_agent/configs/include.yaml new file mode 100755 index 0000000..d5ad8f3 --- /dev/null +++ b/examples/custom_agent/configs/include.yaml @@ -0,0 +1,19 @@ +# Agent Config +name: main +type: openai +version: 0.0.1 +description: main agent leveraging OpenAI function call API. +prompt_template: !prompt ZeroShotVanillaPrompt +llm: + model_name: gpt-4-0613 + params: + temperature: 0.0 + top_p: 0.9 + repetition_penalty: 1.0 + max_tokens: 1024 +target_tasks: + - anything +plugins: + - name: google_search + - name: web_page + - !include sample_agent.yaml diff --git a/examples/custom_agent/configs/sample_agent.yaml b/examples/custom_agent/configs/sample_agent.yaml new file mode 100755 index 0000000..26f1445 --- /dev/null +++ b/examples/custom_agent/configs/sample_agent.yaml @@ -0,0 +1,18 @@ +# Agent Config +name: main +type: openai +version: 0.0.1 +description: main agent leveraging OpenAI function call API. +prompt_template: !prompt ZeroShotVanillaPrompt +llm: + model_name: gpt-4-0613 + params: + temperature: 0.0 + top_p: 0.9 + repetition_penalty: 1.0 + max_tokens: 1024 +target_tasks: + - anything +plugins: + - name: google_search + - name: web_page diff --git a/examples/custom_agent/custom_agent.ipynb b/examples/custom_agent/custom_agent.ipynb new file mode 100755 index 0000000..eb6196c --- /dev/null +++ b/examples/custom_agent/custom_agent.ipynb @@ -0,0 +1,715 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "To make it easier for users to use, Gentopia has expanded on the yaml format and added some features, such as referencing files and using environment variables.\n", + "\n", + "The detailed list is shown in the following table:\n", + "\n", + "| Feature | Syntax | Description |\n", + "| --- | --- |------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|\n", + "| Reference file | `!include` | This syntax can be used to reference other files, such as `!include ./config.yaml`. All contents of the file will be treated as yaml for parsing. This can be used to reference other agents. |\n", + "| Reference Prompt | `!prompt` | This syntax can be used to reference prompts, such as `!prompt ZeroShotVanillaPrompt`. This will reference a pre-defined prompt in Gentopia. To import a custom prompt, use the full Python path, such as `!prompt gentopia.prompt.ZeroShotVanillaPrompt`. |\n", + "| Use environment variable | `!env` | This syntax can be used to reference environment variables, such as `!env GENTOPIA_HOME`. |\n", + "| Reference file | `!file` | This syntax can be used to reference files, such as `!file ./config.yaml`. All contents of the file will be treated as a string for parsing. |" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "from gentopia.assembler.agent_assembler import AgentAssembler\n", + "from gentopia.output import enable_log\n", + "from gentopia import chat" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "This is a sample agent that uses the `!prompt` syntax to reference a pre-defined prompt in Gentopia." + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
RewooAgent(\n", + " name='mathria',\n", + " type=<AgentType.rewoo: 'rewoo'>,\n", + " version='0.0.1',\n", + " description='A math agent capable of solving very complicated math problems. Ask it to solve very hard math \n", + "problems.',\n", + " target_tasks=['solving math problems', 'answer things about math'],\n", + " llm={\n", + " 'Planner': OpenAIGPTClient(\n", + " model_name='gpt-3.5-turbo-0613',\n", + " params=OpenAIParamModel(\n", + " max_tokens=1024,\n", + " temperature=0.0,\n", + " top_p=0.9,\n", + " presence_penalty=0.0,\n", + " frequency_penalty=0.0,\n", + " n=1,\n", + " stop=[]\n", + " )\n", + " ),\n", + " 'Solver': OpenAIGPTClient(\n", + " model_name='gpt-3.5-turbo-0613',\n", + " params=OpenAIParamModel(\n", + " max_tokens=1024,\n", + " temperature=0.0,\n", + " top_p=0.9,\n", + " presence_penalty=0.0,\n", + " frequency_penalty=0.0,\n", + " n=1,\n", + " stop=[]\n", + " )\n", + " )\n", + " },\n", + " prompt_template={\n", + " 'Planner': PromptTemplate(\n", + " input_variables=['tool_description', 'task'],\n", + " output_parser=None,\n", + " partial_variables={},\n", + " template=\"You are an AI agent who makes step-by-step plans to solve a problem under the help of \n", + "external tools. \\nFor each step, make one plan followed by one tool-call, which will be executed later to retrieve \n", + "evidence for that step.\\nYou should store each evidence into a distinct variable #E1, #E2, #E3 ... that can be \n", + "referred to in later tool-call inputs. \\n\\n##Available Tools##\\n{tool_description}\\n\\n##Output Format (Replace \n", + "'<...>')##\\n#Plan1: <describe your plan here>\\n#E1: <toolname>[<input here>] (eg. Search[What is Python])\\n#Plan2: \n", + "<describe next plan>\\n#E2: <toolname>[<input here, you can use #E1 to represent its expected output>]\\nAnd so \n", + "on...\\n \\n##Your Task##\\n{task}\\n\\n##Now Begin##\\n\",\n", + " template_format='f-string',\n", + " validate_template=True\n", + " ),\n", + " 'Solver': PromptTemplate(\n", + " input_variables=['plan_evidence', 'task'],\n", + " output_parser=None,\n", + " partial_variables={},\n", + " template='You are an AI agent who solves a problem with my assistance. I will provide step-by-step \n", + "plans(#Plan) and evidences(#E) that could be helpful.\\nYour task is to briefly summarize each step, then make a \n", + "short final conclusion for your task.\\n\\n##My Plans and Evidences##\\n{plan_evidence}\\n\\n##Example Output##\\nFirst, \n", + "I <did something> , and I think <...>; Second, I <...>, and I think <...>; ....\\nSo, <your conclusion>.\\n\\n##Your \n", + "Task##\\n{task}\\n\\n##Now Begin##\\n',\n", + " template_format='f-string',\n", + " validate_template=True\n", + " )\n", + " },\n", + " plugins=[\n", + " WolframAlpha(\n", + " name='wolfram_alpha',\n", + " description='A WolframAlpha search engine. Useful when you need to solve a complicated Mathematical or \n", + "Algebraic equation. Input should be an equation or function.',\n", + " args_schema=<class 'pydantic.main.WolframAlphaArgs'>,\n", + " verbose=False,\n", + " handle_tool_error=False\n", + " )\n", + " ],\n", + " args_schema=<class 'pydantic.main.ArgsSchema'>,\n", + " memory=None,\n", + " examples={}\n", + ")\n", + "\n" + ], + "text/plain": [ + "\u001B[1;35mRewooAgent\u001B[0m\u001B[1m(\u001B[0m\n", + " \u001B[33mname\u001B[0m=\u001B[32m'mathria'\u001B[0m,\n", + " \u001B[33mtype\u001B[0m=\u001B[1m<\u001B[0m\u001B[1;95mAgentType.rewoo:\u001B[0m\u001B[39m \u001B[0m\u001B[32m'rewoo'\u001B[0m\u001B[39m>,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mversion\u001B[0m\u001B[39m=\u001B[0m\u001B[32m'0.0.1'\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mdescription\u001B[0m\u001B[39m=\u001B[0m\u001B[32m'A math agent capable of solving very complicated math problems. Ask it to solve very hard math \u001B[0m\n", + "\u001B[32mproblems.'\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mtarget_tasks\u001B[0m\u001B[39m=\u001B[0m\u001B[1;39m[\u001B[0m\u001B[32m'solving math problems'\u001B[0m\u001B[39m, \u001B[0m\u001B[32m'answer things about math'\u001B[0m\u001B[1;39m]\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mllm\u001B[0m\u001B[39m=\u001B[0m\u001B[1;39m{\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[32m'Planner'\u001B[0m\u001B[39m: \u001B[0m\u001B[1;35mOpenAIGPTClient\u001B[0m\u001B[1;39m(\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mmodel_name\u001B[0m\u001B[39m=\u001B[0m\u001B[32m'gpt-3.5-turbo-0613'\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mparams\u001B[0m\u001B[39m=\u001B[0m\u001B[1;35mOpenAIParamModel\u001B[0m\u001B[1;39m(\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mmax_tokens\u001B[0m\u001B[39m=\u001B[0m\u001B[1;36m1024\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mtemperature\u001B[0m\u001B[39m=\u001B[0m\u001B[1;36m0\u001B[0m\u001B[1;36m.0\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mtop_p\u001B[0m\u001B[39m=\u001B[0m\u001B[1;36m0\u001B[0m\u001B[1;36m.9\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mpresence_penalty\u001B[0m\u001B[39m=\u001B[0m\u001B[1;36m0\u001B[0m\u001B[1;36m.0\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mfrequency_penalty\u001B[0m\u001B[39m=\u001B[0m\u001B[1;36m0\u001B[0m\u001B[1;36m.0\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mn\u001B[0m\u001B[39m=\u001B[0m\u001B[1;36m1\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mstop\u001B[0m\u001B[39m=\u001B[0m\u001B[1;39m[\u001B[0m\u001B[1;39m]\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[1;39m)\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[1;39m)\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[32m'Solver'\u001B[0m\u001B[39m: \u001B[0m\u001B[1;35mOpenAIGPTClient\u001B[0m\u001B[1;39m(\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mmodel_name\u001B[0m\u001B[39m=\u001B[0m\u001B[32m'gpt-3.5-turbo-0613'\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mparams\u001B[0m\u001B[39m=\u001B[0m\u001B[1;35mOpenAIParamModel\u001B[0m\u001B[1;39m(\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mmax_tokens\u001B[0m\u001B[39m=\u001B[0m\u001B[1;36m1024\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mtemperature\u001B[0m\u001B[39m=\u001B[0m\u001B[1;36m0\u001B[0m\u001B[1;36m.0\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mtop_p\u001B[0m\u001B[39m=\u001B[0m\u001B[1;36m0\u001B[0m\u001B[1;36m.9\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mpresence_penalty\u001B[0m\u001B[39m=\u001B[0m\u001B[1;36m0\u001B[0m\u001B[1;36m.0\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mfrequency_penalty\u001B[0m\u001B[39m=\u001B[0m\u001B[1;36m0\u001B[0m\u001B[1;36m.0\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mn\u001B[0m\u001B[39m=\u001B[0m\u001B[1;36m1\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mstop\u001B[0m\u001B[39m=\u001B[0m\u001B[1;39m[\u001B[0m\u001B[1;39m]\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[1;39m)\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[1;39m)\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[1;39m}\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mprompt_template\u001B[0m\u001B[39m=\u001B[0m\u001B[1;39m{\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[32m'Planner'\u001B[0m\u001B[39m: \u001B[0m\u001B[1;35mPromptTemplate\u001B[0m\u001B[1;39m(\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33minput_variables\u001B[0m\u001B[39m=\u001B[0m\u001B[1;39m[\u001B[0m\u001B[32m'tool_description'\u001B[0m\u001B[39m, \u001B[0m\u001B[32m'task'\u001B[0m\u001B[1;39m]\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33moutput_parser\u001B[0m\u001B[39m=\u001B[0m\u001B[3;35mNone\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mpartial_variables\u001B[0m\u001B[39m=\u001B[0m\u001B[1;39m{\u001B[0m\u001B[1;39m}\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mtemplate\u001B[0m\u001B[39m=\u001B[0m\u001B[32m\"You\u001B[0m\u001B[32m are an AI agent who makes step-by-step plans to solve a problem under the help of \u001B[0m\n", + "\u001B[32mexternal tools. \\nFor each step, make one plan followed by one tool-call, which will be executed later to retrieve \u001B[0m\n", + "\u001B[32mevidence for that step.\\nYou should store each evidence into a distinct variable #E1, #E2, #E3 ... that can be \u001B[0m\n", + "\u001B[32mreferred to in later tool-call inputs. \\n\\n##Available Tools##\\n\u001B[0m\u001B[32m{\u001B[0m\u001B[32mtool_description\u001B[0m\u001B[32m}\u001B[0m\u001B[32m\\n\\n##Output Format \u001B[0m\u001B[32m(\u001B[0m\u001B[32mReplace \u001B[0m\n", + "\u001B[32m'<...>'\u001B[0m\u001B[32m)\u001B[0m\u001B[32m##\\n#Plan1:
OpenAIFunctionChatAgent(\n", + " name='main',\n", + " type=<AgentType.openai: 'openai'>,\n", + " version='0.0.1',\n", + " description='main agent leveraging OpenAI function call API.',\n", + " target_tasks=['anything'],\n", + " llm=OpenAIGPTClient(\n", + " model_name='gpt-4-0613',\n", + " params=OpenAIParamModel(\n", + " max_tokens=1024,\n", + " temperature=0.0,\n", + " top_p=0.9,\n", + " presence_penalty=0.0,\n", + " frequency_penalty=0.0,\n", + " n=1,\n", + " stop=[]\n", + " )\n", + " ),\n", + " prompt_template=PromptTemplate(\n", + " input_variables=['instruction'],\n", + " output_parser=None,\n", + " partial_variables={},\n", + " template='{instruction}',\n", + " template_format='f-string',\n", + " validate_template=True\n", + " ),\n", + " plugins=[\n", + " GoogleSearch(\n", + " name='GoogleSearch',\n", + " description='Tool that searches results from Google. Input should be a search query.',\n", + " args_schema=<class 'pydantic.main.GoogleSearchArgs'>,\n", + " verbose=False,\n", + " handle_tool_error=False\n", + " ),\n", + " WebPage(\n", + " name='WebPage',\n", + " description='Worker that can get web pages through url. Useful when you have a url and need to find \n", + "detailed information.You must make sure that the url is real and correct, come from plugin or user input.Input \n", + "should be a url.',\n", + " args_schema=<class 'pydantic.main.WebPageArgs'>,\n", + " verbose=False,\n", + " handle_tool_error=False\n", + " ),\n", + " OpenAIFunctionChatAgent(\n", + " name='main',\n", + " type=<AgentType.openai: 'openai'>,\n", + " version='0.0.1',\n", + " description='main agent leveraging OpenAI function call API.',\n", + " target_tasks=['anything'],\n", + " llm=OpenAIGPTClient(\n", + " model_name='gpt-4-0613',\n", + " params=OpenAIParamModel(\n", + " max_tokens=1024,\n", + " temperature=0.0,\n", + " top_p=0.9,\n", + " presence_penalty=0.0,\n", + " frequency_penalty=0.0,\n", + " n=1,\n", + " stop=[]\n", + " )\n", + " ),\n", + " prompt_template=PromptTemplate(\n", + " input_variables=['instruction'],\n", + " output_parser=None,\n", + " partial_variables={},\n", + " template='{instruction}',\n", + " template_format='f-string',\n", + " validate_template=True\n", + " ),\n", + " plugins=[\n", + " GoogleSearch(\n", + " name='GoogleSearch',\n", + " description='Tool that searches results from Google. Input should be a search query.',\n", + " args_schema=<class 'pydantic.main.GoogleSearchArgs'>,\n", + " verbose=False,\n", + " handle_tool_error=False\n", + " ),\n", + " WebPage(\n", + " name='WebPage',\n", + " description='Worker that can get web pages through url. Useful when you have a url and need to\n", + "find detailed information.You must make sure that the url is real and correct, come from plugin or user input.Input\n", + "should be a url.',\n", + " args_schema=<class 'pydantic.main.WebPageArgs'>,\n", + " verbose=False,\n", + " handle_tool_error=False\n", + " )\n", + " ],\n", + " args_schema=<class 'pydantic.main.ArgsSchema'>,\n", + " memory=None,\n", + " examples=None,\n", + " message_scratchpad=[{'role': 'system', 'content': 'You are a helpful AI assistant.'}]\n", + " )\n", + " ],\n", + " args_schema=<class 'pydantic.main.ArgsSchema'>,\n", + " memory=None,\n", + " examples=None,\n", + " message_scratchpad=[{'role': 'system', 'content': 'You are a helpful AI assistant.'}]\n", + ")\n", + "\n" + ], + "text/plain": [ + "\u001B[1;35mOpenAIFunctionChatAgent\u001B[0m\u001B[1m(\u001B[0m\n", + " \u001B[33mname\u001B[0m=\u001B[32m'main'\u001B[0m,\n", + " \u001B[33mtype\u001B[0m=\u001B[1m<\u001B[0m\u001B[1;95mAgentType.openai:\u001B[0m\u001B[39m \u001B[0m\u001B[32m'openai'\u001B[0m\u001B[39m>,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mversion\u001B[0m\u001B[39m=\u001B[0m\u001B[32m'0.0.1'\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mdescription\u001B[0m\u001B[39m=\u001B[0m\u001B[32m'main agent leveraging OpenAI function call API.'\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mtarget_tasks\u001B[0m\u001B[39m=\u001B[0m\u001B[1;39m[\u001B[0m\u001B[32m'anything'\u001B[0m\u001B[1;39m]\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mllm\u001B[0m\u001B[39m=\u001B[0m\u001B[1;35mOpenAIGPTClient\u001B[0m\u001B[1;39m(\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mmodel_name\u001B[0m\u001B[39m=\u001B[0m\u001B[32m'gpt-4-0613'\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mparams\u001B[0m\u001B[39m=\u001B[0m\u001B[1;35mOpenAIParamModel\u001B[0m\u001B[1;39m(\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mmax_tokens\u001B[0m\u001B[39m=\u001B[0m\u001B[1;36m1024\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mtemperature\u001B[0m\u001B[39m=\u001B[0m\u001B[1;36m0\u001B[0m\u001B[1;36m.0\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mtop_p\u001B[0m\u001B[39m=\u001B[0m\u001B[1;36m0\u001B[0m\u001B[1;36m.9\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mpresence_penalty\u001B[0m\u001B[39m=\u001B[0m\u001B[1;36m0\u001B[0m\u001B[1;36m.0\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mfrequency_penalty\u001B[0m\u001B[39m=\u001B[0m\u001B[1;36m0\u001B[0m\u001B[1;36m.0\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mn\u001B[0m\u001B[39m=\u001B[0m\u001B[1;36m1\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mstop\u001B[0m\u001B[39m=\u001B[0m\u001B[1;39m[\u001B[0m\u001B[1;39m]\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[1;39m)\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[1;39m)\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mprompt_template\u001B[0m\u001B[39m=\u001B[0m\u001B[1;35mPromptTemplate\u001B[0m\u001B[1;39m(\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33minput_variables\u001B[0m\u001B[39m=\u001B[0m\u001B[1;39m[\u001B[0m\u001B[32m'instruction'\u001B[0m\u001B[1;39m]\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33moutput_parser\u001B[0m\u001B[39m=\u001B[0m\u001B[3;35mNone\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mpartial_variables\u001B[0m\u001B[39m=\u001B[0m\u001B[1;39m{\u001B[0m\u001B[1;39m}\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mtemplate\u001B[0m\u001B[39m=\u001B[0m\u001B[32m'\u001B[0m\u001B[32m{\u001B[0m\u001B[32minstruction\u001B[0m\u001B[32m}\u001B[0m\u001B[32m'\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mtemplate_format\u001B[0m\u001B[39m=\u001B[0m\u001B[32m'f-string'\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mvalidate_template\u001B[0m\u001B[39m=\u001B[0m\u001B[3;92mTrue\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[1;39m)\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mplugins\u001B[0m\u001B[39m=\u001B[0m\u001B[1;39m[\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[1;35mGoogleSearch\u001B[0m\u001B[1;39m(\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mname\u001B[0m\u001B[39m=\u001B[0m\u001B[32m'GoogleSearch'\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mdescription\u001B[0m\u001B[39m=\u001B[0m\u001B[32m'Tool that searches results from Google. Input should be a search query.'\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33margs_schema\u001B[0m\u001B[39m=
OpenAIFunctionChatAgent(\n", + " name='gentopia',\n", + " type=<AgentType.openai: 'openai'>,\n", + " version='0.0.1',\n", + " description='main agent leveraging OpenAI function call API.',\n", + " target_tasks=['anything'],\n", + " llm=OpenAIGPTClient(\n", + " model_name='gpt-4-0613',\n", + " params=OpenAIParamModel(\n", + " max_tokens=1024,\n", + " temperature=0.0,\n", + " top_p=0.9,\n", + " presence_penalty=0.0,\n", + " frequency_penalty=0.0,\n", + " n=1,\n", + " stop=[]\n", + " )\n", + " ),\n", + " prompt_template=PromptTemplate(\n", + " input_variables=['instruction'],\n", + " output_parser=None,\n", + " partial_variables={},\n", + " template='{instruction}',\n", + " template_format='f-string',\n", + " validate_template=True\n", + " ),\n", + " plugins=[\n", + " GoogleSearch(\n", + " name='GoogleSearch',\n", + " description='Tool that searches results from Google. Input should be a search query.',\n", + " args_schema=<class 'pydantic.main.GoogleSearchArgs'>,\n", + " verbose=False,\n", + " handle_tool_error=False\n", + " ),\n", + " WebPage(\n", + " name='WebPage',\n", + " description='Worker that can get web pages through url. Useful when you have a url and need to find \n", + "detailed information.You must make sure that the url is real and correct, come from plugin or user input.Input \n", + "should be a url.',\n", + " args_schema=<class 'pydantic.main.WebPageArgs'>,\n", + " verbose=False,\n", + " handle_tool_error=False\n", + " ),\n", + " OpenAIFunctionChatAgent(\n", + " name='main',\n", + " type=<AgentType.openai: 'openai'>,\n", + " version='0.0.1',\n", + " description='main agent leveraging OpenAI function call API.',\n", + " target_tasks=['anything'],\n", + " llm=OpenAIGPTClient(\n", + " model_name='gpt-4-0613',\n", + " params=OpenAIParamModel(\n", + " max_tokens=1024,\n", + " temperature=0.0,\n", + " top_p=0.9,\n", + " presence_penalty=0.0,\n", + " frequency_penalty=0.0,\n", + " n=1,\n", + " stop=[]\n", + " )\n", + " ),\n", + " prompt_template=PromptTemplate(\n", + " input_variables=['instruction'],\n", + " output_parser=None,\n", + " partial_variables={},\n", + " template='{instruction}',\n", + " template_format='f-string',\n", + " validate_template=True\n", + " ),\n", + " plugins=[\n", + " GoogleSearch(\n", + " name='GoogleSearch',\n", + " description='Tool that searches results from Google. Input should be a search query.',\n", + " args_schema=<class 'pydantic.main.GoogleSearchArgs'>,\n", + " verbose=False,\n", + " handle_tool_error=False\n", + " ),\n", + " WebPage(\n", + " name='WebPage',\n", + " description='Worker that can get web pages through url. Useful when you have a url and need to\n", + "find detailed information.You must make sure that the url is real and correct, come from plugin or user input.Input\n", + "should be a url.',\n", + " args_schema=<class 'pydantic.main.WebPageArgs'>,\n", + " verbose=False,\n", + " handle_tool_error=False\n", + " )\n", + " ],\n", + " args_schema=<class 'pydantic.main.ArgsSchema'>,\n", + " memory=None,\n", + " examples=None,\n", + " message_scratchpad=[{'role': 'system', 'content': 'You are a helpful AI assistant.'}]\n", + " )\n", + " ],\n", + " args_schema=<class 'pydantic.main.ArgsSchema'>,\n", + " memory=None,\n", + " examples=None,\n", + " message_scratchpad=[{'role': 'system', 'content': 'You are a helpful AI assistant.'}]\n", + ")\n", + "\n" + ], + "text/plain": [ + "\u001B[1;35mOpenAIFunctionChatAgent\u001B[0m\u001B[1m(\u001B[0m\n", + " \u001B[33mname\u001B[0m=\u001B[32m'gentopia'\u001B[0m,\n", + " \u001B[33mtype\u001B[0m=\u001B[1m<\u001B[0m\u001B[1;95mAgentType.openai:\u001B[0m\u001B[39m \u001B[0m\u001B[32m'openai'\u001B[0m\u001B[39m>,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mversion\u001B[0m\u001B[39m=\u001B[0m\u001B[32m'0.0.1'\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mdescription\u001B[0m\u001B[39m=\u001B[0m\u001B[32m'main agent leveraging OpenAI function call API.'\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mtarget_tasks\u001B[0m\u001B[39m=\u001B[0m\u001B[1;39m[\u001B[0m\u001B[32m'anything'\u001B[0m\u001B[1;39m]\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mllm\u001B[0m\u001B[39m=\u001B[0m\u001B[1;35mOpenAIGPTClient\u001B[0m\u001B[1;39m(\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mmodel_name\u001B[0m\u001B[39m=\u001B[0m\u001B[32m'gpt-4-0613'\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mparams\u001B[0m\u001B[39m=\u001B[0m\u001B[1;35mOpenAIParamModel\u001B[0m\u001B[1;39m(\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mmax_tokens\u001B[0m\u001B[39m=\u001B[0m\u001B[1;36m1024\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mtemperature\u001B[0m\u001B[39m=\u001B[0m\u001B[1;36m0\u001B[0m\u001B[1;36m.0\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mtop_p\u001B[0m\u001B[39m=\u001B[0m\u001B[1;36m0\u001B[0m\u001B[1;36m.9\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mpresence_penalty\u001B[0m\u001B[39m=\u001B[0m\u001B[1;36m0\u001B[0m\u001B[1;36m.0\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mfrequency_penalty\u001B[0m\u001B[39m=\u001B[0m\u001B[1;36m0\u001B[0m\u001B[1;36m.0\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mn\u001B[0m\u001B[39m=\u001B[0m\u001B[1;36m1\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mstop\u001B[0m\u001B[39m=\u001B[0m\u001B[1;39m[\u001B[0m\u001B[1;39m]\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[1;39m)\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[1;39m)\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mprompt_template\u001B[0m\u001B[39m=\u001B[0m\u001B[1;35mPromptTemplate\u001B[0m\u001B[1;39m(\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33minput_variables\u001B[0m\u001B[39m=\u001B[0m\u001B[1;39m[\u001B[0m\u001B[32m'instruction'\u001B[0m\u001B[1;39m]\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33moutput_parser\u001B[0m\u001B[39m=\u001B[0m\u001B[3;35mNone\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mpartial_variables\u001B[0m\u001B[39m=\u001B[0m\u001B[1;39m{\u001B[0m\u001B[1;39m}\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mtemplate\u001B[0m\u001B[39m=\u001B[0m\u001B[32m'\u001B[0m\u001B[32m{\u001B[0m\u001B[32minstruction\u001B[0m\u001B[32m}\u001B[0m\u001B[32m'\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mtemplate_format\u001B[0m\u001B[39m=\u001B[0m\u001B[32m'f-string'\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mvalidate_template\u001B[0m\u001B[39m=\u001B[0m\u001B[3;92mTrue\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[1;39m)\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mplugins\u001B[0m\u001B[39m=\u001B[0m\u001B[1;39m[\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[1;35mGoogleSearch\u001B[0m\u001B[1;39m(\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mname\u001B[0m\u001B[39m=\u001B[0m\u001B[32m'GoogleSearch'\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33mdescription\u001B[0m\u001B[39m=\u001B[0m\u001B[32m'Tool that searches results from Google. Input should be a search query.'\u001B[0m\u001B[39m,\u001B[0m\n", + "\u001B[39m \u001B[0m\u001B[33margs_schema\u001B[0m\u001B[39m=