-
Notifications
You must be signed in to change notification settings - Fork 6
/
Copy pathgrace_chatbot.py
111 lines (86 loc) · 4.54 KB
/
grace_chatbot.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
import re
import json
import logging
from openai_chatbot import OpenAIChatbot
from router import Router
from knowledge_base import KnowledgeBase
from typing import Dict, Callable
class GRACEChatbot(OpenAIChatbot):
INITIAL_PROMPT_TEMPLATE = """You are an AI assistant for {business_name}, {business_description}. You process customers' requests as follows:
1. Greet the customer and ask how you can be of help.
2. Identify the customer's request and the backend command to process it. You refuse to process the request if it's not among the commands available to you.
3. Ensure you have the values for all of the parameters required by the backend command. Collect from the customer any values you don't have. Do not collect information that is not required. No values are available to you except for those provided by the customer. If the customer cannot provide you with a value, you refuse to process their request.
4. Ask the customer to hold on and then process their request by sending a command JSON to the backend in the following format:
AI: All right, let me look into this for you. <script>{command_example_json}</script>
Backend: (To AI) {command_example_result}
5. Communicate the execution result back to the customer and ask if there's anything else you can do for them.
6. If there's nothing else, say goodbye and output "END".
Only the following Python commands are available to you:
{commands_string}
You can use the look_up command to look up answers to questions related to {business_name}. For example:
Customer: Do you have parking on site?
AI: <script>{{"command": "look_up", "params": {{"question": "Do you have parking on site?"}}}}</script>
Backend: (To AI) On-site parking is available
You use all dates exactly as provided by the customer, without rephrasing or converting them. {extra_instructions}
A transcript of your chat session with a customer follows.
"""
NAMES = ("AI", "Customer")
BACKEND_NAME = "Backend"
def __init__(
self,
openai,
backend: Router,
domain: Dict[str, str],
output_callback: Callable[[str], None],
openai_model: str = "text-davinci-003",
openai_endpoint: str = "completions"
):
self.knowledge_base = KnowledgeBase(domain["answers"])
@backend.command(desc="look up a question", example_params=("What are your opening hours?",))
def look_up(question: str) -> str:
answer, score = self.knowledge_base.look_up(question)
logging.debug(f"Knowledge base lookup score: {score}")
return answer if score > 0.4 else "Cannot answer the question"
command_example = domain["command_example"]
command_example_json = json.dumps({
"command": command_example["command"],
"params": command_example["params"]
})
commands_string = "\n".join([f'- {c["python_sig"]} - {c["desc"]}. Example JSON: <script>{c["example_json"]}</script>'
for c in backend.registry.values()])
initial_prompt = self.INITIAL_PROMPT_TEMPLATE.format(
**domain,
command_example_json=command_example_json,
command_example_result=command_example["result"],
commands_string=commands_string
)
super().__init__(openai=openai,
initial_prompt=initial_prompt,
output_callback=output_callback,
names=self.NAMES,
openai_model=openai_model,
openai_endpoint=openai_endpoint)
self.stop.append(f"{self.BACKEND_NAME}:")
self.backend = backend
self.domain = domain
def _get_all_utterances(self):
utterance = self._get_next_utterance()
m = re.match(r"(.*?)($|<script>(.*?)</script>)",
utterance, re.IGNORECASE | re.DOTALL)
utterance = m[1].strip()
command_json = m[3]
if utterance:
self.output_callback(utterance)
if self.prompt is not None:
self.prompt = f"{self.prompt} {m[0]}"
if command_json:
logging.debug(f"Invoking backend command: {repr(command_json)}")
try:
result = self.backend.invoke(command_json)
logging.debug(f"Got backend response: {repr(result)}")
except Exception as e:
result = str(e)
logging.error(e)
if self.prompt is not None:
self._add_response(self.BACKEND_NAME, f"(To AI) {result}")
self._get_all_utterances()