-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathai.py
155 lines (119 loc) · 5.25 KB
/
ai.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
import json
import os
from openai import OpenAI
import toolbox
from dotenv import load_dotenv
load_dotenv()
client = OpenAI()
client.api_key = os.environ.get("OPENAI_API_KEY")
OPENAI_MODEL = "gpt-3.5-turbo"
def ensure_data_directory():
if not os.path.exists('data'):
os.makedirs('data')
def init_json_file(file_path, default_content):
if not os.path.exists(file_path):
with open(file_path, 'w', encoding='utf-8') as f:
json.dump(default_content, f, indent=4)
def load_json(file_path):
try:
with open(file_path, 'r', encoding='utf-8') as f:
content = f.read()
return json.loads(content) if content else {}
except (FileNotFoundError, json.JSONDecodeError):
return {}
def save_json(file_path, data):
with open(file_path, 'w', encoding='utf-8') as f:
json.dump(data, f, indent=4)
def clear_history():
save_json("data/history.json", [])
def prepare_messages(message, role="user", include_history=True):
messages = []
ai_data = load_json('data/ai.json')
messages.append({"role": "system", "content": ai_data.get('system_prompt', '')})
messages.append({"role": "assistant", "content": ai_data.get('character_prompt', '')})
if include_history:
history = load_json('data/history.json')
messages.extend(history)
messages.append({"role": role, "content": message})
return messages
def plain_ai_response(message, role="user"):
messages = prepare_messages(message, role=role, include_history=False)
completion_args = {
"model": OPENAI_MODEL,
"messages": messages,
"temperature": 1,
"frequency_penalty": 0.7,
"presence_penalty": 0,
"top_p": 1,
}
response = client.chat.completions.create(**completion_args)
message = response.choices[0].message.content
return message
def get_ai_response(messages):
available_tools = toolbox.get_available_tools()
tool_instances = {tool.__name__: tool() for tool in available_tools}
tool_schemas = [instance.schema for instance in tool_instances.values()]
completion_args = {
"model": OPENAI_MODEL,
"messages": messages,
"temperature": 1,
"frequency_penalty": 0.7,
"presence_penalty": 0,
"top_p": 1,
}
if tool_schemas:
completion_args["functions"] = tool_schemas
completion_args["function_call"] = "auto"
response = client.chat.completions.create(**completion_args)
print(response)
# response = ChatCompletion(id='chatcmpl-AAsM3NhuwS88dt8LvItfhYqrhYu9t', choices=[Choice(finish_reason='function_call', index=0, logprobs=None, message=ChatCompletionMessage(content=None, refusal=None, role='assistant', function_call=FunctionCall(arguments='{"range":20}', name='get_birthdays'), tool_calls=None))], created=1727155583, model='gpt-3.5-turbo-0125', object='chat.completion', service_tier=None, system_fingerprint=None, usage=CompletionUsage(completion_tokens=15, prompt_tokens=1539, total_tokens=1554, completion_tokens_details=CompletionTokensDetails(reasoning_tokens=0)))
return response.choices[0].message
def handle_function_call(ai_message, messages, tool_instances):
if ai_message.function_call:
chosen_tool = tool_instances.get(ai_message.function_call.name)
print(f"AI used tool: {ai_message.function_call.name} with arguments: " + str(json.loads(ai_message.function_call.arguments)))
function_to_call = chosen_tool.func
function_args = json.loads(ai_message.function_call.arguments)
# Call the function with unpacked arguments
function_response = function_to_call(**function_args)
messages.append(ai_message)
messages.append({
"role": "function",
"name": ai_message.function_call.name,
"content": str(function_response)
})
second_response = client.chat.completions.create(
model=OPENAI_MODEL,
messages=messages,
temperature=1,
frequency_penalty=0.7,
presence_penalty=0,
top_p=1,
)
return second_response.choices[0].message
return ai_message
def update_history(message, reply_text):
history = load_json('data/history.json')
history.append({"role": "user", "content": message})
history.append({"role": "assistant", "content": reply_text})
save_json('data/history.json', history)
def ai_response(message):
messages = prepare_messages(message)
ai_message = get_ai_response(messages)
available_tools = toolbox.get_available_tools()
tool_instances = {tool.__name__: tool() for tool in available_tools}
ai_message = handle_function_call(ai_message, messages, tool_instances)
reply_text = ai_message.content
update_history(message, reply_text)
return reply_text
def init_data_files():
ensure_data_directory()
init_json_file('data/ai.json', {
"system_prompt": "You are a helpful assistant.",
"character_prompt": "You are ChatGPT, a large language model trained by OpenAI."
})
init_json_file('data/history.json', [])
if __name__ == "__main__":
user_message = "Find birthdays in the next 20 days"
response = ai_response(user_message)
print(response)