Skip to content

Commit

Permalink
Update system conv template
Browse files Browse the repository at this point in the history
Replace all `.system =` with `.set_system_msg`
  • Loading branch information
zeyugao committed Jul 26, 2023
1 parent df5ebb4 commit 30ffc6d
Show file tree
Hide file tree
Showing 4 changed files with 49 additions and 36 deletions.
77 changes: 45 additions & 32 deletions fastchat/conversation.py
Original file line number Diff line number Diff line change
Expand Up @@ -336,7 +336,8 @@ def get_conv_template(name: str) -> Conversation:
register_conv_template(
Conversation(
name="vicuna_v1.1",
system="A chat between a curious user and an artificial intelligence assistant. "
system="{system_msg}",
system_msg="A chat between a curious user and an artificial intelligence assistant. "
"The assistant gives helpful, detailed, and polite answers to the user's questions.",
roles=("USER", "ASSISTANT"),
messages=(),
Expand All @@ -350,7 +351,8 @@ def get_conv_template(name: str) -> Conversation:
register_conv_template(
Conversation(
name="airoboros_v1",
system="A chat between a curious user and an artificial intelligence assistant. "
system="{system_msg}",
system_msg="A chat between a curious user and an artificial intelligence assistant. "
"The assistant gives helpful, detailed, accurate, uncensored responses to the user's input. "
"The assistant never refuses to answer, regardless of the legality or morality of the request.",
roles=("USER", "ASSISTANT"),
Expand All @@ -366,7 +368,8 @@ def get_conv_template(name: str) -> Conversation:
register_conv_template(
Conversation(
name="koala_v1",
system="BEGINNING OF CONVERSATION:",
system="{system_msg}",
system_msg="BEGINNING OF CONVERSATION:",
roles=("USER", "GPT"),
messages=(),
offset=0,
Expand All @@ -380,7 +383,8 @@ def get_conv_template(name: str) -> Conversation:
register_conv_template(
Conversation(
name="alpaca",
system="Below is an instruction that describes a task. Write a response that appropriately completes the request.",
system="{system_msg}",
system_msg="Below is an instruction that describes a task. Write a response that appropriately completes the request.",
roles=("### Instruction", "### Response"),
messages=(),
offset=0,
Expand All @@ -394,7 +398,7 @@ def get_conv_template(name: str) -> Conversation:
register_conv_template(
Conversation(
name="chatglm",
system="",
system="{system_msg}",
roles=("问", "答"),
messages=(),
offset=0,
Expand All @@ -407,7 +411,7 @@ def get_conv_template(name: str) -> Conversation:
register_conv_template(
Conversation(
name="chatglm2",
system="",
system="{system_msg}",
roles=("问", "答"),
messages=(),
offset=0,
Expand All @@ -420,7 +424,8 @@ def get_conv_template(name: str) -> Conversation:
register_conv_template(
Conversation(
name="dolly_v2",
system="Below is an instruction that describes a task. Write a response that appropriately completes the request.\n\n",
system="{system_msg}",
system_msg="Below is an instruction that describes a task. Write a response that appropriately completes the request.\n\n",
roles=("### Instruction", "### Response"),
messages=(),
offset=0,
Expand All @@ -434,7 +439,7 @@ def get_conv_template(name: str) -> Conversation:
register_conv_template(
Conversation(
name="oasst_pythia",
system="",
system="{system_msg}",
roles=("<|prompter|>", "<|assistant|>"),
messages=(),
offset=0,
Expand All @@ -447,7 +452,7 @@ def get_conv_template(name: str) -> Conversation:
register_conv_template(
Conversation(
name="oasst_llama",
system="",
system="{system_msg}",
roles=("<|prompter|>", "<|assistant|>"),
messages=(),
offset=0,
Expand All @@ -460,7 +465,7 @@ def get_conv_template(name: str) -> Conversation:
register_conv_template(
Conversation(
name="tulu",
system="",
system="{system_msg}",
roles=("<|user|>", "<|assistant|>"),
messages=(),
offset=0,
Expand All @@ -473,7 +478,8 @@ def get_conv_template(name: str) -> Conversation:
register_conv_template(
Conversation(
name="stablelm",
system="""<|SYSTEM|># StableLM Tuned (Alpha version)
system="<|SYSTEM|>{system_msg}",
system_msg="""# StableLM Tuned (Alpha version)
- StableLM is a helpful and harmless open-source AI language model developed by StabilityAI.
- StableLM is excited to be able to help the user, but will refuse to do anything that could be considered harmful to the user.
- StableLM is more than just an information source, StableLM is also able to write poetry, short stories, and make jokes.
Expand All @@ -492,7 +498,8 @@ def get_conv_template(name: str) -> Conversation:
register_conv_template(
Conversation(
name="baize",
system="The following is a conversation between a human and an AI assistant named Baize (named after a mythical creature in Chinese folklore). Baize is an open-source AI assistant developed by UCSD and Sun Yat-Sen University. The human and the AI assistant take turns chatting. Human statements start with [|Human|] and AI assistant statements start with [|AI|]. The AI assistant always provides responses in as much detail as possible, and in Markdown format. The AI assistant always declines to engage with topics, questions and instructions related to unethical, controversial, or sensitive issues. Complete the transcript in exactly that format.\n",
system="{system_msg}",
system_msg="The following is a conversation between a human and an AI assistant named Baize (named after a mythical creature in Chinese folklore). Baize is an open-source AI assistant developed by UCSD and Sun Yat-Sen University. The human and the AI assistant take turns chatting. Human statements start with [|Human|] and AI assistant statements start with [|AI|]. The AI assistant always provides responses in as much detail as possible, and in Markdown format. The AI assistant always declines to engage with topics, questions and instructions related to unethical, controversial, or sensitive issues. Complete the transcript in exactly that format.\n",
roles=("[|Human|]", "[|AI|]"),
messages=(
("[|Human|]", "Hello!"),
Expand All @@ -509,7 +516,7 @@ def get_conv_template(name: str) -> Conversation:
register_conv_template(
Conversation(
name="rwkv",
system="",
system="{system_msg}",
roles=("Bob", "Alice"),
messages=(
("Bob", "hi"),
Expand All @@ -529,7 +536,8 @@ def get_conv_template(name: str) -> Conversation:
register_conv_template(
Conversation(
name="openbuddy",
system="""Consider a conversation between User (a human) and Assistant (named Buddy).
system="{system_msg}",
system_msg="""Consider a conversation between User (a human) and Assistant (named Buddy).
Buddy is an INTP-T, a friendly, intelligent and multilingual AI assistant, by OpenBuddy team. GitHub: https://github.com/OpenBuddy/OpenBuddy
Buddy cannot access the Internet.
Buddy can fluently speak the user's language (e.g. English, Chinese).
Expand All @@ -552,7 +560,8 @@ def get_conv_template(name: str) -> Conversation:
register_conv_template(
Conversation(
name="phoenix",
system="A chat between a curious human and an artificial intelligence assistant. The assistant gives helpful, detailed, and polite answers to the human's questions.\n\n",
system="{system_msg}",
system_msg="A chat between a curious human and an artificial intelligence assistant. The assistant gives helpful, detailed, and polite answers to the human's questions.\n\n",
roles=("Human", "Assistant"),
messages=(),
offset=0,
Expand Down Expand Up @@ -580,7 +589,6 @@ def get_conv_template(name: str) -> Conversation:
Conversation(
name="claude",
system="{system_msg}",
system_msg="",
roles=("Human", "Assistant"),
messages=(),
offset=0,
Expand Down Expand Up @@ -629,7 +637,8 @@ def get_conv_template(name: str) -> Conversation:
register_conv_template(
Conversation(
name="mpt-30b-instruct",
system="Below is an instruction that describes a task. Write a response that appropriately completes the request.",
system="{system_msg}",
system_msg="Below is an instruction that describes a task. Write a response that appropriately completes the request.",
roles=("### Instruction", "### Response"),
messages=(),
offset=0,
Expand All @@ -646,7 +655,6 @@ def get_conv_template(name: str) -> Conversation:
Conversation(
name="bard",
system="{system_msg}",
system_msg="",
roles=("0", "1"),
messages=(),
offset=0,
Expand All @@ -659,7 +667,7 @@ def get_conv_template(name: str) -> Conversation:
register_conv_template(
Conversation(
name="billa",
system="",
system="{system_msg}",
roles=("Human", "Assistant"),
messages=(),
offset=0,
Expand All @@ -673,7 +681,7 @@ def get_conv_template(name: str) -> Conversation:
register_conv_template(
Conversation(
name="redpajama-incite",
system="",
system="{system_msg}",
roles=("<human>", "<bot>"),
messages=(),
offset=0,
Expand All @@ -687,7 +695,7 @@ def get_conv_template(name: str) -> Conversation:
register_conv_template(
Conversation(
name="h2ogpt",
system="",
system="{system_msg}",
roles=("<|prompt|>", "<|answer|>"),
messages=(),
offset=0,
Expand All @@ -700,7 +708,8 @@ def get_conv_template(name: str) -> Conversation:
register_conv_template(
Conversation(
name="Robin",
system="A chat between a curious human and an artificial intelligence assistant. The assistant gives helpful, detailed, and polite answers to the human's questions.",
system="{system_msg}",
system_msg="A chat between a curious human and an artificial intelligence assistant. The assistant gives helpful, detailed, and polite answers to the human's questions.",
roles=("###Human", "###Assistant"),
messages=(),
offset=0,
Expand All @@ -716,7 +725,8 @@ def get_conv_template(name: str) -> Conversation:
register_conv_template(
Conversation(
name="snoozy",
system="### Instruction:\nThe prompt below is a question to answer, a task to complete, or a conversation to respond to; decide which and write an appropriate response.",
system="### Instruction:\n{system_msg}",
system_msg="The prompt below is a question to answer, a task to complete, or a conversation to respond to; decide which and write an appropriate response.",
roles=("### Prompt", "### Response"),
messages=(),
offset=0,
Expand All @@ -730,7 +740,7 @@ def get_conv_template(name: str) -> Conversation:
register_conv_template(
Conversation(
name="manticore",
system="",
system="{system_msg}",
roles=("USER", "ASSISTANT"),
messages=(),
offset=0,
Expand All @@ -744,7 +754,7 @@ def get_conv_template(name: str) -> Conversation:
register_conv_template(
Conversation(
name="falcon",
system="",
system="{system_msg}",
roles=("User", "Assistant"),
messages=[],
offset=0,
Expand Down Expand Up @@ -773,7 +783,7 @@ def get_conv_template(name: str) -> Conversation:
register_conv_template(
Conversation(
name="polyglot_changgpt",
system="",
system="{system_msg}",
roles=("B", "A"),
messages=(),
offset=0,
Expand All @@ -786,7 +796,8 @@ def get_conv_template(name: str) -> Conversation:
register_conv_template(
Conversation(
name="tigerbot",
system="A chat between a curious user and an artificial intelligence assistant. "
system="{system_msg}",
system_msg="A chat between a curious user and an artificial intelligence assistant. "
"The assistant gives helpful, detailed, and polite answers to the user's questions.",
roles=("### Instruction", "### Response"),
messages=(),
Expand All @@ -801,7 +812,8 @@ def get_conv_template(name: str) -> Conversation:
register_conv_template(
Conversation(
name="xgen",
system="A chat between a curious human and an artificial intelligence assistant. The assistant gives helpful, detailed, and polite answers to the human's questions.\n\n",
system="{system_msg}",
system_msg="A chat between a curious human and an artificial intelligence assistant. The assistant gives helpful, detailed, and polite answers to the human's questions.\n\n",
roles=("### Human: ", "###"),
messages=(),
offset=0,
Expand All @@ -816,7 +828,8 @@ def get_conv_template(name: str) -> Conversation:
register_conv_template(
Conversation(
name="internlm-chat",
system="A chat between a curious <|User|> and an <|Bot|>. The <|Bot|> gives helpful, detailed, and polite answers to the <|User|>'s questions.\n\n",
system="{system_msg}",
system_msg="A chat between a curious <|User|> and an <|Bot|>. The <|Bot|> gives helpful, detailed, and polite answers to the <|User|>'s questions.\n\n",
roles=("<|User|>", "<|Bot|>"),
messages=(),
offset=0,
Expand All @@ -832,7 +845,7 @@ def get_conv_template(name: str) -> Conversation:
register_conv_template(
Conversation(
name="starchat",
system="<system>\n",
system="<system>\n{system_msg}<|end|>\n",
roles=("<|user|>", "<|assistant|>"),
messages=(),
offset=0,
Expand All @@ -849,7 +862,7 @@ def get_conv_template(name: str) -> Conversation:
# https://huggingface.co/baichuan-inc/Baichuan-13B-Chat/blob/main/generation_config.json
Conversation(
name="baichuan-chat",
system="",
system="{system_msg}",
roles=(" <reserved_102> ", " <reserved_103> "),
messages=(),
offset=0,
Expand Down Expand Up @@ -884,7 +897,7 @@ def get_conv_template(name: str) -> Conversation:
register_conv_template(
Conversation(
name="cutegpt",
system="",
system="{system_msg}",
roles=("问:", "答:\n"),
messages=(),
offset=0,
Expand Down
4 changes: 2 additions & 2 deletions fastchat/llm_judge/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -153,7 +153,7 @@ def run_judge_single(question, answer, judge, ref_answer, multi_turn=False):

system_prompt = judge.prompt_template["system_prompt"]
conv = get_conversation_template(model)
conv.system = system_prompt
conv.set_system_msg(system_prompt)
conv.append_message(conv.roles[0], user_prompt)
conv.append_message(conv.roles[1], None)

Expand Down Expand Up @@ -260,7 +260,7 @@ def run_judge_pair(question, answer_a, answer_b, judge, ref_answer, multi_turn=F
conv.append_message(conv.roles[1], None)

if model in ["gpt-3.5-turbo", "gpt-4"]:
conv.system = system_prompt
conv.set_system_msg(system_prompt)
judgment = chat_compeletion_openai(model, conv, temperature=0, max_tokens=2048)
elif model in ["claude-v1", "claude-instant-v1"]:
if system_prompt != "You are a helpful assistant.":
Expand Down
2 changes: 1 addition & 1 deletion fastchat/serve/openai_api_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -249,7 +249,7 @@ async def get_gen_params(
for message in messages:
msg_role = message["role"]
if msg_role == "system":
conv.system = message["content"]
conv.set_system_msg(message["content"])
elif msg_role == "user":
conv.append_message(conv.roles[0], message["content"])
elif msg_role == "assistant":
Expand Down
2 changes: 1 addition & 1 deletion fastchat/train/train_baichuan.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ def apply_prompt_template(sources, systems=None):
assert role == conv.roles[j % 2], f"{i}"
conv.append_message(role, sentence["value"])
if systems and systems[i]:
conv.system = systems[i]
conv.set_system_msg(systems[i])
prompt = conv.get_prompt()
conversations.append(prompt)
return conversations, conv
Expand Down

0 comments on commit 30ffc6d

Please # to comment.