From f5c0beaf62c8b764d6c74101c77914a7c84a3a70 Mon Sep 17 00:00:00 2001 From: tomguluson92 <314913739@qq.com> Date: Sat, 23 Nov 2024 19:38:10 +0800 Subject: [PATCH] Update generation.ts fix typos in the `generateText` function: Google, redpill --- packages/core/src/generation.ts | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/packages/core/src/generation.ts b/packages/core/src/generation.ts index d58a0fdbded..553df59e17a 100644 --- a/packages/core/src/generation.ts +++ b/packages/core/src/generation.ts @@ -113,6 +113,7 @@ export async function generateText({ ); switch (provider) { + // OPENAI & LLAMACLOUD shared same structure. case ModelProviderName.OPENAI: case ModelProviderName.LLAMACLOUD: { elizaLogger.debug("Initializing OpenAI model."); @@ -139,7 +140,7 @@ export async function generateText({ case ModelProviderName.GOOGLE: { const google = createGoogleGenerativeAI(); - const { text: anthropicResponse } = await aiGenerateText({ + const { text: googleResponse } = await aiGenerateText({ model: google(model), prompt: context, system: @@ -152,7 +153,8 @@ export async function generateText({ presencePenalty: presence_penalty, }); - response = anthropicResponse; + response = googleResponse; + elizaLogger.debug("Received response from Google model."); break; } @@ -280,7 +282,7 @@ export async function generateText({ const serverUrl = models[provider].endpoint; const openai = createOpenAI({ apiKey, baseURL: serverUrl }); - const { text: openaiResponse } = await aiGenerateText({ + const { text: redpillResponse } = await aiGenerateText({ model: openai.languageModel(model), prompt: context, temperature: temperature, @@ -293,8 +295,8 @@ export async function generateText({ presencePenalty: presence_penalty, }); - response = openaiResponse; - elizaLogger.debug("Received response from OpenAI model."); + response = redpillResponse; + elizaLogger.debug("Received response from redpill model."); break; }