Skip to content
New issue

Have a question about this project? # for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “#”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? # to your account

feat (ui/vue): add support for prepareRequestBody #4782

Merged
merged 1 commit into from
Feb 10, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions .changeset/wet-fishes-sleep.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
'@ai-sdk/vue': patch
---

feat (ui/vue): add support for prepareRequestBody
2 changes: 1 addition & 1 deletion content/docs/07-reference/02-ai-sdk-ui/01-use-chat.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -192,7 +192,7 @@ Allows you to easily create a conversational user interface for your chatbot app
type: '(options: { messages: UIMessage[]; requestData?: JSONValue; requestBody?: object, id: string }) => unknown',
isOptional: true,
description:
'Experimental (React & Solid only). When a function is provided, it will be used to prepare the request body for the chat API. This can be useful for customizing the request body based on the messages and data in the chat.',
'Experimental (React, Solid & Vue only). When a function is provided, it will be used to prepare the request body for the chat API. This can be useful for customizing the request body based on the messages and data in the chat.',
},
{
name: 'experimental_throttle',
Expand Down
39 changes: 39 additions & 0 deletions examples/nuxt-openai/pages/use-chat-request/index.vue
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
<script setup lang="ts">
import { useChat } from '@ai-sdk/vue';
import { createIdGenerator } from 'ai';

const { input, handleSubmit, messages } = useChat({
api: '/api/use-chat-request',
sendExtraMessageFields: true,
generateId: createIdGenerator({ prefix: 'msgc', size: 16 }),

experimental_prepareRequestBody({ messages }) {
return {
message: messages[messages.length - 1],
};
},
});

const messageList = computed(() => messages.value); // computer property for type inference
</script>

<template>
<div class="flex flex-col w-full max-w-md py-24 mx-auto stretch">
<div
v-for="message in messageList"
:key="message.id"
class="whitespace-pre-wrap"
>
<strong>{{ `${message.role}: ` }}</strong>
{{ message.content }}
</div>

<form @submit="handleSubmit">
<input
class="fixed bottom-0 w-full max-w-md p-2 mb-8 border border-gray-300 rounded shadow-xl"
v-model="input"
placeholder="Say something..."
/>
</form>
</div>
</template>
29 changes: 29 additions & 0 deletions examples/nuxt-openai/server/api/use-chat-request.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
import { createOpenAI } from '@ai-sdk/openai';
import { streamText, Message } from 'ai';

export default defineLazyEventHandler(async () => {
const openai = createOpenAI({
apiKey: useRuntimeConfig().openaiApiKey,
});

return defineEventHandler(async (event: any) => {
// Extract the `messages` from the body of the request
const { message } = await readBody(event);

// Implement your own logic here to add message history
const previousMessages: Message[] = [];
const messages = [...previousMessages, message];

// Call the language model
const result = streamText({
model: openai('gpt-4o-mini'),
messages,
async onFinish({ text, toolCalls, toolResults, usage, finishReason }) {
// Implement your own logic here, e.g. for storing messages
},
});

// Respond with the stream
return result.toDataStreamResponse();
});
});
50 changes: 50 additions & 0 deletions packages/vue/src/TestChatPrepareRequestBodyComponent.vue
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
<script setup lang="ts">
import { ref } from 'vue';
import { UIMessage, useChat } from './use-chat';
import { JSONValue } from '@ai-sdk/ui-utils';

const bodyOptions = ref<{
id: string;
messages: UIMessage[];
requestData?: JSONValue;
requestBody?: object;
}>();

const { messages, append, isLoading } = useChat({
experimental_prepareRequestBody(options) {
bodyOptions.value = options;
return 'test-request-body';
},
});
</script>

<template>
<div>
<div data-testid="loading">{{ isLoading?.toString() }}</div>
<div
v-for="(m, idx) in messages"
key="m.id"
:data-testid="`message-${idx}`"
>
{{ m.role === 'user' ? 'User: ' : 'AI: ' }}
{{ m.content }}
</div>

<button
data-testid="do-append"
@click="
append(
{ role: 'user', content: 'hi' },
{
data: { 'test-data-key': 'test-data-value' },
body: { 'request-body-key': 'request-body-value' },
},
)
"
/>

<div v-if="bodyOptions" data-testid="on-body-options">
{{ bodyOptions }}
</div>
</div>
</template>
25 changes: 24 additions & 1 deletion packages/vue/src/use-chat.ts
Original file line number Diff line number Diff line change
Expand Up @@ -111,13 +111,31 @@ export function useChat(
fetch,
keepLastMessageOnError = true,
maxSteps = 1,
experimental_prepareRequestBody,
}: UseChatOptions & {
/**
* Maximum number of sequential LLM calls (steps), e.g. when you use tool calls. Must be at least 1.
* A maximum number is required to prevent infinite loops in the case of misconfigured tools.
* By default, it's set to 1, which means that only a single LLM call is made.
*/
maxSteps?: number;

/**
* Experimental (Vue only). When a function is provided, it will be used
* to prepare the request body for the chat API. This can be useful for
* customizing the request body based on the messages and data in the chat.
*
* @param id The chat ID
* @param messages The current messages in the chat
* @param requestData The data object passed in the chat request
* @param requestBody The request body object passed in the chat request
*/
experimental_prepareRequestBody?: (options: {
id: string;
messages: UIMessage[];
requestData?: JSONValue;
requestBody?: object;
}) => unknown;
} = {
maxSteps: 1,
},
Expand Down Expand Up @@ -204,7 +222,12 @@ export function useChat(

await callChatApi({
api,
body: {
body: experimental_prepareRequestBody?.({
id: chatId,
messages: chatMessages,
requestData: data,
requestBody: body,
}) ?? {
id: chatId,
messages: constructedMessagesPayload,
data,
Expand Down
59 changes: 59 additions & 0 deletions packages/vue/src/use-chat.ui.test.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,65 @@ import TestChatToolInvocationsComponent from './TestChatToolInvocationsComponent
import TestChatAttachmentsComponent from './TestChatAttachmentsComponent.vue';
import TestChatUrlAttachmentsComponent from './TestChatUrlAttachmentsComponent.vue';
import TestChatAppendAttachmentsComponent from './TestChatAppendAttachmentsComponent.vue';
import TestChatPrepareRequestBodyComponent from './TestChatPrepareRequestBodyComponent.vue';

describe('prepareRequestBody', () => {
beforeEach(() => {
render(TestChatPrepareRequestBodyComponent);
});

afterEach(() => {
vi.restoreAllMocks();
cleanup();
});

it(
'should show streamed response',
withTestServer(
{
url: '/api/chat',
type: 'stream-values',
content: ['0:"Hello"\n', '0:","\n', '0:" world"\n', '0:"."\n'],
},
async ({ call }) => {
await userEvent.click(screen.getByTestId('do-append'));

await waitFor(() => {
const element = screen.getByTestId('on-body-options');
expect(element.textContent?.trim() ?? '').not.toBe('');
});

const value = JSON.parse(
screen.getByTestId('on-body-options').textContent ?? '',
);

await screen.findByTestId('message-0');
expect(screen.getByTestId('message-0')).toHaveTextContent('User: hi');
expect(value).toStrictEqual({
id: expect.any(String),
messages: [
{
role: 'user',
content: 'hi',
id: expect.any(String),
createdAt: expect.any(String),
parts: [{ type: 'text', text: 'hi' }],
},
],
requestData: { 'test-data-key': 'test-data-value' },
requestBody: { 'request-body-key': 'request-body-value' },
});

expect(await call(0).getRequestBodyJson()).toBe('test-request-body');

await screen.findByTestId('message-1');
expect(screen.getByTestId('message-1')).toHaveTextContent(
'AI: Hello, world.',
);
},
),
);
});

describe('data protocol stream', () => {
beforeEach(() => {
Expand Down
Loading