Skip to content
New issue

Have a question about this project? # for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “#”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? # to your account

feat: add experimental telemetry model option #1603

Merged
merged 2 commits into from
Dec 31, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
19 changes: 19 additions & 0 deletions packages/core/src/generation.ts
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ import {
ServiceType,
SearchResponse,
ActionResponse,
TelemetrySettings,
} from "./types.ts";
import { fal } from "@fal-ai/client";

Expand Down Expand Up @@ -164,6 +165,9 @@ export async function generateText({
const max_response_length =
modelConfiguration?.max_response_length ||
models[provider].settings.maxOutputTokens;
const experimental_telemetry =
modelConfiguration?.experimental_telemetry ||
models[provider].settings.experimental_telemetry;

const apiKey = runtime.token;

Expand Down Expand Up @@ -209,6 +213,7 @@ export async function generateText({
maxTokens: max_response_length,
frequencyPenalty: frequency_penalty,
presencePenalty: presence_penalty,
experimental_telemetry: experimental_telemetry,
});

response = openaiResponse;
Expand All @@ -232,6 +237,7 @@ export async function generateText({
maxTokens: max_response_length,
frequencyPenalty: frequency_penalty,
presencePenalty: presence_penalty,
experimental_telemetry: experimental_telemetry,
});

response = googleResponse;
Expand All @@ -258,6 +264,7 @@ export async function generateText({
maxTokens: max_response_length,
frequencyPenalty: frequency_penalty,
presencePenalty: presence_penalty,
experimental_telemetry: experimental_telemetry,
});

response = anthropicResponse;
Expand All @@ -284,6 +291,7 @@ export async function generateText({
maxTokens: max_response_length,
frequencyPenalty: frequency_penalty,
presencePenalty: presence_penalty,
experimental_telemetry: experimental_telemetry,
});

response = anthropicResponse;
Expand Down Expand Up @@ -314,6 +322,7 @@ export async function generateText({
maxTokens: max_response_length,
frequencyPenalty: frequency_penalty,
presencePenalty: presence_penalty,
experimental_telemetry: experimental_telemetry,
});

response = grokResponse;
Expand All @@ -335,6 +344,7 @@ export async function generateText({
maxTokens: max_response_length,
frequencyPenalty: frequency_penalty,
presencePenalty: presence_penalty,
experimental_telemetry: experimental_telemetry,
});

response = groqResponse;
Expand Down Expand Up @@ -386,6 +396,7 @@ export async function generateText({
maxTokens: max_response_length,
frequencyPenalty: frequency_penalty,
presencePenalty: presence_penalty,
experimental_telemetry: experimental_telemetry,
});

response = redpillResponse;
Expand Down Expand Up @@ -413,6 +424,7 @@ export async function generateText({
maxTokens: max_response_length,
frequencyPenalty: frequency_penalty,
presencePenalty: presence_penalty,
experimental_telemetry: experimental_telemetry,
});

response = openrouterResponse;
Expand All @@ -439,6 +451,7 @@ export async function generateText({
maxTokens: max_response_length,
frequencyPenalty: frequency_penalty,
presencePenalty: presence_penalty,
experimental_telemetry: experimental_telemetry,
});

response = ollamaResponse;
Expand Down Expand Up @@ -466,6 +479,7 @@ export async function generateText({
maxTokens: max_response_length,
frequencyPenalty: frequency_penalty,
presencePenalty: presence_penalty,
experimental_telemetry: experimental_telemetry,
});

response = heuristResponse;
Expand Down Expand Up @@ -515,6 +529,7 @@ export async function generateText({
maxTokens: max_response_length,
frequencyPenalty: frequency_penalty,
presencePenalty: presence_penalty,
experimental_telemetry: experimental_telemetry,
});

response = openaiResponse;
Expand All @@ -541,6 +556,7 @@ export async function generateText({
maxTokens: max_response_length,
frequencyPenalty: frequency_penalty,
presencePenalty: presence_penalty,
experimental_telemetry: experimental_telemetry,
});

response = galadrielResponse;
Expand Down Expand Up @@ -1357,6 +1373,7 @@ interface ModelSettings {
frequencyPenalty: number;
presencePenalty: number;
stop?: string[];
experimental_telemetry?: TelemetrySettings;
}

/**
Expand Down Expand Up @@ -1392,6 +1409,7 @@ export const generateObject = async ({
const presence_penalty = models[provider].settings.presence_penalty;
const max_context_length = models[provider].settings.maxInputTokens;
const max_response_length = models[provider].settings.maxOutputTokens;
const experimental_telemetry = models[provider].settings.experimental_telemetry;
const apiKey = runtime.token;

try {
Expand All @@ -1404,6 +1422,7 @@ export const generateObject = async ({
frequencyPenalty: frequency_penalty,
presencePenalty: presence_penalty,
stop: stop || models[provider].settings.stop,
experimental_telemetry: experimental_telemetry,
};

const response = await handleProvider({
Expand Down
29 changes: 29 additions & 0 deletions packages/core/src/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -165,6 +165,9 @@ export type Model = {

/** Temperature setting */
temperature: number;

/** Optional telemetry configuration (experimental) */
experimental_telemetry?: TelemetrySettings;
};

/** Optional image generation settings */
Expand Down Expand Up @@ -628,12 +631,38 @@ export interface IAgentConfig {
[key: string]: string;
}

export type TelemetrySettings = {
/**
* Enable or disable telemetry. Disabled by default while experimental.
*/
isEnabled?: boolean;
/**
* Enable or disable input recording. Enabled by default.
*
* You might want to disable input recording to avoid recording sensitive
* information, to reduce data transfers, or to increase performance.
*/
recordInputs?: boolean;
/**
* Enable or disable output recording. Enabled by default.
*
* You might want to disable output recording to avoid recording sensitive
* information, to reduce data transfers, or to increase performance.
*/
recordOutputs?: boolean;
/**
* Identifier for this function. Used to group telemetry data by function.
*/
functionId?: string;
};

export interface ModelConfiguration {
temperature?: number;
max_response_length?: number;
frequency_penalty?: number;
presence_penalty?: number;
maxInputTokens?: number;
experimental_telemetry?: TelemetrySettings;
}

/**
Expand Down
Loading