Skip to content

Commit

Permalink
[OPIK-1060] [FE] Add support for OpenRouter in the playground (#1389)
Browse files Browse the repository at this point in the history
  • Loading branch information
andriidudar authored Feb 27, 2025
1 parent 034839f commit 2860269
Show file tree
Hide file tree
Showing 9 changed files with 1,899 additions and 5 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,7 @@ const PromptModelSelect = ({
}: PromptModelSelectProps) => {
const resetDialogKeyRef = useRef(0);
const inputRef = useRef<HTMLInputElement>(null);
const modelProviderMapRef = useRef<Record<string, PROVIDER_TYPE>>({});

const [openConfigDialog, setOpenConfigDialog] = React.useState(false);
const [filterValue, setFilterValue] = useState("");
Expand All @@ -79,6 +80,14 @@ const PromptModelSelect = ({
configuredProviderKeys,
);

Object.entries(filteredByConfiguredProviders).forEach(
([pn, providerModels]) => {
providerModels.forEach(({ value }) => {
modelProviderMapRef.current[value] = pn as PROVIDER_TYPE;
});
},
);

return Object.entries(filteredByConfiguredProviders)
.map(([pn, providerModels]) => {
const providerName = pn as PROVIDER_TYPE;
Expand Down Expand Up @@ -133,7 +142,10 @@ const PromptModelSelect = ({

const handleOnChange = useCallback(
(value: PROVIDER_MODEL_TYPE) => {
onChange(value, openProviderMenu as PROVIDER_TYPE);
const modelProvider = openProviderMenu
? (openProviderMenu as PROVIDER_TYPE)
: modelProviderMapRef.current[value];
onChange(value, modelProvider);
},
[onChange, openProviderMenu],
);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ import { Settings2 } from "lucide-react";
import {
LLMAnthropicConfigsType,
LLMOpenAIConfigsType,
LLMOpenRouterConfigsType,
LLMPromptConfigsType,
PROVIDER_TYPE,
} from "@/types/providers";
Expand All @@ -13,15 +14,16 @@ import {
DropdownMenuContent,
DropdownMenuTrigger,
} from "@/components/ui/dropdown-menu";
import { Button } from "@/components/ui/button";
import { Button, ButtonProps } from "@/components/ui/button";

import OpenAIModelConfigs from "@/components/pages-shared/llm/PromptModelSettings/providerConfigs/OpenAIModelConfigs";
import AnthropicModelConfigs from "@/components/pages-shared/llm/PromptModelSettings/providerConfigs/AnthropicModelConfigs";
import isEmpty from "lodash/isEmpty";
import OpenRouterModelConfigs from "@/components/pages-shared/llm/PromptModelSettings/providerConfigs/OpenRouterModelConfigs";

interface PromptModelConfigsProps {
provider: PROVIDER_TYPE | "";
size?: "icon" | "icon-sm" | "icon-lg" | "icon-xs" | "icon-xxs";
size?: ButtonProps["size"];
configs: Partial<LLMPromptConfigsType>;
onChange: (configs: Partial<LLMPromptConfigsType>) => void;
}
Expand Down Expand Up @@ -51,6 +53,15 @@ const PromptModelConfigs = ({
);
}

if (provider === PROVIDER_TYPE.OPEN_ROUTER) {
return (
<OpenRouterModelConfigs
configs={configs as LLMOpenRouterConfigsType}
onChange={onChange}
/>
);
}

return;
};

Expand All @@ -64,7 +75,11 @@ const PromptModelConfigs = ({
</Button>
</DropdownMenuTrigger>

<DropdownMenuContent className="p-6" side="bottom" align="end">
<DropdownMenuContent
className="max-h-[70vh] overflow-y-auto p-6"
side="bottom"
align="end"
>
{getProviderForm()}
</DropdownMenuContent>
</DropdownMenu>
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,163 @@
import React from "react";
import isUndefined from "lodash/isUndefined";

import SliderInputControl from "@/components/shared/SliderInputControl/SliderInputControl";
import { LLMOpenRouterConfigsType } from "@/types/providers";
import { DEFAULT_OPEN_ROUTER_CONFIGS } from "@/constants/llm";
import PromptModelConfigsTooltipContent from "@/components/pages-shared/llm/PromptModelSettings/providerConfigs/PromptModelConfigsTooltipContent";

interface OpenRouterModelConfigsProps {
configs: LLMOpenRouterConfigsType;
onChange: (configs: Partial<LLMOpenRouterConfigsType>) => void;
}

const OpenRouterModelConfigs = ({
configs,
onChange,
}: OpenRouterModelConfigsProps) => {
return (
<div className="flex w-72 flex-col gap-4">
{!isUndefined(configs.temperature) && (
<SliderInputControl
value={configs.temperature}
onChange={(v) => onChange({ temperature: v })}
id="temperature"
min={-1}
max={1}
step={0.01}
defaultValue={DEFAULT_OPEN_ROUTER_CONFIGS.TEMPERATURE}
label="Temperature"
tooltip={
<PromptModelConfigsTooltipContent text="Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive." />
}
/>
)}
{!isUndefined(configs.maxTokens) && (
<SliderInputControl
value={configs.maxTokens}
onChange={(v) => onChange({ maxTokens: v })}
id="maxTokens"
min={0}
max={10000}
step={1}
defaultValue={DEFAULT_OPEN_ROUTER_CONFIGS.MAX_TOKENS}
label="Max tokens"
tooltip={
<PromptModelConfigsTooltipContent text="The maximum number of tokens to generate shared between the prompt and completion. The exact limit varies by model. (One token is roughly 4 characters for standard English text)." />
}
/>
)}
{!isUndefined(configs.topP) && (
<SliderInputControl
value={configs.topP}
onChange={(v) => onChange({ topP: v })}
id="topP"
min={0}
max={1}
step={0.01}
defaultValue={DEFAULT_OPEN_ROUTER_CONFIGS.TOP_P}
label="Top P"
tooltip={
<PromptModelConfigsTooltipContent text="Controls diversity via nucleus sampling: 0.5 means half of all likelihood-weighted options are considered" />
}
/>
)}
{!isUndefined(configs.topK) && (
<SliderInputControl
value={configs.topK}
onChange={(v) => onChange({ topK: v })}
id="topK"
min={0}
max={100}
step={0.01}
defaultValue={DEFAULT_OPEN_ROUTER_CONFIGS.TOP_K}
label="Top K"
tooltip={
<PromptModelConfigsTooltipContent text="This limits the model's choice of tokens at each step, making it choose from a smaller set. A value of 1 means the model will always pick the most likely next token, leading to predictable results. By default this setting is disabled, making the model to consider all choices." />
}
/>
)}
{!isUndefined(configs.frequencyPenalty) && (
<SliderInputControl
value={configs.frequencyPenalty}
onChange={(v) => onChange({ frequencyPenalty: v })}
id="topK"
min={-2}
max={2}
step={0.01}
defaultValue={DEFAULT_OPEN_ROUTER_CONFIGS.FREQUENCY_PENALTY}
label="Frequency penalty"
tooltip={
<PromptModelConfigsTooltipContent text="This setting aims to control the repetition of tokens based on how often they appear in the input. It tries to use less frequently those tokens that appear more in the input, proportional to how frequently they occur. Token penalty scales with the number of occurrences. Negative values will encourage token reuse." />
}
/>
)}
{!isUndefined(configs.presencePenalty) && (
<SliderInputControl
value={configs.presencePenalty}
onChange={(v) => onChange({ presencePenalty: v })}
id="presencePenalty"
min={-2}
max={2}
step={0.01}
defaultValue={DEFAULT_OPEN_ROUTER_CONFIGS.PRESENCE_PENALTY}
label="Presence penalty"
tooltip={
<PromptModelConfigsTooltipContent text="Adjusts how often the model repeats specific tokens already used in the input. Higher values make such repetition less likely, while negative values do the opposite. Token penalty does not scale with the number of occurrences. Negative values will encourage token reuse." />
}
/>
)}
{!isUndefined(configs.repetitionPenalty) && (
<SliderInputControl
value={configs.repetitionPenalty}
onChange={(v) => onChange({ repetitionPenalty: v })}
id="repetitionPenalty"
min={0}
max={2}
step={0.01}
defaultValue={DEFAULT_OPEN_ROUTER_CONFIGS.REPETITION_PENALTY}
label="Repetition penalty"
tooltip={
<PromptModelConfigsTooltipContent text="Helps to reduce the repetition of tokens from the input. A higher value makes the model less likely to repeat tokens, but too high a value can make the output less coherent (often with run-on sentences that lack small words). Token penalty scales based on original token's probability." />
}
/>
)}
{!isUndefined(configs.minP) && (
<SliderInputControl
value={configs.minP}
onChange={(v) => onChange({ minP: v })}
id="minP"
min={0}
max={1}
step={0.01}
defaultValue={DEFAULT_OPEN_ROUTER_CONFIGS.MIN_P}
label="Min P"
tooltip={
<PromptModelConfigsTooltipContent text="Represents the minimum probability for a token to be considered, relative to the probability of the most likely token. (The value changes depending on the confidence level of the most probable token.) If your Min-P is set to 0.1, that means it will only allow for tokens that are at least 1/10th as probable as the best possible option." />
}
/>
)}
{!isUndefined(configs.topA) && (
<SliderInputControl
value={configs.topA}
onChange={(v) => onChange({ topA: v })}
id="topA"
min={0}
max={1}
step={0.01}
defaultValue={DEFAULT_OPEN_ROUTER_CONFIGS.TOP_A}
label="Top A"
tooltip={
<PromptModelConfigsTooltipContent
text={
'Consider only the top tokens with "sufficiently high" probabilities based on the probability of the most likely token. Think of it like a dynamic Top-P. A lower Top-A value focuses the choices based on the highest probability token but with a narrower scope. A higher Top-A value does not necessarily affect the creativity of the output, but rather refines the filtering process based on the maximum probability.'
}
/>
}
/>
)}
</div>
);
};

export default OpenRouterModelConfigs;
12 changes: 12 additions & 0 deletions apps/opik-frontend/src/constants/llm.ts
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,18 @@ export const DEFAULT_ANTHROPIC_CONFIGS = {
TOP_P: 1,
};

export const DEFAULT_OPEN_ROUTER_CONFIGS = {
MAX_TOKENS: 0,
TEMPERATURE: 1,
TOP_P: 1,
TOP_K: 0,
FREQUENCY_PENALTY: 0,
PRESENCE_PENALTY: 0,
REPETITION_PENALTY: 1,
MIN_P: 0,
TOP_A: 0,
};

export const LLM_PROMPT_CUSTOM_TEMPLATE: LLMPromptTemplate = {
label: "Custom LLM-as-judge",
description:
Expand Down
10 changes: 10 additions & 0 deletions apps/opik-frontend/src/constants/providers.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import OpenAIIcon from "@/icons/integrations/openai.svg?react";
import AnthropicIcon from "@/icons/integrations/anthropic.svg?react";
import OpenRouterIcon from "@/icons/integrations/open_router.svg?react";
import OllamaIcon from "@/icons/integrations/ollama.svg?react";

import {
Expand Down Expand Up @@ -56,6 +57,15 @@ export const PROVIDERS: PROVIDERS_TYPE = {
defaultModel: PROVIDER_MODEL_TYPE.CLAUDE_3_5_SONNET_LATEST,
locationType: PROVIDER_LOCATION_TYPE.cloud,
},
[PROVIDER_TYPE.OPEN_ROUTER]: {
label: "OpenRouter",
value: PROVIDER_TYPE.OPEN_ROUTER,
icon: OpenRouterIcon,
apiKeyName: "OPENROUTER_API_KEY",
apiKeyURL: "https://openrouter.ai/keys",
defaultModel: PROVIDER_MODEL_TYPE.OPENAI_GPT_4O,
locationType: PROVIDER_LOCATION_TYPE.cloud,
},
[PROVIDER_TYPE.OLLAMA]: {
label: "Ollama (Experimental)",
value: PROVIDER_TYPE.OLLAMA,
Expand Down
Loading

0 comments on commit 2860269

Please # to comment.