Skip to content

Commit

Permalink
[OPIK-711]: playground Anthropic integration (#1098)
Browse files Browse the repository at this point in the history
* [OPIK-711] [OPIK-712]: add integrations to gemini and anthropic on the FE;

* [OPIK-711] [OPIK-712]: support the last picked model;

* [OPIK-711] [OPIK-712]: eslint issues;

* [OPIK-711] [OPIK-712]: rename models;

* [NA]: remove gemini;

---------

Co-authored-by: Sasha <aliaksandr@comet.com>
  • Loading branch information
aadereiko and Sasha authored Jan 21, 2025
1 parent f230645 commit ba5b38e
Show file tree
Hide file tree
Showing 14 changed files with 274 additions and 42 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -76,24 +76,28 @@ const PromptModelSelect = ({
configuredProviderKeys,
);

return Object.entries(filteredByConfiguredProviders).map(
([pn, providerModels]) => {
return Object.entries(filteredByConfiguredProviders)
.map(([pn, providerModels]) => {
const providerName = pn as PROVIDER_TYPE;

const options = providerModels
.filter((m) => (onlyWithStructuredOutput ? m.structuredOutput : true))
.map((providerModel) => ({
label: providerModel.label,
value: providerModel.value,
}));

if (!options.length) {
return null;
}

return {
label: PROVIDERS[providerName].label,
options: providerModels
.filter((m) =>
onlyWithStructuredOutput ? m.structuredOutput : true,
)
.map((providerModel) => ({
label: providerModel.label,
value: providerModel.value,
})),
options,
icon: PROVIDERS[providerName].icon,
};
},
);
})
.filter((g): g is NonNullable<typeof g> => !isNull(g));
}, [configuredProviderKeys, onlyWithStructuredOutput]);

const filteredOptions = useMemo(() => {
Expand Down Expand Up @@ -241,7 +245,7 @@ const PromptModelSelect = ({
return null;
}

return <Icon />;
return <Icon className="min-w-3.5" />;
};

return (
Expand All @@ -258,7 +262,9 @@ const PromptModelSelect = ({
>
<div className="flex items-center gap-2">
{renderProviderValueIcon()}
{provider && PROVIDERS[provider].label} {value}
<span className="truncate">
{provider && PROVIDERS[provider].label} {value}
</span>
</div>
</SelectValue>
</SelectTrigger>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ import React from "react";
import { Settings2 } from "lucide-react";

import {
LLMAnthropicConfigsType,
LLMOpenAIConfigsType,
LLMPromptConfigsType,
PROVIDER_TYPE,
Expand All @@ -15,6 +16,7 @@ import {
import { Button } from "@/components/ui/button";

import OpenAIModelConfigs from "@/components/pages-shared/llm/PromptModelSettings/providerConfigs/OpenAIModelConfigs";
import AnthropicModelConfigs from "@/components/pages-shared/llm/PromptModelSettings/providerConfigs/AnthropicModelConfigs";

interface PromptModelConfigsProps {
provider: PROVIDER_TYPE | "";
Expand All @@ -39,6 +41,15 @@ const PromptModelConfigs = ({
);
}

if (provider === PROVIDER_TYPE.ANTHROPIC) {
return (
<AnthropicModelConfigs
configs={configs as LLMAnthropicConfigsType}
onChange={onChange}
/>
);
}

return;
};

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
import React from "react";

import SliderInputControl from "@/components/shared/SliderInputControl/SliderInputControl";
import { LLMAnthropicConfigsType } from "@/types/providers";
import { DEFAULT_ANTHROPIC_CONFIGS } from "@/constants/llm";
import PromptModelConfigsTooltipContent from "@/components/pages-shared/llm/PromptModelSettings/providerConfigs/PromptModelConfigsTooltipContent";

interface AnthropicModelConfigsProps {
configs: LLMAnthropicConfigsType;
onChange: (configs: Partial<LLMAnthropicConfigsType>) => void;
}

const AnthropicModelConfigs = ({
configs,
onChange,
}: AnthropicModelConfigsProps) => {
return (
<div className="flex w-72 flex-col gap-6">
<SliderInputControl
value={configs.temperature}
onChange={(v) => onChange({ temperature: v })}
id="temperature"
min={0}
max={1}
step={0.01}
defaultValue={DEFAULT_ANTHROPIC_CONFIGS.TEMPERATURE}
label="Temperature"
tooltip={
<PromptModelConfigsTooltipContent text="Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive." />
}
/>

<SliderInputControl
value={configs.maxCompletionTokens}
onChange={(v) => onChange({ maxCompletionTokens: v })}
id="maxCompletionTokens"
min={0}
max={10000}
step={1}
defaultValue={DEFAULT_ANTHROPIC_CONFIGS.MAX_COMPLETION_TOKENS}
label="Max output tokens"
tooltip={
<PromptModelConfigsTooltipContent text="The maximum number of tokens to generate shared between the prompt and completion. The exact limit varies by model. (One token is roughly 4 characters for standard English text)." />
}
/>

<SliderInputControl
value={configs.topP}
onChange={(v) => onChange({ topP: v })}
id="topP"
min={0}
max={1}
step={0.01}
defaultValue={DEFAULT_ANTHROPIC_CONFIGS.TOP_P}
label="Top P"
tooltip={
<PromptModelConfigsTooltipContent text="Controls diversity via nucleus sampling: 0.5 means half of all likelihood-weighted options are considered" />
}
/>
</div>
);
};

export default AnthropicModelConfigs;
Original file line number Diff line number Diff line change
Expand Up @@ -7,17 +7,17 @@ import { PROVIDER_TYPE } from "@/types/providers";

const AIProviderCell = (context: CellContext<unknown, PROVIDER_TYPE>) => {
const provider = context.getValue();
const Icon = PROVIDERS[provider].icon;
const Icon = PROVIDERS[provider]?.icon || null;

const providerKeyLabel = PROVIDERS[provider].label;
const providerKeyLabel = PROVIDERS[provider]?.label || "";

return (
<CellWrapper
metadata={context.column.columnDef.meta}
tableMetadata={context.table.options.meta}
className="flex gap-1"
>
<Icon />
{Icon && <Icon />}
<span>{providerKeyLabel}</span>
</CellWrapper>
);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ const AIProvidersRowActionsCell: React.FunctionComponent<
open={open === 1}
setOpen={setOpen}
onConfirm={deleteProviderKeyHandler}
title={`Delete ${PROVIDERS[providerKey.provider].label} configuration`}
title={`Delete ${PROVIDERS[providerKey.provider]?.label} configuration`}
description="Are you sure you want to delete this provider configuration?"
confirmText="Delete configuration"
/>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ import {
} from "@/store/PlaygroundStore";
import { getDefaultProviderKey } from "@/lib/provider";
import { PROVIDERS } from "@/constants/providers";
import useLastPickedModel from "@/components/pages/PlaygroundPage/PlaygroundPrompts/useLastPickedModel";

interface PlaygroundPromptProps {
workspaceName: string;
Expand All @@ -53,6 +54,7 @@ const PlaygroundPrompt = ({
const checkedIfModelIsValidRef = useRef(false);

const prompt = usePromptById(promptId);
const [, setLastPickedModel] = useLastPickedModel();

const { model, messages, configs, name } = prompt;

Expand Down Expand Up @@ -107,8 +109,9 @@ const PlaygroundPrompt = ({
const handleUpdateModel = useCallback(
(model: PROVIDER_MODEL_TYPE) => {
updatePrompt(promptId, { model });
setLastPickedModel(model);
},
[updatePrompt, promptId],
[updatePrompt, promptId, setLastPickedModel],
);

const handleAddProvider = useCallback(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ import {
usePromptIds,
useSetPromptMap,
} from "@/store/PlaygroundStore";
import useLastPickedModel from "@/components/pages/PlaygroundPage/PlaygroundPrompts/useLastPickedModel";

interface PlaygroundPromptsState {
workspaceName: string;
Expand All @@ -28,15 +29,23 @@ const PlaygroundPrompts = ({

const promptIds = usePromptIds();

const [lastPickedModel] = useLastPickedModel();

const handleAddPrompt = () => {
const newPrompt = generateDefaultPrompt({ setupProviders: providerKeys });
const newPrompt = generateDefaultPrompt({
setupProviders: providerKeys,
lastPickedModel,
});
addPrompt(newPrompt);
};

const resetPlayground = useCallback(() => {
const newPrompt = generateDefaultPrompt({ setupProviders: providerKeys });
const newPrompt = generateDefaultPrompt({
setupProviders: providerKeys,
lastPickedModel,
});
setPromptMap([newPrompt.id], { [newPrompt.id]: newPrompt });
}, [setPromptMap, providerKeys]);
}, [setPromptMap, providerKeys, lastPickedModel]);

useEffect(() => {
// hasn't been initialized yet or the last prompt is removed
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
import useLocalStorageState from "use-local-storage-state";
import { PROVIDER_MODEL_TYPE } from "@/types/providers";

const PLAYGROUND_LAST_PICKED_MODEL = "playground-last-picked-model";

const useLastPickedModel = () => {
return useLocalStorageState<PROVIDER_MODEL_TYPE | "">(
PLAYGROUND_LAST_PICKED_MODEL,
{
defaultValue: "",
},
);
};

export default useLastPickedModel;
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ const AddEditAIProviderDialog: React.FC<AddEditAIProviderDialogProps> = ({
const isEdit = Boolean(providerKey);
const isValid = Boolean(apiKey.length);

const providerName = (provider && PROVIDERS[provider].label) || "";
const providerName = (provider && PROVIDERS[provider]?.label) || "";

const title = isEdit
? "Edit AI provider configuration"
Expand Down Expand Up @@ -83,7 +83,7 @@ const AddEditAIProviderDialog: React.FC<AddEditAIProviderDialogProps> = ({
]);

const renderOption = (option: DropdownOption<string>) => {
const Icon = PROVIDERS[option.value as PROVIDER_TYPE].icon;
const Icon = PROVIDERS[option.value as PROVIDER_TYPE]?.icon;

return (
<SelectItem key={option.value} value={option.value} withoutCheck>
Expand Down Expand Up @@ -125,7 +125,7 @@ const AddEditAIProviderDialog: React.FC<AddEditAIProviderDialogProps> = ({
Get your {providerName} API key{" "}
<Button variant="link" size="sm" asChild className="px-0">
<a
href={PROVIDERS[provider].apiKeyURL}
href={PROVIDERS[provider]?.apiKeyURL}
target="_blank"
rel="noreferrer"
>
Expand Down
45 changes: 45 additions & 0 deletions apps/opik-frontend/src/constants/llm.ts
Original file line number Diff line number Diff line change
Expand Up @@ -86,6 +86,45 @@ export const PROVIDER_MODELS: PROVIDER_MODELS_TYPE = {
label: "GPT 3.5 Turbo 0125",
},
],

[PROVIDER_TYPE.ANTHROPIC]: [
{
value: PROVIDER_MODEL_TYPE.CLAUDE_3_5_SONNET_20241022,
label: "Claude 3.5 Sonnet 2024-10-22",
},
{
value: PROVIDER_MODEL_TYPE.CLAUDE_3_5_HAIKU_20241022,
label: "Claude 3.5 Haiku 2024-10-22",
},
{
value: PROVIDER_MODEL_TYPE.CLAUDE_3_5_SONNET_20240620,
label: "Claude 3.5 Sonnet 2024-06-20",
},
{
value: PROVIDER_MODEL_TYPE.CLAUDE_3_OPUS_20240229,
label: "Claude 3 Opus 2024-02-29",
},
{
value: PROVIDER_MODEL_TYPE.CLAUDE_3_SONNET_20240229,
label: "Claude 3 Sonnet 2024-02-29",
},
{
value: PROVIDER_MODEL_TYPE.CLAUDE_3_HAIKU_20240307,
label: "Claude 3 Haiku 2024-03-07",
},
{
value: PROVIDER_MODEL_TYPE.CLAUDE_3_5_SONNET_LATEST,
label: "Claude 3.5 Sonnet Latest",
},
{
value: PROVIDER_MODEL_TYPE.CLAUDE_3_5_HAIKU_LATEST,
label: "Claude 3.5 Haiku Latest",
},
{
value: PROVIDER_MODEL_TYPE.CLAUDE_3_OPUS_LATEST,
label: "Claude 3 Opus Latest",
},
],
};

export const DEFAULT_OPEN_AI_CONFIGS = {
Expand All @@ -96,6 +135,12 @@ export const DEFAULT_OPEN_AI_CONFIGS = {
PRESENCE_PENALTY: 0,
};

export const DEFAULT_ANTHROPIC_CONFIGS = {
TEMPERATURE: 0,
MAX_COMPLETION_TOKENS: 1024,
TOP_P: 1,
};

export const LLM_PROMPT_CUSTOM_TEMPLATE: LLMPromptTemplate = {
label: "Custom LLM-as-judge",
description:
Expand Down
10 changes: 10 additions & 0 deletions apps/opik-frontend/src/constants/providers.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
import OpenAIIcon from "@/icons/integrations/openai.svg?react";
import AnthropicIcon from "@/icons/integrations/anthropic.svg?react";

import { PROVIDER_MODEL_TYPE, PROVIDER_TYPE } from "@/types/providers";

type IconType = typeof OpenAIIcon;
Expand All @@ -25,6 +27,14 @@ export const PROVIDERS: PROVIDERS_TYPE = {
apiKeyURL: "https://platform.openai.com/account/api-keys",
defaultModel: PROVIDER_MODEL_TYPE.GPT_4O,
},
[PROVIDER_TYPE.ANTHROPIC]: {
label: "Anthropic",
value: PROVIDER_TYPE.ANTHROPIC,
icon: AnthropicIcon,
apiKeyName: "ANTHROPIC_API_KEY",
apiKeyURL: "https://console.anthropic.com/settings/keys",
defaultModel: PROVIDER_MODEL_TYPE.CLAUDE_3_5_SONNET_LATEST,
},
};

export const PROVIDERS_OPTIONS = Object.values(PROVIDERS);
6 changes: 6 additions & 0 deletions apps/opik-frontend/src/icons/integrations/anthropic.svg
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading

0 comments on commit ba5b38e

Please # to comment.