mirror of
https://github.com/browseros-ai/BrowserOS.git
synced 2026-05-14 16:14:28 +00:00
Compare commits
5 Commits
fix/dev-se
...
feat/model
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
40d88d2838 | ||
|
|
aed83c9968 | ||
|
|
b905aa5902 | ||
|
|
0fe55e2c92 | ||
|
|
05c3f18f1c |
1
packages/browseros-agent/.gitignore
vendored
1
packages/browseros-agent/.gitignore
vendored
@@ -195,3 +195,4 @@ test-results/
|
||||
.agent/
|
||||
.llm/
|
||||
.grove/
|
||||
docs/plans/2026-03-24-models-dev-integration.md
|
||||
|
||||
@@ -81,6 +81,9 @@ bun run dev:server # Build server for development
|
||||
bun run dev:ext # Build extension for development
|
||||
bun run dist:server # Build server for production (all targets)
|
||||
bun run dist:ext # Build extension for production
|
||||
|
||||
# Refresh models.dev data
|
||||
bun run generate:models # Fetches latest from models.dev/api.json
|
||||
```
|
||||
|
||||
## Architecture
|
||||
|
||||
@@ -1,6 +1,13 @@
|
||||
import { zodResolver } from '@hookform/resolvers/zod'
|
||||
import { CheckCircle2, ExternalLink, Loader2, XCircle } from 'lucide-react'
|
||||
import { type FC, useEffect, useState } from 'react'
|
||||
import {
|
||||
CheckCircle2,
|
||||
ChevronDown,
|
||||
ExternalLink,
|
||||
Loader2,
|
||||
SearchIcon,
|
||||
XCircle,
|
||||
} from 'lucide-react'
|
||||
import { type FC, useEffect, useRef, useState } from 'react'
|
||||
import { useForm } from 'react-hook-form'
|
||||
import { z } from 'zod/v3'
|
||||
import { Button } from '@/components/ui/button'
|
||||
@@ -47,7 +54,12 @@ import {
|
||||
import { type TestResult, testProvider } from '@/lib/llm-providers/testProvider'
|
||||
import type { LlmProviderConfig, ProviderType } from '@/lib/llm-providers/types'
|
||||
import { track } from '@/lib/metrics/track'
|
||||
import { getModelContextLength, getModelOptions } from './models'
|
||||
import { cn } from '@/lib/utils'
|
||||
import {
|
||||
getModelContextLength,
|
||||
getModelsForProvider,
|
||||
type ModelInfo,
|
||||
} from './models'
|
||||
|
||||
const providerTypeEnum = z.enum([
|
||||
'moonshot',
|
||||
@@ -163,6 +175,107 @@ export const providerFormSchema = z
|
||||
*/
|
||||
export type ProviderFormValues = z.infer<typeof providerFormSchema>
|
||||
|
||||
function formatContextWindow(tokens: number): string {
|
||||
if (tokens >= 1000000)
|
||||
return `${(tokens / 1000000).toFixed(tokens % 1000000 === 0 ? 0 : 1)}M`
|
||||
if (tokens >= 1000) return `${Math.round(tokens / 1000)}K`
|
||||
return `${tokens}`
|
||||
}
|
||||
|
||||
function ModelPickerList({
|
||||
models,
|
||||
selectedModelId,
|
||||
onSelect,
|
||||
onCustomSubmit,
|
||||
onClose,
|
||||
}: {
|
||||
models: ModelInfo[]
|
||||
selectedModelId: string
|
||||
onSelect: (modelId: string) => void
|
||||
onCustomSubmit: (modelId: string) => void
|
||||
onClose: () => void
|
||||
}) {
|
||||
const [search, setSearch] = useState('')
|
||||
const inputRef = useRef<HTMLInputElement>(null)
|
||||
const containerRef = useRef<HTMLDivElement>(null)
|
||||
|
||||
useEffect(() => {
|
||||
inputRef.current?.focus()
|
||||
}, [])
|
||||
|
||||
useEffect(() => {
|
||||
const handleClickOutside = (e: MouseEvent) => {
|
||||
if (
|
||||
containerRef.current &&
|
||||
!containerRef.current.contains(e.target as Node)
|
||||
) {
|
||||
onClose()
|
||||
}
|
||||
}
|
||||
document.addEventListener('mousedown', handleClickOutside)
|
||||
return () => document.removeEventListener('mousedown', handleClickOutside)
|
||||
}, [onClose])
|
||||
|
||||
const query = search.toLowerCase()
|
||||
const filtered = query
|
||||
? models.filter((m) => m.modelId.toLowerCase().includes(query))
|
||||
: models
|
||||
|
||||
const handleKeyDown = (e: React.KeyboardEvent) => {
|
||||
if (e.key === 'Enter' && search) {
|
||||
e.preventDefault()
|
||||
onCustomSubmit(search)
|
||||
}
|
||||
if (e.key === 'Escape') {
|
||||
onClose()
|
||||
}
|
||||
}
|
||||
|
||||
return (
|
||||
<div ref={containerRef} className="rounded-md border">
|
||||
<div className="flex items-center gap-2 border-b px-3">
|
||||
<SearchIcon className="h-4 w-4 shrink-0 text-muted-foreground opacity-50" />
|
||||
<input
|
||||
ref={inputRef}
|
||||
type="text"
|
||||
value={search}
|
||||
onChange={(e) => setSearch(e.target.value)}
|
||||
onKeyDown={handleKeyDown}
|
||||
placeholder="Search or type a custom model ID..."
|
||||
className="flex h-9 w-full bg-transparent py-2 text-sm outline-none placeholder:text-muted-foreground"
|
||||
/>
|
||||
</div>
|
||||
<div className="max-h-[200px] overflow-y-auto">
|
||||
{filtered.length > 0 ? (
|
||||
filtered.map((model) => {
|
||||
const isSelected = selectedModelId === model.modelId
|
||||
return (
|
||||
<button
|
||||
key={model.modelId}
|
||||
type="button"
|
||||
onClick={() => onSelect(model.modelId)}
|
||||
className={cn(
|
||||
'flex w-full items-center justify-between px-3 py-2 text-left text-sm transition-colors hover:bg-accent',
|
||||
isSelected && 'bg-accent font-medium',
|
||||
)}
|
||||
>
|
||||
<span className="truncate">{model.modelId}</span>
|
||||
<span className="ml-2 shrink-0 rounded-md bg-muted px-1.5 py-0.5 font-mono text-[10px] text-muted-foreground">
|
||||
{formatContextWindow(model.contextLength)}
|
||||
</span>
|
||||
</button>
|
||||
)
|
||||
})
|
||||
) : (
|
||||
<div className="px-3 py-6 text-center text-muted-foreground text-sm">
|
||||
No models match. Press Enter to use "{search}"
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Props for NewProviderDialog
|
||||
* @public
|
||||
@@ -188,9 +301,9 @@ export const NewProviderDialog: FC<NewProviderDialogProps> = ({
|
||||
initialValues,
|
||||
onSave,
|
||||
}) => {
|
||||
const [isCustomModel, setIsCustomModel] = useState(false)
|
||||
const [isTesting, setIsTesting] = useState(false)
|
||||
const [testResult, setTestResult] = useState<TestResult | null>(null)
|
||||
const [modelListOpen, setModelListOpen] = useState(false)
|
||||
const { supports } = useCapabilities()
|
||||
const { baseUrl: agentServerUrl } = useAgentServerUrl()
|
||||
const kimiLaunch = useKimiLaunch()
|
||||
@@ -261,8 +374,7 @@ export const NewProviderDialog: FC<NewProviderDialogProps> = ({
|
||||
watchedSessionToken,
|
||||
])
|
||||
|
||||
// Get model options for current provider type
|
||||
const modelOptions = getModelOptions(watchedType as ProviderType)
|
||||
const modelInfoList = getModelsForProvider(watchedType as ProviderType)
|
||||
|
||||
// Handle provider type change (user-initiated via Select)
|
||||
const handleTypeChange = (newType: ProviderType) => {
|
||||
@@ -272,14 +384,13 @@ export const NewProviderDialog: FC<NewProviderDialogProps> = ({
|
||||
form.setValue('baseUrl', defaultUrl)
|
||||
}
|
||||
form.setValue('modelId', '')
|
||||
setIsCustomModel(false)
|
||||
}
|
||||
|
||||
// Auto-fill context window when model changes (only for new providers)
|
||||
useEffect(() => {
|
||||
if (initialValues?.id) return
|
||||
|
||||
if (watchedModelId && watchedModelId !== 'custom') {
|
||||
if (watchedModelId) {
|
||||
const contextLength = getModelContextLength(
|
||||
watchedType as ProviderType,
|
||||
watchedModelId,
|
||||
@@ -290,17 +401,6 @@ export const NewProviderDialog: FC<NewProviderDialogProps> = ({
|
||||
}
|
||||
}, [watchedModelId, watchedType, form, initialValues?.id])
|
||||
|
||||
// Handle model selection (including custom option)
|
||||
const handleModelChange = (value: string) => {
|
||||
if (value === 'custom') {
|
||||
setIsCustomModel(true)
|
||||
form.setValue('modelId', '')
|
||||
} else {
|
||||
setIsCustomModel(false)
|
||||
form.setValue('modelId', value)
|
||||
}
|
||||
}
|
||||
|
||||
// Reset form when initialValues change
|
||||
useEffect(() => {
|
||||
if (initialValues) {
|
||||
@@ -325,7 +425,6 @@ export const NewProviderDialog: FC<NewProviderDialogProps> = ({
|
||||
reasoningEffort: initialValues.reasoningEffort || 'high',
|
||||
reasoningSummary: initialValues.reasoningSummary || 'auto',
|
||||
})
|
||||
setIsCustomModel(false)
|
||||
}
|
||||
}, [initialValues, form])
|
||||
|
||||
@@ -352,7 +451,6 @@ export const NewProviderDialog: FC<NewProviderDialogProps> = ({
|
||||
reasoningEffort: 'high',
|
||||
reasoningSummary: 'auto',
|
||||
})
|
||||
setIsCustomModel(false)
|
||||
}
|
||||
// Clear test result when dialog opens/closes
|
||||
setTestResult(null)
|
||||
@@ -811,52 +909,51 @@ export const NewProviderDialog: FC<NewProviderDialogProps> = ({
|
||||
control={form.control}
|
||||
name="modelId"
|
||||
render={({ field }) => (
|
||||
<FormItem>
|
||||
<FormItem className="flex flex-col">
|
||||
<FormLabel>Model *</FormLabel>
|
||||
{isCustomModel || modelOptions.length === 1 ? (
|
||||
<>
|
||||
<FormControl>
|
||||
<Input
|
||||
placeholder={
|
||||
watchedType === 'azure'
|
||||
? 'Enter your deployment name'
|
||||
: watchedType === 'bedrock'
|
||||
? 'e.g., anthropic.claude-3-5-sonnet-20241022-v2:0'
|
||||
: 'Enter custom model ID'
|
||||
}
|
||||
{...field}
|
||||
/>
|
||||
</FormControl>
|
||||
{modelOptions.length > 1 && (
|
||||
<Button
|
||||
type="button"
|
||||
variant="link"
|
||||
size="sm"
|
||||
className="h-auto p-0 text-xs"
|
||||
onClick={() => setIsCustomModel(false)}
|
||||
>
|
||||
← Back to model list
|
||||
</Button>
|
||||
)}
|
||||
</>
|
||||
{modelInfoList.length === 0 ? (
|
||||
<FormControl>
|
||||
<Input
|
||||
placeholder={
|
||||
watchedType === 'azure'
|
||||
? 'Enter your deployment name'
|
||||
: watchedType === 'bedrock'
|
||||
? 'e.g., anthropic.claude-3-5-sonnet-20241022-v2:0'
|
||||
: 'Enter model ID'
|
||||
}
|
||||
{...field}
|
||||
/>
|
||||
</FormControl>
|
||||
) : modelListOpen ? (
|
||||
<ModelPickerList
|
||||
models={modelInfoList}
|
||||
selectedModelId={field.value}
|
||||
onSelect={(modelId) => {
|
||||
form.setValue('modelId', modelId)
|
||||
setModelListOpen(false)
|
||||
}}
|
||||
onCustomSubmit={(modelId) => {
|
||||
form.setValue('modelId', modelId)
|
||||
setModelListOpen(false)
|
||||
}}
|
||||
onClose={() => setModelListOpen(false)}
|
||||
/>
|
||||
) : (
|
||||
<Select
|
||||
onValueChange={handleModelChange}
|
||||
value={field.value}
|
||||
<button
|
||||
type="button"
|
||||
onClick={() => setModelListOpen(true)}
|
||||
className={cn(
|
||||
'flex h-9 w-full items-center justify-between rounded-md border border-input bg-transparent px-3 py-1 text-sm shadow-xs',
|
||||
field.value
|
||||
? 'text-foreground'
|
||||
: 'text-muted-foreground',
|
||||
)}
|
||||
>
|
||||
<FormControl>
|
||||
<SelectTrigger className="w-full">
|
||||
<SelectValue placeholder="Select a model" />
|
||||
</SelectTrigger>
|
||||
</FormControl>
|
||||
<SelectContent>
|
||||
{modelOptions.map((modelId) => (
|
||||
<SelectItem key={modelId} value={modelId}>
|
||||
{modelId === 'custom' ? '+ Custom model' : modelId}
|
||||
</SelectItem>
|
||||
))}
|
||||
</SelectContent>
|
||||
</Select>
|
||||
<span className="truncate">
|
||||
{field.value || 'Select a model...'}
|
||||
</span>
|
||||
<ChevronDown className="ml-2 h-4 w-4 shrink-0 opacity-50" />
|
||||
</button>
|
||||
)}
|
||||
<FormMessage />
|
||||
</FormItem>
|
||||
|
||||
@@ -1,98 +1,21 @@
|
||||
import {
|
||||
getModelsDevModels,
|
||||
type ModelsDevModel,
|
||||
} from '@/lib/llm-providers/models-dev'
|
||||
import type { ProviderType } from '@/lib/llm-providers/types'
|
||||
|
||||
/**
|
||||
* Model information with context length
|
||||
*/
|
||||
export interface ModelInfo {
|
||||
modelId: string
|
||||
contextLength: number
|
||||
supportsImages?: boolean
|
||||
supportsReasoning?: boolean
|
||||
supportsToolCall?: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Models data organized by provider type (matches backend AIProvider enum)
|
||||
*/
|
||||
export interface ModelsData {
|
||||
anthropic: ModelInfo[]
|
||||
openai: ModelInfo[]
|
||||
'openai-compatible': ModelInfo[]
|
||||
google: ModelInfo[]
|
||||
openrouter: ModelInfo[]
|
||||
azure: ModelInfo[]
|
||||
ollama: ModelInfo[]
|
||||
lmstudio: ModelInfo[]
|
||||
bedrock: ModelInfo[]
|
||||
browseros: ModelInfo[]
|
||||
moonshot: ModelInfo[]
|
||||
'chatgpt-pro': ModelInfo[]
|
||||
'github-copilot': ModelInfo[]
|
||||
'qwen-code': ModelInfo[]
|
||||
}
|
||||
|
||||
/**
|
||||
* Available models per provider with context lengths
|
||||
* Based on: https://github.com/browseros-ai/BrowserOS-agent/blob/main/src/options/data/models.ts
|
||||
*/
|
||||
export const MODELS_DATA: ModelsData = {
|
||||
moonshot: [{ modelId: 'kimi-k2.5', contextLength: 200000 }],
|
||||
anthropic: [
|
||||
{ modelId: 'claude-opus-4-5-20251101', contextLength: 200000 },
|
||||
{ modelId: 'claude-haiku-4-5-20251001', contextLength: 200000 },
|
||||
{ modelId: 'claude-sonnet-4-5-20250929', contextLength: 200000 },
|
||||
{ modelId: 'claude-sonnet-4-20250514', contextLength: 200000 },
|
||||
{ modelId: 'claude-opus-4-20250514', contextLength: 200000 },
|
||||
{ modelId: 'claude-3-7-sonnet-20250219', contextLength: 200000 },
|
||||
{ modelId: 'claude-3-5-haiku-20241022', contextLength: 200000 },
|
||||
],
|
||||
openai: [
|
||||
{ modelId: 'gpt-5.2', contextLength: 200000 },
|
||||
{ modelId: 'gpt-5.2-pro', contextLength: 200000 },
|
||||
{ modelId: 'gpt-5', contextLength: 200000 },
|
||||
{ modelId: 'gpt-5-mini', contextLength: 200000 },
|
||||
{ modelId: 'gpt-5-nano', contextLength: 200000 },
|
||||
{ modelId: 'gpt-4.1', contextLength: 200000 },
|
||||
{ modelId: 'gpt-4.1-mini', contextLength: 200000 },
|
||||
{ modelId: 'o4-mini', contextLength: 200000 },
|
||||
{ modelId: 'o3-mini', contextLength: 200000 },
|
||||
{ modelId: 'gpt-4o', contextLength: 128000 },
|
||||
{ modelId: 'gpt-4o-mini', contextLength: 128000 },
|
||||
],
|
||||
'openai-compatible': [],
|
||||
google: [
|
||||
{ modelId: 'gemini-3-pro-preview', contextLength: 1048576 },
|
||||
{ modelId: 'gemini-3-flash-preview', contextLength: 1048576 },
|
||||
{ modelId: 'gemini-2.5-flash', contextLength: 1048576 },
|
||||
{ modelId: 'gemini-2.5-pro', contextLength: 1048576 },
|
||||
],
|
||||
openrouter: [
|
||||
{ modelId: 'google/gemini-3-pro-preview', contextLength: 1048576 },
|
||||
{ modelId: 'google/gemini-3-flash-preview', contextLength: 1048576 },
|
||||
{ modelId: 'google/gemini-2.5-flash', contextLength: 1048576 },
|
||||
{ modelId: 'anthropic/claude-opus-4.5', contextLength: 200000 },
|
||||
{ modelId: 'anthropic/claude-haiku-4.5', contextLength: 200000 },
|
||||
{ modelId: 'anthropic/claude-sonnet-4.5', contextLength: 200000 },
|
||||
{ modelId: 'anthropic/claude-sonnet-4', contextLength: 200000 },
|
||||
{ modelId: 'anthropic/claude-3.7-sonnet', contextLength: 200000 },
|
||||
{ modelId: 'openai/gpt-4o', contextLength: 128000 },
|
||||
{ modelId: 'openai/gpt-oss-120b', contextLength: 128000 },
|
||||
{ modelId: 'openai/gpt-oss-20b', contextLength: 128000 },
|
||||
{ modelId: 'qwen/qwen3-14b', contextLength: 131072 },
|
||||
{ modelId: 'qwen/qwen3-8b', contextLength: 131072 },
|
||||
],
|
||||
azure: [],
|
||||
ollama: [
|
||||
{ modelId: 'qwen3:4b', contextLength: 262144 },
|
||||
{ modelId: 'qwen3:8b', contextLength: 40960 },
|
||||
{ modelId: 'qwen3:14b', contextLength: 40960 },
|
||||
{ modelId: 'gpt-oss:20b', contextLength: 128000 },
|
||||
{ modelId: 'gpt-oss:120b', contextLength: 128000 },
|
||||
],
|
||||
lmstudio: [
|
||||
{ modelId: 'openai/gpt-oss-20b', contextLength: 128000 },
|
||||
{ modelId: 'openai/gpt-oss-120b', contextLength: 128000 },
|
||||
{ modelId: 'qwen/qwen3-vl-8b', contextLength: 131072 },
|
||||
],
|
||||
bedrock: [],
|
||||
const CUSTOM_PROVIDER_MODELS: Partial<Record<ProviderType, ModelInfo[]>> = {
|
||||
browseros: [{ modelId: 'browseros-auto', contextLength: 200000 }],
|
||||
'openai-compatible': [],
|
||||
ollama: [],
|
||||
'chatgpt-pro': [
|
||||
{ modelId: 'gpt-5.4', contextLength: 400000 },
|
||||
{ modelId: 'gpt-5.3-codex', contextLength: 400000 },
|
||||
@@ -103,32 +26,6 @@ export const MODELS_DATA: ModelsData = {
|
||||
{ modelId: 'gpt-5.1-codex-mini', contextLength: 400000 },
|
||||
{ modelId: 'gpt-5.1', contextLength: 200000 },
|
||||
],
|
||||
'github-copilot': [
|
||||
// Free tier (unlimited with Pro)
|
||||
{ modelId: 'gpt-5-mini', contextLength: 128000 },
|
||||
{ modelId: 'claude-haiku-4.5', contextLength: 128000 },
|
||||
{ modelId: 'gpt-4o', contextLength: 64000 },
|
||||
{ modelId: 'gpt-4.1', contextLength: 64000 },
|
||||
// Premium models (Pro: 300/mo, Pro+: 1500/mo)
|
||||
{ modelId: 'claude-sonnet-4.6', contextLength: 128000 },
|
||||
{ modelId: 'claude-sonnet-4.5', contextLength: 128000 },
|
||||
{ modelId: 'claude-sonnet-4', contextLength: 128000 },
|
||||
{ modelId: 'claude-opus-4.6', contextLength: 128000 },
|
||||
{ modelId: 'claude-opus-4.5', contextLength: 128000 },
|
||||
{ modelId: 'gemini-2.5-pro', contextLength: 128000 },
|
||||
{ modelId: 'gemini-3-pro-preview', contextLength: 128000 },
|
||||
{ modelId: 'gemini-3-flash-preview', contextLength: 128000 },
|
||||
{ modelId: 'gemini-3.1-pro-preview', contextLength: 128000 },
|
||||
{ modelId: 'gpt-5.4', contextLength: 272000 },
|
||||
{ modelId: 'gpt-5.4-mini', contextLength: 128000 },
|
||||
{ modelId: 'gpt-5.3-codex', contextLength: 272000 },
|
||||
{ modelId: 'gpt-5.2-codex', contextLength: 272000 },
|
||||
{ modelId: 'gpt-5.2', contextLength: 128000 },
|
||||
{ modelId: 'gpt-5.1-codex', contextLength: 128000 },
|
||||
{ modelId: 'gpt-5.1-codex-max', contextLength: 128000 },
|
||||
{ modelId: 'gpt-5.1', contextLength: 128000 },
|
||||
{ modelId: 'grok-code-fast-1', contextLength: 128000 },
|
||||
],
|
||||
'qwen-code': [
|
||||
{ modelId: 'coder-model', contextLength: 1000000 },
|
||||
{ modelId: 'qwen3-coder-plus', contextLength: 1000000 },
|
||||
@@ -137,25 +34,23 @@ export const MODELS_DATA: ModelsData = {
|
||||
],
|
||||
}
|
||||
|
||||
/**
|
||||
* Get models for a specific provider type
|
||||
*/
|
||||
function fromModelsDevModel(m: ModelsDevModel): ModelInfo {
|
||||
return {
|
||||
modelId: m.id,
|
||||
contextLength: m.contextWindow,
|
||||
supportsImages: m.supportsImages,
|
||||
supportsReasoning: m.supportsReasoning,
|
||||
supportsToolCall: m.supportsToolCall,
|
||||
}
|
||||
}
|
||||
|
||||
export function getModelsForProvider(providerType: ProviderType): ModelInfo[] {
|
||||
return MODELS_DATA[providerType] || []
|
||||
const custom = CUSTOM_PROVIDER_MODELS[providerType]
|
||||
if (custom !== undefined) return custom
|
||||
|
||||
return getModelsDevModels(providerType).map(fromModelsDevModel)
|
||||
}
|
||||
|
||||
/**
|
||||
* Get model options for select dropdown (model IDs + custom option)
|
||||
*/
|
||||
export function getModelOptions(providerType: ProviderType): string[] {
|
||||
const models = getModelsForProvider(providerType)
|
||||
const modelIds = models.map((m) => m.modelId)
|
||||
return modelIds.length > 0 ? [...modelIds, 'custom'] : ['custom']
|
||||
}
|
||||
|
||||
/**
|
||||
* Get context length for a specific model
|
||||
*/
|
||||
export function getModelContextLength(
|
||||
providerType: ProviderType,
|
||||
modelId: string,
|
||||
@@ -164,14 +59,3 @@ export function getModelContextLength(
|
||||
const model = models.find((m) => m.modelId === modelId)
|
||||
return model?.contextLength
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if model ID is a custom (user-entered) value
|
||||
*/
|
||||
export function isCustomModel(
|
||||
providerType: ProviderType,
|
||||
modelId: string,
|
||||
): boolean {
|
||||
const models = getModelsForProvider(providerType)
|
||||
return !models.some((m) => m.modelId === modelId)
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,35 @@
|
||||
import data from './models-dev-data.json'
|
||||
|
||||
export interface ModelsDevModel {
|
||||
id: string
|
||||
name: string
|
||||
contextWindow: number
|
||||
maxOutput: number
|
||||
supportsImages: boolean
|
||||
supportsReasoning: boolean
|
||||
supportsToolCall: boolean
|
||||
inputCost?: number
|
||||
outputCost?: number
|
||||
}
|
||||
|
||||
export interface ModelsDevProvider {
|
||||
name: string
|
||||
api?: string
|
||||
doc: string
|
||||
models: ModelsDevModel[]
|
||||
}
|
||||
|
||||
const modelsDevData: Record<string, ModelsDevProvider> = data as Record<
|
||||
string,
|
||||
ModelsDevProvider
|
||||
>
|
||||
|
||||
export function getModelsDevProvider(
|
||||
providerId: string,
|
||||
): ModelsDevProvider | undefined {
|
||||
return modelsDevData[providerId]
|
||||
}
|
||||
|
||||
export function getModelsDevModels(providerId: string): ModelsDevModel[] {
|
||||
return modelsDevData[providerId]?.models ?? []
|
||||
}
|
||||
@@ -1,3 +1,4 @@
|
||||
import { getModelsDevProvider } from './models-dev'
|
||||
import type { ProviderType } from './types'
|
||||
|
||||
/**
|
||||
@@ -15,6 +16,30 @@ export interface ProviderTemplate {
|
||||
apiKeyUrl?: string
|
||||
}
|
||||
|
||||
function enrichTemplate(
|
||||
providerId: ProviderType,
|
||||
overrides: {
|
||||
defaultModelId: string
|
||||
defaultBaseUrl?: string
|
||||
apiKeyUrl?: string
|
||||
setupGuideUrl?: string
|
||||
},
|
||||
): ProviderTemplate {
|
||||
const provider = getModelsDevProvider(providerId)
|
||||
const model = provider?.models.find((m) => m.id === overrides.defaultModelId)
|
||||
|
||||
return {
|
||||
id: providerId,
|
||||
name: provider?.name ?? providerId,
|
||||
defaultBaseUrl: overrides.defaultBaseUrl ?? provider?.api ?? '',
|
||||
defaultModelId: overrides.defaultModelId,
|
||||
supportsImages: model?.supportsImages ?? true,
|
||||
contextWindow: model?.contextWindow ?? 128000,
|
||||
...(overrides.apiKeyUrl && { apiKeyUrl: overrides.apiKeyUrl }),
|
||||
...(overrides.setupGuideUrl && { setupGuideUrl: overrides.setupGuideUrl }),
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Available provider templates for quick setup
|
||||
* @public
|
||||
@@ -57,17 +82,12 @@ export const providerTemplates: ProviderTemplate[] = [
|
||||
apiKeyUrl: 'https://platform.moonshot.ai/console/api-keys',
|
||||
setupGuideUrl: 'https://platform.moonshot.ai/console/api-keys',
|
||||
},
|
||||
{
|
||||
id: 'openai',
|
||||
name: 'OpenAI',
|
||||
defaultBaseUrl: 'https://api.openai.com/v1',
|
||||
defaultModelId: 'gpt-4',
|
||||
supportsImages: true,
|
||||
contextWindow: 128000,
|
||||
enrichTemplate('openai', {
|
||||
defaultModelId: 'gpt-5',
|
||||
apiKeyUrl: 'https://platform.openai.com/api-keys',
|
||||
setupGuideUrl:
|
||||
'https://docs.browseros.com/features/bring-your-own-llm#openai',
|
||||
},
|
||||
}),
|
||||
{
|
||||
id: 'openai-compatible',
|
||||
name: 'OpenAI Compatible',
|
||||
@@ -76,28 +96,18 @@ export const providerTemplates: ProviderTemplate[] = [
|
||||
supportsImages: true,
|
||||
contextWindow: 128000,
|
||||
},
|
||||
{
|
||||
id: 'anthropic',
|
||||
name: 'Anthropic',
|
||||
defaultBaseUrl: 'https://api.anthropic.com/v1',
|
||||
defaultModelId: 'claude-3-5-sonnet-20241022',
|
||||
supportsImages: true,
|
||||
contextWindow: 200000,
|
||||
enrichTemplate('anthropic', {
|
||||
defaultModelId: 'claude-sonnet-4-6',
|
||||
apiKeyUrl: 'https://console.anthropic.com/settings/keys',
|
||||
setupGuideUrl:
|
||||
'https://docs.browseros.com/features/bring-your-own-llm#claude',
|
||||
},
|
||||
{
|
||||
id: 'google',
|
||||
name: 'Gemini',
|
||||
defaultBaseUrl: 'https://generativelanguage.googleapis.com/v1beta',
|
||||
defaultModelId: 'gemini-1.5-pro',
|
||||
supportsImages: true,
|
||||
contextWindow: 1000000,
|
||||
}),
|
||||
enrichTemplate('google', {
|
||||
defaultModelId: 'gemini-2.5-flash',
|
||||
apiKeyUrl: 'https://aistudio.google.com/app/apikey',
|
||||
setupGuideUrl:
|
||||
'https://docs.browseros.com/features/bring-your-own-llm#gemini',
|
||||
},
|
||||
}),
|
||||
{
|
||||
id: 'ollama',
|
||||
name: 'Ollama',
|
||||
@@ -108,47 +118,28 @@ export const providerTemplates: ProviderTemplate[] = [
|
||||
setupGuideUrl:
|
||||
'https://docs.browseros.com/features/bring-your-own-llm#ollama',
|
||||
},
|
||||
{
|
||||
id: 'openrouter',
|
||||
name: 'OpenRouter',
|
||||
defaultBaseUrl: 'https://openrouter.ai/api/v1',
|
||||
defaultModelId: 'openai/gpt-4-turbo',
|
||||
supportsImages: true,
|
||||
contextWindow: 128000,
|
||||
enrichTemplate('openrouter', {
|
||||
defaultModelId: 'anthropic/claude-sonnet-4.5',
|
||||
apiKeyUrl: 'https://openrouter.ai/keys',
|
||||
setupGuideUrl:
|
||||
'https://docs.browseros.com/features/bring-your-own-llm#openrouter',
|
||||
},
|
||||
{
|
||||
id: 'lmstudio',
|
||||
name: 'LM Studio',
|
||||
}),
|
||||
enrichTemplate('lmstudio', {
|
||||
defaultModelId: 'openai/gpt-oss-20b',
|
||||
defaultBaseUrl: 'http://localhost:1234/v1',
|
||||
defaultModelId: 'local-model',
|
||||
supportsImages: false,
|
||||
contextWindow: 32000,
|
||||
setupGuideUrl:
|
||||
'https://docs.browseros.com/features/bring-your-own-llm#lmstudio',
|
||||
},
|
||||
{
|
||||
id: 'azure',
|
||||
name: 'Azure',
|
||||
defaultBaseUrl: '',
|
||||
}),
|
||||
enrichTemplate('azure', {
|
||||
defaultModelId: '',
|
||||
supportsImages: true,
|
||||
contextWindow: 128000,
|
||||
apiKeyUrl:
|
||||
'https://portal.azure.com/#view/Microsoft_Azure_ProjectOxford/CognitiveServicesHub/~/OpenAI',
|
||||
},
|
||||
{
|
||||
id: 'bedrock',
|
||||
name: 'AWS Bedrock',
|
||||
defaultBaseUrl: '',
|
||||
defaultModelId: '',
|
||||
supportsImages: true,
|
||||
contextWindow: 200000,
|
||||
}),
|
||||
enrichTemplate('bedrock', {
|
||||
defaultModelId: 'anthropic.claude-sonnet-4-6',
|
||||
setupGuideUrl:
|
||||
'https://docs.aws.amazon.com/bedrock/latest/userguide/getting-started.html',
|
||||
},
|
||||
}),
|
||||
]
|
||||
|
||||
/**
|
||||
|
||||
@@ -1225,7 +1225,7 @@
|
||||
const score = graders[firstKey].score;
|
||||
if (typeof score === 'number') {
|
||||
const pct = Math.round(score * 100);
|
||||
return { label: pct + '%', cls: pct >= 75 ? 'pass' : 'fail' };
|
||||
return { label: `${pct}%`, cls: pct >= 75 ? 'pass' : 'fail' };
|
||||
}
|
||||
const anyPass = keys.some((k) => graders[k].pass);
|
||||
return { label: anyPass ? 'PASS' : 'FAIL', cls: anyPass ? 'pass' : 'fail' };
|
||||
|
||||
@@ -34,6 +34,7 @@
|
||||
"lint": "bunx biome check",
|
||||
"lint:fix": "bunx biome check --write --unsafe",
|
||||
"gen:cdp": "bun scripts/codegen/cdp-protocol.ts",
|
||||
"generate:models": "bun scripts/generate-models.ts",
|
||||
"clean": "rimraf dist"
|
||||
},
|
||||
"repository": "browseros-ai/BrowserOS-server",
|
||||
|
||||
145
packages/browseros-agent/scripts/generate-models.ts
Normal file
145
packages/browseros-agent/scripts/generate-models.ts
Normal file
@@ -0,0 +1,145 @@
|
||||
/**
|
||||
* Fetches models.dev/api.json and generates a compact models data file
|
||||
* for BrowserOS. Run: bun scripts/generate-models.ts
|
||||
*/
|
||||
|
||||
const API_URL = 'https://models.dev/api.json'
|
||||
const OUTPUT_PATH = new URL(
|
||||
'../apps/agent/lib/llm-providers/models-dev-data.json',
|
||||
import.meta.url,
|
||||
).pathname
|
||||
|
||||
interface ModelsDevModel {
|
||||
id: string
|
||||
name: string
|
||||
family?: string
|
||||
attachment: boolean
|
||||
reasoning: boolean
|
||||
tool_call: boolean
|
||||
structured_output?: boolean
|
||||
modalities: { input: string[]; output: string[] }
|
||||
cost?: {
|
||||
input: number
|
||||
output: number
|
||||
cache_read?: number
|
||||
cache_write?: number
|
||||
}
|
||||
limit: { context: number; output: number; input?: number }
|
||||
status?: string
|
||||
release_date: string
|
||||
last_updated: string
|
||||
}
|
||||
|
||||
interface ModelsDevProvider {
|
||||
id: string
|
||||
name: string
|
||||
npm: string
|
||||
api?: string
|
||||
doc: string
|
||||
env: string[]
|
||||
models: Record<string, ModelsDevModel>
|
||||
}
|
||||
|
||||
interface OutputModel {
|
||||
id: string
|
||||
name: string
|
||||
contextWindow: number
|
||||
maxOutput: number
|
||||
supportsImages: boolean
|
||||
supportsReasoning: boolean
|
||||
supportsToolCall: boolean
|
||||
inputCost?: number
|
||||
outputCost?: number
|
||||
}
|
||||
|
||||
interface OutputProvider {
|
||||
name: string
|
||||
api?: string
|
||||
doc: string
|
||||
models: OutputModel[]
|
||||
}
|
||||
|
||||
// models.dev ID → BrowserOS provider ID
|
||||
const PROVIDER_MAP: Record<string, string> = {
|
||||
anthropic: 'anthropic',
|
||||
openai: 'openai',
|
||||
google: 'google',
|
||||
openrouter: 'openrouter',
|
||||
azure: 'azure',
|
||||
'amazon-bedrock': 'bedrock',
|
||||
lmstudio: 'lmstudio',
|
||||
moonshotai: 'moonshot',
|
||||
'github-copilot': 'github-copilot',
|
||||
}
|
||||
|
||||
function transformModel(model: ModelsDevModel): OutputModel | null {
|
||||
if (model.status === 'deprecated') return null
|
||||
|
||||
const supportsImages =
|
||||
model.attachment || model.modalities.input.includes('image')
|
||||
|
||||
return {
|
||||
id: model.id,
|
||||
name: model.name,
|
||||
contextWindow: model.limit.context,
|
||||
maxOutput: model.limit.output,
|
||||
supportsImages,
|
||||
supportsReasoning: model.reasoning,
|
||||
supportsToolCall: model.tool_call,
|
||||
...(model.cost && {
|
||||
inputCost: model.cost.input,
|
||||
outputCost: model.cost.output,
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
async function main() {
|
||||
console.log(`Fetching ${API_URL}...`)
|
||||
const response = await fetch(API_URL)
|
||||
if (!response.ok) throw new Error(`Failed to fetch: ${response.status}`)
|
||||
|
||||
const data: Record<string, ModelsDevProvider> = await response.json()
|
||||
console.log(`Fetched ${Object.keys(data).length} providers`)
|
||||
|
||||
const output: Record<string, OutputProvider> = {}
|
||||
|
||||
for (const [modelsDevId, browserosId] of Object.entries(PROVIDER_MAP)) {
|
||||
const provider = data[modelsDevId]
|
||||
if (!provider) {
|
||||
console.warn(`Provider not found in models.dev: ${modelsDevId}`)
|
||||
continue
|
||||
}
|
||||
|
||||
const models = Object.values(provider.models)
|
||||
.map(transformModel)
|
||||
.filter((m): m is OutputModel => m !== null)
|
||||
.sort((a, b) => {
|
||||
const dateA = provider.models[a.id]?.last_updated ?? ''
|
||||
const dateB = provider.models[b.id]?.last_updated ?? ''
|
||||
return dateB.localeCompare(dateA)
|
||||
})
|
||||
|
||||
output[browserosId] = {
|
||||
name: provider.name,
|
||||
...(provider.api && { api: provider.api }),
|
||||
doc: provider.doc,
|
||||
models,
|
||||
}
|
||||
}
|
||||
|
||||
const totalModels = Object.values(output).reduce(
|
||||
(sum, p) => sum + p.models.length,
|
||||
0,
|
||||
)
|
||||
console.log(
|
||||
`Generated ${Object.keys(output).length} providers with ${totalModels} models`,
|
||||
)
|
||||
|
||||
await Bun.write(OUTPUT_PATH, JSON.stringify(output, null, 2))
|
||||
console.log(`Written to ${OUTPUT_PATH}`)
|
||||
}
|
||||
|
||||
main().catch((err) => {
|
||||
console.error(err)
|
||||
process.exit(1)
|
||||
})
|
||||
Reference in New Issue
Block a user