From d6c364fdcbd5e10cdef2da961976e7952ee83990 Mon Sep 17 00:00:00 2001 From: ItzCrazyKns <95534749+ItzCrazyKns@users.noreply.github.com> Date: Sat, 22 Nov 2025 22:23:10 +0530 Subject: [PATCH] feat(models): remove old providers --- src/lib/models/providers/aiml.ts | 152 ---------------------- src/lib/models/providers/anthropic.ts | 115 ----------------- src/lib/models/providers/deepseek.ts | 107 --------------- src/lib/models/providers/gemini.ts | 145 --------------------- src/lib/models/providers/groq.ts | 118 ----------------- src/lib/models/providers/lemonade.ts | 158 ----------------------- src/lib/models/providers/lmstudio.ts | 148 --------------------- src/lib/models/providers/transformers.ts | 87 ------------- 8 files changed, 1030 deletions(-) delete mode 100644 src/lib/models/providers/aiml.ts delete mode 100644 src/lib/models/providers/anthropic.ts delete mode 100644 src/lib/models/providers/deepseek.ts delete mode 100644 src/lib/models/providers/gemini.ts delete mode 100644 src/lib/models/providers/groq.ts delete mode 100644 src/lib/models/providers/lemonade.ts delete mode 100644 src/lib/models/providers/lmstudio.ts delete mode 100644 src/lib/models/providers/transformers.ts diff --git a/src/lib/models/providers/aiml.ts b/src/lib/models/providers/aiml.ts deleted file mode 100644 index 35ccf79..0000000 --- a/src/lib/models/providers/aiml.ts +++ /dev/null @@ -1,152 +0,0 @@ -import { BaseChatModel } from '@langchain/core/language_models/chat_models'; -import { Model, ModelList, ProviderMetadata } from '../types'; -import BaseModelProvider from './baseProvider'; -import { ChatOpenAI, OpenAIEmbeddings } from '@langchain/openai'; -import { Embeddings } from '@langchain/core/embeddings'; -import { UIConfigField } from '@/lib/config/types'; -import { getConfiguredModelProviderById } from '@/lib/config/serverRegistry'; - -interface AimlConfig { - apiKey: string; -} - -const providerConfigFields: UIConfigField[] = [ - { - type: 'password', - name: 'API Key', - key: 'apiKey', - description: 'Your AI/ML API key', - required: true, - placeholder: 'AI/ML API Key', - env: 'AIML_API_KEY', - scope: 'server', - }, -]; - -class AimlProvider extends BaseModelProvider { - constructor(id: string, name: string, config: AimlConfig) { - super(id, name, config); - } - - async getDefaultModels(): Promise { - try { - const res = await fetch('https://api.aimlapi.com/models', { - method: 'GET', - headers: { - 'Content-Type': 'application/json', - Authorization: `Bearer ${this.config.apiKey}`, - }, - }); - - const data = await res.json(); - - const chatModels: Model[] = data.data - .filter((m: any) => m.type === 'chat-completion') - .map((m: any) => { - return { - name: m.id, - key: m.id, - }; - }); - - const embeddingModels: Model[] = data.data - .filter((m: any) => m.type === 'embedding') - .map((m: any) => { - return { - name: m.id, - key: m.id, - }; - }); - - return { - embedding: embeddingModels, - chat: chatModels, - }; - } catch (err) { - if (err instanceof TypeError) { - throw new Error( - 'Error connecting to AI/ML API. Please ensure your API key is correct and the service is available.', - ); - } - - throw err; - } - } - - async getModelList(): Promise { - const defaultModels = await this.getDefaultModels(); - const configProvider = getConfiguredModelProviderById(this.id)!; - - return { - embedding: [ - ...defaultModels.embedding, - ...configProvider.embeddingModels, - ], - chat: [...defaultModels.chat, ...configProvider.chatModels], - }; - } - - async loadChatModel(key: string): Promise { - const modelList = await this.getModelList(); - - const exists = modelList.chat.find((m) => m.key === key); - - if (!exists) { - throw new Error( - 'Error Loading AI/ML API Chat Model. Invalid Model Selected', - ); - } - - return new ChatOpenAI({ - apiKey: this.config.apiKey, - temperature: 0.7, - model: key, - configuration: { - baseURL: 'https://api.aimlapi.com', - }, - }); - } - - async loadEmbeddingModel(key: string): Promise { - const modelList = await this.getModelList(); - const exists = modelList.embedding.find((m) => m.key === key); - - if (!exists) { - throw new Error( - 'Error Loading AI/ML API Embedding Model. Invalid Model Selected.', - ); - } - - return new OpenAIEmbeddings({ - apiKey: this.config.apiKey, - model: key, - configuration: { - baseURL: 'https://api.aimlapi.com', - }, - }); - } - - static parseAndValidate(raw: any): AimlConfig { - if (!raw || typeof raw !== 'object') - throw new Error('Invalid config provided. Expected object'); - if (!raw.apiKey) - throw new Error('Invalid config provided. API key must be provided'); - - return { - apiKey: String(raw.apiKey), - }; - } - - static getProviderConfigFields(): UIConfigField[] { - return providerConfigFields; - } - - static getProviderMetadata(): ProviderMetadata { - return { - key: 'aiml', - name: 'AI/ML API', - }; - } -} - -export default AimlProvider; diff --git a/src/lib/models/providers/anthropic.ts b/src/lib/models/providers/anthropic.ts deleted file mode 100644 index e071159..0000000 --- a/src/lib/models/providers/anthropic.ts +++ /dev/null @@ -1,115 +0,0 @@ -import { BaseChatModel } from '@langchain/core/language_models/chat_models'; -import { Model, ModelList, ProviderMetadata } from '../types'; -import BaseModelProvider from './baseProvider'; -import { ChatAnthropic } from '@langchain/anthropic'; -import { Embeddings } from '@langchain/core/embeddings'; -import { UIConfigField } from '@/lib/config/types'; -import { getConfiguredModelProviderById } from '@/lib/config/serverRegistry'; - -interface AnthropicConfig { - apiKey: string; -} - -const providerConfigFields: UIConfigField[] = [ - { - type: 'password', - name: 'API Key', - key: 'apiKey', - description: 'Your Anthropic API key', - required: true, - placeholder: 'Anthropic API Key', - env: 'ANTHROPIC_API_KEY', - scope: 'server', - }, -]; - -class AnthropicProvider extends BaseModelProvider { - constructor(id: string, name: string, config: AnthropicConfig) { - super(id, name, config); - } - - async getDefaultModels(): Promise { - const res = await fetch('https://api.anthropic.com/v1/models?limit=999', { - method: 'GET', - headers: { - 'x-api-key': this.config.apiKey, - 'anthropic-version': '2023-06-01', - 'Content-type': 'application/json', - }, - }); - - if (!res.ok) { - throw new Error(`Failed to fetch Anthropic models: ${res.statusText}`); - } - - const data = (await res.json()).data; - - const models: Model[] = data.map((m: any) => { - return { - key: m.id, - name: m.display_name, - }; - }); - - return { - embedding: [], - chat: models, - }; - } - - async getModelList(): Promise { - const defaultModels = await this.getDefaultModels(); - const configProvider = getConfiguredModelProviderById(this.id)!; - - return { - embedding: [], - chat: [...defaultModels.chat, ...configProvider.chatModels], - }; - } - - async loadChatModel(key: string): Promise { - const modelList = await this.getModelList(); - - const exists = modelList.chat.find((m) => m.key === key); - - if (!exists) { - throw new Error( - 'Error Loading Anthropic Chat Model. Invalid Model Selected', - ); - } - - return new ChatAnthropic({ - apiKey: this.config.apiKey, - temperature: 0.7, - model: key, - }); - } - - async loadEmbeddingModel(key: string): Promise { - throw new Error('Anthropic provider does not support embedding models.'); - } - - static parseAndValidate(raw: any): AnthropicConfig { - if (!raw || typeof raw !== 'object') - throw new Error('Invalid config provided. Expected object'); - if (!raw.apiKey) - throw new Error('Invalid config provided. API key must be provided'); - - return { - apiKey: String(raw.apiKey), - }; - } - - static getProviderConfigFields(): UIConfigField[] { - return providerConfigFields; - } - - static getProviderMetadata(): ProviderMetadata { - return { - key: 'anthropic', - name: 'Anthropic', - }; - } -} - -export default AnthropicProvider; diff --git a/src/lib/models/providers/deepseek.ts b/src/lib/models/providers/deepseek.ts deleted file mode 100644 index 9b29d83..0000000 --- a/src/lib/models/providers/deepseek.ts +++ /dev/null @@ -1,107 +0,0 @@ -import { BaseChatModel } from '@langchain/core/language_models/chat_models'; -import { Model, ModelList, ProviderMetadata } from '../types'; -import BaseModelProvider from './baseProvider'; -import { ChatOpenAI } from '@langchain/openai'; -import { Embeddings } from '@langchain/core/embeddings'; -import { UIConfigField } from '@/lib/config/types'; -import { getConfiguredModelProviderById } from '@/lib/config/serverRegistry'; - -interface DeepSeekConfig { - apiKey: string; -} - -const defaultChatModels: Model[] = [ - { - name: 'Deepseek Chat / DeepSeek V3.2 Exp', - key: 'deepseek-chat', - }, - { - name: 'Deepseek Reasoner / DeepSeek V3.2 Exp', - key: 'deepseek-reasoner', - }, -]; - -const providerConfigFields: UIConfigField[] = [ - { - type: 'password', - name: 'API Key', - key: 'apiKey', - description: 'Your DeepSeek API key', - required: true, - placeholder: 'DeepSeek API Key', - env: 'DEEPSEEK_API_KEY', - scope: 'server', - }, -]; - -class DeepSeekProvider extends BaseModelProvider { - constructor(id: string, name: string, config: DeepSeekConfig) { - super(id, name, config); - } - - async getDefaultModels(): Promise { - return { - embedding: [], - chat: defaultChatModels, - }; - } - - async getModelList(): Promise { - const defaultModels = await this.getDefaultModels(); - const configProvider = getConfiguredModelProviderById(this.id)!; - - return { - embedding: [], - chat: [...defaultModels.chat, ...configProvider.chatModels], - }; - } - - async loadChatModel(key: string): Promise { - const modelList = await this.getModelList(); - - const exists = modelList.chat.find((m) => m.key === key); - - if (!exists) { - throw new Error( - 'Error Loading DeepSeek Chat Model. Invalid Model Selected', - ); - } - - return new ChatOpenAI({ - apiKey: this.config.apiKey, - temperature: 0.7, - model: key, - configuration: { - baseURL: 'https://api.deepseek.com', - }, - }); - } - - async loadEmbeddingModel(key: string): Promise { - throw new Error('DeepSeek provider does not support embedding models.'); - } - - static parseAndValidate(raw: any): DeepSeekConfig { - if (!raw || typeof raw !== 'object') - throw new Error('Invalid config provided. Expected object'); - if (!raw.apiKey) - throw new Error('Invalid config provided. API key must be provided'); - - return { - apiKey: String(raw.apiKey), - }; - } - - static getProviderConfigFields(): UIConfigField[] { - return providerConfigFields; - } - - static getProviderMetadata(): ProviderMetadata { - return { - key: 'deepseek', - name: 'Deepseek AI', - }; - } -} - -export default DeepSeekProvider; diff --git a/src/lib/models/providers/gemini.ts b/src/lib/models/providers/gemini.ts deleted file mode 100644 index 6cfd913..0000000 --- a/src/lib/models/providers/gemini.ts +++ /dev/null @@ -1,145 +0,0 @@ -import { BaseChatModel } from '@langchain/core/language_models/chat_models'; -import { Model, ModelList, ProviderMetadata } from '../types'; -import BaseModelProvider from './baseProvider'; -import { - ChatGoogleGenerativeAI, - GoogleGenerativeAIEmbeddings, -} from '@langchain/google-genai'; -import { Embeddings } from '@langchain/core/embeddings'; -import { UIConfigField } from '@/lib/config/types'; -import { getConfiguredModelProviderById } from '@/lib/config/serverRegistry'; - -interface GeminiConfig { - apiKey: string; -} - -const providerConfigFields: UIConfigField[] = [ - { - type: 'password', - name: 'API Key', - key: 'apiKey', - description: 'Your Google Gemini API key', - required: true, - placeholder: 'Google Gemini API Key', - env: 'GEMINI_API_KEY', - scope: 'server', - }, -]; - -class GeminiProvider extends BaseModelProvider { - constructor(id: string, name: string, config: GeminiConfig) { - super(id, name, config); - } - - async getDefaultModels(): Promise { - const res = await fetch( - `https://generativelanguage.googleapis.com/v1beta/models?key=${this.config.apiKey}`, - { - method: 'GET', - headers: { - 'Content-Type': 'application/json', - }, - }, - ); - - const data = await res.json(); - - let defaultEmbeddingModels: Model[] = []; - let defaultChatModels: Model[] = []; - - data.models.forEach((m: any) => { - if ( - m.supportedGenerationMethods.some( - (genMethod: string) => - genMethod === 'embedText' || genMethod === 'embedContent', - ) - ) { - defaultEmbeddingModels.push({ - key: m.name, - name: m.displayName, - }); - } else if (m.supportedGenerationMethods.includes('generateContent')) { - defaultChatModels.push({ - key: m.name, - name: m.displayName, - }); - } - }); - - return { - embedding: defaultEmbeddingModels, - chat: defaultChatModels, - }; - } - - async getModelList(): Promise { - const defaultModels = await this.getDefaultModels(); - const configProvider = getConfiguredModelProviderById(this.id)!; - - return { - embedding: [ - ...defaultModels.embedding, - ...configProvider.embeddingModels, - ], - chat: [...defaultModels.chat, ...configProvider.chatModels], - }; - } - - async loadChatModel(key: string): Promise { - const modelList = await this.getModelList(); - - const exists = modelList.chat.find((m) => m.key === key); - - if (!exists) { - throw new Error( - 'Error Loading Gemini Chat Model. Invalid Model Selected', - ); - } - - return new ChatGoogleGenerativeAI({ - apiKey: this.config.apiKey, - temperature: 0.7, - model: key, - }); - } - - async loadEmbeddingModel(key: string): Promise { - const modelList = await this.getModelList(); - const exists = modelList.embedding.find((m) => m.key === key); - - if (!exists) { - throw new Error( - 'Error Loading Gemini Embedding Model. Invalid Model Selected.', - ); - } - - return new GoogleGenerativeAIEmbeddings({ - apiKey: this.config.apiKey, - model: key, - }); - } - - static parseAndValidate(raw: any): GeminiConfig { - if (!raw || typeof raw !== 'object') - throw new Error('Invalid config provided. Expected object'); - if (!raw.apiKey) - throw new Error('Invalid config provided. API key must be provided'); - - return { - apiKey: String(raw.apiKey), - }; - } - - static getProviderConfigFields(): UIConfigField[] { - return providerConfigFields; - } - - static getProviderMetadata(): ProviderMetadata { - return { - key: 'gemini', - name: 'Google Gemini', - }; - } -} - -export default GeminiProvider; diff --git a/src/lib/models/providers/groq.ts b/src/lib/models/providers/groq.ts deleted file mode 100644 index a87ea88..0000000 --- a/src/lib/models/providers/groq.ts +++ /dev/null @@ -1,118 +0,0 @@ -import { BaseChatModel } from '@langchain/core/language_models/chat_models'; -import { Model, ModelList, ProviderMetadata } from '../types'; -import BaseModelProvider from './baseProvider'; -import { ChatGroq } from '@langchain/groq'; -import { Embeddings } from '@langchain/core/embeddings'; -import { UIConfigField } from '@/lib/config/types'; -import { getConfiguredModelProviderById } from '@/lib/config/serverRegistry'; - -interface GroqConfig { - apiKey: string; -} - -const providerConfigFields: UIConfigField[] = [ - { - type: 'password', - name: 'API Key', - key: 'apiKey', - description: 'Your Groq API key', - required: true, - placeholder: 'Groq API Key', - env: 'GROQ_API_KEY', - scope: 'server', - }, -]; - -class GroqProvider extends BaseModelProvider { - constructor(id: string, name: string, config: GroqConfig) { - super(id, name, config); - } - - async getDefaultModels(): Promise { - try { - const res = await fetch('https://api.groq.com/openai/v1/models', { - method: 'GET', - headers: { - 'Content-Type': 'application/json', - Authorization: `Bearer ${this.config.apiKey}`, - }, - }); - - const data = await res.json(); - - const models: Model[] = data.data.map((m: any) => { - return { - name: m.id, - key: m.id, - }; - }); - - return { - embedding: [], - chat: models, - }; - } catch (err) { - if (err instanceof TypeError) { - throw new Error( - 'Error connecting to Groq API. Please ensure your API key is correct and the Groq service is available.', - ); - } - - throw err; - } - } - - async getModelList(): Promise { - const defaultModels = await this.getDefaultModels(); - const configProvider = getConfiguredModelProviderById(this.id)!; - - return { - embedding: [], - chat: [...defaultModels.chat, ...configProvider.chatModels], - }; - } - - async loadChatModel(key: string): Promise { - const modelList = await this.getModelList(); - - const exists = modelList.chat.find((m) => m.key === key); - - if (!exists) { - throw new Error('Error Loading Groq Chat Model. Invalid Model Selected'); - } - - return new ChatGroq({ - apiKey: this.config.apiKey, - temperature: 0.7, - model: key, - }); - } - - async loadEmbeddingModel(key: string): Promise { - throw new Error('Groq provider does not support embedding models.'); - } - - static parseAndValidate(raw: any): GroqConfig { - if (!raw || typeof raw !== 'object') - throw new Error('Invalid config provided. Expected object'); - if (!raw.apiKey) - throw new Error('Invalid config provided. API key must be provided'); - - return { - apiKey: String(raw.apiKey), - }; - } - - static getProviderConfigFields(): UIConfigField[] { - return providerConfigFields; - } - - static getProviderMetadata(): ProviderMetadata { - return { - key: 'groq', - name: 'Groq', - }; - } -} - -export default GroqProvider; diff --git a/src/lib/models/providers/lemonade.ts b/src/lib/models/providers/lemonade.ts deleted file mode 100644 index 20680a8..0000000 --- a/src/lib/models/providers/lemonade.ts +++ /dev/null @@ -1,158 +0,0 @@ -import { BaseChatModel } from '@langchain/core/language_models/chat_models'; -import { Model, ModelList, ProviderMetadata } from '../types'; -import BaseModelProvider from './baseProvider'; -import { ChatOpenAI, OpenAIEmbeddings } from '@langchain/openai'; -import { Embeddings } from '@langchain/core/embeddings'; -import { UIConfigField } from '@/lib/config/types'; -import { getConfiguredModelProviderById } from '@/lib/config/serverRegistry'; - -interface LemonadeConfig { - baseURL: string; - apiKey?: string; -} - -const providerConfigFields: UIConfigField[] = [ - { - type: 'string', - name: 'Base URL', - key: 'baseURL', - description: 'The base URL for Lemonade API', - required: true, - placeholder: 'https://api.lemonade.ai/v1', - env: 'LEMONADE_BASE_URL', - scope: 'server', - }, - { - type: 'password', - name: 'API Key', - key: 'apiKey', - description: 'Your Lemonade API key (optional)', - required: false, - placeholder: 'Lemonade API Key', - env: 'LEMONADE_API_KEY', - scope: 'server', - }, -]; - -class LemonadeProvider extends BaseModelProvider { - constructor(id: string, name: string, config: LemonadeConfig) { - super(id, name, config); - } - - async getDefaultModels(): Promise { - try { - const headers: Record = { - 'Content-Type': 'application/json', - }; - - if (this.config.apiKey) { - headers['Authorization'] = `Bearer ${this.config.apiKey}`; - } - - const res = await fetch(`${this.config.baseURL}/models`, { - method: 'GET', - headers, - }); - - const data = await res.json(); - - const models: Model[] = data.data.map((m: any) => { - return { - name: m.id, - key: m.id, - }; - }); - - return { - embedding: models, - chat: models, - }; - } catch (err) { - if (err instanceof TypeError) { - throw new Error( - 'Error connecting to Lemonade API. Please ensure the base URL is correct and the service is available.', - ); - } - - throw err; - } - } - - async getModelList(): Promise { - const defaultModels = await this.getDefaultModels(); - const configProvider = getConfiguredModelProviderById(this.id)!; - - return { - embedding: [ - ...defaultModels.embedding, - ...configProvider.embeddingModels, - ], - chat: [...defaultModels.chat, ...configProvider.chatModels], - }; - } - - async loadChatModel(key: string): Promise { - const modelList = await this.getModelList(); - - const exists = modelList.chat.find((m) => m.key === key); - - if (!exists) { - throw new Error( - 'Error Loading Lemonade Chat Model. Invalid Model Selected', - ); - } - - return new ChatOpenAI({ - apiKey: this.config.apiKey || 'not-needed', - temperature: 0.7, - model: key, - configuration: { - baseURL: this.config.baseURL, - }, - }); - } - - async loadEmbeddingModel(key: string): Promise { - const modelList = await this.getModelList(); - const exists = modelList.embedding.find((m) => m.key === key); - - if (!exists) { - throw new Error( - 'Error Loading Lemonade Embedding Model. Invalid Model Selected.', - ); - } - - return new OpenAIEmbeddings({ - apiKey: this.config.apiKey || 'not-needed', - model: key, - configuration: { - baseURL: this.config.baseURL, - }, - }); - } - - static parseAndValidate(raw: any): LemonadeConfig { - if (!raw || typeof raw !== 'object') - throw new Error('Invalid config provided. Expected object'); - if (!raw.baseURL) - throw new Error('Invalid config provided. Base URL must be provided'); - - return { - baseURL: String(raw.baseURL), - apiKey: raw.apiKey ? String(raw.apiKey) : undefined, - }; - } - - static getProviderConfigFields(): UIConfigField[] { - return providerConfigFields; - } - - static getProviderMetadata(): ProviderMetadata { - return { - key: 'lemonade', - name: 'Lemonade', - }; - } -} - -export default LemonadeProvider; diff --git a/src/lib/models/providers/lmstudio.ts b/src/lib/models/providers/lmstudio.ts deleted file mode 100644 index 3a73a34..0000000 --- a/src/lib/models/providers/lmstudio.ts +++ /dev/null @@ -1,148 +0,0 @@ -import { BaseChatModel } from '@langchain/core/language_models/chat_models'; -import { Model, ModelList, ProviderMetadata } from '../types'; -import BaseModelProvider from './baseProvider'; -import { ChatOpenAI, OpenAIEmbeddings } from '@langchain/openai'; -import { Embeddings } from '@langchain/core/embeddings'; -import { UIConfigField } from '@/lib/config/types'; -import { getConfiguredModelProviderById } from '@/lib/config/serverRegistry'; - -interface LMStudioConfig { - baseURL: string; -} - -const providerConfigFields: UIConfigField[] = [ - { - type: 'string', - name: 'Base URL', - key: 'baseURL', - description: 'The base URL for LM Studio server', - required: true, - placeholder: 'http://localhost:1234', - env: 'LM_STUDIO_BASE_URL', - scope: 'server', - }, -]; - -class LMStudioProvider extends BaseModelProvider { - constructor(id: string, name: string, config: LMStudioConfig) { - super(id, name, config); - } - - private normalizeBaseURL(url: string): string { - const trimmed = url.trim().replace(/\/+$/, ''); - return trimmed.endsWith('/v1') ? trimmed : `${trimmed}/v1`; - } - - async getDefaultModels(): Promise { - try { - const baseURL = this.normalizeBaseURL(this.config.baseURL); - - const res = await fetch(`${baseURL}/models`, { - method: 'GET', - headers: { - 'Content-Type': 'application/json', - }, - }); - - const data = await res.json(); - - const models: Model[] = data.data.map((m: any) => { - return { - name: m.id, - key: m.id, - }; - }); - - return { - embedding: models, - chat: models, - }; - } catch (err) { - if (err instanceof TypeError) { - throw new Error( - 'Error connecting to LM Studio. Please ensure the base URL is correct and the LM Studio server is running.', - ); - } - - throw err; - } - } - - async getModelList(): Promise { - const defaultModels = await this.getDefaultModels(); - const configProvider = getConfiguredModelProviderById(this.id)!; - - return { - embedding: [ - ...defaultModels.embedding, - ...configProvider.embeddingModels, - ], - chat: [...defaultModels.chat, ...configProvider.chatModels], - }; - } - - async loadChatModel(key: string): Promise { - const modelList = await this.getModelList(); - - const exists = modelList.chat.find((m) => m.key === key); - - if (!exists) { - throw new Error( - 'Error Loading LM Studio Chat Model. Invalid Model Selected', - ); - } - - return new ChatOpenAI({ - apiKey: 'lm-studio', - temperature: 0.7, - model: key, - streaming: true, - configuration: { - baseURL: this.normalizeBaseURL(this.config.baseURL), - }, - }); - } - - async loadEmbeddingModel(key: string): Promise { - const modelList = await this.getModelList(); - const exists = modelList.embedding.find((m) => m.key === key); - - if (!exists) { - throw new Error( - 'Error Loading LM Studio Embedding Model. Invalid Model Selected.', - ); - } - - return new OpenAIEmbeddings({ - apiKey: 'lm-studio', - model: key, - configuration: { - baseURL: this.normalizeBaseURL(this.config.baseURL), - }, - }); - } - - static parseAndValidate(raw: any): LMStudioConfig { - if (!raw || typeof raw !== 'object') - throw new Error('Invalid config provided. Expected object'); - if (!raw.baseURL) - throw new Error('Invalid config provided. Base URL must be provided'); - - return { - baseURL: String(raw.baseURL), - }; - } - - static getProviderConfigFields(): UIConfigField[] { - return providerConfigFields; - } - - static getProviderMetadata(): ProviderMetadata { - return { - key: 'lmstudio', - name: 'LM Studio', - }; - } -} - -export default LMStudioProvider; diff --git a/src/lib/models/providers/transformers.ts b/src/lib/models/providers/transformers.ts deleted file mode 100644 index afd6b9e..0000000 --- a/src/lib/models/providers/transformers.ts +++ /dev/null @@ -1,87 +0,0 @@ -import { BaseChatModel } from '@langchain/core/language_models/chat_models'; -import { Model, ModelList, ProviderMetadata } from '../types'; -import BaseModelProvider from './baseProvider'; -import { Embeddings } from '@langchain/core/embeddings'; -import { UIConfigField } from '@/lib/config/types'; -import { getConfiguredModelProviderById } from '@/lib/config/serverRegistry'; -import { HuggingFaceTransformersEmbeddings } from '@langchain/community/embeddings/huggingface_transformers'; -interface TransformersConfig {} - -const defaultEmbeddingModels: Model[] = [ - { - name: 'all-MiniLM-L6-v2', - key: 'Xenova/all-MiniLM-L6-v2', - }, - { - name: 'mxbai-embed-large-v1', - key: 'mixedbread-ai/mxbai-embed-large-v1', - }, - { - name: 'nomic-embed-text-v1', - key: 'Xenova/nomic-embed-text-v1', - }, -]; - -const providerConfigFields: UIConfigField[] = []; - -class TransformersProvider extends BaseModelProvider { - constructor(id: string, name: string, config: TransformersConfig) { - super(id, name, config); - } - - async getDefaultModels(): Promise { - return { - embedding: [...defaultEmbeddingModels], - chat: [], - }; - } - - async getModelList(): Promise { - const defaultModels = await this.getDefaultModels(); - const configProvider = getConfiguredModelProviderById(this.id)!; - - return { - embedding: [ - ...defaultModels.embedding, - ...configProvider.embeddingModels, - ], - chat: [], - }; - } - - async loadChatModel(key: string): Promise { - throw new Error('Transformers Provider does not support chat models.'); - } - - async loadEmbeddingModel(key: string): Promise { - const modelList = await this.getModelList(); - const exists = modelList.embedding.find((m) => m.key === key); - - if (!exists) { - throw new Error( - 'Error Loading OpenAI Embedding Model. Invalid Model Selected.', - ); - } - - return new HuggingFaceTransformersEmbeddings({ - model: key, - }); - } - - static parseAndValidate(raw: any): TransformersConfig { - return {}; - } - - static getProviderConfigFields(): UIConfigField[] { - return providerConfigFields; - } - - static getProviderMetadata(): ProviderMetadata { - return { - key: 'transformers', - name: 'Transformers', - }; - } -} - -export default TransformersProvider;