diff --git a/src/lib/models/providers/groq/groqLLM.ts b/src/lib/models/providers/groq/groqLLM.ts new file mode 100644 index 0000000..dfcb294 --- /dev/null +++ b/src/lib/models/providers/groq/groqLLM.ts @@ -0,0 +1,5 @@ +import OpenAILLM from '../openai/openaiLLM'; + +class GroqLLM extends OpenAILLM {} + +export default GroqLLM; diff --git a/src/lib/models/providers/groq/index.ts b/src/lib/models/providers/groq/index.ts new file mode 100644 index 0000000..f50323e --- /dev/null +++ b/src/lib/models/providers/groq/index.ts @@ -0,0 +1,113 @@ +import { UIConfigField } from '@/lib/config/types'; +import { getConfiguredModelProviderById } from '@/lib/config/serverRegistry'; +import { Model, ModelList, ProviderMetadata } from '../../types'; +import BaseEmbedding from '../../base/embedding'; +import BaseModelProvider from '../../base/provider'; +import BaseLLM from '../../base/llm'; +import GroqLLM from './groqLLM'; + +interface GroqConfig { + apiKey: string; +} + +const providerConfigFields: UIConfigField[] = [ + { + type: 'password', + name: 'API Key', + key: 'apiKey', + description: 'Your Groq API key', + required: true, + placeholder: 'Groq API Key', + env: 'GROQ_API_KEY', + scope: 'server', + }, +]; + +class GroqProvider extends BaseModelProvider { + constructor(id: string, name: string, config: GroqConfig) { + super(id, name, config); + } + + async getDefaultModels(): Promise { + const res = await fetch(`https://api.groq.com/openai/v1/models`, { + method: 'GET', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${this.config.apiKey}`, + }, + }); + + const data = await res.json(); + + const defaultChatModels: Model[] = []; + + data.data.forEach((m: any) => { + defaultChatModels.push({ + key: m.id, + name: m.id, + }); + }); + + return { + embedding: [], + chat: defaultChatModels, + }; + } + + async getModelList(): Promise { + const defaultModels = await this.getDefaultModels(); + const configProvider = getConfiguredModelProviderById(this.id)!; + + return { + embedding: [ + ...defaultModels.embedding, + ...configProvider.embeddingModels, + ], + chat: [...defaultModels.chat, ...configProvider.chatModels], + }; + } + + async loadChatModel(key: string): Promise> { + const modelList = await this.getModelList(); + + const exists = modelList.chat.find((m) => m.key === key); + + if (!exists) { + throw new Error('Error Loading Groq Chat Model. Invalid Model Selected'); + } + + return new GroqLLM({ + apiKey: this.config.apiKey, + model: key, + baseURL: 'https://api.groq.com/openai/v1', + }); + } + + async loadEmbeddingModel(key: string): Promise> { + throw new Error('Groq Provider does not support embedding models.'); + } + + static parseAndValidate(raw: any): GroqConfig { + if (!raw || typeof raw !== 'object') + throw new Error('Invalid config provided. Expected object'); + if (!raw.apiKey) + throw new Error('Invalid config provided. API key must be provided'); + + return { + apiKey: String(raw.apiKey), + }; + } + + static getProviderConfigFields(): UIConfigField[] { + return providerConfigFields; + } + + static getProviderMetadata(): ProviderMetadata { + return { + key: 'groq', + name: 'Groq', + }; + } +} + +export default GroqProvider;