feat(openAiProvider): load models from config as well

This commit is contained in:
ItzCrazyKns
2025-10-14 13:05:19 +05:30
parent 87226957f1
commit a375de73cc
2 changed files with 11 additions and 5 deletions

View File

@@ -1,3 +1,5 @@
import { Model } from '../models/types';
type BaseUIConfigField = {
name: string;
key: string;
@@ -40,8 +42,8 @@ type ConfigModelProvider = {
id: string;
name: string;
type: string;
chatModels: string[];
embeddingModels: string[];
chatModels: Model[];
embeddingModels: Model[];
config: { [key: string]: any };
hash: string;
};

View File

@@ -4,6 +4,7 @@ import BaseModelProvider from './baseProvider';
import { ChatOpenAI, OpenAIEmbeddings } from '@langchain/openai';
import { Embeddings } from '@langchain/core/embeddings';
import { UIConfigField } from '@/lib/config/types';
import { getConfiguredModelProviderById } from '@/lib/config/serverRegistry';
interface OpenAIConfig {
apiKey: string;
@@ -132,12 +133,15 @@ class OpenAIProvider extends BaseModelProvider<OpenAIConfig> {
}
async getModelList(): Promise<ModelList> {
/* Todo: IMPLEMENT MODEL READING FROM CONFIG FILE */
const defaultModels = await this.getDefaultModels();
const configProvider = getConfiguredModelProviderById(this.id)!;
return {
embedding: [...defaultModels.embedding],
chat: [...defaultModels.chat],
embedding: [
...defaultModels.embedding,
...configProvider.embeddingModels,
],
chat: [...defaultModels.chat, ...configProvider.chatModels],
};
}