feat(providers): add gemini

This commit is contained in:
ItzCrazyKns
2025-12-24 13:55:32 +05:30
parent d4c276ab93
commit c4349f3d5c
3 changed files with 154 additions and 0 deletions

View File

@@ -0,0 +1,5 @@
import OpenAIEmbedding from '../openai/openaiEmbedding';
class GeminiEmbedding extends OpenAIEmbedding {}
export default GeminiEmbedding;

View File

@@ -0,0 +1,5 @@
import OpenAILLM from '../openai/openaiLLM';
class GeminiLLM extends OpenAILLM {}
export default GeminiLLM;

View File

@@ -0,0 +1,144 @@
import { UIConfigField } from '@/lib/config/types';
import { getConfiguredModelProviderById } from '@/lib/config/serverRegistry';
import { Model, ModelList, ProviderMetadata } from '../../types';
import GeminiEmbedding from './geminiEmbedding';
import BaseEmbedding from '../../base/embedding';
import BaseModelProvider from '../../base/provider';
import BaseLLM from '../../base/llm';
import GeminiLLM from './geminiLLM';
interface GeminiConfig {
apiKey: string;
}
const providerConfigFields: UIConfigField[] = [
{
type: 'password',
name: 'API Key',
key: 'apiKey',
description: 'Your Gemini API key',
required: true,
placeholder: 'Gemini API Key',
env: 'GEMINI_API_KEY',
scope: 'server',
},
];
class GeminiProvider extends BaseModelProvider<GeminiConfig> {
constructor(id: string, name: string, config: GeminiConfig) {
super(id, name, config);
}
async getDefaultModels(): Promise<ModelList> {
const res = await fetch(
`https://generativelanguage.googleapis.com/v1beta/models?key=${this.config.apiKey}`,
{
method: 'GET',
headers: {
'Content-Type': 'application/json',
},
},
);
const data = await res.json();
let defaultEmbeddingModels: Model[] = [];
let defaultChatModels: Model[] = [];
data.models.forEach((m: any) => {
if (
m.supportedGenerationMethods.some(
(genMethod: string) =>
genMethod === 'embedText' || genMethod === 'embedContent',
)
) {
defaultEmbeddingModels.push({
key: m.name,
name: m.displayName,
});
} else if (m.supportedGenerationMethods.includes('generateContent')) {
defaultChatModels.push({
key: m.name,
name: m.displayName,
});
}
});
return {
embedding: defaultEmbeddingModels,
chat: defaultChatModels,
};
}
async getModelList(): Promise<ModelList> {
const defaultModels = await this.getDefaultModels();
const configProvider = getConfiguredModelProviderById(this.id)!;
return {
embedding: [
...defaultModels.embedding,
...configProvider.embeddingModels,
],
chat: [...defaultModels.chat, ...configProvider.chatModels],
};
}
async loadChatModel(key: string): Promise<BaseLLM<any>> {
const modelList = await this.getModelList();
const exists = modelList.chat.find((m) => m.key === key);
if (!exists) {
throw new Error(
'Error Loading Gemini Chat Model. Invalid Model Selected',
);
}
return new GeminiLLM({
apiKey: this.config.apiKey,
model: key,
baseURL: 'https://generativelanguage.googleapis.com/v1beta/openai',
});
}
async loadEmbeddingModel(key: string): Promise<BaseEmbedding<any>> {
const modelList = await this.getModelList();
const exists = modelList.embedding.find((m) => m.key === key);
if (!exists) {
throw new Error(
'Error Loading Gemini Embedding Model. Invalid Model Selected.',
);
}
return new GeminiEmbedding({
apiKey: this.config.apiKey,
model: key,
baseURL: 'https://generativelanguage.googleapis.com/v1beta/openai',
});
}
static parseAndValidate(raw: any): GeminiConfig {
if (!raw || typeof raw !== 'object')
throw new Error('Invalid config provided. Expected object');
if (!raw.apiKey)
throw new Error('Invalid config provided. API key must be provided');
return {
apiKey: String(raw.apiKey),
};
}
static getProviderConfigFields(): UIConfigField[] {
return providerConfigFields;
}
static getProviderMetadata(): ProviderMetadata {
return {
key: 'gemini',
name: 'Gemini',
};
}
}
export default GeminiProvider;