Compare commits

...

3 Commits

Author SHA1 Message Date
Naman Bansal
3509f84cc6 Merge 7288c97326 into a8d410bc2f 2025-04-02 12:30:53 +05:30
namanb
7288c97326 feat(providers): changed readme as well 2025-04-02 12:26:38 +05:30
namanb
3545137bc0 feat(providers): added openrouter support 2025-04-02 12:24:27 +05:30
8 changed files with 98 additions and 1 deletions

View File

@@ -89,6 +89,7 @@ There are mainly 2 ways of installing Perplexica - With Docker, Without Docker.
- `OPENAI`: Your OpenAI API key. **You only need to fill this if you wish to use OpenAI's models**.
- `OLLAMA`: Your Ollama API URL. You should enter it as `http://host.docker.internal:PORT_NUMBER`. If you installed Ollama on port 11434, use `http://host.docker.internal:11434`. For other ports, adjust accordingly. **You need to fill this if you wish to use Ollama's models instead of OpenAI's**.
- `GROQ`: Your Groq API key. **You only need to fill this if you wish to use Groq's hosted models**.
- `OPENROUTER`: Your OpenRouter API key. **You only need to fill this if you wish to use models via OpenRouter**.
- `ANTHROPIC`: Your Anthropic API key. **You only need to fill this if you wish to use Anthropic models**.
**Note**: You can change these after starting Perplexica from the settings dialog.

View File

@@ -11,6 +11,9 @@ API_KEY = ""
[MODELS.ANTHROPIC]
API_KEY = ""
[MODELS.OPENROUTER]
API_KEY = ""
[MODELS.GEMINI]
API_KEY = ""

View File

@@ -5,6 +5,7 @@ import {
getCustomOpenaiModelName,
getGeminiApiKey,
getGroqApiKey,
getOpenrouterApiKey,
getOllamaApiEndpoint,
getOpenaiApiKey,
updateConfig,
@@ -52,6 +53,7 @@ export const GET = async (req: Request) => {
config['ollamaApiUrl'] = getOllamaApiEndpoint();
config['anthropicApiKey'] = getAnthropicApiKey();
config['groqApiKey'] = getGroqApiKey();
config['openrouterApiKey'] = getOpenrouterApiKey();
config['geminiApiKey'] = getGeminiApiKey();
config['customOpenaiApiUrl'] = getCustomOpenaiApiUrl();
config['customOpenaiApiKey'] = getCustomOpenaiApiKey();
@@ -79,6 +81,9 @@ export const POST = async (req: Request) => {
GROQ: {
API_KEY: config.groqApiKey,
},
OPENROUTER: {
API_KEY: config.openrouterApiKey,
},
ANTHROPIC: {
API_KEY: config.anthropicApiKey,
},

View File

@@ -125,7 +125,7 @@ export const POST = async (req: Request) => {
embeddings,
body.optimizationMode,
[],
"",
'',
);
if (!body.stream) {

View File

@@ -17,6 +17,7 @@ interface SettingsType {
};
openaiApiKey: string;
groqApiKey: string;
openrouterApiKey: string;
anthropicApiKey: string;
geminiApiKey: string;
ollamaApiUrl: string;
@@ -801,6 +802,25 @@ const Page = () => {
/>
</div>
<div className="flex flex-col space-y-1">
<p className="text-black/70 dark:text-white/70 text-sm">
OpenRouter API Key
</p>
<Input
type="text"
placeholder="OpenRouter API Key"
value={config.openrouterApiKey}
isSaving={savingStates['openrouterApiKey']}
onChange={(e) => {
setConfig((prev) => ({
...prev!,
openrouterApiKey: e.target.value,
}));
}}
onSave={(value) => saveConfig('openrouterApiKey', value)}
/>
</div>
<div className="flex flex-col space-y-1">
<p className="text-black/70 dark:text-white/70 text-sm">
Anthropic API Key

View File

@@ -25,6 +25,9 @@ interface Config {
OLLAMA: {
API_URL: string;
};
OPENROUTER: {
API_KEY: string;
};
CUSTOM_OPENAI: {
API_URL: string;
API_KEY: string;
@@ -54,6 +57,8 @@ export const getOpenaiApiKey = () => loadConfig().MODELS.OPENAI.API_KEY;
export const getGroqApiKey = () => loadConfig().MODELS.GROQ.API_KEY;
export const getOpenrouterApiKey = () => loadConfig().MODELS.OPENROUTER.API_KEY;
export const getAnthropicApiKey = () => loadConfig().MODELS.ANTHROPIC.API_KEY;
export const getGeminiApiKey = () => loadConfig().MODELS.GEMINI.API_KEY;

View File

@@ -12,6 +12,7 @@ import { loadGroqChatModels } from './groq';
import { loadAnthropicChatModels } from './anthropic';
import { loadGeminiChatModels, loadGeminiEmbeddingModels } from './gemini';
import { loadTransformersEmbeddingsModels } from './transformers';
import { loadOpenrouterChatModels } from '@/lib/providers/openrouter';
export interface ChatModel {
displayName: string;
@@ -32,6 +33,7 @@ export const chatModelProviders: Record<
groq: loadGroqChatModels,
anthropic: loadAnthropicChatModels,
gemini: loadGeminiChatModels,
openrouter: loadOpenrouterChatModels,
};
export const embeddingModelProviders: Record<

View File

@@ -0,0 +1,61 @@
import { ChatOpenAI } from '@langchain/openai';
import { getOpenrouterApiKey } from '../config';
import { ChatModel } from '.';
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
let openrouterChatModels: Record<string, string>[] = [];
async function fetchModelList(): Promise<void> {
try {
const response = await fetch('https://openrouter.ai/api/v1/models', {
method: 'GET',
headers: {
'Content-Type': 'application/json',
},
});
if (!response.ok) {
throw new Error(`API request failed with status: ${response.status}`);
}
const data = await response.json();
openrouterChatModels = data.data.map((model: any) => ({
displayName: model.name,
key: model.id,
}));
} catch (error) {
console.error('Error fetching models:', error);
}
}
export const loadOpenrouterChatModels = async () => {
await fetchModelList();
const openrouterApikey = getOpenrouterApiKey();
if (!openrouterApikey) return {};
try {
const chatModels: Record<string, ChatModel> = {};
openrouterChatModels.forEach((model) => {
chatModels[model.key] = {
displayName: model.displayName,
model: new ChatOpenAI({
openAIApiKey: openrouterApikey,
modelName: model.key,
temperature: 0.7,
configuration: {
baseURL: 'https://openrouter.ai/api/v1',
},
}) as unknown as BaseChatModel,
};
});
return chatModels;
} catch (err) {
console.error(`Error loading Openrouter models: ${err}`);
return {};
}
};