mirror of
https://github.com/ItzCrazyKns/Perplexica.git
synced 2025-09-19 15:51:34 +00:00
Compare commits
1 Commits
feat/model
...
45a02477a0
Author | SHA1 | Date | |
---|---|---|---|
|
45a02477a0 |
@@ -8,7 +8,6 @@ import {
|
|||||||
getOllamaApiEndpoint,
|
getOllamaApiEndpoint,
|
||||||
getOpenaiApiKey,
|
getOpenaiApiKey,
|
||||||
getDeepseekApiKey,
|
getDeepseekApiKey,
|
||||||
getLMStudioApiEndpoint,
|
|
||||||
updateConfig,
|
updateConfig,
|
||||||
} from '@/lib/config';
|
} from '@/lib/config';
|
||||||
import {
|
import {
|
||||||
@@ -52,7 +51,6 @@ export const GET = async (req: Request) => {
|
|||||||
|
|
||||||
config['openaiApiKey'] = getOpenaiApiKey();
|
config['openaiApiKey'] = getOpenaiApiKey();
|
||||||
config['ollamaApiUrl'] = getOllamaApiEndpoint();
|
config['ollamaApiUrl'] = getOllamaApiEndpoint();
|
||||||
config['lmStudioApiUrl'] = getLMStudioApiEndpoint();
|
|
||||||
config['anthropicApiKey'] = getAnthropicApiKey();
|
config['anthropicApiKey'] = getAnthropicApiKey();
|
||||||
config['groqApiKey'] = getGroqApiKey();
|
config['groqApiKey'] = getGroqApiKey();
|
||||||
config['geminiApiKey'] = getGeminiApiKey();
|
config['geminiApiKey'] = getGeminiApiKey();
|
||||||
@@ -95,9 +93,6 @@ export const POST = async (req: Request) => {
|
|||||||
DEEPSEEK: {
|
DEEPSEEK: {
|
||||||
API_KEY: config.deepseekApiKey,
|
API_KEY: config.deepseekApiKey,
|
||||||
},
|
},
|
||||||
LM_STUDIO: {
|
|
||||||
API_URL: config.lmStudioApiUrl,
|
|
||||||
},
|
|
||||||
CUSTOM_OPENAI: {
|
CUSTOM_OPENAI: {
|
||||||
API_URL: config.customOpenaiApiUrl,
|
API_URL: config.customOpenaiApiUrl,
|
||||||
API_KEY: config.customOpenaiApiKey,
|
API_KEY: config.customOpenaiApiKey,
|
||||||
|
@@ -21,7 +21,6 @@ interface SettingsType {
|
|||||||
anthropicApiKey: string;
|
anthropicApiKey: string;
|
||||||
geminiApiKey: string;
|
geminiApiKey: string;
|
||||||
ollamaApiUrl: string;
|
ollamaApiUrl: string;
|
||||||
lmStudioApiUrl: string;
|
|
||||||
deepseekApiKey: string;
|
deepseekApiKey: string;
|
||||||
customOpenaiApiKey: string;
|
customOpenaiApiKey: string;
|
||||||
customOpenaiApiUrl: string;
|
customOpenaiApiUrl: string;
|
||||||
@@ -549,10 +548,8 @@ const Page = () => {
|
|||||||
options={Object.keys(config.chatModelProviders).map(
|
options={Object.keys(config.chatModelProviders).map(
|
||||||
(provider) => ({
|
(provider) => ({
|
||||||
value: provider,
|
value: provider,
|
||||||
label:
|
label: (PROVIDER_METADATA as any)[provider]?.displayName ||
|
||||||
(PROVIDER_METADATA as any)[provider]?.displayName ||
|
provider.charAt(0).toUpperCase() + provider.slice(1),
|
||||||
provider.charAt(0).toUpperCase() +
|
|
||||||
provider.slice(1),
|
|
||||||
}),
|
}),
|
||||||
)}
|
)}
|
||||||
/>
|
/>
|
||||||
@@ -692,10 +689,8 @@ const Page = () => {
|
|||||||
options={Object.keys(config.embeddingModelProviders).map(
|
options={Object.keys(config.embeddingModelProviders).map(
|
||||||
(provider) => ({
|
(provider) => ({
|
||||||
value: provider,
|
value: provider,
|
||||||
label:
|
label: (PROVIDER_METADATA as any)[provider]?.displayName ||
|
||||||
(PROVIDER_METADATA as any)[provider]?.displayName ||
|
provider.charAt(0).toUpperCase() + provider.slice(1),
|
||||||
provider.charAt(0).toUpperCase() +
|
|
||||||
provider.slice(1),
|
|
||||||
}),
|
}),
|
||||||
)}
|
)}
|
||||||
/>
|
/>
|
||||||
@@ -862,25 +857,6 @@ const Page = () => {
|
|||||||
onSave={(value) => saveConfig('deepseekApiKey', value)}
|
onSave={(value) => saveConfig('deepseekApiKey', value)}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div className="flex flex-col space-y-1">
|
|
||||||
<p className="text-black/70 dark:text-white/70 text-sm">
|
|
||||||
LM Studio API URL
|
|
||||||
</p>
|
|
||||||
<Input
|
|
||||||
type="text"
|
|
||||||
placeholder="LM Studio API URL"
|
|
||||||
value={config.lmStudioApiUrl}
|
|
||||||
isSaving={savingStates['lmStudioApiUrl']}
|
|
||||||
onChange={(e) => {
|
|
||||||
setConfig((prev) => ({
|
|
||||||
...prev!,
|
|
||||||
lmStudioApiUrl: e.target.value,
|
|
||||||
}));
|
|
||||||
}}
|
|
||||||
onSave={(value) => saveConfig('lmStudioApiUrl', value)}
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
</div>
|
</div>
|
||||||
</SettingsSection>
|
</SettingsSection>
|
||||||
</div>
|
</div>
|
||||||
|
@@ -60,7 +60,7 @@ const loadConfig = () => {
|
|||||||
fs.readFileSync(path.join(process.cwd(), `${configFileName}`), 'utf-8'),
|
fs.readFileSync(path.join(process.cwd(), `${configFileName}`), 'utf-8'),
|
||||||
) as any as Config;
|
) as any as Config;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Client-side fallback - settings will be loaded via API
|
// Client-side fallback - settings will be loaded via API
|
||||||
return {} as Config;
|
return {} as Config;
|
||||||
};
|
};
|
||||||
@@ -94,8 +94,7 @@ export const getCustomOpenaiApiUrl = () =>
|
|||||||
export const getCustomOpenaiModelName = () =>
|
export const getCustomOpenaiModelName = () =>
|
||||||
loadConfig().MODELS.CUSTOM_OPENAI.MODEL_NAME;
|
loadConfig().MODELS.CUSTOM_OPENAI.MODEL_NAME;
|
||||||
|
|
||||||
export const getLMStudioApiEndpoint = () =>
|
export const getLMStudioApiEndpoint = () => loadConfig().MODELS.LM_STUDIO.API_URL;
|
||||||
loadConfig().MODELS.LM_STUDIO.API_URL;
|
|
||||||
|
|
||||||
const mergeConfigs = (current: any, update: any): any => {
|
const mergeConfigs = (current: any, update: any): any => {
|
||||||
if (update === null || update === undefined) {
|
if (update === null || update === undefined) {
|
||||||
|
@@ -4,7 +4,7 @@ import { getAnthropicApiKey } from '../config';
|
|||||||
|
|
||||||
export const PROVIDER_INFO = {
|
export const PROVIDER_INFO = {
|
||||||
key: 'anthropic',
|
key: 'anthropic',
|
||||||
displayName: 'Anthropic',
|
displayName: 'Anthropic'
|
||||||
};
|
};
|
||||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||||
|
|
||||||
|
@@ -5,7 +5,7 @@ import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
|||||||
|
|
||||||
export const PROVIDER_INFO = {
|
export const PROVIDER_INFO = {
|
||||||
key: 'deepseek',
|
key: 'deepseek',
|
||||||
displayName: 'Deepseek AI',
|
displayName: 'Deepseek AI'
|
||||||
};
|
};
|
||||||
|
|
||||||
const deepseekChatModels: Record<string, string>[] = [
|
const deepseekChatModels: Record<string, string>[] = [
|
||||||
|
@@ -7,7 +7,7 @@ import { ChatModel, EmbeddingModel } from '.';
|
|||||||
|
|
||||||
export const PROVIDER_INFO = {
|
export const PROVIDER_INFO = {
|
||||||
key: 'gemini',
|
key: 'gemini',
|
||||||
displayName: 'Google Gemini',
|
displayName: 'Google Gemini'
|
||||||
};
|
};
|
||||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||||
import { Embeddings } from '@langchain/core/embeddings';
|
import { Embeddings } from '@langchain/core/embeddings';
|
||||||
|
@@ -4,7 +4,7 @@ import { ChatModel } from '.';
|
|||||||
|
|
||||||
export const PROVIDER_INFO = {
|
export const PROVIDER_INFO = {
|
||||||
key: 'groq',
|
key: 'groq',
|
||||||
displayName: 'Groq',
|
displayName: 'Groq'
|
||||||
};
|
};
|
||||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||||
|
|
||||||
|
@@ -1,45 +1,19 @@
|
|||||||
import { Embeddings } from '@langchain/core/embeddings';
|
import { Embeddings } from '@langchain/core/embeddings';
|
||||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||||
import {
|
import { loadOpenAIChatModels, loadOpenAIEmbeddingModels, PROVIDER_INFO as OpenAIInfo, PROVIDER_INFO } from './openai';
|
||||||
loadOpenAIChatModels,
|
|
||||||
loadOpenAIEmbeddingModels,
|
|
||||||
PROVIDER_INFO as OpenAIInfo,
|
|
||||||
PROVIDER_INFO,
|
|
||||||
} from './openai';
|
|
||||||
import {
|
import {
|
||||||
getCustomOpenaiApiKey,
|
getCustomOpenaiApiKey,
|
||||||
getCustomOpenaiApiUrl,
|
getCustomOpenaiApiUrl,
|
||||||
getCustomOpenaiModelName,
|
getCustomOpenaiModelName,
|
||||||
} from '../config';
|
} from '../config';
|
||||||
import { ChatOpenAI } from '@langchain/openai';
|
import { ChatOpenAI } from '@langchain/openai';
|
||||||
import {
|
import { loadOllamaChatModels, loadOllamaEmbeddingModels, PROVIDER_INFO as OllamaInfo } from './ollama';
|
||||||
loadOllamaChatModels,
|
|
||||||
loadOllamaEmbeddingModels,
|
|
||||||
PROVIDER_INFO as OllamaInfo,
|
|
||||||
} from './ollama';
|
|
||||||
import { loadGroqChatModels, PROVIDER_INFO as GroqInfo } from './groq';
|
import { loadGroqChatModels, PROVIDER_INFO as GroqInfo } from './groq';
|
||||||
import {
|
import { loadAnthropicChatModels, PROVIDER_INFO as AnthropicInfo } from './anthropic';
|
||||||
loadAnthropicChatModels,
|
import { loadGeminiChatModels, loadGeminiEmbeddingModels, PROVIDER_INFO as GeminiInfo } from './gemini';
|
||||||
PROVIDER_INFO as AnthropicInfo,
|
import { loadTransformersEmbeddingsModels, PROVIDER_INFO as TransformersInfo } from './transformers';
|
||||||
} from './anthropic';
|
import { loadDeepseekChatModels, PROVIDER_INFO as DeepseekInfo } from './deepseek';
|
||||||
import {
|
import { loadLMStudioChatModels, loadLMStudioEmbeddingsModels, PROVIDER_INFO as LMStudioInfo } from './lmstudio';
|
||||||
loadGeminiChatModels,
|
|
||||||
loadGeminiEmbeddingModels,
|
|
||||||
PROVIDER_INFO as GeminiInfo,
|
|
||||||
} from './gemini';
|
|
||||||
import {
|
|
||||||
loadTransformersEmbeddingsModels,
|
|
||||||
PROVIDER_INFO as TransformersInfo,
|
|
||||||
} from './transformers';
|
|
||||||
import {
|
|
||||||
loadDeepseekChatModels,
|
|
||||||
PROVIDER_INFO as DeepseekInfo,
|
|
||||||
} from './deepseek';
|
|
||||||
import {
|
|
||||||
loadLMStudioChatModels,
|
|
||||||
loadLMStudioEmbeddingsModels,
|
|
||||||
PROVIDER_INFO as LMStudioInfo,
|
|
||||||
} from './lmstudio';
|
|
||||||
|
|
||||||
export const PROVIDER_METADATA = {
|
export const PROVIDER_METADATA = {
|
||||||
openai: OpenAIInfo,
|
openai: OpenAIInfo,
|
||||||
@@ -52,8 +26,8 @@ export const PROVIDER_METADATA = {
|
|||||||
lmstudio: LMStudioInfo,
|
lmstudio: LMStudioInfo,
|
||||||
custom_openai: {
|
custom_openai: {
|
||||||
key: 'custom_openai',
|
key: 'custom_openai',
|
||||||
displayName: 'Custom OpenAI',
|
displayName: 'Custom OpenAI'
|
||||||
},
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
export interface ChatModel {
|
export interface ChatModel {
|
||||||
|
@@ -4,7 +4,7 @@ import { ChatModel, EmbeddingModel } from '.';
|
|||||||
|
|
||||||
export const PROVIDER_INFO = {
|
export const PROVIDER_INFO = {
|
||||||
key: 'lmstudio',
|
key: 'lmstudio',
|
||||||
displayName: 'LM Studio',
|
displayName: 'LM Studio'
|
||||||
};
|
};
|
||||||
import { ChatOpenAI } from '@langchain/openai';
|
import { ChatOpenAI } from '@langchain/openai';
|
||||||
import { OpenAIEmbeddings } from '@langchain/openai';
|
import { OpenAIEmbeddings } from '@langchain/openai';
|
||||||
@@ -16,12 +16,14 @@ interface LMStudioModel {
|
|||||||
name?: string;
|
name?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
const ensureV1Endpoint = (endpoint: string): string =>
|
const ensureV1Endpoint = (endpoint: string): string =>
|
||||||
endpoint.endsWith('/v1') ? endpoint : `${endpoint}/v1`;
|
endpoint.endsWith('/v1') ? endpoint : `${endpoint}/v1`;
|
||||||
|
|
||||||
const checkServerAvailability = async (endpoint: string): Promise<boolean> => {
|
const checkServerAvailability = async (endpoint: string): Promise<boolean> => {
|
||||||
try {
|
try {
|
||||||
|
const keepAlive = getKeepAlive();
|
||||||
await axios.get(`${ensureV1Endpoint(endpoint)}/models`, {
|
await axios.get(`${ensureV1Endpoint(endpoint)}/models`, {
|
||||||
|
timeout: parseInt(keepAlive) * 1000 || 5000,
|
||||||
headers: { 'Content-Type': 'application/json' },
|
headers: { 'Content-Type': 'application/json' },
|
||||||
});
|
});
|
||||||
return true;
|
return true;
|
||||||
@@ -32,12 +34,14 @@ const checkServerAvailability = async (endpoint: string): Promise<boolean> => {
|
|||||||
|
|
||||||
export const loadLMStudioChatModels = async () => {
|
export const loadLMStudioChatModels = async () => {
|
||||||
const endpoint = getLMStudioApiEndpoint();
|
const endpoint = getLMStudioApiEndpoint();
|
||||||
|
const keepAlive = getKeepAlive();
|
||||||
|
|
||||||
if (!endpoint) return {};
|
if (!endpoint) return {};
|
||||||
if (!(await checkServerAvailability(endpoint))) return {};
|
if (!await checkServerAvailability(endpoint)) return {};
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const response = await axios.get(`${ensureV1Endpoint(endpoint)}/models`, {
|
const response = await axios.get(`${ensureV1Endpoint(endpoint)}/models`, {
|
||||||
|
timeout: parseInt(keepAlive) * 1000 || 5000,
|
||||||
headers: { 'Content-Type': 'application/json' },
|
headers: { 'Content-Type': 'application/json' },
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -54,7 +58,7 @@ export const loadLMStudioChatModels = async () => {
|
|||||||
modelName: model.id,
|
modelName: model.id,
|
||||||
temperature: 0.7,
|
temperature: 0.7,
|
||||||
streaming: true,
|
streaming: true,
|
||||||
maxRetries: 3,
|
maxRetries: 3
|
||||||
}) as unknown as BaseChatModel,
|
}) as unknown as BaseChatModel,
|
||||||
};
|
};
|
||||||
});
|
});
|
||||||
@@ -68,12 +72,14 @@ export const loadLMStudioChatModels = async () => {
|
|||||||
|
|
||||||
export const loadLMStudioEmbeddingsModels = async () => {
|
export const loadLMStudioEmbeddingsModels = async () => {
|
||||||
const endpoint = getLMStudioApiEndpoint();
|
const endpoint = getLMStudioApiEndpoint();
|
||||||
|
const keepAlive = getKeepAlive();
|
||||||
|
|
||||||
if (!endpoint) return {};
|
if (!endpoint) return {};
|
||||||
if (!(await checkServerAvailability(endpoint))) return {};
|
if (!await checkServerAvailability(endpoint)) return {};
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const response = await axios.get(`${ensureV1Endpoint(endpoint)}/models`, {
|
const response = await axios.get(`${ensureV1Endpoint(endpoint)}/models`, {
|
||||||
|
timeout: parseInt(keepAlive) * 1000 || 5000,
|
||||||
headers: { 'Content-Type': 'application/json' },
|
headers: { 'Content-Type': 'application/json' },
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@@ -4,7 +4,7 @@ import { ChatModel, EmbeddingModel } from '.';
|
|||||||
|
|
||||||
export const PROVIDER_INFO = {
|
export const PROVIDER_INFO = {
|
||||||
key: 'ollama',
|
key: 'ollama',
|
||||||
displayName: 'Ollama',
|
displayName: 'Ollama'
|
||||||
};
|
};
|
||||||
import { ChatOllama } from '@langchain/community/chat_models/ollama';
|
import { ChatOllama } from '@langchain/community/chat_models/ollama';
|
||||||
import { OllamaEmbeddings } from '@langchain/community/embeddings/ollama';
|
import { OllamaEmbeddings } from '@langchain/community/embeddings/ollama';
|
||||||
|
@@ -4,7 +4,7 @@ import { ChatModel, EmbeddingModel } from '.';
|
|||||||
|
|
||||||
export const PROVIDER_INFO = {
|
export const PROVIDER_INFO = {
|
||||||
key: 'openai',
|
key: 'openai',
|
||||||
displayName: 'OpenAI',
|
displayName: 'OpenAI'
|
||||||
};
|
};
|
||||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||||
import { Embeddings } from '@langchain/core/embeddings';
|
import { Embeddings } from '@langchain/core/embeddings';
|
||||||
@@ -30,18 +30,6 @@ const openaiChatModels: Record<string, string>[] = [
|
|||||||
displayName: 'GPT-4 omni mini',
|
displayName: 'GPT-4 omni mini',
|
||||||
key: 'gpt-4o-mini',
|
key: 'gpt-4o-mini',
|
||||||
},
|
},
|
||||||
{
|
|
||||||
displayName: 'GPT 4.1 nano',
|
|
||||||
key: 'gpt-4.1-nano',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
displayName: 'GPT 4.1 mini',
|
|
||||||
key: 'gpt-4.1-mini',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
displayName: 'GPT 4.1',
|
|
||||||
key: 'gpt-4.1',
|
|
||||||
},
|
|
||||||
];
|
];
|
||||||
|
|
||||||
const openaiEmbeddingModels: Record<string, string>[] = [
|
const openaiEmbeddingModels: Record<string, string>[] = [
|
||||||
|
@@ -2,7 +2,7 @@ import { HuggingFaceTransformersEmbeddings } from '../huggingfaceTransformer';
|
|||||||
|
|
||||||
export const PROVIDER_INFO = {
|
export const PROVIDER_INFO = {
|
||||||
key: 'transformers',
|
key: 'transformers',
|
||||||
displayName: 'Hugging Face',
|
displayName: 'Hugging Face'
|
||||||
};
|
};
|
||||||
|
|
||||||
export const loadTransformersEmbeddingsModels = async () => {
|
export const loadTransformersEmbeddingsModels = async () => {
|
||||||
|
@@ -64,7 +64,7 @@ export const getDocumentsFromLinks = async ({ links }: { links: string[] }) => {
|
|||||||
const splittedText = await splitter.splitText(parsedText);
|
const splittedText = await splitter.splitText(parsedText);
|
||||||
const title = res.data
|
const title = res.data
|
||||||
.toString('utf8')
|
.toString('utf8')
|
||||||
.match(/<title.*>(.*?)<\/title>/)?.[1];
|
.match(/<title>(.*?)<\/title>/)?.[1];
|
||||||
|
|
||||||
const linkDocs = splittedText.map((text) => {
|
const linkDocs = splittedText.map((text) => {
|
||||||
return new Document({
|
return new Document({
|
||||||
|
Reference in New Issue
Block a user