Compare commits

..

2 Commits

Author SHA1 Message Date
eb1837abfb Merge 2427caf352 into a8d410bc2f 2025-04-01 16:22:21 +05:30
2427caf352 fix typo in prompt 2025-04-01 11:36:33 +02:00
12 changed files with 9 additions and 127 deletions

View File

@ -33,7 +33,6 @@ The API accepts a JSON object in the request body, where you define the focus mo
["human", "Hi, how are you?"],
["assistant", "I am doing well, how can I help you today?"]
],
"systemInstructions": "Focus on providing technical details about Perplexica's architecture.",
"stream": false
}
```
@ -64,8 +63,6 @@ The API accepts a JSON object in the request body, where you define the focus mo
- **`query`** (string, required): The search query or question.
- **`systemInstructions`** (string, optional): Custom instructions provided by the user to guide the AI's response. These instructions are treated as user preferences and have lower priority than the system's core instructions. For example, you can specify a particular writing style, format, or focus area.
- **`history`** (array, optional): An array of message pairs representing the conversation history. Each pair consists of a role (either 'human' or 'assistant') and the message content. This allows the system to use the context of the conversation to refine results. Example:
```json

View File

@ -1,6 +1,6 @@
{
"name": "perplexica-frontend",
"version": "1.10.2",
"version": "1.10.1",
"license": "MIT",
"author": "ItzCrazyKns",
"scripts": {

View File

@ -22,8 +22,5 @@ MODEL_NAME = ""
[MODELS.OLLAMA]
API_URL = "" # Ollama API URL - http://host.docker.internal:11434
[MODELS.DEEPSEEK]
API_KEY = ""
[API_ENDPOINTS]
SEARXNG = "" # SearxNG API URL - http://localhost:32768

View File

@ -7,7 +7,6 @@ import {
getGroqApiKey,
getOllamaApiEndpoint,
getOpenaiApiKey,
getDeepseekApiKey,
updateConfig,
} from '@/lib/config';
import {
@ -54,7 +53,6 @@ export const GET = async (req: Request) => {
config['anthropicApiKey'] = getAnthropicApiKey();
config['groqApiKey'] = getGroqApiKey();
config['geminiApiKey'] = getGeminiApiKey();
config['deepseekApiKey'] = getDeepseekApiKey();
config['customOpenaiApiUrl'] = getCustomOpenaiApiUrl();
config['customOpenaiApiKey'] = getCustomOpenaiApiKey();
config['customOpenaiModelName'] = getCustomOpenaiModelName();
@ -90,9 +88,6 @@ export const POST = async (req: Request) => {
OLLAMA: {
API_URL: config.ollamaApiUrl,
},
DEEPSEEK: {
API_KEY: config.deepseekApiKey,
},
CUSTOM_OPENAI: {
API_URL: config.customOpenaiApiUrl,
API_KEY: config.customOpenaiApiKey,

View File

@ -34,7 +34,6 @@ interface ChatRequestBody {
query: string;
history: Array<[string, string]>;
stream?: boolean;
systemInstructions?: string;
}
export const POST = async (req: Request) => {
@ -126,7 +125,7 @@ export const POST = async (req: Request) => {
embeddings,
body.optimizationMode,
[],
body.systemInstructions || '',
"",
);
if (!body.stream) {

View File

@ -20,7 +20,6 @@ interface SettingsType {
anthropicApiKey: string;
geminiApiKey: string;
ollamaApiUrl: string;
deepseekApiKey: string;
customOpenaiApiKey: string;
customOpenaiApiUrl: string;
customOpenaiModelName: string;
@ -839,25 +838,6 @@ const Page = () => {
onSave={(value) => saveConfig('geminiApiKey', value)}
/>
</div>
<div className="flex flex-col space-y-1">
<p className="text-black/70 dark:text-white/70 text-sm">
Deepseek API Key
</p>
<Input
type="text"
placeholder="Deepseek API Key"
value={config.deepseekApiKey}
isSaving={savingStates['deepseekApiKey']}
onChange={(e) => {
setConfig((prev) => ({
...prev!,
deepseekApiKey: e.target.value,
}));
}}
onSave={(value) => saveConfig('deepseekApiKey', value)}
/>
</div>
</div>
</SettingsSection>
</div>

View File

@ -48,7 +48,6 @@ const MessageBox = ({
const [speechMessage, setSpeechMessage] = useState(message.content);
useEffect(() => {
const citationRegex = /\[([^\]]+)\]/g;
const regex = /\[(\d+)\]/g;
let processedMessage = message.content;
@ -68,33 +67,11 @@ const MessageBox = ({
) {
setParsedMessage(
processedMessage.replace(
citationRegex,
(_, capturedContent: string) => {
const numbers = capturedContent
.split(',')
.map((numStr) => numStr.trim());
const linksHtml = numbers
.map((numStr) => {
const number = parseInt(numStr);
if (isNaN(number) || number <= 0) {
return `[${numStr}]`;
}
const source = message.sources?.[number - 1];
const url = source?.metadata?.url;
if (url) {
return `<a href="${url}" target="_blank" className="bg-light-secondary dark:bg-dark-secondary px-1 rounded ml-1 no-underline text-xs text-black/70 dark:text-white/70 relative">${numStr}</a>`;
} else {
return `[${numStr}]`;
}
})
.join('');
return linksHtml;
},
regex,
(_, number) =>
`<a href="${
message.sources?.[number - 1]?.metadata?.url
}" target="_blank" className="bg-light-secondary dark:bg-dark-secondary px-1 rounded ml-1 no-underline text-xs text-black/70 dark:text-white/70 relative">${number}</a>`,
),
);
return;

View File

@ -25,9 +25,6 @@ interface Config {
OLLAMA: {
API_URL: string;
};
DEEPSEEK: {
API_KEY: string;
};
CUSTOM_OPENAI: {
API_URL: string;
API_KEY: string;
@ -66,8 +63,6 @@ export const getSearxngApiEndpoint = () =>
export const getOllamaApiEndpoint = () => loadConfig().MODELS.OLLAMA.API_URL;
export const getDeepseekApiKey = () => loadConfig().MODELS.DEEPSEEK.API_KEY;
export const getCustomOpenaiApiKey = () =>
loadConfig().MODELS.CUSTOM_OPENAI.API_KEY;

View File

@ -1,44 +0,0 @@
import { ChatOpenAI } from '@langchain/openai';
import { getDeepseekApiKey } from '../config';
import { ChatModel } from '.';
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
const deepseekChatModels: Record<string, string>[] = [
{
displayName: 'Deepseek Chat (Deepseek V3)',
key: 'deepseek-chat',
},
{
displayName: 'Deepseek Reasoner (Deepseek R1)',
key: 'deepseek-reasoner',
},
];
export const loadDeepseekChatModels = async () => {
const deepseekApiKey = getDeepseekApiKey();
if (!deepseekApiKey) return {};
try {
const chatModels: Record<string, ChatModel> = {};
deepseekChatModels.forEach((model) => {
chatModels[model.key] = {
displayName: model.displayName,
model: new ChatOpenAI({
openAIApiKey: deepseekApiKey,
modelName: model.key,
temperature: 0.7,
configuration: {
baseURL: 'https://api.deepseek.com',
},
}) as unknown as BaseChatModel,
};
});
return chatModels;
} catch (err) {
console.error(`Error loading Deepseek models: ${err}`);
return {};
}
};

View File

@ -40,12 +40,8 @@ const geminiChatModels: Record<string, string>[] = [
const geminiEmbeddingModels: Record<string, string>[] = [
{
displayName: 'Text Embedding 004',
key: 'models/text-embedding-004',
},
{
displayName: 'Embedding 001',
key: 'models/embedding-001',
displayName: 'Gemini Embedding',
key: 'gemini-embedding-exp',
},
];

View File

@ -72,14 +72,6 @@ const groqChatModels: Record<string, string>[] = [
displayName: 'Llama 3.2 90B Vision Preview (Preview)',
key: 'llama-3.2-90b-vision-preview',
},
/* {
displayName: 'Llama 4 Maverick 17B 128E Instruct (Preview)',
key: 'meta-llama/llama-4-maverick-17b-128e-instruct',
}, */
{
displayName: 'Llama 4 Scout 17B 16E Instruct (Preview)',
key: 'meta-llama/llama-4-scout-17b-16e-instruct',
},
];
export const loadGroqChatModels = async () => {

View File

@ -12,7 +12,6 @@ import { loadGroqChatModels } from './groq';
import { loadAnthropicChatModels } from './anthropic';
import { loadGeminiChatModels, loadGeminiEmbeddingModels } from './gemini';
import { loadTransformersEmbeddingsModels } from './transformers';
import { loadDeepseekChatModels } from './deepseek';
export interface ChatModel {
displayName: string;
@ -33,7 +32,6 @@ export const chatModelProviders: Record<
groq: loadGroqChatModels,
anthropic: loadAnthropicChatModels,
gemini: loadGeminiChatModels,
deepseek: loadDeepseekChatModels,
};
export const embeddingModelProviders: Record<