mirror of
https://github.com/ItzCrazyKns/Perplexica.git
synced 2025-10-13 19:18:14 +00:00
feat(app): lint & beautify
This commit is contained in:
@@ -131,7 +131,7 @@ If Perplexica tells you that you haven't configured any chat model providers, en
|
||||
|
||||
1. Your server is running on `0.0.0.0` (not `127.0.0.1`) and on the same port you put in the API URL.
|
||||
2. You have specified the correct model name loaded by your local LLM server.
|
||||
3. You have specified the correct API key, or if one is not defined, you have put *something* in the API key field and not left it empty.
|
||||
3. You have specified the correct API key, or if one is not defined, you have put _something_ in the API key field and not left it empty.
|
||||
|
||||
#### Ollama Connection Errors
|
||||
|
||||
@@ -197,7 +197,6 @@ Perplexica runs on Next.js and handles all API requests. It works right away on
|
||||
[](https://template.run.claw.cloud/?referralCode=U11MRQ8U9RM4&openapp=system-fastdeploy%3FtemplateName%3Dperplexica)
|
||||
[](https://www.hostinger.com/vps/docker-hosting?compose_url=https://raw.githubusercontent.com/ItzCrazyKns/Perplexica/refs/heads/master/docker-compose.yaml)
|
||||
|
||||
|
||||
## Upcoming Features
|
||||
|
||||
- [x] Add settings page
|
||||
|
@@ -122,4 +122,4 @@
|
||||
"internal": {
|
||||
"indexes": {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -17,4 +17,4 @@
|
||||
"breakpoints": true
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
@@ -108,7 +108,10 @@ const AttachSmall = () => {
|
||||
className="flex flex-row items-center justify-start w-full space-x-3 p-3"
|
||||
>
|
||||
<div className="bg-light-100 dark:bg-dark-100 flex items-center justify-center w-10 h-10 rounded-md">
|
||||
<File size={16} className="text-black/70 dark:text-white/70" />
|
||||
<File
|
||||
size={16}
|
||||
className="text-black/70 dark:text-white/70"
|
||||
/>
|
||||
</div>
|
||||
<p className="text-black/70 dark:text-white/70 text-sm">
|
||||
{file.fileName.length > 25
|
||||
|
@@ -15,33 +15,41 @@ db.exec(`
|
||||
`);
|
||||
|
||||
function sanitizeSql(content: string) {
|
||||
return content
|
||||
.split(/\r?\n/)
|
||||
.filter((l) => !l.trim().startsWith('-->') && !l.includes('statement-breakpoint'))
|
||||
.join('\n');
|
||||
return content
|
||||
.split(/\r?\n/)
|
||||
.filter(
|
||||
(l) => !l.trim().startsWith('-->') && !l.includes('statement-breakpoint'),
|
||||
)
|
||||
.join('\n');
|
||||
}
|
||||
|
||||
fs.readdirSync(migrationsFolder)
|
||||
.filter((f) => f.endsWith('.sql'))
|
||||
.sort()
|
||||
.forEach((file) => {
|
||||
const filePath = path.join(migrationsFolder, file);
|
||||
let content = fs.readFileSync(filePath, 'utf-8');
|
||||
content = sanitizeSql(content);
|
||||
.filter((f) => f.endsWith('.sql'))
|
||||
.sort()
|
||||
.forEach((file) => {
|
||||
const filePath = path.join(migrationsFolder, file);
|
||||
let content = fs.readFileSync(filePath, 'utf-8');
|
||||
content = sanitizeSql(content);
|
||||
|
||||
const migrationName = file.split('_')[0] || file;
|
||||
const migrationName = file.split('_')[0] || file;
|
||||
|
||||
const already = db.prepare('SELECT 1 FROM ran_migrations WHERE name = ?').get(migrationName);
|
||||
if (already) {
|
||||
console.log(`Skipping already-applied migration: ${file}`);
|
||||
return;
|
||||
}
|
||||
const already = db
|
||||
.prepare('SELECT 1 FROM ran_migrations WHERE name = ?')
|
||||
.get(migrationName);
|
||||
if (already) {
|
||||
console.log(`Skipping already-applied migration: ${file}`);
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
if (migrationName === '0001') {
|
||||
const messages = db.prepare('SELECT id, type, metadata, content, chatId, messageId FROM messages').all();
|
||||
try {
|
||||
if (migrationName === '0001') {
|
||||
const messages = db
|
||||
.prepare(
|
||||
'SELECT id, type, metadata, content, chatId, messageId FROM messages',
|
||||
)
|
||||
.all();
|
||||
|
||||
db.exec(`
|
||||
db.exec(`
|
||||
CREATE TABLE IF NOT EXISTS messages_with_sources (
|
||||
id INTEGER PRIMARY KEY,
|
||||
type TEXT NOT NULL,
|
||||
@@ -53,37 +61,58 @@ fs.readdirSync(migrationsFolder)
|
||||
);
|
||||
`);
|
||||
|
||||
const insertMessage = db.prepare(`
|
||||
const insertMessage = db.prepare(`
|
||||
INSERT INTO messages_with_sources (type, chatId, createdAt, messageId, content, sources)
|
||||
VALUES (?, ?, ?, ?, ?, ?)
|
||||
`);
|
||||
|
||||
messages.forEach((msg: any) => {
|
||||
if (msg.type === 'user') {
|
||||
msg.metadata = JSON.parse(msg.metadata || '{}');
|
||||
insertMessage.run('user', msg.chatId, msg.metadata['createdAt'], msg.messageId, msg.content, '[]');
|
||||
} else if (msg.type === 'assistant') {
|
||||
msg.metadata = JSON.parse(msg.metadata || '{}');
|
||||
insertMessage.run('assistant', msg.chatId, msg.metadata['createdAt'], msg.messageId, msg.content, '[]');
|
||||
const sources = msg.metadata['sources'] || '[]'
|
||||
if (sources && sources.length > 0) {
|
||||
insertMessage.run('source', msg.chatId, msg.metadata['createdAt'], `${msg.messageId}-source`, '', JSON.stringify(sources));
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
db.exec('DROP TABLE messages;');
|
||||
db.exec('ALTER TABLE messages_with_sources RENAME TO messages;');
|
||||
|
||||
} else {
|
||||
db.exec(content);
|
||||
messages.forEach((msg: any) => {
|
||||
if (msg.type === 'user') {
|
||||
msg.metadata = JSON.parse(msg.metadata || '{}');
|
||||
insertMessage.run(
|
||||
'user',
|
||||
msg.chatId,
|
||||
msg.metadata['createdAt'],
|
||||
msg.messageId,
|
||||
msg.content,
|
||||
'[]',
|
||||
);
|
||||
} else if (msg.type === 'assistant') {
|
||||
msg.metadata = JSON.parse(msg.metadata || '{}');
|
||||
insertMessage.run(
|
||||
'assistant',
|
||||
msg.chatId,
|
||||
msg.metadata['createdAt'],
|
||||
msg.messageId,
|
||||
msg.content,
|
||||
'[]',
|
||||
);
|
||||
const sources = msg.metadata['sources'] || '[]';
|
||||
if (sources && sources.length > 0) {
|
||||
insertMessage.run(
|
||||
'source',
|
||||
msg.chatId,
|
||||
msg.metadata['createdAt'],
|
||||
`${msg.messageId}-source`,
|
||||
'',
|
||||
JSON.stringify(sources),
|
||||
);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
db.prepare('INSERT OR IGNORE INTO ran_migrations (name) VALUES (?)').run(migrationName);
|
||||
console.log(`Applied migration: ${file}`);
|
||||
db.exec('DROP TABLE messages;');
|
||||
db.exec('ALTER TABLE messages_with_sources RENAME TO messages;');
|
||||
} else {
|
||||
db.exec(content);
|
||||
}
|
||||
|
||||
} catch (err) {
|
||||
console.error(`Failed to apply migration ${file}:`, err);
|
||||
throw err;
|
||||
}
|
||||
});
|
||||
db.prepare('INSERT OR IGNORE INTO ran_migrations (name) VALUES (?)').run(
|
||||
migrationName,
|
||||
);
|
||||
console.log(`Applied migration: ${file}`);
|
||||
} catch (err) {
|
||||
console.error(`Failed to apply migration ${file}:`, err);
|
||||
throw err;
|
||||
}
|
||||
});
|
||||
|
@@ -128,11 +128,22 @@ export const getAvailableChatModelProviders = async () => {
|
||||
model: new ChatOpenAI({
|
||||
apiKey: customOpenAiApiKey,
|
||||
modelName: customOpenAiModelName,
|
||||
...((() => {
|
||||
const temperatureRestrictedModels = ['gpt-5-nano','gpt-5','gpt-5-mini','o1', 'o3', 'o3-mini', 'o4-mini'];
|
||||
const isTemperatureRestricted = temperatureRestrictedModels.some(restrictedModel => customOpenAiModelName.includes(restrictedModel));
|
||||
...(() => {
|
||||
const temperatureRestrictedModels = [
|
||||
'gpt-5-nano',
|
||||
'gpt-5',
|
||||
'gpt-5-mini',
|
||||
'o1',
|
||||
'o3',
|
||||
'o3-mini',
|
||||
'o4-mini',
|
||||
];
|
||||
const isTemperatureRestricted =
|
||||
temperatureRestrictedModels.some((restrictedModel) =>
|
||||
customOpenAiModelName.includes(restrictedModel),
|
||||
);
|
||||
return isTemperatureRestricted ? {} : { temperature: 0.7 };
|
||||
})()),
|
||||
})(),
|
||||
configuration: {
|
||||
baseURL: customOpenAiApiUrl,
|
||||
},
|
||||
|
@@ -91,4 +91,4 @@ export const loadLemonadeEmbeddingModels = async () => {
|
||||
console.error(`Error loading Lemonade embedding models: ${err}`);
|
||||
return {};
|
||||
}
|
||||
};
|
||||
};
|
||||
|
@@ -97,8 +97,18 @@ export const loadOpenAIChatModels = async () => {
|
||||
|
||||
openaiChatModels.forEach((model) => {
|
||||
// Models that only support temperature = 1
|
||||
const temperatureRestrictedModels = ['gpt-5-nano','gpt-5','gpt-5-mini','o1', 'o3', 'o3-mini', 'o4-mini'];
|
||||
const isTemperatureRestricted = temperatureRestrictedModels.some(restrictedModel => model.key.includes(restrictedModel));
|
||||
const temperatureRestrictedModels = [
|
||||
'gpt-5-nano',
|
||||
'gpt-5',
|
||||
'gpt-5-mini',
|
||||
'o1',
|
||||
'o3',
|
||||
'o3-mini',
|
||||
'o4-mini',
|
||||
];
|
||||
const isTemperatureRestricted = temperatureRestrictedModels.some(
|
||||
(restrictedModel) => model.key.includes(restrictedModel),
|
||||
);
|
||||
|
||||
const modelConfig: any = {
|
||||
apiKey: openaiApiKey,
|
||||
|
Reference in New Issue
Block a user