feat(app): lint & beautify

This commit is contained in:
ItzCrazyKns
2025-09-26 13:54:08 +05:30
parent dde6c8d719
commit 23dde9fa59
8 changed files with 111 additions and 59 deletions

View File

@@ -131,7 +131,7 @@ If Perplexica tells you that you haven't configured any chat model providers, en
1. Your server is running on `0.0.0.0` (not `127.0.0.1`) and on the same port you put in the API URL. 1. Your server is running on `0.0.0.0` (not `127.0.0.1`) and on the same port you put in the API URL.
2. You have specified the correct model name loaded by your local LLM server. 2. You have specified the correct model name loaded by your local LLM server.
3. You have specified the correct API key, or if one is not defined, you have put *something* in the API key field and not left it empty. 3. You have specified the correct API key, or if one is not defined, you have put _something_ in the API key field and not left it empty.
#### Ollama Connection Errors #### Ollama Connection Errors
@@ -197,7 +197,6 @@ Perplexica runs on Next.js and handles all API requests. It works right away on
[![Run on ClawCloud](https://raw.githubusercontent.com/ClawCloud/Run-Template/refs/heads/main/Run-on-ClawCloud.svg)](https://template.run.claw.cloud/?referralCode=U11MRQ8U9RM4&openapp=system-fastdeploy%3FtemplateName%3Dperplexica) [![Run on ClawCloud](https://raw.githubusercontent.com/ClawCloud/Run-Template/refs/heads/main/Run-on-ClawCloud.svg)](https://template.run.claw.cloud/?referralCode=U11MRQ8U9RM4&openapp=system-fastdeploy%3FtemplateName%3Dperplexica)
[![Deploy on Hostinger](https://assets.hostinger.com/vps/deploy.svg)](https://www.hostinger.com/vps/docker-hosting?compose_url=https://raw.githubusercontent.com/ItzCrazyKns/Perplexica/refs/heads/master/docker-compose.yaml) [![Deploy on Hostinger](https://assets.hostinger.com/vps/deploy.svg)](https://www.hostinger.com/vps/docker-hosting?compose_url=https://raw.githubusercontent.com/ItzCrazyKns/Perplexica/refs/heads/master/docker-compose.yaml)
## Upcoming Features ## Upcoming Features
- [x] Add settings page - [x] Add settings page

View File

@@ -108,7 +108,10 @@ const AttachSmall = () => {
className="flex flex-row items-center justify-start w-full space-x-3 p-3" className="flex flex-row items-center justify-start w-full space-x-3 p-3"
> >
<div className="bg-light-100 dark:bg-dark-100 flex items-center justify-center w-10 h-10 rounded-md"> <div className="bg-light-100 dark:bg-dark-100 flex items-center justify-center w-10 h-10 rounded-md">
<File size={16} className="text-black/70 dark:text-white/70" /> <File
size={16}
className="text-black/70 dark:text-white/70"
/>
</div> </div>
<p className="text-black/70 dark:text-white/70 text-sm"> <p className="text-black/70 dark:text-white/70 text-sm">
{file.fileName.length > 25 {file.fileName.length > 25

View File

@@ -15,33 +15,41 @@ db.exec(`
`); `);
function sanitizeSql(content: string) { function sanitizeSql(content: string) {
return content return content
.split(/\r?\n/) .split(/\r?\n/)
.filter((l) => !l.trim().startsWith('-->') && !l.includes('statement-breakpoint')) .filter(
.join('\n'); (l) => !l.trim().startsWith('-->') && !l.includes('statement-breakpoint'),
)
.join('\n');
} }
fs.readdirSync(migrationsFolder) fs.readdirSync(migrationsFolder)
.filter((f) => f.endsWith('.sql')) .filter((f) => f.endsWith('.sql'))
.sort() .sort()
.forEach((file) => { .forEach((file) => {
const filePath = path.join(migrationsFolder, file); const filePath = path.join(migrationsFolder, file);
let content = fs.readFileSync(filePath, 'utf-8'); let content = fs.readFileSync(filePath, 'utf-8');
content = sanitizeSql(content); content = sanitizeSql(content);
const migrationName = file.split('_')[0] || file; const migrationName = file.split('_')[0] || file;
const already = db.prepare('SELECT 1 FROM ran_migrations WHERE name = ?').get(migrationName); const already = db
if (already) { .prepare('SELECT 1 FROM ran_migrations WHERE name = ?')
console.log(`Skipping already-applied migration: ${file}`); .get(migrationName);
return; if (already) {
} console.log(`Skipping already-applied migration: ${file}`);
return;
}
try { try {
if (migrationName === '0001') { if (migrationName === '0001') {
const messages = db.prepare('SELECT id, type, metadata, content, chatId, messageId FROM messages').all(); const messages = db
.prepare(
'SELECT id, type, metadata, content, chatId, messageId FROM messages',
)
.all();
db.exec(` db.exec(`
CREATE TABLE IF NOT EXISTS messages_with_sources ( CREATE TABLE IF NOT EXISTS messages_with_sources (
id INTEGER PRIMARY KEY, id INTEGER PRIMARY KEY,
type TEXT NOT NULL, type TEXT NOT NULL,
@@ -53,37 +61,58 @@ fs.readdirSync(migrationsFolder)
); );
`); `);
const insertMessage = db.prepare(` const insertMessage = db.prepare(`
INSERT INTO messages_with_sources (type, chatId, createdAt, messageId, content, sources) INSERT INTO messages_with_sources (type, chatId, createdAt, messageId, content, sources)
VALUES (?, ?, ?, ?, ?, ?) VALUES (?, ?, ?, ?, ?, ?)
`); `);
messages.forEach((msg: any) => { messages.forEach((msg: any) => {
if (msg.type === 'user') { if (msg.type === 'user') {
msg.metadata = JSON.parse(msg.metadata || '{}'); msg.metadata = JSON.parse(msg.metadata || '{}');
insertMessage.run('user', msg.chatId, msg.metadata['createdAt'], msg.messageId, msg.content, '[]'); insertMessage.run(
} else if (msg.type === 'assistant') { 'user',
msg.metadata = JSON.parse(msg.metadata || '{}'); msg.chatId,
insertMessage.run('assistant', msg.chatId, msg.metadata['createdAt'], msg.messageId, msg.content, '[]'); msg.metadata['createdAt'],
const sources = msg.metadata['sources'] || '[]' msg.messageId,
if (sources && sources.length > 0) { msg.content,
insertMessage.run('source', msg.chatId, msg.metadata['createdAt'], `${msg.messageId}-source`, '', JSON.stringify(sources)); '[]',
} );
} } else if (msg.type === 'assistant') {
}); msg.metadata = JSON.parse(msg.metadata || '{}');
insertMessage.run(
db.exec('DROP TABLE messages;'); 'assistant',
db.exec('ALTER TABLE messages_with_sources RENAME TO messages;'); msg.chatId,
msg.metadata['createdAt'],
} else { msg.messageId,
db.exec(content); msg.content,
'[]',
);
const sources = msg.metadata['sources'] || '[]';
if (sources && sources.length > 0) {
insertMessage.run(
'source',
msg.chatId,
msg.metadata['createdAt'],
`${msg.messageId}-source`,
'',
JSON.stringify(sources),
);
} }
}
});
db.prepare('INSERT OR IGNORE INTO ran_migrations (name) VALUES (?)').run(migrationName); db.exec('DROP TABLE messages;');
console.log(`Applied migration: ${file}`); db.exec('ALTER TABLE messages_with_sources RENAME TO messages;');
} else {
db.exec(content);
}
} catch (err) { db.prepare('INSERT OR IGNORE INTO ran_migrations (name) VALUES (?)').run(
console.error(`Failed to apply migration ${file}:`, err); migrationName,
throw err; );
} console.log(`Applied migration: ${file}`);
}); } catch (err) {
console.error(`Failed to apply migration ${file}:`, err);
throw err;
}
});

View File

@@ -128,11 +128,22 @@ export const getAvailableChatModelProviders = async () => {
model: new ChatOpenAI({ model: new ChatOpenAI({
apiKey: customOpenAiApiKey, apiKey: customOpenAiApiKey,
modelName: customOpenAiModelName, modelName: customOpenAiModelName,
...((() => { ...(() => {
const temperatureRestrictedModels = ['gpt-5-nano','gpt-5','gpt-5-mini','o1', 'o3', 'o3-mini', 'o4-mini']; const temperatureRestrictedModels = [
const isTemperatureRestricted = temperatureRestrictedModels.some(restrictedModel => customOpenAiModelName.includes(restrictedModel)); 'gpt-5-nano',
'gpt-5',
'gpt-5-mini',
'o1',
'o3',
'o3-mini',
'o4-mini',
];
const isTemperatureRestricted =
temperatureRestrictedModels.some((restrictedModel) =>
customOpenAiModelName.includes(restrictedModel),
);
return isTemperatureRestricted ? {} : { temperature: 0.7 }; return isTemperatureRestricted ? {} : { temperature: 0.7 };
})()), })(),
configuration: { configuration: {
baseURL: customOpenAiApiUrl, baseURL: customOpenAiApiUrl,
}, },

View File

@@ -97,8 +97,18 @@ export const loadOpenAIChatModels = async () => {
openaiChatModels.forEach((model) => { openaiChatModels.forEach((model) => {
// Models that only support temperature = 1 // Models that only support temperature = 1
const temperatureRestrictedModels = ['gpt-5-nano','gpt-5','gpt-5-mini','o1', 'o3', 'o3-mini', 'o4-mini']; const temperatureRestrictedModels = [
const isTemperatureRestricted = temperatureRestrictedModels.some(restrictedModel => model.key.includes(restrictedModel)); 'gpt-5-nano',
'gpt-5',
'gpt-5-mini',
'o1',
'o3',
'o3-mini',
'o4-mini',
];
const isTemperatureRestricted = temperatureRestrictedModels.some(
(restrictedModel) => model.key.includes(restrictedModel),
);
const modelConfig: any = { const modelConfig: any = {
apiKey: openaiApiKey, apiKey: openaiApiKey,