Compare commits

...

13 Commits

Author SHA1 Message Date
ItzCrazyKns
57407112fb feat(package): bump version 2025-07-16 10:39:50 +05:30
ItzCrazyKns
b280cc2e01 Merge pull request #787 from chriswritescode-dev/IOS
Fix: IOS Input Zoom / Support PWA Home Screen App, closes #458
2025-07-15 22:10:01 +05:30
ItzCrazyKns
e6ebf892c5 feat(styles): update globals.css 2025-07-15 21:47:20 +05:30
ItzCrazyKns
b754641058 feat(gitignore): add certificates 2025-07-15 21:45:44 +05:30
ItzCrazyKns
722f4f760e feat(manifest): update icons & screenshots 2025-07-15 21:45:37 +05:30
ItzCrazyKns
01e04a209f feat(public): add screenshots & update icons 2025-07-15 21:45:24 +05:30
ItzCrazyKns
0299fd1ea0 Merge pull request #817 from kittrydge/patch-1
Update Linux ollama instructions in README.md
2025-07-15 20:23:02 +05:30
ItzCrazyKns
cf8dec53ca feat(chat-window): select provider if model's present, closes #803 2025-07-07 16:09:36 +05:30
ItzCrazyKns
d5c012d748 Revert "Update ChatWindow.tsx"
This reverts commit 2ccbd9a44c.
2025-07-07 15:52:39 +05:30
ItzCrazyKns
2ccbd9a44c Update ChatWindow.tsx 2025-07-05 22:00:06 +05:30
kittrydge
ccd89d48d9 Update Linux ollama instructions in README.md
When setting the OLLAMA_HOST environment variable, the port number must be specified ( see https://github.com/ollama/ollama/blob/main/docs/faq.md#setting-environment-variables-on-linux )

Also, 'systemctl daemon-reload' needs to be called after changing a systemd unit file, and before the relevant systemd service is reloaded.
2025-07-01 18:00:26 -06:00
ItzCrazyKns
87d788ddef Update README.md 2025-06-30 19:55:23 +05:30
Chris Scott
68c43ea372 Fix: IOS Input Zoom
config for theme consistency and iOS standalone mode
- Modified manifest.ts to ensure proper metadata

- Added display: standalone for iOS PWA behavior
2025-06-02 21:52:41 -04:00
14 changed files with 100 additions and 9 deletions

0
.assets/manifest.json Normal file
View File

2
.gitignore vendored
View File

@@ -37,3 +37,5 @@ Thumbs.db
# Db
db.sqlite
/searxng
certificates

View File

@@ -16,7 +16,7 @@
<hr/>
[![Discord](https://dcbadge.vercel.app/api/server/26aArMy8tT?style=flat&compact=true)](https://discord.gg/26aArMy8tT)
[![Discord](https://dcbadge.limes.pink/api/server/26aArMy8tT?style=flat)](https://discord.gg/26aArMy8tT)
![preview](.assets/perplexica-screenshot.png?)
@@ -135,7 +135,7 @@ If you're encountering an Ollama connection error, it is likely due to the backe
3. **Linux Users - Expose Ollama to Network:**
- Inside `/etc/systemd/system/ollama.service`, you need to add `Environment="OLLAMA_HOST=0.0.0.0"`. Then restart Ollama by `systemctl restart ollama`. For more information see [Ollama docs](https://github.com/ollama/ollama/blob/main/docs/faq.md#setting-environment-variables-on-linux)
- Inside `/etc/systemd/system/ollama.service`, you need to add `Environment="OLLAMA_HOST=0.0.0.0:11434"`. (Change the port number if you are using a different one.) Then reload the systemd manager configuration with `systemctl daemon-reload`, and restart Ollama by `systemctl restart ollama`. For more information see [Ollama docs](https://github.com/ollama/ollama/blob/main/docs/faq.md#setting-environment-variables-on-linux)
- Ensure that the port (default is 11434) is not blocked by your firewall.

View File

@@ -1,6 +1,6 @@
{
"name": "perplexica-frontend",
"version": "1.11.0-rc1",
"version": "1.11.0-rc2",
"license": "MIT",
"author": "ItzCrazyKns",
"scripts": {

BIN
public/icon-100.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 916 B

BIN
public/icon-50.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 515 B

BIN
public/icon.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 30 KiB

BIN
public/screenshots/p1.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 183 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 130 KiB

BIN
public/screenshots/p2.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 627 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 202 KiB

View File

@@ -11,3 +11,11 @@
display: none;
}
}
@media screen and (-webkit-min-device-pixel-ratio: 0) {
select,
textarea,
input {
font-size: 16px !important;
}
}

54
src/app/manifest.ts Normal file
View File

@@ -0,0 +1,54 @@
import type { MetadataRoute } from 'next';
export default function manifest(): MetadataRoute.Manifest {
return {
name: 'Perplexica - Chat with the internet',
short_name: 'Perplexica',
description:
'Perplexica is an AI powered chatbot that is connected to the internet.',
start_url: '/',
display: 'standalone',
background_color: '#0a0a0a',
theme_color: '#0a0a0a',
screenshots: [
{
src: '/screenshots/p1.png',
form_factor: 'wide',
sizes: '2560x1600',
},
{
src: '/screenshots/p2.png',
form_factor: 'wide',
sizes: '2560x1600',
},
{
src: '/screenshots/p1_small.png',
form_factor: 'narrow',
sizes: '828x1792',
},
{
src: '/screenshots/p2_small.png',
form_factor: 'narrow',
sizes: '828x1792',
},
],
icons: [
{
src: '/icon-50.png',
sizes: '50x50',
type: 'image/png' as const,
},
{
src: '/icon-100.png',
sizes: '100x100',
type: 'image/png',
},
{
src: '/icon.png',
sizes: '440x440',
type: 'image/png',
purpose: 'any',
},
],
};
}

View File

@@ -82,14 +82,29 @@ const checkConfig = async (
) {
if (!chatModel || !chatModelProvider) {
const chatModelProviders = providers.chatModelProviders;
const chatModelProvidersKeys = Object.keys(chatModelProviders);
chatModelProvider =
chatModelProvider || Object.keys(chatModelProviders)[0];
if (!chatModelProviders || chatModelProvidersKeys.length === 0) {
return toast.error('No chat models available');
} else {
chatModelProvider =
chatModelProvidersKeys.find(
(provider) =>
Object.keys(chatModelProviders[provider]).length > 0,
) || chatModelProvidersKeys[0];
}
if (
chatModelProvider === 'custom_openai' &&
Object.keys(chatModelProviders[chatModelProvider]).length === 0
) {
toast.error(
"Looks like you haven't configured any chat model providers. Please configure them from the settings page or the config file.",
);
return setHasError(true);
}
chatModel = Object.keys(chatModelProviders[chatModelProvider])[0];
if (!chatModelProviders || Object.keys(chatModelProviders).length === 0)
return toast.error('No chat models available');
}
if (!embeddingModel || !embeddingModelProvider) {
@@ -117,7 +132,8 @@ const checkConfig = async (
if (
Object.keys(chatModelProviders).length > 0 &&
!chatModelProviders[chatModelProvider]
(!chatModelProviders[chatModelProvider] ||
Object.keys(chatModelProviders[chatModelProvider]).length === 0)
) {
const chatModelProvidersKeys = Object.keys(chatModelProviders);
chatModelProvider =
@@ -132,6 +148,16 @@ const checkConfig = async (
chatModelProvider &&
!chatModelProviders[chatModelProvider][chatModel]
) {
if (
chatModelProvider === 'custom_openai' &&
Object.keys(chatModelProviders[chatModelProvider]).length === 0
) {
toast.error(
"Looks like you haven't configured any chat model providers. Please configure them from the settings page or the config file.",
);
return setHasError(true);
}
chatModel = Object.keys(
chatModelProviders[
Object.keys(chatModelProviders[chatModelProvider]).length > 0
@@ -139,6 +165,7 @@ const checkConfig = async (
: Object.keys(chatModelProviders)[0]
],
)[0];
localStorage.setItem('chatModel', chatModel);
}