mirror of
https://github.com/ItzCrazyKns/Perplexica.git
synced 2025-05-02 17:22:32 +00:00
Compare commits
14 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
f88f179920 | ||
|
4cb0aeeee3 | ||
|
e8fe74ae7c | ||
|
ed47191d9b | ||
|
b4d787d333 | ||
|
38b1995677 | ||
|
f28257b480 | ||
|
9b088cd161 | ||
|
94ea6c372a | ||
|
6e61c88c9e | ||
|
ba7b92ffde | ||
|
f8fd2a6fb0 | ||
|
0440a810f5 | ||
|
e3fef3a1be |
11
README.md
11
README.md
@ -10,6 +10,7 @@
|
|||||||
- [Installation](#installation)
|
- [Installation](#installation)
|
||||||
- [Getting Started with Docker (Recommended)](#getting-started-with-docker-recommended)
|
- [Getting Started with Docker (Recommended)](#getting-started-with-docker-recommended)
|
||||||
- [Non-Docker Installation](#non-docker-installation)
|
- [Non-Docker Installation](#non-docker-installation)
|
||||||
|
- [Ollama connection errors](#ollama-connection-errors)
|
||||||
- [One-Click Deployment](#one-click-deployment)
|
- [One-Click Deployment](#one-click-deployment)
|
||||||
- [Upcoming Features](#upcoming-features)
|
- [Upcoming Features](#upcoming-features)
|
||||||
- [Support Us](#support-us)
|
- [Support Us](#support-us)
|
||||||
@ -90,6 +91,16 @@ There are mainly 2 ways of installing Perplexica - With Docker, Without Docker.
|
|||||||
|
|
||||||
**Note**: Using Docker is recommended as it simplifies the setup process, especially for managing environment variables and dependencies.
|
**Note**: Using Docker is recommended as it simplifies the setup process, especially for managing environment variables and dependencies.
|
||||||
|
|
||||||
|
#### Ollama connection errors
|
||||||
|
|
||||||
|
If you're facing an Ollama connection error, it is often related to the backend not being able to connect to Ollama's API. How can you fix it? You can fix it by updating your Ollama API URL in the settings menu to the following:
|
||||||
|
|
||||||
|
On Windows: `http://host.docker.internal:11434`<br>
|
||||||
|
On Mac: `http://host.docker.internal:11434`<br>
|
||||||
|
On Linux: `http://private_ip_of_computer_hosting_ollama:11434`
|
||||||
|
|
||||||
|
You need to edit the ports accordingly.
|
||||||
|
|
||||||
## One-Click Deployment
|
## One-Click Deployment
|
||||||
|
|
||||||
[](https://repocloud.io/details/?app_id=267)
|
[](https://repocloud.io/details/?app_id=267)
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "perplexica-backend",
|
"name": "perplexica-backend",
|
||||||
"version": "1.3.0",
|
"version": "1.3.4",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"author": "ItzCrazyKns",
|
"author": "ItzCrazyKns",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
|
@ -90,7 +90,11 @@ export const getAvailableChatModelProviders = async () => {
|
|||||||
|
|
||||||
if (ollamaEndpoint) {
|
if (ollamaEndpoint) {
|
||||||
try {
|
try {
|
||||||
const response = await fetch(`${ollamaEndpoint}/api/tags`);
|
const response = await fetch(`${ollamaEndpoint}/api/tags`, {
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
const { models: ollamaModels } = (await response.json()) as any;
|
const { models: ollamaModels } = (await response.json()) as any;
|
||||||
|
|
||||||
@ -137,7 +141,11 @@ export const getAvailableEmbeddingModelProviders = async () => {
|
|||||||
|
|
||||||
if (ollamaEndpoint) {
|
if (ollamaEndpoint) {
|
||||||
try {
|
try {
|
||||||
const response = await fetch(`${ollamaEndpoint}/api/tags`);
|
const response = await fetch(`${ollamaEndpoint}/api/tags`, {
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
const { models: ollamaModels } = (await response.json()) as any;
|
const { models: ollamaModels } = (await response.json()) as any;
|
||||||
|
|
||||||
|
@ -14,6 +14,7 @@ export const handleConnection = async (
|
|||||||
ws: WebSocket,
|
ws: WebSocket,
|
||||||
request: IncomingMessage,
|
request: IncomingMessage,
|
||||||
) => {
|
) => {
|
||||||
|
try {
|
||||||
const searchParams = new URL(request.url, `http://${request.headers.host}`)
|
const searchParams = new URL(request.url, `http://${request.headers.host}`)
|
||||||
.searchParams;
|
.searchParams;
|
||||||
|
|
||||||
@ -23,7 +24,8 @@ export const handleConnection = async (
|
|||||||
]);
|
]);
|
||||||
|
|
||||||
const chatModelProvider =
|
const chatModelProvider =
|
||||||
searchParams.get('chatModelProvider') || Object.keys(chatModelProviders)[0];
|
searchParams.get('chatModelProvider') ||
|
||||||
|
Object.keys(chatModelProviders)[0];
|
||||||
const chatModel =
|
const chatModel =
|
||||||
searchParams.get('chatModel') ||
|
searchParams.get('chatModel') ||
|
||||||
Object.keys(chatModelProviders[chatModelProvider])[0];
|
Object.keys(chatModelProviders[chatModelProvider])[0];
|
||||||
@ -70,7 +72,8 @@ export const handleConnection = async (
|
|||||||
ws.send(
|
ws.send(
|
||||||
JSON.stringify({
|
JSON.stringify({
|
||||||
type: 'error',
|
type: 'error',
|
||||||
data: 'Invalid LLM or embeddings model selected',
|
data: 'Invalid LLM or embeddings model selected, please refresh the page and try again.',
|
||||||
|
key: 'INVALID_MODEL_SELECTED',
|
||||||
}),
|
}),
|
||||||
);
|
);
|
||||||
ws.close();
|
ws.close();
|
||||||
@ -83,4 +86,15 @@ export const handleConnection = async (
|
|||||||
);
|
);
|
||||||
|
|
||||||
ws.on('close', () => logger.debug('Connection closed'));
|
ws.on('close', () => logger.debug('Connection closed'));
|
||||||
|
} catch (err) {
|
||||||
|
ws.send(
|
||||||
|
JSON.stringify({
|
||||||
|
type: 'error',
|
||||||
|
data: 'Internal server error.',
|
||||||
|
key: 'INTERNAL_SERVER_ERROR',
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
ws.close();
|
||||||
|
logger.error(err);
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
@ -57,7 +57,13 @@ const handleEmitterEvents = (
|
|||||||
});
|
});
|
||||||
emitter.on('error', (data) => {
|
emitter.on('error', (data) => {
|
||||||
const parsedData = JSON.parse(data);
|
const parsedData = JSON.parse(data);
|
||||||
ws.send(JSON.stringify({ type: 'error', data: parsedData.data }));
|
ws.send(
|
||||||
|
JSON.stringify({
|
||||||
|
type: 'error',
|
||||||
|
data: parsedData.data,
|
||||||
|
key: 'CHAIN_ERROR',
|
||||||
|
}),
|
||||||
|
);
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -73,7 +79,11 @@ export const handleMessage = async (
|
|||||||
|
|
||||||
if (!parsedMessage.content)
|
if (!parsedMessage.content)
|
||||||
return ws.send(
|
return ws.send(
|
||||||
JSON.stringify({ type: 'error', data: 'Invalid message format' }),
|
JSON.stringify({
|
||||||
|
type: 'error',
|
||||||
|
data: 'Invalid message format',
|
||||||
|
key: 'INVALID_FORMAT',
|
||||||
|
}),
|
||||||
);
|
);
|
||||||
|
|
||||||
const history: BaseMessage[] = parsedMessage.history.map((msg) => {
|
const history: BaseMessage[] = parsedMessage.history.map((msg) => {
|
||||||
@ -99,11 +109,23 @@ export const handleMessage = async (
|
|||||||
);
|
);
|
||||||
handleEmitterEvents(emitter, ws, id);
|
handleEmitterEvents(emitter, ws, id);
|
||||||
} else {
|
} else {
|
||||||
ws.send(JSON.stringify({ type: 'error', data: 'Invalid focus mode' }));
|
ws.send(
|
||||||
|
JSON.stringify({
|
||||||
|
type: 'error',
|
||||||
|
data: 'Invalid focus mode',
|
||||||
|
key: 'INVALID_FOCUS_MODE',
|
||||||
|
}),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
ws.send(JSON.stringify({ type: 'error', data: 'Invalid message format' }));
|
ws.send(
|
||||||
|
JSON.stringify({
|
||||||
|
type: 'error',
|
||||||
|
data: 'Invalid message format',
|
||||||
|
key: 'INVALID_FORMAT',
|
||||||
|
}),
|
||||||
|
);
|
||||||
logger.error(`Failed to handle message: ${err}`);
|
logger.error(`Failed to handle message: ${err}`);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -3,6 +3,7 @@ import { Montserrat } from 'next/font/google';
|
|||||||
import './globals.css';
|
import './globals.css';
|
||||||
import { cn } from '@/lib/utils';
|
import { cn } from '@/lib/utils';
|
||||||
import Sidebar from '@/components/Sidebar';
|
import Sidebar from '@/components/Sidebar';
|
||||||
|
import { Toaster } from 'sonner';
|
||||||
|
|
||||||
const montserrat = Montserrat({
|
const montserrat = Montserrat({
|
||||||
weight: ['300', '400', '500', '700'],
|
weight: ['300', '400', '500', '700'],
|
||||||
@ -26,6 +27,15 @@ export default function RootLayout({
|
|||||||
<html className="h-full" lang="en">
|
<html className="h-full" lang="en">
|
||||||
<body className={cn('h-full', montserrat.className)}>
|
<body className={cn('h-full', montserrat.className)}>
|
||||||
<Sidebar>{children}</Sidebar>
|
<Sidebar>{children}</Sidebar>
|
||||||
|
<Toaster
|
||||||
|
toastOptions={{
|
||||||
|
unstyled: true,
|
||||||
|
classNames: {
|
||||||
|
toast:
|
||||||
|
'bg-[#111111] text-white rounded-lg p-4 flex flex-row items-center space-x-2',
|
||||||
|
},
|
||||||
|
}}
|
||||||
|
/>
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
);
|
);
|
||||||
|
@ -5,6 +5,7 @@ import { Document } from '@langchain/core/documents';
|
|||||||
import Navbar from './Navbar';
|
import Navbar from './Navbar';
|
||||||
import Chat from './Chat';
|
import Chat from './Chat';
|
||||||
import EmptyChat from './EmptyChat';
|
import EmptyChat from './EmptyChat';
|
||||||
|
import { toast } from 'sonner';
|
||||||
|
|
||||||
export type Message = {
|
export type Message = {
|
||||||
id: string;
|
id: string;
|
||||||
@ -35,6 +36,11 @@ const useSocket = (url: string) => {
|
|||||||
) {
|
) {
|
||||||
const providers = await fetch(
|
const providers = await fetch(
|
||||||
`${process.env.NEXT_PUBLIC_API_URL}/models`,
|
`${process.env.NEXT_PUBLIC_API_URL}/models`,
|
||||||
|
{
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
},
|
||||||
|
},
|
||||||
).then(async (res) => await res.json());
|
).then(async (res) => await res.json());
|
||||||
|
|
||||||
const chatModelProviders = providers.chatModelProviders;
|
const chatModelProviders = providers.chatModelProviders;
|
||||||
@ -44,13 +50,13 @@ const useSocket = (url: string) => {
|
|||||||
!chatModelProviders ||
|
!chatModelProviders ||
|
||||||
Object.keys(chatModelProviders).length === 0
|
Object.keys(chatModelProviders).length === 0
|
||||||
)
|
)
|
||||||
return console.error('No chat models available');
|
return toast.error('No chat models available');
|
||||||
|
|
||||||
if (
|
if (
|
||||||
!embeddingModelProviders ||
|
!embeddingModelProviders ||
|
||||||
Object.keys(embeddingModelProviders).length === 0
|
Object.keys(embeddingModelProviders).length === 0
|
||||||
)
|
)
|
||||||
return console.error('No embedding models available');
|
return toast.error('No embedding models available');
|
||||||
|
|
||||||
chatModelProvider = Object.keys(chatModelProviders)[0];
|
chatModelProvider = Object.keys(chatModelProviders)[0];
|
||||||
chatModel = Object.keys(chatModelProviders[chatModelProvider])[0];
|
chatModel = Object.keys(chatModelProviders[chatModelProvider])[0];
|
||||||
@ -92,17 +98,27 @@ const useSocket = (url: string) => {
|
|||||||
wsURL.search = searchParams.toString();
|
wsURL.search = searchParams.toString();
|
||||||
|
|
||||||
const ws = new WebSocket(wsURL.toString());
|
const ws = new WebSocket(wsURL.toString());
|
||||||
|
|
||||||
ws.onopen = () => {
|
ws.onopen = () => {
|
||||||
console.log('[DEBUG] open');
|
console.log('[DEBUG] open');
|
||||||
setWs(ws);
|
setWs(ws);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
ws.onmessage = (e) => {
|
||||||
|
const parsedData = JSON.parse(e.data);
|
||||||
|
if (parsedData.type === 'error') {
|
||||||
|
toast.error(parsedData.data);
|
||||||
|
if (parsedData.key === 'INVALID_MODEL_SELECTED') {
|
||||||
|
localStorage.clear();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
connectWs();
|
connectWs();
|
||||||
}
|
}
|
||||||
|
|
||||||
return () => {
|
return () => {
|
||||||
1;
|
|
||||||
ws?.close();
|
ws?.close();
|
||||||
console.log('[DEBUG] closed');
|
console.log('[DEBUG] closed');
|
||||||
};
|
};
|
||||||
@ -150,6 +166,12 @@ const ChatWindow = () => {
|
|||||||
const messageHandler = (e: MessageEvent) => {
|
const messageHandler = (e: MessageEvent) => {
|
||||||
const data = JSON.parse(e.data);
|
const data = JSON.parse(e.data);
|
||||||
|
|
||||||
|
if (data.type === 'error') {
|
||||||
|
toast.error(data.data);
|
||||||
|
setLoading(false);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
if (data.type === 'sources') {
|
if (data.type === 'sources') {
|
||||||
sources = data.data;
|
sources = data.data;
|
||||||
if (!added) {
|
if (!added) {
|
||||||
|
@ -33,12 +33,8 @@ const SettingsDialog = ({
|
|||||||
const [selectedEmbeddingModel, setSelectedEmbeddingModel] = useState<
|
const [selectedEmbeddingModel, setSelectedEmbeddingModel] = useState<
|
||||||
string | null
|
string | null
|
||||||
>(null);
|
>(null);
|
||||||
const [customOpenAIApiKey, setCustomOpenAIApiKey] = useState<string | null>(
|
const [customOpenAIApiKey, setCustomOpenAIApiKey] = useState<string>('');
|
||||||
null,
|
const [customOpenAIBaseURL, setCustomOpenAIBaseURL] = useState<string>('');
|
||||||
);
|
|
||||||
const [customOpenAIBaseURL, setCustomOpenAIBaseURL] = useState<string | null>(
|
|
||||||
null,
|
|
||||||
);
|
|
||||||
const [isLoading, setIsLoading] = useState(false);
|
const [isLoading, setIsLoading] = useState(false);
|
||||||
const [isUpdating, setIsUpdating] = useState(false);
|
const [isUpdating, setIsUpdating] = useState(false);
|
||||||
|
|
||||||
@ -46,9 +42,54 @@ const SettingsDialog = ({
|
|||||||
if (isOpen) {
|
if (isOpen) {
|
||||||
const fetchConfig = async () => {
|
const fetchConfig = async () => {
|
||||||
setIsLoading(true);
|
setIsLoading(true);
|
||||||
const res = await fetch(`${process.env.NEXT_PUBLIC_API_URL}/config`);
|
const res = await fetch(`${process.env.NEXT_PUBLIC_API_URL}/config`, {
|
||||||
const data = await res.json();
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const data = (await res.json()) as SettingsType;
|
||||||
setConfig(data);
|
setConfig(data);
|
||||||
|
|
||||||
|
const chatModelProvidersKeys = Object.keys(
|
||||||
|
data.chatModelProviders || {},
|
||||||
|
);
|
||||||
|
const embeddingModelProvidersKeys = Object.keys(
|
||||||
|
data.embeddingModelProviders || {},
|
||||||
|
);
|
||||||
|
|
||||||
|
const defaultChatModelProvider =
|
||||||
|
chatModelProvidersKeys.length > 0 ? chatModelProvidersKeys[0] : '';
|
||||||
|
const defaultEmbeddingModelProvider =
|
||||||
|
embeddingModelProvidersKeys.length > 0
|
||||||
|
? embeddingModelProvidersKeys[0]
|
||||||
|
: '';
|
||||||
|
|
||||||
|
const chatModelProvider =
|
||||||
|
localStorage.getItem('chatModelProvider') ||
|
||||||
|
defaultChatModelProvider ||
|
||||||
|
'';
|
||||||
|
const chatModel =
|
||||||
|
localStorage.getItem('chatModel') ||
|
||||||
|
(data.chatModelProviders &&
|
||||||
|
data.chatModelProviders[chatModelProvider]?.[0]) ||
|
||||||
|
'';
|
||||||
|
const embeddingModelProvider =
|
||||||
|
localStorage.getItem('embeddingModelProvider') ||
|
||||||
|
defaultEmbeddingModelProvider ||
|
||||||
|
'';
|
||||||
|
const embeddingModel =
|
||||||
|
localStorage.getItem('embeddingModel') ||
|
||||||
|
(data.embeddingModelProviders &&
|
||||||
|
data.embeddingModelProviders[embeddingModelProvider]?.[0]) ||
|
||||||
|
'';
|
||||||
|
|
||||||
|
setSelectedChatModelProvider(chatModelProvider);
|
||||||
|
setSelectedChatModel(chatModel);
|
||||||
|
setSelectedEmbeddingModelProvider(embeddingModelProvider);
|
||||||
|
setSelectedEmbeddingModel(embeddingModel);
|
||||||
|
setCustomOpenAIApiKey(localStorage.getItem('openAIApiKey') || '');
|
||||||
|
setCustomOpenAIBaseURL(localStorage.getItem('openAIBaseUrl') || '');
|
||||||
setIsLoading(false);
|
setIsLoading(false);
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -57,17 +98,6 @@ const SettingsDialog = ({
|
|||||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||||
}, [isOpen]);
|
}, [isOpen]);
|
||||||
|
|
||||||
useEffect(() => {
|
|
||||||
setSelectedChatModelProvider(localStorage.getItem('chatModelProvider'));
|
|
||||||
setSelectedChatModel(localStorage.getItem('chatModel'));
|
|
||||||
setSelectedEmbeddingModelProvider(
|
|
||||||
localStorage.getItem('embeddingModelProvider'),
|
|
||||||
);
|
|
||||||
setSelectedEmbeddingModel(localStorage.getItem('embeddingModel'));
|
|
||||||
setCustomOpenAIApiKey(localStorage.getItem('openAIApiKey'));
|
|
||||||
setCustomOpenAIBaseURL(localStorage.getItem('openAIBaseUrl'));
|
|
||||||
}, []);
|
|
||||||
|
|
||||||
const handleSubmit = async () => {
|
const handleSubmit = async () => {
|
||||||
setIsUpdating(true);
|
setIsUpdating(true);
|
||||||
|
|
||||||
@ -222,7 +252,7 @@ const SettingsDialog = ({
|
|||||||
</div>
|
</div>
|
||||||
<div className="flex flex-col space-y-1">
|
<div className="flex flex-col space-y-1">
|
||||||
<p className="text-white/70 text-sm">
|
<p className="text-white/70 text-sm">
|
||||||
Custom OpenAI API Key (optional)
|
Custom OpenAI API Key
|
||||||
</p>
|
</p>
|
||||||
<input
|
<input
|
||||||
type="text"
|
type="text"
|
||||||
@ -251,7 +281,7 @@ const SettingsDialog = ({
|
|||||||
</>
|
</>
|
||||||
)}
|
)}
|
||||||
{/* Embedding models */}
|
{/* Embedding models */}
|
||||||
{config.chatModelProviders && (
|
{config.embeddingModelProviders && (
|
||||||
<div className="flex flex-col space-y-1">
|
<div className="flex flex-col space-y-1">
|
||||||
<p className="text-white/70 text-sm">
|
<p className="text-white/70 text-sm">
|
||||||
Embedding model Provider
|
Embedding model Provider
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "perplexica-frontend",
|
"name": "perplexica-frontend",
|
||||||
"version": "1.3.0",
|
"version": "1.3.4",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"author": "ItzCrazyKns",
|
"author": "ItzCrazyKns",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
@ -24,6 +24,7 @@
|
|||||||
"react-dom": "^18",
|
"react-dom": "^18",
|
||||||
"react-text-to-speech": "^0.14.5",
|
"react-text-to-speech": "^0.14.5",
|
||||||
"react-textarea-autosize": "^8.5.3",
|
"react-textarea-autosize": "^8.5.3",
|
||||||
|
"sonner": "^1.4.41",
|
||||||
"tailwind-merge": "^2.2.2",
|
"tailwind-merge": "^2.2.2",
|
||||||
"yet-another-react-lightbox": "^3.17.2",
|
"yet-another-react-lightbox": "^3.17.2",
|
||||||
"zod": "^3.22.4"
|
"zod": "^3.22.4"
|
||||||
|
@ -2839,6 +2839,11 @@ slash@^3.0.0:
|
|||||||
resolved "https://registry.yarnpkg.com/slash/-/slash-3.0.0.tgz#6539be870c165adbd5240220dbe361f1bc4d4634"
|
resolved "https://registry.yarnpkg.com/slash/-/slash-3.0.0.tgz#6539be870c165adbd5240220dbe361f1bc4d4634"
|
||||||
integrity sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==
|
integrity sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==
|
||||||
|
|
||||||
|
sonner@^1.4.41:
|
||||||
|
version "1.4.41"
|
||||||
|
resolved "https://registry.yarnpkg.com/sonner/-/sonner-1.4.41.tgz#ff085ae4f4244713daf294959beaa3e90f842d2c"
|
||||||
|
integrity sha512-uG511ggnnsw6gcn/X+YKkWPo5ep9il9wYi3QJxHsYe7yTZ4+cOd1wuodOUmOpFuXL+/RE3R04LczdNCDygTDgQ==
|
||||||
|
|
||||||
source-map-js@^1.0.2, source-map-js@^1.2.0:
|
source-map-js@^1.0.2, source-map-js@^1.2.0:
|
||||||
version "1.2.0"
|
version "1.2.0"
|
||||||
resolved "https://registry.yarnpkg.com/source-map-js/-/source-map-js-1.2.0.tgz#16b809c162517b5b8c3e7dcd315a2a5c2612b2af"
|
resolved "https://registry.yarnpkg.com/source-map-js/-/source-map-js-1.2.0.tgz#16b809c162517b5b8c3e7dcd315a2a5c2612b2af"
|
||||||
|
Reference in New Issue
Block a user