Compare commits

17 Commits

Author SHA1 Message Date
ItzCrazyKns
97e64aa65e Merge branch 'pr/703' 2025-03-30 21:12:27 +05:30
ItzCrazyKns
90e303f737 feat(search): lint & beautify, update content type 2025-03-30 21:12:04 +05:30
ItzCrazyKns
7955d8e408 Merge pull request #705 from ottsch/add-gemini-2.5
feat(models): Update Gemini chat models
2025-03-29 21:53:02 +05:30
ottsch
b285cb4323 Update Gemini chat models 2025-03-28 17:07:11 +01:00
OTYAK
5d60ab1139 feat(api): Switch to newline-delimited JSON streaming instead of SSE 2025-03-27 13:04:09 +01:00
OTYAK
9095996356 Merge branch 'ItzCrazyKns:master' into master 2025-03-27 13:01:09 +01:00
ItzCrazyKns
310c8a75fd feat(routes): fix typo, closes #692 2025-03-27 11:36:58 +05:30
OTYAK
191d1dc25f refactor(api): clean up comments and improve abort handling in search route 2025-03-26 11:32:46 +01:00
OTYAK
d3b2f8983d feat(api): add streaming support to search route 2025-03-26 11:28:05 +01:00
ItzCrazyKns
27286465a3 feat(package): bump version 2025-03-26 13:34:09 +05:30
ItzCrazyKns
defc677932 feat(providers): update gemini & anthropic provider 2025-03-25 22:01:24 +05:30
ItzCrazyKns
45df9dc5bf feat(readme): update networking guide 2025-03-21 11:27:12 +05:30
ItzCrazyKns
06db95d7c0 feat(dockerfile): fix onnx issues 2025-03-21 11:25:28 +05:30
ItzCrazyKns
74f7eaed6e feat(workflow): fix build errors 2025-03-20 13:43:29 +05:30
ItzCrazyKns
dddd944a18 feat(workflow): update docker build 2025-03-20 13:22:43 +05:30
ItzCrazyKns
7eccd4d75b Merge pull request #679 from ItzCrazyKns/feat/remove-backend
feat(app): fix build errors
2025-03-20 12:48:27 +05:30
ItzCrazyKns
04a0342b52 Merge pull request #678 from ItzCrazyKns/feat/remove-backend
Feat/remove backend
2025-03-20 12:42:18 +05:30
16 changed files with 324 additions and 74 deletions

View File

@ -8,15 +8,12 @@ on:
types: [published]
jobs:
build-and-push:
build-amd64:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v3
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
with:
@ -33,28 +30,104 @@ jobs:
id: version
run: echo "RELEASE_VERSION=${GITHUB_REF#refs/tags/}" >> $GITHUB_ENV
- name: Build and push Docker image
- name: Build and push AMD64 Docker image
if: github.ref == 'refs/heads/master' && github.event_name == 'push'
run: |
docker buildx create --use
DOCKERFILE=app.dockerfile; \
IMAGE_NAME=perplexica; \
docker buildx build --platform linux/amd64,linux/arm64 \
--cache-from=type=registry,ref=itzcrazykns1337/${IMAGE_NAME}:main \
DOCKERFILE=app.dockerfile
IMAGE_NAME=perplexica
docker buildx build --platform linux/amd64 \
--cache-from=type=registry,ref=itzcrazykns1337/${IMAGE_NAME}:amd64 \
--cache-to=type=inline \
--provenance false \
-f $DOCKERFILE \
-t itzcrazykns1337/${IMAGE_NAME}:main \
-t itzcrazykns1337/${IMAGE_NAME}:amd64 \
--push .
- name: Build and push release Docker image
- name: Build and push AMD64 release Docker image
if: github.event_name == 'release'
run: |
docker buildx create --use
DOCKERFILE=app.dockerfile; \
IMAGE_NAME=perplexica; \
docker buildx build --platform linux/amd64,linux/arm64 \
--cache-from=type=registry,ref=itzcrazykns1337/${IMAGE_NAME}:${{ env.RELEASE_VERSION }} \
DOCKERFILE=app.dockerfile
IMAGE_NAME=perplexica
docker buildx build --platform linux/amd64 \
--cache-from=type=registry,ref=itzcrazykns1337/${IMAGE_NAME}:${{ env.RELEASE_VERSION }}-amd64 \
--cache-to=type=inline \
--provenance false \
-f $DOCKERFILE \
-t itzcrazykns1337/${IMAGE_NAME}:${{ env.RELEASE_VERSION }} \
-t itzcrazykns1337/${IMAGE_NAME}:${{ env.RELEASE_VERSION }}-amd64 \
--push .
build-arm64:
runs-on: ubuntu-24.04-arm
steps:
- name: Checkout code
uses: actions/checkout@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
with:
install: true
- name: Log in to DockerHub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Extract version from release tag
if: github.event_name == 'release'
id: version
run: echo "RELEASE_VERSION=${GITHUB_REF#refs/tags/}" >> $GITHUB_ENV
- name: Build and push ARM64 Docker image
if: github.ref == 'refs/heads/master' && github.event_name == 'push'
run: |
DOCKERFILE=app.dockerfile
IMAGE_NAME=perplexica
docker buildx build --platform linux/arm64 \
--cache-from=type=registry,ref=itzcrazykns1337/${IMAGE_NAME}:arm64 \
--cache-to=type=inline \
--provenance false \
-f $DOCKERFILE \
-t itzcrazykns1337/${IMAGE_NAME}:arm64 \
--push .
- name: Build and push ARM64 release Docker image
if: github.event_name == 'release'
run: |
DOCKERFILE=app.dockerfile
IMAGE_NAME=perplexica
docker buildx build --platform linux/arm64 \
--cache-from=type=registry,ref=itzcrazykns1337/${IMAGE_NAME}:${{ env.RELEASE_VERSION }}-arm64 \
--cache-to=type=inline \
--provenance false \
-f $DOCKERFILE \
-t itzcrazykns1337/${IMAGE_NAME}:${{ env.RELEASE_VERSION }}-arm64 \
--push .
manifest:
needs: [build-amd64, build-arm64]
runs-on: ubuntu-latest
steps:
- name: Log in to DockerHub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Create and push multi-arch manifest for main
if: github.ref == 'refs/heads/master' && github.event_name == 'push'
run: |
IMAGE_NAME=perplexica
docker manifest create itzcrazykns1337/${IMAGE_NAME}:main \
--amend itzcrazykns1337/${IMAGE_NAME}:amd64 \
--amend itzcrazykns1337/${IMAGE_NAME}:arm64
docker manifest push itzcrazykns1337/${IMAGE_NAME}:main
- name: Create and push multi-arch manifest for releases
if: github.event_name == 'release'
run: |
IMAGE_NAME=perplexica
docker manifest create itzcrazykns1337/${IMAGE_NAME}:${{ env.RELEASE_VERSION }} \
--amend itzcrazykns1337/${IMAGE_NAME}:${{ env.RELEASE_VERSION }}-amd64 \
--amend itzcrazykns1337/${IMAGE_NAME}:${{ env.RELEASE_VERSION }}-arm64
docker manifest push itzcrazykns1337/${IMAGE_NAME}:${{ env.RELEASE_VERSION }}

View File

@ -153,7 +153,7 @@ For more details, check out the full documentation [here](https://github.com/Itz
## Expose Perplexica to network
You can access Perplexica over your home network by following our networking guide [here](https://github.com/ItzCrazyKns/Perplexica/blob/master/docs/installation/NETWORKING.md).
Perplexica runs on Next.js and handles all API requests. It works right away on the same network and stays accessible even with port forwarding.
## One-Click Deployment

View File

@ -1,4 +1,4 @@
FROM node:20.18.0-alpine AS builder
FROM node:20.18.0-slim AS builder
WORKDIR /home/perplexica
@ -12,7 +12,7 @@ COPY public ./public
RUN mkdir -p /home/perplexica/data
RUN yarn build
FROM node:20.18.0-alpine
FROM node:20.18.0-slim
WORKDIR /home/perplexica

View File

@ -32,7 +32,8 @@ The API accepts a JSON object in the request body, where you define the focus mo
"history": [
["human", "Hi, how are you?"],
["assistant", "I am doing well, how can I help you today?"]
]
],
"stream": false
}
```
@ -71,11 +72,13 @@ The API accepts a JSON object in the request body, where you define the focus mo
]
```
- **`stream`** (boolean, optional): When set to `true`, enables streaming responses. Default is `false`.
### Response
The response from the API includes both the final message and the sources used to generate that message.
#### Example Response
#### Standard Response (stream: false)
```json
{
@ -100,6 +103,28 @@ The response from the API includes both the final message and the sources used t
}
```
#### Streaming Response (stream: true)
When streaming is enabled, the API returns a stream of newline-delimited JSON objects. Each line contains a complete, valid JSON object. The response has Content-Type: application/json.
Example of streamed response objects:
```
{"type":"init","data":"Stream connected"}
{"type":"sources","data":[{"pageContent":"...","metadata":{"title":"...","url":"..."}},...]}
{"type":"response","data":"Perplexica is an "}
{"type":"response","data":"innovative, open-source "}
{"type":"response","data":"AI-powered search engine..."}
{"type":"done"}
```
Clients should process each line as a separate JSON object. The different message types include:
- **`init`**: Initial connection message
- **`sources`**: All sources used for the response
- **`response`**: Chunks of the generated answer text
- **`done`**: Indicates the stream is complete
### Fields in the Response
- **`message`** (string): The search result, generated based on the query and focus mode.

View File

@ -1,6 +1,6 @@
{
"name": "perplexica-frontend",
"version": "1.10.0",
"version": "1.10.1",
"license": "MIT",
"author": "ItzCrazyKns",
"scripts": {
@ -15,8 +15,10 @@
"@headlessui/react": "^2.2.0",
"@iarna/toml": "^2.2.5",
"@icons-pack/react-simple-icons": "^12.3.0",
"@langchain/anthropic": "^0.3.15",
"@langchain/community": "^0.3.36",
"@langchain/core": "^0.3.42",
"@langchain/google-genai": "^0.1.12",
"@langchain/openai": "^0.0.25",
"@langchain/textsplitters": "^0.1.0",
"@tailwindcss/typography": "^0.5.12",

View File

@ -295,9 +295,9 @@ export const POST = async (req: Request) => {
},
});
} catch (err) {
console.error('An error ocurred while processing chat request:', err);
console.error('An error occurred while processing chat request:', err);
return Response.json(
{ message: 'An error ocurred while processing chat request' },
{ message: 'An error occurred while processing chat request' },
{ status: 500 },
);
}

View File

@ -59,9 +59,9 @@ export const GET = async (req: Request) => {
return Response.json({ ...config }, { status: 200 });
} catch (err) {
console.error('An error ocurred while getting config:', err);
console.error('An error occurred while getting config:', err);
return Response.json(
{ message: 'An error ocurred while getting config' },
{ message: 'An error occurred while getting config' },
{ status: 500 },
);
}
@ -100,9 +100,9 @@ export const POST = async (req: Request) => {
return Response.json({ message: 'Config updated' }, { status: 200 });
} catch (err) {
console.error('An error ocurred while updating config:', err);
console.error('An error occurred while updating config:', err);
return Response.json(
{ message: 'An error ocurred while updating config' },
{ message: 'An error occurred while updating config' },
{ status: 500 },
);
}

View File

@ -48,7 +48,7 @@ export const GET = async (req: Request) => {
},
);
} catch (err) {
console.error(`An error ocurred in discover route: ${err}`);
console.error(`An error occurred in discover route: ${err}`);
return Response.json(
{
message: 'An error has occurred',

View File

@ -74,9 +74,9 @@ export const POST = async (req: Request) => {
return Response.json({ images }, { status: 200 });
} catch (err) {
console.error(`An error ocurred while searching images: ${err}`);
console.error(`An error occurred while searching images: ${err}`);
return Response.json(
{ message: 'An error ocurred while searching images' },
{ message: 'An error occurred while searching images' },
{ status: 500 },
);
}

View File

@ -34,7 +34,7 @@ export const GET = async (req: Request) => {
},
);
} catch (err) {
console.error('An error ocurred while fetching models', err);
console.error('An error occurred while fetching models', err);
return Response.json(
{
message: 'An error has occurred.',

View File

@ -33,6 +33,7 @@ interface ChatRequestBody {
embeddingModel?: embeddingModel;
query: string;
history: Array<[string, string]>;
stream?: boolean;
}
export const POST = async (req: Request) => {
@ -48,6 +49,7 @@ export const POST = async (req: Request) => {
body.history = body.history || [];
body.optimizationMode = body.optimizationMode || 'balanced';
body.stream = body.stream || false;
const history: BaseMessage[] = body.history.map((msg) => {
return msg[0] === 'human'
@ -125,40 +127,137 @@ export const POST = async (req: Request) => {
[],
);
return new Promise(
(
resolve: (value: Response) => void,
reject: (value: Response) => void,
) => {
let message = '';
if (!body.stream) {
return new Promise(
(
resolve: (value: Response) => void,
reject: (value: Response) => void,
) => {
let message = '';
let sources: any[] = [];
emitter.on('data', (data: string) => {
try {
const parsedData = JSON.parse(data);
if (parsedData.type === 'response') {
message += parsedData.data;
} else if (parsedData.type === 'sources') {
sources = parsedData.data;
}
} catch (error) {
reject(
Response.json(
{ message: 'Error parsing data' },
{ status: 500 },
),
);
}
});
emitter.on('end', () => {
resolve(Response.json({ message, sources }, { status: 200 }));
});
emitter.on('error', (error: any) => {
reject(
Response.json(
{ message: 'Search error', error },
{ status: 500 },
),
);
});
},
);
}
const encoder = new TextEncoder();
const abortController = new AbortController();
const { signal } = abortController;
const stream = new ReadableStream({
start(controller) {
let sources: any[] = [];
emitter.on('data', (data) => {
controller.enqueue(
encoder.encode(
JSON.stringify({
type: 'init',
data: 'Stream connected',
}) + '\n',
),
);
signal.addEventListener('abort', () => {
emitter.removeAllListeners();
try {
controller.close();
} catch (error) {}
});
emitter.on('data', (data: string) => {
if (signal.aborted) return;
try {
const parsedData = JSON.parse(data);
if (parsedData.type === 'response') {
message += parsedData.data;
controller.enqueue(
encoder.encode(
JSON.stringify({
type: 'response',
data: parsedData.data,
}) + '\n',
),
);
} else if (parsedData.type === 'sources') {
sources = parsedData.data;
controller.enqueue(
encoder.encode(
JSON.stringify({
type: 'sources',
data: sources,
}) + '\n',
),
);
}
} catch (error) {
reject(
Response.json({ message: 'Error parsing data' }, { status: 500 }),
);
controller.error(error);
}
});
emitter.on('end', () => {
resolve(Response.json({ message, sources }, { status: 200 }));
if (signal.aborted) return;
controller.enqueue(
encoder.encode(
JSON.stringify({
type: 'done',
}) + '\n',
),
);
controller.close();
});
emitter.on('error', (error) => {
reject(
Response.json({ message: 'Search error', error }, { status: 500 }),
);
emitter.on('error', (error: any) => {
if (signal.aborted) return;
controller.error(error);
});
},
);
cancel() {
abortController.abort();
},
});
return new Response(stream, {
headers: {
'Content-Type': 'text/event-stream',
'Cache-Control': 'no-cache, no-transform',
Connection: 'keep-alive',
},
});
} catch (err: any) {
console.error(`Error in getting search results: ${err.message}`);
return Response.json(

View File

@ -72,9 +72,9 @@ export const POST = async (req: Request) => {
return Response.json({ suggestions }, { status: 200 });
} catch (err) {
console.error(`An error ocurred while generating suggestions: ${err}`);
console.error(`An error occurred while generating suggestions: ${err}`);
return Response.json(
{ message: 'An error ocurred while generating suggestions' },
{ message: 'An error occurred while generating suggestions' },
{ status: 500 },
);
}

View File

@ -74,9 +74,9 @@ export const POST = async (req: Request) => {
return Response.json({ videos }, { status: 200 });
} catch (err) {
console.error(`An error ocurred while searching videos: ${err}`);
console.error(`An error occurred while searching videos: ${err}`);
return Response.json(
{ message: 'An error ocurred while searching videos' },
{ message: 'An error occurred while searching videos' },
{ status: 500 },
);
}

View File

@ -1,4 +1,4 @@
import { ChatOpenAI } from '@langchain/openai';
import { ChatAnthropic } from '@langchain/anthropic';
import { ChatModel } from '.';
import { getAnthropicApiKey } from '../config';
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
@ -45,13 +45,10 @@ export const loadAnthropicChatModels = async () => {
anthropicChatModels.forEach((model) => {
chatModels[model.key] = {
displayName: model.displayName,
model: new ChatOpenAI({
openAIApiKey: anthropicApiKey,
model: new ChatAnthropic({
apiKey: anthropicApiKey,
modelName: model.key,
temperature: 0.7,
configuration: {
baseURL: 'https://api.anthropic.com/v1/',
},
}) as unknown as BaseChatModel,
};
});

View File

@ -1,10 +1,17 @@
import { ChatOpenAI, OpenAIEmbeddings } from '@langchain/openai';
import {
ChatGoogleGenerativeAI,
GoogleGenerativeAIEmbeddings,
} from '@langchain/google-genai';
import { getGeminiApiKey } from '../config';
import { ChatModel, EmbeddingModel } from '.';
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
import { Embeddings } from '@langchain/core/embeddings';
const geminiChatModels: Record<string, string>[] = [
{
displayName: 'Gemini 2.5 Pro Experimental',
key: 'gemini-2.5-pro-exp-03-25',
},
{
displayName: 'Gemini 2.0 Flash',
key: 'gemini-2.0-flash',
@ -14,8 +21,8 @@ const geminiChatModels: Record<string, string>[] = [
key: 'gemini-2.0-flash-lite',
},
{
displayName: 'Gemini 2.0 Pro Experimental',
key: 'gemini-2.0-pro-exp-02-05',
displayName: 'Gemini 2.0 Flash Thinking Experimental',
key: 'gemini-2.0-flash-thinking-exp-01-21',
},
{
displayName: 'Gemini 1.5 Flash',
@ -49,13 +56,10 @@ export const loadGeminiChatModels = async () => {
geminiChatModels.forEach((model) => {
chatModels[model.key] = {
displayName: model.displayName,
model: new ChatOpenAI({
openAIApiKey: geminiApiKey,
model: new ChatGoogleGenerativeAI({
apiKey: geminiApiKey,
modelName: model.key,
temperature: 0.7,
configuration: {
baseURL: 'https://generativelanguage.googleapis.com/v1beta/openai/',
},
}) as unknown as BaseChatModel,
};
});
@ -78,12 +82,9 @@ export const loadGeminiEmbeddingModels = async () => {
geminiEmbeddingModels.forEach((model) => {
embeddingModels[model.key] = {
displayName: model.displayName,
model: new OpenAIEmbeddings({
openAIApiKey: geminiApiKey,
model: new GoogleGenerativeAIEmbeddings({
apiKey: geminiApiKey,
modelName: model.key,
configuration: {
baseURL: 'https://generativelanguage.googleapis.com/v1beta/openai/',
},
}) as unknown as Embeddings,
};
});

View File

@ -12,6 +12,19 @@
resolved "https://registry.yarnpkg.com/@alloc/quick-lru/-/quick-lru-5.2.0.tgz#7bf68b20c0a350f936915fcae06f58e32007ce30"
integrity sha512-UrcABB+4bUrFABwbluTIBErXwvbsU/V7TZWfmbgJfbkwiBuziS9gxdODUyuiecfdGQ85jglMW6juS3+z5TsKLw==
"@anthropic-ai/sdk@^0.37.0":
version "0.37.0"
resolved "https://registry.yarnpkg.com/@anthropic-ai/sdk/-/sdk-0.37.0.tgz#0018127404ecb9b8a12968068e0c4b3e8bbd6386"
integrity sha512-tHjX2YbkUBwEgg0JZU3EFSSAQPoK4qQR/NFYa8Vtzd5UAyXzZksCw2In69Rml4R/TyHPBfRYaLK35XiOe33pjw==
dependencies:
"@types/node" "^18.11.18"
"@types/node-fetch" "^2.6.4"
abort-controller "^3.0.0"
agentkeepalive "^4.2.1"
form-data-encoder "1.7.2"
formdata-node "^4.3.2"
node-fetch "^2.6.7"
"@anthropic-ai/sdk@^0.9.1":
version "0.9.1"
resolved "https://registry.yarnpkg.com/@anthropic-ai/sdk/-/sdk-0.9.1.tgz#b2d2b7bf05c90dce502c9a2e869066870f69ba88"
@ -374,6 +387,11 @@
resolved "https://registry.yarnpkg.com/@floating-ui/utils/-/utils-0.2.8.tgz#21a907684723bbbaa5f0974cf7730bd797eb8e62"
integrity sha512-kym7SodPp8/wloecOpcmSnWJsK7M0E5Wg8UcFA+uO4B9s5d0ywXOEro/8HM9x0rW+TljRzul/14UYz3TleT3ig==
"@google/generative-ai@^0.24.0":
version "0.24.0"
resolved "https://registry.yarnpkg.com/@google/generative-ai/-/generative-ai-0.24.0.tgz#4d27af7d944c924a27a593c17ad1336535d53846"
integrity sha512-fnEITCGEB7NdX0BhoYZ/cq/7WPZ1QS5IzJJfC3Tg/OwkvBetMiVJciyaan297OvE4B9Jg1xvo0zIazX/9sGu1Q==
"@headlessui/react@^2.2.0":
version "2.2.0"
resolved "https://registry.yarnpkg.com/@headlessui/react/-/react-2.2.0.tgz#a8e32f0899862849a1ce1615fa280e7891431ab7"
@ -575,6 +593,16 @@
"@jridgewell/resolve-uri" "^3.1.0"
"@jridgewell/sourcemap-codec" "^1.4.14"
"@langchain/anthropic@^0.3.15":
version "0.3.15"
resolved "https://registry.yarnpkg.com/@langchain/anthropic/-/anthropic-0.3.15.tgz#0244cdb345cb492eb40aedd681881ebadfbb73f2"
integrity sha512-Ar2viYcZ64idgV7EtCBCb36tIkNtPAhQRxSaMTWPHGspFgMfvwRoleVri9e90sCpjpS9xhlHsIQ0LlUS/Atsrw==
dependencies:
"@anthropic-ai/sdk" "^0.37.0"
fast-xml-parser "^4.4.1"
zod "^3.22.4"
zod-to-json-schema "^3.22.4"
"@langchain/community@^0.3.36":
version "0.3.36"
resolved "https://registry.yarnpkg.com/@langchain/community/-/community-0.3.36.tgz#e4c13b8f928b17e0f9257395f43be2246dfada40"
@ -640,6 +668,14 @@
zod "^3.22.4"
zod-to-json-schema "^3.22.3"
"@langchain/google-genai@^0.1.12":
version "0.1.12"
resolved "https://registry.yarnpkg.com/@langchain/google-genai/-/google-genai-0.1.12.tgz#6727253bda6f0d87cd74cf0bb6b1e0f398f60f32"
integrity sha512-0Ea0E2g63ejCuormVxbuoyJQ5BYN53i2/fb6WP8bMKzyh+y43R13V8JqOtr3e/GmgNyv3ou/VeaZjx7KAvu/0g==
dependencies:
"@google/generative-ai" "^0.24.0"
zod-to-json-schema "^3.22.4"
"@langchain/openai@>=0.1.0 <0.5.0", "@langchain/openai@>=0.2.0 <0.5.0":
version "0.4.5"
resolved "https://registry.yarnpkg.com/@langchain/openai/-/openai-0.4.5.tgz#d18e207c3ec3f2ecaa4698a5a5888092f643da52"
@ -2369,6 +2405,13 @@ fast-levenshtein@^2.0.6:
resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917"
integrity sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==
fast-xml-parser@^4.4.1:
version "4.5.3"
resolved "https://registry.yarnpkg.com/fast-xml-parser/-/fast-xml-parser-4.5.3.tgz#c54d6b35aa0f23dc1ea60b6c884340c006dc6efb"
integrity sha512-RKihhV+SHsIUGXObeVy9AXiBbFwkVk7Syp8XgwN5U3JV416+Gwp/GO9i0JYKmikykgz/UHRrrV4ROuZEo/T0ig==
dependencies:
strnum "^1.1.1"
fastq@^1.6.0:
version "1.17.1"
resolved "https://registry.yarnpkg.com/fastq/-/fastq-1.17.1.tgz#2a523f07a4e7b1e81a42b91b8bf2254107753b47"
@ -4458,6 +4501,11 @@ strip-json-comments@~2.0.1:
resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a"
integrity sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ==
strnum@^1.1.1:
version "1.1.2"
resolved "https://registry.yarnpkg.com/strnum/-/strnum-1.1.2.tgz#57bca4fbaa6f271081715dbc9ed7cee5493e28e4"
integrity sha512-vrN+B7DBIoTTZjnPNewwhx6cBA/H+IS7rfW68n7XxC1y7uoiGQBxaKzqucGUgavX15dJgiGztLJ8vxuEzwqBdA==
styled-jsx@5.1.6:
version "5.1.6"
resolved "https://registry.yarnpkg.com/styled-jsx/-/styled-jsx-5.1.6.tgz#83b90c077e6c6a80f7f5e8781d0f311b2fe41499"
@ -4955,6 +5003,11 @@ zod-to-json-schema@^3.22.3, zod-to-json-schema@^3.22.5:
resolved "https://registry.yarnpkg.com/zod-to-json-schema/-/zod-to-json-schema-3.22.5.tgz#3646e81cfc318dbad2a22519e5ce661615418673"
integrity sha512-+akaPo6a0zpVCCseDed504KBJUQpEW5QZw7RMneNmKw+fGaML1Z9tUNLnHHAC8x6dzVRO1eB2oEMyZRnuBZg7Q==
zod-to-json-schema@^3.22.4:
version "3.24.5"
resolved "https://registry.yarnpkg.com/zod-to-json-schema/-/zod-to-json-schema-3.24.5.tgz#d1095440b147fb7c2093812a53c54df8d5df50a3"
integrity sha512-/AuWwMP+YqiPbsJx5D6TfgRTc4kTLjsh5SOcd4bLsfUg2RcEXrFMJl1DGgdHy2aCfsIA/cr/1JM0xcB2GZji8g==
zod@^3.22.3, zod@^3.22.4:
version "3.22.4"
resolved "https://registry.yarnpkg.com/zod/-/zod-3.22.4.tgz#f31c3a9386f61b1f228af56faa9255e845cf3fff"