mirror of
https://github.com/ItzCrazyKns/Perplexica.git
synced 2025-06-18 07:48:35 +00:00
Compare commits
13 Commits
0fcd598ff7
...
b5ee8386e7
Author | SHA1 | Date | |
---|---|---|---|
b5ee8386e7 | |||
4b2a7916fd | |||
97e64aa65e | |||
90e303f737 | |||
7955d8e408 | |||
b285cb4323 | |||
5d60ab1139 | |||
9095996356 | |||
310c8a75fd | |||
191d1dc25f | |||
d3b2f8983d | |||
27286465a3 | |||
defc677932 |
5
.github/workflows/docker-build.yaml
vendored
5
.github/workflows/docker-build.yaml
vendored
@ -114,6 +114,11 @@ jobs:
|
|||||||
username: ${{ secrets.DOCKER_USERNAME }}
|
username: ${{ secrets.DOCKER_USERNAME }}
|
||||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||||
|
|
||||||
|
- name: Extract version from release tag
|
||||||
|
if: github.event_name == 'release'
|
||||||
|
id: version
|
||||||
|
run: echo "RELEASE_VERSION=${GITHUB_REF#refs/tags/}" >> $GITHUB_ENV
|
||||||
|
|
||||||
- name: Create and push multi-arch manifest for main
|
- name: Create and push multi-arch manifest for main
|
||||||
if: github.ref == 'refs/heads/master' && github.event_name == 'push'
|
if: github.ref == 'refs/heads/master' && github.event_name == 'push'
|
||||||
run: |
|
run: |
|
||||||
|
@ -32,7 +32,8 @@ The API accepts a JSON object in the request body, where you define the focus mo
|
|||||||
"history": [
|
"history": [
|
||||||
["human", "Hi, how are you?"],
|
["human", "Hi, how are you?"],
|
||||||
["assistant", "I am doing well, how can I help you today?"]
|
["assistant", "I am doing well, how can I help you today?"]
|
||||||
]
|
],
|
||||||
|
"stream": false
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
@ -71,11 +72,13 @@ The API accepts a JSON object in the request body, where you define the focus mo
|
|||||||
]
|
]
|
||||||
```
|
```
|
||||||
|
|
||||||
|
- **`stream`** (boolean, optional): When set to `true`, enables streaming responses. Default is `false`.
|
||||||
|
|
||||||
### Response
|
### Response
|
||||||
|
|
||||||
The response from the API includes both the final message and the sources used to generate that message.
|
The response from the API includes both the final message and the sources used to generate that message.
|
||||||
|
|
||||||
#### Example Response
|
#### Standard Response (stream: false)
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
@ -100,6 +103,28 @@ The response from the API includes both the final message and the sources used t
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
#### Streaming Response (stream: true)
|
||||||
|
|
||||||
|
When streaming is enabled, the API returns a stream of newline-delimited JSON objects. Each line contains a complete, valid JSON object. The response has Content-Type: application/json.
|
||||||
|
|
||||||
|
Example of streamed response objects:
|
||||||
|
|
||||||
|
```
|
||||||
|
{"type":"init","data":"Stream connected"}
|
||||||
|
{"type":"sources","data":[{"pageContent":"...","metadata":{"title":"...","url":"..."}},...]}
|
||||||
|
{"type":"response","data":"Perplexica is an "}
|
||||||
|
{"type":"response","data":"innovative, open-source "}
|
||||||
|
{"type":"response","data":"AI-powered search engine..."}
|
||||||
|
{"type":"done"}
|
||||||
|
```
|
||||||
|
|
||||||
|
Clients should process each line as a separate JSON object. The different message types include:
|
||||||
|
|
||||||
|
- **`init`**: Initial connection message
|
||||||
|
- **`sources`**: All sources used for the response
|
||||||
|
- **`response`**: Chunks of the generated answer text
|
||||||
|
- **`done`**: Indicates the stream is complete
|
||||||
|
|
||||||
### Fields in the Response
|
### Fields in the Response
|
||||||
|
|
||||||
- **`message`** (string): The search result, generated based on the query and focus mode.
|
- **`message`** (string): The search result, generated based on the query and focus mode.
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "perplexica-frontend",
|
"name": "perplexica-frontend",
|
||||||
"version": "1.10.0",
|
"version": "1.10.1",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"author": "ItzCrazyKns",
|
"author": "ItzCrazyKns",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
@ -15,8 +15,10 @@
|
|||||||
"@headlessui/react": "^2.2.0",
|
"@headlessui/react": "^2.2.0",
|
||||||
"@iarna/toml": "^2.2.5",
|
"@iarna/toml": "^2.2.5",
|
||||||
"@icons-pack/react-simple-icons": "^12.3.0",
|
"@icons-pack/react-simple-icons": "^12.3.0",
|
||||||
|
"@langchain/anthropic": "^0.3.15",
|
||||||
"@langchain/community": "^0.3.36",
|
"@langchain/community": "^0.3.36",
|
||||||
"@langchain/core": "^0.3.42",
|
"@langchain/core": "^0.3.42",
|
||||||
|
"@langchain/google-genai": "^0.1.12",
|
||||||
"@langchain/openai": "^0.0.25",
|
"@langchain/openai": "^0.0.25",
|
||||||
"@langchain/textsplitters": "^0.1.0",
|
"@langchain/textsplitters": "^0.1.0",
|
||||||
"@tailwindcss/typography": "^0.5.12",
|
"@tailwindcss/typography": "^0.5.12",
|
||||||
|
@ -295,9 +295,9 @@ export const POST = async (req: Request) => {
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error('An error ocurred while processing chat request:', err);
|
console.error('An error occurred while processing chat request:', err);
|
||||||
return Response.json(
|
return Response.json(
|
||||||
{ message: 'An error ocurred while processing chat request' },
|
{ message: 'An error occurred while processing chat request' },
|
||||||
{ status: 500 },
|
{ status: 500 },
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -59,9 +59,9 @@ export const GET = async (req: Request) => {
|
|||||||
|
|
||||||
return Response.json({ ...config }, { status: 200 });
|
return Response.json({ ...config }, { status: 200 });
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error('An error ocurred while getting config:', err);
|
console.error('An error occurred while getting config:', err);
|
||||||
return Response.json(
|
return Response.json(
|
||||||
{ message: 'An error ocurred while getting config' },
|
{ message: 'An error occurred while getting config' },
|
||||||
{ status: 500 },
|
{ status: 500 },
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@ -100,9 +100,9 @@ export const POST = async (req: Request) => {
|
|||||||
|
|
||||||
return Response.json({ message: 'Config updated' }, { status: 200 });
|
return Response.json({ message: 'Config updated' }, { status: 200 });
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error('An error ocurred while updating config:', err);
|
console.error('An error occurred while updating config:', err);
|
||||||
return Response.json(
|
return Response.json(
|
||||||
{ message: 'An error ocurred while updating config' },
|
{ message: 'An error occurred while updating config' },
|
||||||
{ status: 500 },
|
{ status: 500 },
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -48,7 +48,7 @@ export const GET = async (req: Request) => {
|
|||||||
},
|
},
|
||||||
);
|
);
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error(`An error ocurred in discover route: ${err}`);
|
console.error(`An error occurred in discover route: ${err}`);
|
||||||
return Response.json(
|
return Response.json(
|
||||||
{
|
{
|
||||||
message: 'An error has occurred',
|
message: 'An error has occurred',
|
||||||
|
@ -74,9 +74,9 @@ export const POST = async (req: Request) => {
|
|||||||
|
|
||||||
return Response.json({ images }, { status: 200 });
|
return Response.json({ images }, { status: 200 });
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error(`An error ocurred while searching images: ${err}`);
|
console.error(`An error occurred while searching images: ${err}`);
|
||||||
return Response.json(
|
return Response.json(
|
||||||
{ message: 'An error ocurred while searching images' },
|
{ message: 'An error occurred while searching images' },
|
||||||
{ status: 500 },
|
{ status: 500 },
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -34,7 +34,7 @@ export const GET = async (req: Request) => {
|
|||||||
},
|
},
|
||||||
);
|
);
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error('An error ocurred while fetching models', err);
|
console.error('An error occurred while fetching models', err);
|
||||||
return Response.json(
|
return Response.json(
|
||||||
{
|
{
|
||||||
message: 'An error has occurred.',
|
message: 'An error has occurred.',
|
||||||
|
@ -33,6 +33,7 @@ interface ChatRequestBody {
|
|||||||
embeddingModel?: embeddingModel;
|
embeddingModel?: embeddingModel;
|
||||||
query: string;
|
query: string;
|
||||||
history: Array<[string, string]>;
|
history: Array<[string, string]>;
|
||||||
|
stream?: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
export const POST = async (req: Request) => {
|
export const POST = async (req: Request) => {
|
||||||
@ -48,6 +49,7 @@ export const POST = async (req: Request) => {
|
|||||||
|
|
||||||
body.history = body.history || [];
|
body.history = body.history || [];
|
||||||
body.optimizationMode = body.optimizationMode || 'balanced';
|
body.optimizationMode = body.optimizationMode || 'balanced';
|
||||||
|
body.stream = body.stream || false;
|
||||||
|
|
||||||
const history: BaseMessage[] = body.history.map((msg) => {
|
const history: BaseMessage[] = body.history.map((msg) => {
|
||||||
return msg[0] === 'human'
|
return msg[0] === 'human'
|
||||||
@ -125,40 +127,137 @@ export const POST = async (req: Request) => {
|
|||||||
[],
|
[],
|
||||||
);
|
);
|
||||||
|
|
||||||
return new Promise(
|
if (!body.stream) {
|
||||||
(
|
return new Promise(
|
||||||
resolve: (value: Response) => void,
|
(
|
||||||
reject: (value: Response) => void,
|
resolve: (value: Response) => void,
|
||||||
) => {
|
reject: (value: Response) => void,
|
||||||
let message = '';
|
) => {
|
||||||
|
let message = '';
|
||||||
|
let sources: any[] = [];
|
||||||
|
|
||||||
|
emitter.on('data', (data: string) => {
|
||||||
|
try {
|
||||||
|
const parsedData = JSON.parse(data);
|
||||||
|
if (parsedData.type === 'response') {
|
||||||
|
message += parsedData.data;
|
||||||
|
} else if (parsedData.type === 'sources') {
|
||||||
|
sources = parsedData.data;
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
reject(
|
||||||
|
Response.json(
|
||||||
|
{ message: 'Error parsing data' },
|
||||||
|
{ status: 500 },
|
||||||
|
),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
emitter.on('end', () => {
|
||||||
|
resolve(Response.json({ message, sources }, { status: 200 }));
|
||||||
|
});
|
||||||
|
|
||||||
|
emitter.on('error', (error: any) => {
|
||||||
|
reject(
|
||||||
|
Response.json(
|
||||||
|
{ message: 'Search error', error },
|
||||||
|
{ status: 500 },
|
||||||
|
),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const encoder = new TextEncoder();
|
||||||
|
|
||||||
|
const abortController = new AbortController();
|
||||||
|
const { signal } = abortController;
|
||||||
|
|
||||||
|
const stream = new ReadableStream({
|
||||||
|
start(controller) {
|
||||||
let sources: any[] = [];
|
let sources: any[] = [];
|
||||||
|
|
||||||
emitter.on('data', (data) => {
|
controller.enqueue(
|
||||||
|
encoder.encode(
|
||||||
|
JSON.stringify({
|
||||||
|
type: 'init',
|
||||||
|
data: 'Stream connected',
|
||||||
|
}) + '\n',
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
signal.addEventListener('abort', () => {
|
||||||
|
emitter.removeAllListeners();
|
||||||
|
|
||||||
|
try {
|
||||||
|
controller.close();
|
||||||
|
} catch (error) {}
|
||||||
|
});
|
||||||
|
|
||||||
|
emitter.on('data', (data: string) => {
|
||||||
|
if (signal.aborted) return;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const parsedData = JSON.parse(data);
|
const parsedData = JSON.parse(data);
|
||||||
|
|
||||||
if (parsedData.type === 'response') {
|
if (parsedData.type === 'response') {
|
||||||
message += parsedData.data;
|
controller.enqueue(
|
||||||
|
encoder.encode(
|
||||||
|
JSON.stringify({
|
||||||
|
type: 'response',
|
||||||
|
data: parsedData.data,
|
||||||
|
}) + '\n',
|
||||||
|
),
|
||||||
|
);
|
||||||
} else if (parsedData.type === 'sources') {
|
} else if (parsedData.type === 'sources') {
|
||||||
sources = parsedData.data;
|
sources = parsedData.data;
|
||||||
|
controller.enqueue(
|
||||||
|
encoder.encode(
|
||||||
|
JSON.stringify({
|
||||||
|
type: 'sources',
|
||||||
|
data: sources,
|
||||||
|
}) + '\n',
|
||||||
|
),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
reject(
|
controller.error(error);
|
||||||
Response.json({ message: 'Error parsing data' }, { status: 500 }),
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
emitter.on('end', () => {
|
emitter.on('end', () => {
|
||||||
resolve(Response.json({ message, sources }, { status: 200 }));
|
if (signal.aborted) return;
|
||||||
|
|
||||||
|
controller.enqueue(
|
||||||
|
encoder.encode(
|
||||||
|
JSON.stringify({
|
||||||
|
type: 'done',
|
||||||
|
}) + '\n',
|
||||||
|
),
|
||||||
|
);
|
||||||
|
controller.close();
|
||||||
});
|
});
|
||||||
|
|
||||||
emitter.on('error', (error) => {
|
emitter.on('error', (error: any) => {
|
||||||
reject(
|
if (signal.aborted) return;
|
||||||
Response.json({ message: 'Search error', error }, { status: 500 }),
|
|
||||||
);
|
controller.error(error);
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
);
|
cancel() {
|
||||||
|
abortController.abort();
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
return new Response(stream, {
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'text/event-stream',
|
||||||
|
'Cache-Control': 'no-cache, no-transform',
|
||||||
|
Connection: 'keep-alive',
|
||||||
|
},
|
||||||
|
});
|
||||||
} catch (err: any) {
|
} catch (err: any) {
|
||||||
console.error(`Error in getting search results: ${err.message}`);
|
console.error(`Error in getting search results: ${err.message}`);
|
||||||
return Response.json(
|
return Response.json(
|
||||||
|
@ -72,9 +72,9 @@ export const POST = async (req: Request) => {
|
|||||||
|
|
||||||
return Response.json({ suggestions }, { status: 200 });
|
return Response.json({ suggestions }, { status: 200 });
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error(`An error ocurred while generating suggestions: ${err}`);
|
console.error(`An error occurred while generating suggestions: ${err}`);
|
||||||
return Response.json(
|
return Response.json(
|
||||||
{ message: 'An error ocurred while generating suggestions' },
|
{ message: 'An error occurred while generating suggestions' },
|
||||||
{ status: 500 },
|
{ status: 500 },
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -74,9 +74,9 @@ export const POST = async (req: Request) => {
|
|||||||
|
|
||||||
return Response.json({ videos }, { status: 200 });
|
return Response.json({ videos }, { status: 200 });
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error(`An error ocurred while searching videos: ${err}`);
|
console.error(`An error occurred while searching videos: ${err}`);
|
||||||
return Response.json(
|
return Response.json(
|
||||||
{ message: 'An error ocurred while searching videos' },
|
{ message: 'An error occurred while searching videos' },
|
||||||
{ status: 500 },
|
{ status: 500 },
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
import { ChatOpenAI } from '@langchain/openai';
|
import { ChatAnthropic } from '@langchain/anthropic';
|
||||||
import { ChatModel } from '.';
|
import { ChatModel } from '.';
|
||||||
import { getAnthropicApiKey } from '../config';
|
import { getAnthropicApiKey } from '../config';
|
||||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||||
@ -45,13 +45,10 @@ export const loadAnthropicChatModels = async () => {
|
|||||||
anthropicChatModels.forEach((model) => {
|
anthropicChatModels.forEach((model) => {
|
||||||
chatModels[model.key] = {
|
chatModels[model.key] = {
|
||||||
displayName: model.displayName,
|
displayName: model.displayName,
|
||||||
model: new ChatOpenAI({
|
model: new ChatAnthropic({
|
||||||
openAIApiKey: anthropicApiKey,
|
apiKey: anthropicApiKey,
|
||||||
modelName: model.key,
|
modelName: model.key,
|
||||||
temperature: 0.7,
|
temperature: 0.7,
|
||||||
configuration: {
|
|
||||||
baseURL: 'https://api.anthropic.com/v1/',
|
|
||||||
},
|
|
||||||
}) as unknown as BaseChatModel,
|
}) as unknown as BaseChatModel,
|
||||||
};
|
};
|
||||||
});
|
});
|
||||||
|
@ -1,10 +1,17 @@
|
|||||||
import { ChatOpenAI, OpenAIEmbeddings } from '@langchain/openai';
|
import {
|
||||||
|
ChatGoogleGenerativeAI,
|
||||||
|
GoogleGenerativeAIEmbeddings,
|
||||||
|
} from '@langchain/google-genai';
|
||||||
import { getGeminiApiKey } from '../config';
|
import { getGeminiApiKey } from '../config';
|
||||||
import { ChatModel, EmbeddingModel } from '.';
|
import { ChatModel, EmbeddingModel } from '.';
|
||||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||||
import { Embeddings } from '@langchain/core/embeddings';
|
import { Embeddings } from '@langchain/core/embeddings';
|
||||||
|
|
||||||
const geminiChatModels: Record<string, string>[] = [
|
const geminiChatModels: Record<string, string>[] = [
|
||||||
|
{
|
||||||
|
displayName: 'Gemini 2.5 Pro Experimental',
|
||||||
|
key: 'gemini-2.5-pro-exp-03-25',
|
||||||
|
},
|
||||||
{
|
{
|
||||||
displayName: 'Gemini 2.0 Flash',
|
displayName: 'Gemini 2.0 Flash',
|
||||||
key: 'gemini-2.0-flash',
|
key: 'gemini-2.0-flash',
|
||||||
@ -14,8 +21,8 @@ const geminiChatModels: Record<string, string>[] = [
|
|||||||
key: 'gemini-2.0-flash-lite',
|
key: 'gemini-2.0-flash-lite',
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
displayName: 'Gemini 2.0 Pro Experimental',
|
displayName: 'Gemini 2.0 Flash Thinking Experimental',
|
||||||
key: 'gemini-2.0-pro-exp-02-05',
|
key: 'gemini-2.0-flash-thinking-exp-01-21',
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
displayName: 'Gemini 1.5 Flash',
|
displayName: 'Gemini 1.5 Flash',
|
||||||
@ -49,13 +56,10 @@ export const loadGeminiChatModels = async () => {
|
|||||||
geminiChatModels.forEach((model) => {
|
geminiChatModels.forEach((model) => {
|
||||||
chatModels[model.key] = {
|
chatModels[model.key] = {
|
||||||
displayName: model.displayName,
|
displayName: model.displayName,
|
||||||
model: new ChatOpenAI({
|
model: new ChatGoogleGenerativeAI({
|
||||||
openAIApiKey: geminiApiKey,
|
apiKey: geminiApiKey,
|
||||||
modelName: model.key,
|
modelName: model.key,
|
||||||
temperature: 0.7,
|
temperature: 0.7,
|
||||||
configuration: {
|
|
||||||
baseURL: 'https://generativelanguage.googleapis.com/v1beta/openai/',
|
|
||||||
},
|
|
||||||
}) as unknown as BaseChatModel,
|
}) as unknown as BaseChatModel,
|
||||||
};
|
};
|
||||||
});
|
});
|
||||||
@ -78,12 +82,9 @@ export const loadGeminiEmbeddingModels = async () => {
|
|||||||
geminiEmbeddingModels.forEach((model) => {
|
geminiEmbeddingModels.forEach((model) => {
|
||||||
embeddingModels[model.key] = {
|
embeddingModels[model.key] = {
|
||||||
displayName: model.displayName,
|
displayName: model.displayName,
|
||||||
model: new OpenAIEmbeddings({
|
model: new GoogleGenerativeAIEmbeddings({
|
||||||
openAIApiKey: geminiApiKey,
|
apiKey: geminiApiKey,
|
||||||
modelName: model.key,
|
modelName: model.key,
|
||||||
configuration: {
|
|
||||||
baseURL: 'https://generativelanguage.googleapis.com/v1beta/openai/',
|
|
||||||
},
|
|
||||||
}) as unknown as Embeddings,
|
}) as unknown as Embeddings,
|
||||||
};
|
};
|
||||||
});
|
});
|
||||||
|
53
yarn.lock
53
yarn.lock
@ -12,6 +12,19 @@
|
|||||||
resolved "https://registry.yarnpkg.com/@alloc/quick-lru/-/quick-lru-5.2.0.tgz#7bf68b20c0a350f936915fcae06f58e32007ce30"
|
resolved "https://registry.yarnpkg.com/@alloc/quick-lru/-/quick-lru-5.2.0.tgz#7bf68b20c0a350f936915fcae06f58e32007ce30"
|
||||||
integrity sha512-UrcABB+4bUrFABwbluTIBErXwvbsU/V7TZWfmbgJfbkwiBuziS9gxdODUyuiecfdGQ85jglMW6juS3+z5TsKLw==
|
integrity sha512-UrcABB+4bUrFABwbluTIBErXwvbsU/V7TZWfmbgJfbkwiBuziS9gxdODUyuiecfdGQ85jglMW6juS3+z5TsKLw==
|
||||||
|
|
||||||
|
"@anthropic-ai/sdk@^0.37.0":
|
||||||
|
version "0.37.0"
|
||||||
|
resolved "https://registry.yarnpkg.com/@anthropic-ai/sdk/-/sdk-0.37.0.tgz#0018127404ecb9b8a12968068e0c4b3e8bbd6386"
|
||||||
|
integrity sha512-tHjX2YbkUBwEgg0JZU3EFSSAQPoK4qQR/NFYa8Vtzd5UAyXzZksCw2In69Rml4R/TyHPBfRYaLK35XiOe33pjw==
|
||||||
|
dependencies:
|
||||||
|
"@types/node" "^18.11.18"
|
||||||
|
"@types/node-fetch" "^2.6.4"
|
||||||
|
abort-controller "^3.0.0"
|
||||||
|
agentkeepalive "^4.2.1"
|
||||||
|
form-data-encoder "1.7.2"
|
||||||
|
formdata-node "^4.3.2"
|
||||||
|
node-fetch "^2.6.7"
|
||||||
|
|
||||||
"@anthropic-ai/sdk@^0.9.1":
|
"@anthropic-ai/sdk@^0.9.1":
|
||||||
version "0.9.1"
|
version "0.9.1"
|
||||||
resolved "https://registry.yarnpkg.com/@anthropic-ai/sdk/-/sdk-0.9.1.tgz#b2d2b7bf05c90dce502c9a2e869066870f69ba88"
|
resolved "https://registry.yarnpkg.com/@anthropic-ai/sdk/-/sdk-0.9.1.tgz#b2d2b7bf05c90dce502c9a2e869066870f69ba88"
|
||||||
@ -374,6 +387,11 @@
|
|||||||
resolved "https://registry.yarnpkg.com/@floating-ui/utils/-/utils-0.2.8.tgz#21a907684723bbbaa5f0974cf7730bd797eb8e62"
|
resolved "https://registry.yarnpkg.com/@floating-ui/utils/-/utils-0.2.8.tgz#21a907684723bbbaa5f0974cf7730bd797eb8e62"
|
||||||
integrity sha512-kym7SodPp8/wloecOpcmSnWJsK7M0E5Wg8UcFA+uO4B9s5d0ywXOEro/8HM9x0rW+TljRzul/14UYz3TleT3ig==
|
integrity sha512-kym7SodPp8/wloecOpcmSnWJsK7M0E5Wg8UcFA+uO4B9s5d0ywXOEro/8HM9x0rW+TljRzul/14UYz3TleT3ig==
|
||||||
|
|
||||||
|
"@google/generative-ai@^0.24.0":
|
||||||
|
version "0.24.0"
|
||||||
|
resolved "https://registry.yarnpkg.com/@google/generative-ai/-/generative-ai-0.24.0.tgz#4d27af7d944c924a27a593c17ad1336535d53846"
|
||||||
|
integrity sha512-fnEITCGEB7NdX0BhoYZ/cq/7WPZ1QS5IzJJfC3Tg/OwkvBetMiVJciyaan297OvE4B9Jg1xvo0zIazX/9sGu1Q==
|
||||||
|
|
||||||
"@headlessui/react@^2.2.0":
|
"@headlessui/react@^2.2.0":
|
||||||
version "2.2.0"
|
version "2.2.0"
|
||||||
resolved "https://registry.yarnpkg.com/@headlessui/react/-/react-2.2.0.tgz#a8e32f0899862849a1ce1615fa280e7891431ab7"
|
resolved "https://registry.yarnpkg.com/@headlessui/react/-/react-2.2.0.tgz#a8e32f0899862849a1ce1615fa280e7891431ab7"
|
||||||
@ -575,6 +593,16 @@
|
|||||||
"@jridgewell/resolve-uri" "^3.1.0"
|
"@jridgewell/resolve-uri" "^3.1.0"
|
||||||
"@jridgewell/sourcemap-codec" "^1.4.14"
|
"@jridgewell/sourcemap-codec" "^1.4.14"
|
||||||
|
|
||||||
|
"@langchain/anthropic@^0.3.15":
|
||||||
|
version "0.3.15"
|
||||||
|
resolved "https://registry.yarnpkg.com/@langchain/anthropic/-/anthropic-0.3.15.tgz#0244cdb345cb492eb40aedd681881ebadfbb73f2"
|
||||||
|
integrity sha512-Ar2viYcZ64idgV7EtCBCb36tIkNtPAhQRxSaMTWPHGspFgMfvwRoleVri9e90sCpjpS9xhlHsIQ0LlUS/Atsrw==
|
||||||
|
dependencies:
|
||||||
|
"@anthropic-ai/sdk" "^0.37.0"
|
||||||
|
fast-xml-parser "^4.4.1"
|
||||||
|
zod "^3.22.4"
|
||||||
|
zod-to-json-schema "^3.22.4"
|
||||||
|
|
||||||
"@langchain/community@^0.3.36":
|
"@langchain/community@^0.3.36":
|
||||||
version "0.3.36"
|
version "0.3.36"
|
||||||
resolved "https://registry.yarnpkg.com/@langchain/community/-/community-0.3.36.tgz#e4c13b8f928b17e0f9257395f43be2246dfada40"
|
resolved "https://registry.yarnpkg.com/@langchain/community/-/community-0.3.36.tgz#e4c13b8f928b17e0f9257395f43be2246dfada40"
|
||||||
@ -640,6 +668,14 @@
|
|||||||
zod "^3.22.4"
|
zod "^3.22.4"
|
||||||
zod-to-json-schema "^3.22.3"
|
zod-to-json-schema "^3.22.3"
|
||||||
|
|
||||||
|
"@langchain/google-genai@^0.1.12":
|
||||||
|
version "0.1.12"
|
||||||
|
resolved "https://registry.yarnpkg.com/@langchain/google-genai/-/google-genai-0.1.12.tgz#6727253bda6f0d87cd74cf0bb6b1e0f398f60f32"
|
||||||
|
integrity sha512-0Ea0E2g63ejCuormVxbuoyJQ5BYN53i2/fb6WP8bMKzyh+y43R13V8JqOtr3e/GmgNyv3ou/VeaZjx7KAvu/0g==
|
||||||
|
dependencies:
|
||||||
|
"@google/generative-ai" "^0.24.0"
|
||||||
|
zod-to-json-schema "^3.22.4"
|
||||||
|
|
||||||
"@langchain/openai@>=0.1.0 <0.5.0", "@langchain/openai@>=0.2.0 <0.5.0":
|
"@langchain/openai@>=0.1.0 <0.5.0", "@langchain/openai@>=0.2.0 <0.5.0":
|
||||||
version "0.4.5"
|
version "0.4.5"
|
||||||
resolved "https://registry.yarnpkg.com/@langchain/openai/-/openai-0.4.5.tgz#d18e207c3ec3f2ecaa4698a5a5888092f643da52"
|
resolved "https://registry.yarnpkg.com/@langchain/openai/-/openai-0.4.5.tgz#d18e207c3ec3f2ecaa4698a5a5888092f643da52"
|
||||||
@ -2369,6 +2405,13 @@ fast-levenshtein@^2.0.6:
|
|||||||
resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917"
|
resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917"
|
||||||
integrity sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==
|
integrity sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==
|
||||||
|
|
||||||
|
fast-xml-parser@^4.4.1:
|
||||||
|
version "4.5.3"
|
||||||
|
resolved "https://registry.yarnpkg.com/fast-xml-parser/-/fast-xml-parser-4.5.3.tgz#c54d6b35aa0f23dc1ea60b6c884340c006dc6efb"
|
||||||
|
integrity sha512-RKihhV+SHsIUGXObeVy9AXiBbFwkVk7Syp8XgwN5U3JV416+Gwp/GO9i0JYKmikykgz/UHRrrV4ROuZEo/T0ig==
|
||||||
|
dependencies:
|
||||||
|
strnum "^1.1.1"
|
||||||
|
|
||||||
fastq@^1.6.0:
|
fastq@^1.6.0:
|
||||||
version "1.17.1"
|
version "1.17.1"
|
||||||
resolved "https://registry.yarnpkg.com/fastq/-/fastq-1.17.1.tgz#2a523f07a4e7b1e81a42b91b8bf2254107753b47"
|
resolved "https://registry.yarnpkg.com/fastq/-/fastq-1.17.1.tgz#2a523f07a4e7b1e81a42b91b8bf2254107753b47"
|
||||||
@ -4458,6 +4501,11 @@ strip-json-comments@~2.0.1:
|
|||||||
resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a"
|
resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a"
|
||||||
integrity sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ==
|
integrity sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ==
|
||||||
|
|
||||||
|
strnum@^1.1.1:
|
||||||
|
version "1.1.2"
|
||||||
|
resolved "https://registry.yarnpkg.com/strnum/-/strnum-1.1.2.tgz#57bca4fbaa6f271081715dbc9ed7cee5493e28e4"
|
||||||
|
integrity sha512-vrN+B7DBIoTTZjnPNewwhx6cBA/H+IS7rfW68n7XxC1y7uoiGQBxaKzqucGUgavX15dJgiGztLJ8vxuEzwqBdA==
|
||||||
|
|
||||||
styled-jsx@5.1.6:
|
styled-jsx@5.1.6:
|
||||||
version "5.1.6"
|
version "5.1.6"
|
||||||
resolved "https://registry.yarnpkg.com/styled-jsx/-/styled-jsx-5.1.6.tgz#83b90c077e6c6a80f7f5e8781d0f311b2fe41499"
|
resolved "https://registry.yarnpkg.com/styled-jsx/-/styled-jsx-5.1.6.tgz#83b90c077e6c6a80f7f5e8781d0f311b2fe41499"
|
||||||
@ -4955,6 +5003,11 @@ zod-to-json-schema@^3.22.3, zod-to-json-schema@^3.22.5:
|
|||||||
resolved "https://registry.yarnpkg.com/zod-to-json-schema/-/zod-to-json-schema-3.22.5.tgz#3646e81cfc318dbad2a22519e5ce661615418673"
|
resolved "https://registry.yarnpkg.com/zod-to-json-schema/-/zod-to-json-schema-3.22.5.tgz#3646e81cfc318dbad2a22519e5ce661615418673"
|
||||||
integrity sha512-+akaPo6a0zpVCCseDed504KBJUQpEW5QZw7RMneNmKw+fGaML1Z9tUNLnHHAC8x6dzVRO1eB2oEMyZRnuBZg7Q==
|
integrity sha512-+akaPo6a0zpVCCseDed504KBJUQpEW5QZw7RMneNmKw+fGaML1Z9tUNLnHHAC8x6dzVRO1eB2oEMyZRnuBZg7Q==
|
||||||
|
|
||||||
|
zod-to-json-schema@^3.22.4:
|
||||||
|
version "3.24.5"
|
||||||
|
resolved "https://registry.yarnpkg.com/zod-to-json-schema/-/zod-to-json-schema-3.24.5.tgz#d1095440b147fb7c2093812a53c54df8d5df50a3"
|
||||||
|
integrity sha512-/AuWwMP+YqiPbsJx5D6TfgRTc4kTLjsh5SOcd4bLsfUg2RcEXrFMJl1DGgdHy2aCfsIA/cr/1JM0xcB2GZji8g==
|
||||||
|
|
||||||
zod@^3.22.3, zod@^3.22.4:
|
zod@^3.22.3, zod@^3.22.4:
|
||||||
version "3.22.4"
|
version "3.22.4"
|
||||||
resolved "https://registry.yarnpkg.com/zod/-/zod-3.22.4.tgz#f31c3a9386f61b1f228af56faa9255e845cf3fff"
|
resolved "https://registry.yarnpkg.com/zod/-/zod-3.22.4.tgz#f31c3a9386f61b1f228af56faa9255e845cf3fff"
|
||||||
|
Reference in New Issue
Block a user