mirror of
https://github.com/ItzCrazyKns/Perplexica.git
synced 2025-12-14 15:48:15 +00:00
feat(openai-llm): implement function calling
This commit is contained in:
@@ -7,8 +7,16 @@ import {
|
|||||||
GenerateTextInput,
|
GenerateTextInput,
|
||||||
GenerateTextOutput,
|
GenerateTextOutput,
|
||||||
StreamTextOutput,
|
StreamTextOutput,
|
||||||
|
ToolCall,
|
||||||
} from '../../types';
|
} from '../../types';
|
||||||
import { parse } from 'partial-json';
|
import { parse } from 'partial-json';
|
||||||
|
import z from 'zod';
|
||||||
|
import {
|
||||||
|
ChatCompletionMessageParam,
|
||||||
|
ChatCompletionTool,
|
||||||
|
ChatCompletionToolMessageParam,
|
||||||
|
} from 'openai/resources/index.mjs';
|
||||||
|
import { Message } from '@/lib/types';
|
||||||
|
|
||||||
type OpenAIConfig = {
|
type OpenAIConfig = {
|
||||||
apiKey: string;
|
apiKey: string;
|
||||||
@@ -29,10 +37,38 @@ class OpenAILLM extends BaseLLM<OpenAIConfig> {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
convertToOpenAIMessages(messages: Message[]): ChatCompletionMessageParam[] {
|
||||||
|
return messages.map((msg) => {
|
||||||
|
if (msg.role === 'tool') {
|
||||||
|
return {
|
||||||
|
role: 'tool',
|
||||||
|
tool_call_id: msg.id,
|
||||||
|
content: msg.content,
|
||||||
|
} as ChatCompletionToolMessageParam;
|
||||||
|
}
|
||||||
|
|
||||||
|
return msg;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
async generateText(input: GenerateTextInput): Promise<GenerateTextOutput> {
|
async generateText(input: GenerateTextInput): Promise<GenerateTextOutput> {
|
||||||
|
const openaiTools: ChatCompletionTool[] = [];
|
||||||
|
|
||||||
|
input.tools?.forEach((tool) => {
|
||||||
|
openaiTools.push({
|
||||||
|
type: 'function',
|
||||||
|
function: {
|
||||||
|
name: tool.name,
|
||||||
|
description: tool.description,
|
||||||
|
parameters: z.toJSONSchema(tool.schema),
|
||||||
|
},
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
const response = await this.openAIClient.chat.completions.create({
|
const response = await this.openAIClient.chat.completions.create({
|
||||||
model: this.config.model,
|
model: this.config.model,
|
||||||
messages: input.messages,
|
tools: openaiTools.length > 0 ? openaiTools : undefined,
|
||||||
|
messages: this.convertToOpenAIMessages(input.messages),
|
||||||
temperature:
|
temperature:
|
||||||
input.options?.temperature ?? this.config.options?.temperature ?? 1.0,
|
input.options?.temperature ?? this.config.options?.temperature ?? 1.0,
|
||||||
top_p: input.options?.topP ?? this.config.options?.topP,
|
top_p: input.options?.topP ?? this.config.options?.topP,
|
||||||
@@ -49,6 +85,18 @@ class OpenAILLM extends BaseLLM<OpenAIConfig> {
|
|||||||
if (response.choices && response.choices.length > 0) {
|
if (response.choices && response.choices.length > 0) {
|
||||||
return {
|
return {
|
||||||
content: response.choices[0].message.content!,
|
content: response.choices[0].message.content!,
|
||||||
|
toolCalls:
|
||||||
|
response.choices[0].message.tool_calls
|
||||||
|
?.map((tc) => {
|
||||||
|
if (tc.type === 'function') {
|
||||||
|
return {
|
||||||
|
name: tc.function.name,
|
||||||
|
id: tc.id,
|
||||||
|
arguments: JSON.parse(tc.function.arguments),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.filter((tc) => tc !== undefined) || [],
|
||||||
additionalInfo: {
|
additionalInfo: {
|
||||||
finishReason: response.choices[0].finish_reason,
|
finishReason: response.choices[0].finish_reason,
|
||||||
},
|
},
|
||||||
@@ -61,9 +109,23 @@ class OpenAILLM extends BaseLLM<OpenAIConfig> {
|
|||||||
async *streamText(
|
async *streamText(
|
||||||
input: GenerateTextInput,
|
input: GenerateTextInput,
|
||||||
): AsyncGenerator<StreamTextOutput> {
|
): AsyncGenerator<StreamTextOutput> {
|
||||||
|
const openaiTools: ChatCompletionTool[] = [];
|
||||||
|
|
||||||
|
input.tools?.forEach((tool) => {
|
||||||
|
openaiTools.push({
|
||||||
|
type: 'function',
|
||||||
|
function: {
|
||||||
|
name: tool.name,
|
||||||
|
description: tool.description,
|
||||||
|
parameters: z.toJSONSchema(tool.schema),
|
||||||
|
},
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
const stream = await this.openAIClient.chat.completions.create({
|
const stream = await this.openAIClient.chat.completions.create({
|
||||||
model: this.config.model,
|
model: this.config.model,
|
||||||
messages: input.messages,
|
messages: this.convertToOpenAIMessages(input.messages),
|
||||||
|
tools: openaiTools.length > 0 ? openaiTools : undefined,
|
||||||
temperature:
|
temperature:
|
||||||
input.options?.temperature ?? this.config.options?.temperature ?? 1.0,
|
input.options?.temperature ?? this.config.options?.temperature ?? 1.0,
|
||||||
top_p: input.options?.topP ?? this.config.options?.topP,
|
top_p: input.options?.topP ?? this.config.options?.topP,
|
||||||
@@ -78,10 +140,33 @@ class OpenAILLM extends BaseLLM<OpenAIConfig> {
|
|||||||
stream: true,
|
stream: true,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
let recievedToolCalls: { name: string; id: string; arguments: string }[] =
|
||||||
|
[];
|
||||||
|
|
||||||
for await (const chunk of stream) {
|
for await (const chunk of stream) {
|
||||||
if (chunk.choices && chunk.choices.length > 0) {
|
if (chunk.choices && chunk.choices.length > 0) {
|
||||||
|
const toolCalls = chunk.choices[0].delta.tool_calls;
|
||||||
yield {
|
yield {
|
||||||
contentChunk: chunk.choices[0].delta.content || '',
|
contentChunk: chunk.choices[0].delta.content || '',
|
||||||
|
toolCallChunk:
|
||||||
|
toolCalls?.map((tc) => {
|
||||||
|
if (tc.type === 'function') {
|
||||||
|
const call = {
|
||||||
|
name: tc.function?.name!,
|
||||||
|
id: tc.id!,
|
||||||
|
arguments: tc.function?.arguments || '',
|
||||||
|
};
|
||||||
|
recievedToolCalls.push(call);
|
||||||
|
return { ...call, arguments: parse(call.arguments || '{}') };
|
||||||
|
} else {
|
||||||
|
const existingCall = recievedToolCalls[tc.index];
|
||||||
|
existingCall.arguments += tc.function?.arguments || '';
|
||||||
|
return {
|
||||||
|
...existingCall,
|
||||||
|
arguments: parse(existingCall.arguments),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}) || [],
|
||||||
done: chunk.choices[0].finish_reason !== null,
|
done: chunk.choices[0].finish_reason !== null,
|
||||||
additionalInfo: {
|
additionalInfo: {
|
||||||
finishReason: chunk.choices[0].finish_reason,
|
finishReason: chunk.choices[0].finish_reason,
|
||||||
|
|||||||
Reference in New Issue
Block a user