mirror of
https://github.com/ItzCrazyKns/Perplexica.git
synced 2025-09-17 14:51:32 +00:00
fix(Library): Returns metadata back to original format so sources continue to work.
This commit is contained in:
@@ -112,7 +112,7 @@ const handleEmitterEvents = async (
|
||||
stream.on('end', () => {
|
||||
const endTime = Date.now();
|
||||
const duration = endTime - startTime;
|
||||
|
||||
|
||||
modelStats = {
|
||||
...modelStats,
|
||||
responseTime: duration,
|
||||
@@ -135,9 +135,11 @@ const handleEmitterEvents = async (
|
||||
chatId: chatId,
|
||||
messageId: aiMessageId,
|
||||
role: 'assistant',
|
||||
metadata: {
|
||||
metadata: JSON.stringify({
|
||||
createdAt: new Date(),
|
||||
...(sources && sources.length > 0 && { sources }),
|
||||
modelStats: modelStats,
|
||||
},
|
||||
}),
|
||||
})
|
||||
.execute();
|
||||
});
|
||||
@@ -319,7 +321,14 @@ export const POST = async (req: Request) => {
|
||||
const writer = responseStream.writable.getWriter();
|
||||
const encoder = new TextEncoder();
|
||||
|
||||
handleEmitterEvents(stream, writer, encoder, aiMessageId, message.chatId, startTime);
|
||||
handleEmitterEvents(
|
||||
stream,
|
||||
writer,
|
||||
encoder,
|
||||
aiMessageId,
|
||||
message.chatId,
|
||||
startTime,
|
||||
);
|
||||
handleHistorySave(message, humanMessageId, body.focusMode, body.files);
|
||||
|
||||
return new Response(responseStream.readable, {
|
||||
|
@@ -546,8 +546,7 @@ const Page = () => {
|
||||
Automatic Suggestions
|
||||
</p>
|
||||
<p className="text-xs text-black/60 dark:text-white/60 mt-0.5">
|
||||
Automatically show related suggestions after
|
||||
responses
|
||||
Automatically show related suggestions after responses
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
|
@@ -18,10 +18,6 @@ export type ModelStats = {
|
||||
responseTime?: number;
|
||||
};
|
||||
|
||||
export type MessageMetadata = {
|
||||
modelStats?: ModelStats;
|
||||
};
|
||||
|
||||
export type Message = {
|
||||
messageId: string;
|
||||
chatId: string;
|
||||
@@ -30,7 +26,7 @@ export type Message = {
|
||||
role: 'user' | 'assistant';
|
||||
suggestions?: string[];
|
||||
sources?: Document[];
|
||||
metadata?: MessageMetadata;
|
||||
modelStats?: ModelStats;
|
||||
};
|
||||
|
||||
export interface File {
|
||||
@@ -217,6 +213,7 @@ const loadMessages = async (
|
||||
const messages = data.messages.map((msg: any) => {
|
||||
return {
|
||||
...msg,
|
||||
...JSON.parse(msg.metadata),
|
||||
};
|
||||
}) as Message[];
|
||||
|
||||
@@ -445,11 +442,8 @@ const ChatWindow = ({ id }: { id?: string }) => {
|
||||
role: 'assistant',
|
||||
sources: sources,
|
||||
createdAt: new Date(),
|
||||
metadata: {
|
||||
// modelStats will be added when we receive messageEnd event
|
||||
modelStats: {
|
||||
modelName: data.modelName,
|
||||
},
|
||||
modelStats: {
|
||||
modelName: data.modelName,
|
||||
},
|
||||
},
|
||||
]);
|
||||
@@ -483,10 +477,8 @@ const ChatWindow = ({ id }: { id?: string }) => {
|
||||
if (message.messageId === data.messageId) {
|
||||
return {
|
||||
...message,
|
||||
metadata: {
|
||||
// Include model stats if available, otherwise null
|
||||
modelStats: data.modelStats || null,
|
||||
},
|
||||
// Include model stats if available, otherwise null
|
||||
modelStats: data.modelStats || null,
|
||||
};
|
||||
}
|
||||
return message;
|
||||
|
@@ -63,7 +63,9 @@ const ModelInfoButton: React.FC<ModelInfoButtonProps> = ({ modelStats }) => {
|
||||
</div>
|
||||
{modelStats?.responseTime && (
|
||||
<div className="flex justify-between">
|
||||
<span className="text-black/70 dark:text-white/70">Response time:</span>
|
||||
<span className="text-black/70 dark:text-white/70">
|
||||
Response time:
|
||||
</span>
|
||||
<span className="text-black dark:text-white font-medium">
|
||||
{(modelStats.responseTime / 1000).toFixed(2)}s
|
||||
</span>
|
||||
|
@@ -58,11 +58,15 @@ const MessageBox = ({
|
||||
const [speechMessage, setSpeechMessage] = useState(message.content);
|
||||
const [loadingSuggestions, setLoadingSuggestions] = useState(false);
|
||||
const [autoSuggestions, setAutoSuggestions] = useState(
|
||||
localStorage.getItem('autoSuggestions')
|
||||
localStorage.getItem('autoSuggestions'),
|
||||
);
|
||||
|
||||
const handleLoadSuggestions = async () => {
|
||||
if (loadingSuggestions || (message?.suggestions && message.suggestions.length > 0)) return;
|
||||
if (
|
||||
loadingSuggestions ||
|
||||
(message?.suggestions && message.suggestions.length > 0)
|
||||
)
|
||||
return;
|
||||
|
||||
setLoadingSuggestions(true);
|
||||
try {
|
||||
@@ -202,8 +206,8 @@ const MessageBox = ({
|
||||
<h3 className="text-black dark:text-white font-medium text-xl">
|
||||
Answer
|
||||
</h3>
|
||||
{message.metadata?.modelStats && (
|
||||
<ModelInfoButton modelStats={message.metadata.modelStats} />
|
||||
{message.modelStats && (
|
||||
<ModelInfoButton modelStats={message.modelStats} />
|
||||
)}
|
||||
</div>
|
||||
<Markdown
|
||||
@@ -251,8 +255,9 @@ const MessageBox = ({
|
||||
<div className="flex flex-row items-center space-x-2 mt-4">
|
||||
<Layers3 />
|
||||
<h3 className="text-xl font-medium">Related</h3>{' '}
|
||||
{(!autoSuggestions || autoSuggestions === 'false') && (!message.suggestions ||
|
||||
message.suggestions.length === 0) ? (
|
||||
{(!autoSuggestions || autoSuggestions === 'false') &&
|
||||
(!message.suggestions ||
|
||||
message.suggestions.length === 0) ? (
|
||||
<div className="bg-light-secondary dark:bg-dark-secondary">
|
||||
<button
|
||||
onClick={handleLoadSuggestions}
|
||||
|
Reference in New Issue
Block a user