mirror of
				https://github.com/ItzCrazyKns/Perplexica.git
				synced 2025-11-04 04:38:15 +00:00 
			
		
		
		
	feat(app): lint & beautify
This commit is contained in:
		@@ -187,16 +187,13 @@ const Page = () => {
 | 
			
		||||
        [key]: value,
 | 
			
		||||
      } as SettingsType;
 | 
			
		||||
 | 
			
		||||
      const response = await fetch(
 | 
			
		||||
        `/api/config`,
 | 
			
		||||
        {
 | 
			
		||||
      const response = await fetch(`/api/config`, {
 | 
			
		||||
        method: 'POST',
 | 
			
		||||
        headers: {
 | 
			
		||||
          'Content-Type': 'application/json',
 | 
			
		||||
        },
 | 
			
		||||
        body: JSON.stringify(updatedConfig),
 | 
			
		||||
        },
 | 
			
		||||
      );
 | 
			
		||||
      });
 | 
			
		||||
 | 
			
		||||
      if (!response.ok) {
 | 
			
		||||
        throw new Error('Failed to update config');
 | 
			
		||||
 
 | 
			
		||||
@@ -1,6 +1,6 @@
 | 
			
		||||
import { ChatOpenAI, OpenAIEmbeddings } from '@langchain/openai'
 | 
			
		||||
import { getOpenaiApiKey } from '../config'
 | 
			
		||||
import { ChatModelProvider, EmbeddingModelProvider } from '.'
 | 
			
		||||
import { ChatOpenAI, OpenAIEmbeddings } from '@langchain/openai';
 | 
			
		||||
import { getOpenaiApiKey } from '../config';
 | 
			
		||||
import { ChatModel, EmbeddingModel } from '.';
 | 
			
		||||
 | 
			
		||||
const openaiChatModels: Record<string, string>[] = [
 | 
			
		||||
  {
 | 
			
		||||
@@ -22,8 +22,8 @@ const openaiChatModels: Record<string, string>[] = [
 | 
			
		||||
  {
 | 
			
		||||
    displayName: 'GPT-4 omni mini',
 | 
			
		||||
    key: 'gpt-4o-mini',
 | 
			
		||||
  }
 | 
			
		||||
]
 | 
			
		||||
  },
 | 
			
		||||
];
 | 
			
		||||
 | 
			
		||||
const openaiEmbeddingModels: Record<string, string>[] = [
 | 
			
		||||
  {
 | 
			
		||||
@@ -33,56 +33,56 @@ const openaiEmbeddingModels: Record<string, string>[] = [
 | 
			
		||||
  {
 | 
			
		||||
    displayName: 'Text Embedding 3 Large',
 | 
			
		||||
    key: 'text-embedding-3-large',
 | 
			
		||||
    }
 | 
			
		||||
]
 | 
			
		||||
  },
 | 
			
		||||
];
 | 
			
		||||
 | 
			
		||||
export const loadOpenAIChatModels = async () => {
 | 
			
		||||
    const openaiApiKey = getOpenaiApiKey()
 | 
			
		||||
  const openaiApiKey = getOpenaiApiKey();
 | 
			
		||||
 | 
			
		||||
    if (!openaiApiKey) return {}
 | 
			
		||||
  if (!openaiApiKey) return {};
 | 
			
		||||
 | 
			
		||||
  try {
 | 
			
		||||
        const chatModels: Record<string, ChatModelProvider> = {}
 | 
			
		||||
    const chatModels: Record<string, ChatModel> = {};
 | 
			
		||||
 | 
			
		||||
        openaiChatModels.forEach(model => {
 | 
			
		||||
    openaiChatModels.forEach((model) => {
 | 
			
		||||
      chatModels[model.key] = {
 | 
			
		||||
        displayName: model.displayName,
 | 
			
		||||
        model: new ChatOpenAI({
 | 
			
		||||
          openAIApiKey: openaiApiKey,
 | 
			
		||||
          modelName: model.key,
 | 
			
		||||
                    temperature: 0.7
 | 
			
		||||
                })
 | 
			
		||||
            }
 | 
			
		||||
        })
 | 
			
		||||
          temperature: 0.7,
 | 
			
		||||
        }),
 | 
			
		||||
      };
 | 
			
		||||
    });
 | 
			
		||||
 | 
			
		||||
        return chatModels
 | 
			
		||||
    } catch(err) {
 | 
			
		||||
        console.error(`Error loading OpenAI models: ${err}`)
 | 
			
		||||
        return {}
 | 
			
		||||
    return chatModels;
 | 
			
		||||
  } catch (err) {
 | 
			
		||||
    console.error(`Error loading OpenAI models: ${err}`);
 | 
			
		||||
    return {};
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
export const loadOpenAIEmbeddingModels = async () => {
 | 
			
		||||
    const openaiApiKey = getOpenaiApiKey()
 | 
			
		||||
  const openaiApiKey = getOpenaiApiKey();
 | 
			
		||||
 | 
			
		||||
    if (!openaiApiKey) return {}
 | 
			
		||||
  if (!openaiApiKey) return {};
 | 
			
		||||
 | 
			
		||||
  try {
 | 
			
		||||
        const embeddingModels: Record<string, EmbeddingModelProvider> = {}
 | 
			
		||||
    const embeddingModels: Record<string, EmbeddingModel> = {};
 | 
			
		||||
 | 
			
		||||
        openaiEmbeddingModels.forEach(model => {
 | 
			
		||||
    openaiEmbeddingModels.forEach((model) => {
 | 
			
		||||
      embeddingModels[model.key] = {
 | 
			
		||||
        displayName: model.displayName,
 | 
			
		||||
        model: new OpenAIEmbeddings({
 | 
			
		||||
          openAIApiKey: openaiApiKey,
 | 
			
		||||
          modelName: model.key,
 | 
			
		||||
                })
 | 
			
		||||
            }
 | 
			
		||||
        })
 | 
			
		||||
        }),
 | 
			
		||||
      };
 | 
			
		||||
    });
 | 
			
		||||
 | 
			
		||||
        return embeddingModels
 | 
			
		||||
    } catch(err) {
 | 
			
		||||
        console.error(`Error loading OpenAI embeddings models: ${err}`)
 | 
			
		||||
        return {}
 | 
			
		||||
    return embeddingModels;
 | 
			
		||||
  } catch (err) {
 | 
			
		||||
    console.error(`Error loading OpenAI embeddings models: ${err}`);
 | 
			
		||||
    return {};
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
};
 | 
			
		||||
 
 | 
			
		||||
@@ -414,7 +414,7 @@ class MetaSearchAgent implements MetaSearchAgentType {
 | 
			
		||||
      return sortedDocs;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    return []
 | 
			
		||||
    return [];
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  private processDocs(docs: Document[]) {
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										2
									
								
								ui/lib/types/compute-dot.d.ts
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								ui/lib/types/compute-dot.d.ts
									
									
									
									
										vendored
									
									
								
							@@ -1,5 +1,5 @@
 | 
			
		||||
declare function computeDot(vectorA: number[], vectorB: number[]): number;
 | 
			
		||||
 | 
			
		||||
declare module "compute-dot" {
 | 
			
		||||
declare module 'compute-dot' {
 | 
			
		||||
  export default computeDot;
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -79,7 +79,10 @@ export const getDocumentsFromLinks = async ({ links }: { links: string[] }) => {
 | 
			
		||||
 | 
			
		||||
        docs.push(...linkDocs);
 | 
			
		||||
      } catch (err) {
 | 
			
		||||
        console.error("An error occurred while getting documents from links: ", err);
 | 
			
		||||
        console.error(
 | 
			
		||||
          'An error occurred while getting documents from links: ',
 | 
			
		||||
          err,
 | 
			
		||||
        );
 | 
			
		||||
        docs.push(
 | 
			
		||||
          new Document({
 | 
			
		||||
            pageContent: `Failed to retrieve content from the link: ${err}`,
 | 
			
		||||
 
 | 
			
		||||
@@ -7,7 +7,7 @@ const nextConfig = {
 | 
			
		||||
      },
 | 
			
		||||
    ],
 | 
			
		||||
  },
 | 
			
		||||
  serverExternalPackages: ['pdf-parse']
 | 
			
		||||
  serverExternalPackages: ['pdf-parse'],
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
export default nextConfig;
 | 
			
		||||
 
 | 
			
		||||
		Reference in New Issue
	
	Block a user