feat(app): lint & beautify

This commit is contained in:
ItzCrazyKns
2025-04-12 11:58:52 +05:30
parent 9a332e79e4
commit 073b5e897c
12 changed files with 66 additions and 35 deletions

View File

@ -8,6 +8,7 @@ import {
getOllamaApiEndpoint, getOllamaApiEndpoint,
getOpenaiApiKey, getOpenaiApiKey,
getDeepseekApiKey, getDeepseekApiKey,
getLMStudioApiEndpoint,
updateConfig, updateConfig,
} from '@/lib/config'; } from '@/lib/config';
import { import {
@ -51,6 +52,7 @@ export const GET = async (req: Request) => {
config['openaiApiKey'] = getOpenaiApiKey(); config['openaiApiKey'] = getOpenaiApiKey();
config['ollamaApiUrl'] = getOllamaApiEndpoint(); config['ollamaApiUrl'] = getOllamaApiEndpoint();
config['lmStudioApiUrl'] = getLMStudioApiEndpoint();
config['anthropicApiKey'] = getAnthropicApiKey(); config['anthropicApiKey'] = getAnthropicApiKey();
config['groqApiKey'] = getGroqApiKey(); config['groqApiKey'] = getGroqApiKey();
config['geminiApiKey'] = getGeminiApiKey(); config['geminiApiKey'] = getGeminiApiKey();
@ -93,6 +95,9 @@ export const POST = async (req: Request) => {
DEEPSEEK: { DEEPSEEK: {
API_KEY: config.deepseekApiKey, API_KEY: config.deepseekApiKey,
}, },
LM_STUDIO: {
API_URL: config.lmStudioApiUrl,
},
CUSTOM_OPENAI: { CUSTOM_OPENAI: {
API_URL: config.customOpenaiApiUrl, API_URL: config.customOpenaiApiUrl,
API_KEY: config.customOpenaiApiKey, API_KEY: config.customOpenaiApiKey,

View File

@ -21,6 +21,7 @@ interface SettingsType {
anthropicApiKey: string; anthropicApiKey: string;
geminiApiKey: string; geminiApiKey: string;
ollamaApiUrl: string; ollamaApiUrl: string;
lmStudioApiUrl: string;
deepseekApiKey: string; deepseekApiKey: string;
customOpenaiApiKey: string; customOpenaiApiKey: string;
customOpenaiApiUrl: string; customOpenaiApiUrl: string;
@ -548,8 +549,10 @@ const Page = () => {
options={Object.keys(config.chatModelProviders).map( options={Object.keys(config.chatModelProviders).map(
(provider) => ({ (provider) => ({
value: provider, value: provider,
label: (PROVIDER_METADATA as any)[provider]?.displayName || label:
provider.charAt(0).toUpperCase() + provider.slice(1), (PROVIDER_METADATA as any)[provider]?.displayName ||
provider.charAt(0).toUpperCase() +
provider.slice(1),
}), }),
)} )}
/> />
@ -689,8 +692,10 @@ const Page = () => {
options={Object.keys(config.embeddingModelProviders).map( options={Object.keys(config.embeddingModelProviders).map(
(provider) => ({ (provider) => ({
value: provider, value: provider,
label: (PROVIDER_METADATA as any)[provider]?.displayName || label:
provider.charAt(0).toUpperCase() + provider.slice(1), (PROVIDER_METADATA as any)[provider]?.displayName ||
provider.charAt(0).toUpperCase() +
provider.slice(1),
}), }),
)} )}
/> />

View File

@ -60,7 +60,7 @@ const loadConfig = () => {
fs.readFileSync(path.join(process.cwd(), `${configFileName}`), 'utf-8'), fs.readFileSync(path.join(process.cwd(), `${configFileName}`), 'utf-8'),
) as any as Config; ) as any as Config;
} }
// Client-side fallback - settings will be loaded via API // Client-side fallback - settings will be loaded via API
return {} as Config; return {} as Config;
}; };
@ -94,7 +94,8 @@ export const getCustomOpenaiApiUrl = () =>
export const getCustomOpenaiModelName = () => export const getCustomOpenaiModelName = () =>
loadConfig().MODELS.CUSTOM_OPENAI.MODEL_NAME; loadConfig().MODELS.CUSTOM_OPENAI.MODEL_NAME;
export const getLMStudioApiEndpoint = () => loadConfig().MODELS.LM_STUDIO.API_URL; export const getLMStudioApiEndpoint = () =>
loadConfig().MODELS.LM_STUDIO.API_URL;
const mergeConfigs = (current: any, update: any): any => { const mergeConfigs = (current: any, update: any): any => {
if (update === null || update === undefined) { if (update === null || update === undefined) {

View File

@ -4,7 +4,7 @@ import { getAnthropicApiKey } from '../config';
export const PROVIDER_INFO = { export const PROVIDER_INFO = {
key: 'anthropic', key: 'anthropic',
displayName: 'Anthropic' displayName: 'Anthropic',
}; };
import { BaseChatModel } from '@langchain/core/language_models/chat_models'; import { BaseChatModel } from '@langchain/core/language_models/chat_models';

View File

@ -5,7 +5,7 @@ import { BaseChatModel } from '@langchain/core/language_models/chat_models';
export const PROVIDER_INFO = { export const PROVIDER_INFO = {
key: 'deepseek', key: 'deepseek',
displayName: 'Deepseek AI' displayName: 'Deepseek AI',
}; };
const deepseekChatModels: Record<string, string>[] = [ const deepseekChatModels: Record<string, string>[] = [

View File

@ -7,7 +7,7 @@ import { ChatModel, EmbeddingModel } from '.';
export const PROVIDER_INFO = { export const PROVIDER_INFO = {
key: 'gemini', key: 'gemini',
displayName: 'Google Gemini' displayName: 'Google Gemini',
}; };
import { BaseChatModel } from '@langchain/core/language_models/chat_models'; import { BaseChatModel } from '@langchain/core/language_models/chat_models';
import { Embeddings } from '@langchain/core/embeddings'; import { Embeddings } from '@langchain/core/embeddings';

View File

@ -4,7 +4,7 @@ import { ChatModel } from '.';
export const PROVIDER_INFO = { export const PROVIDER_INFO = {
key: 'groq', key: 'groq',
displayName: 'Groq' displayName: 'Groq',
}; };
import { BaseChatModel } from '@langchain/core/language_models/chat_models'; import { BaseChatModel } from '@langchain/core/language_models/chat_models';

View File

@ -1,19 +1,45 @@
import { Embeddings } from '@langchain/core/embeddings'; import { Embeddings } from '@langchain/core/embeddings';
import { BaseChatModel } from '@langchain/core/language_models/chat_models'; import { BaseChatModel } from '@langchain/core/language_models/chat_models';
import { loadOpenAIChatModels, loadOpenAIEmbeddingModels, PROVIDER_INFO as OpenAIInfo, PROVIDER_INFO } from './openai'; import {
loadOpenAIChatModels,
loadOpenAIEmbeddingModels,
PROVIDER_INFO as OpenAIInfo,
PROVIDER_INFO,
} from './openai';
import { import {
getCustomOpenaiApiKey, getCustomOpenaiApiKey,
getCustomOpenaiApiUrl, getCustomOpenaiApiUrl,
getCustomOpenaiModelName, getCustomOpenaiModelName,
} from '../config'; } from '../config';
import { ChatOpenAI } from '@langchain/openai'; import { ChatOpenAI } from '@langchain/openai';
import { loadOllamaChatModels, loadOllamaEmbeddingModels, PROVIDER_INFO as OllamaInfo } from './ollama'; import {
loadOllamaChatModels,
loadOllamaEmbeddingModels,
PROVIDER_INFO as OllamaInfo,
} from './ollama';
import { loadGroqChatModels, PROVIDER_INFO as GroqInfo } from './groq'; import { loadGroqChatModels, PROVIDER_INFO as GroqInfo } from './groq';
import { loadAnthropicChatModels, PROVIDER_INFO as AnthropicInfo } from './anthropic'; import {
import { loadGeminiChatModels, loadGeminiEmbeddingModels, PROVIDER_INFO as GeminiInfo } from './gemini'; loadAnthropicChatModels,
import { loadTransformersEmbeddingsModels, PROVIDER_INFO as TransformersInfo } from './transformers'; PROVIDER_INFO as AnthropicInfo,
import { loadDeepseekChatModels, PROVIDER_INFO as DeepseekInfo } from './deepseek'; } from './anthropic';
import { loadLMStudioChatModels, loadLMStudioEmbeddingsModels, PROVIDER_INFO as LMStudioInfo } from './lmstudio'; import {
loadGeminiChatModels,
loadGeminiEmbeddingModels,
PROVIDER_INFO as GeminiInfo,
} from './gemini';
import {
loadTransformersEmbeddingsModels,
PROVIDER_INFO as TransformersInfo,
} from './transformers';
import {
loadDeepseekChatModels,
PROVIDER_INFO as DeepseekInfo,
} from './deepseek';
import {
loadLMStudioChatModels,
loadLMStudioEmbeddingsModels,
PROVIDER_INFO as LMStudioInfo,
} from './lmstudio';
export const PROVIDER_METADATA = { export const PROVIDER_METADATA = {
openai: OpenAIInfo, openai: OpenAIInfo,
@ -26,8 +52,8 @@ export const PROVIDER_METADATA = {
lmstudio: LMStudioInfo, lmstudio: LMStudioInfo,
custom_openai: { custom_openai: {
key: 'custom_openai', key: 'custom_openai',
displayName: 'Custom OpenAI' displayName: 'Custom OpenAI',
} },
}; };
export interface ChatModel { export interface ChatModel {

View File

@ -4,7 +4,7 @@ import { ChatModel, EmbeddingModel } from '.';
export const PROVIDER_INFO = { export const PROVIDER_INFO = {
key: 'lmstudio', key: 'lmstudio',
displayName: 'LM Studio' displayName: 'LM Studio',
}; };
import { ChatOpenAI } from '@langchain/openai'; import { ChatOpenAI } from '@langchain/openai';
import { OpenAIEmbeddings } from '@langchain/openai'; import { OpenAIEmbeddings } from '@langchain/openai';
@ -16,14 +16,12 @@ interface LMStudioModel {
name?: string; name?: string;
} }
const ensureV1Endpoint = (endpoint: string): string => const ensureV1Endpoint = (endpoint: string): string =>
endpoint.endsWith('/v1') ? endpoint : `${endpoint}/v1`; endpoint.endsWith('/v1') ? endpoint : `${endpoint}/v1`;
const checkServerAvailability = async (endpoint: string): Promise<boolean> => { const checkServerAvailability = async (endpoint: string): Promise<boolean> => {
try { try {
const keepAlive = getKeepAlive();
await axios.get(`${ensureV1Endpoint(endpoint)}/models`, { await axios.get(`${ensureV1Endpoint(endpoint)}/models`, {
timeout: parseInt(keepAlive) * 1000 || 5000,
headers: { 'Content-Type': 'application/json' }, headers: { 'Content-Type': 'application/json' },
}); });
return true; return true;
@ -34,14 +32,12 @@ const checkServerAvailability = async (endpoint: string): Promise<boolean> => {
export const loadLMStudioChatModels = async () => { export const loadLMStudioChatModels = async () => {
const endpoint = getLMStudioApiEndpoint(); const endpoint = getLMStudioApiEndpoint();
const keepAlive = getKeepAlive();
if (!endpoint) return {}; if (!endpoint) return {};
if (!await checkServerAvailability(endpoint)) return {}; if (!(await checkServerAvailability(endpoint))) return {};
try { try {
const response = await axios.get(`${ensureV1Endpoint(endpoint)}/models`, { const response = await axios.get(`${ensureV1Endpoint(endpoint)}/models`, {
timeout: parseInt(keepAlive) * 1000 || 5000,
headers: { 'Content-Type': 'application/json' }, headers: { 'Content-Type': 'application/json' },
}); });
@ -58,7 +54,7 @@ export const loadLMStudioChatModels = async () => {
modelName: model.id, modelName: model.id,
temperature: 0.7, temperature: 0.7,
streaming: true, streaming: true,
maxRetries: 3 maxRetries: 3,
}) as unknown as BaseChatModel, }) as unknown as BaseChatModel,
}; };
}); });
@ -72,14 +68,12 @@ export const loadLMStudioChatModels = async () => {
export const loadLMStudioEmbeddingsModels = async () => { export const loadLMStudioEmbeddingsModels = async () => {
const endpoint = getLMStudioApiEndpoint(); const endpoint = getLMStudioApiEndpoint();
const keepAlive = getKeepAlive();
if (!endpoint) return {}; if (!endpoint) return {};
if (!await checkServerAvailability(endpoint)) return {}; if (!(await checkServerAvailability(endpoint))) return {};
try { try {
const response = await axios.get(`${ensureV1Endpoint(endpoint)}/models`, { const response = await axios.get(`${ensureV1Endpoint(endpoint)}/models`, {
timeout: parseInt(keepAlive) * 1000 || 5000,
headers: { 'Content-Type': 'application/json' }, headers: { 'Content-Type': 'application/json' },
}); });

View File

@ -4,7 +4,7 @@ import { ChatModel, EmbeddingModel } from '.';
export const PROVIDER_INFO = { export const PROVIDER_INFO = {
key: 'ollama', key: 'ollama',
displayName: 'Ollama' displayName: 'Ollama',
}; };
import { ChatOllama } from '@langchain/community/chat_models/ollama'; import { ChatOllama } from '@langchain/community/chat_models/ollama';
import { OllamaEmbeddings } from '@langchain/community/embeddings/ollama'; import { OllamaEmbeddings } from '@langchain/community/embeddings/ollama';

View File

@ -4,7 +4,7 @@ import { ChatModel, EmbeddingModel } from '.';
export const PROVIDER_INFO = { export const PROVIDER_INFO = {
key: 'openai', key: 'openai',
displayName: 'OpenAI' displayName: 'OpenAI',
}; };
import { BaseChatModel } from '@langchain/core/language_models/chat_models'; import { BaseChatModel } from '@langchain/core/language_models/chat_models';
import { Embeddings } from '@langchain/core/embeddings'; import { Embeddings } from '@langchain/core/embeddings';

View File

@ -2,7 +2,7 @@ import { HuggingFaceTransformersEmbeddings } from '../huggingfaceTransformer';
export const PROVIDER_INFO = { export const PROVIDER_INFO = {
key: 'transformers', key: 'transformers',
displayName: 'Hugging Face' displayName: 'Hugging Face',
}; };
export const loadTransformersEmbeddingsModels = async () => { export const loadTransformersEmbeddingsModels = async () => {