mirror of
https://github.com/ItzCrazyKns/Perplexica.git
synced 2025-04-29 07:42:46 +00:00
124 lines
2.9 KiB
TypeScript
124 lines
2.9 KiB
TypeScript
import fs from 'fs';
|
|
import path from 'path';
|
|
import toml from '@iarna/toml';
|
|
|
|
const configFileName = 'config.toml';
|
|
|
|
interface Config {
|
|
GENERAL: {
|
|
SIMILARITY_MEASURE: string;
|
|
KEEP_ALIVE: string;
|
|
};
|
|
MODELS: {
|
|
OPENAI: {
|
|
API_KEY: string;
|
|
};
|
|
GROQ: {
|
|
API_KEY: string;
|
|
};
|
|
ANTHROPIC: {
|
|
API_KEY: string;
|
|
};
|
|
GEMINI: {
|
|
API_KEY: string;
|
|
};
|
|
OLLAMA: {
|
|
API_URL: string;
|
|
};
|
|
DEEPSEEK: {
|
|
API_KEY: string;
|
|
};
|
|
LM_STUDIO: {
|
|
API_URL: string;
|
|
};
|
|
CUSTOM_OPENAI: {
|
|
API_URL: string;
|
|
API_KEY: string;
|
|
MODEL_NAME: string;
|
|
};
|
|
};
|
|
API_ENDPOINTS: {
|
|
SEARXNG: string;
|
|
};
|
|
}
|
|
|
|
type RecursivePartial<T> = {
|
|
[P in keyof T]?: RecursivePartial<T[P]>;
|
|
};
|
|
|
|
const loadConfig = () =>
|
|
toml.parse(
|
|
fs.readFileSync(path.join(process.cwd(), `${configFileName}`), 'utf-8'),
|
|
) as any as Config;
|
|
|
|
export const getSimilarityMeasure = () =>
|
|
loadConfig().GENERAL.SIMILARITY_MEASURE;
|
|
|
|
export const getKeepAlive = () => loadConfig().GENERAL.KEEP_ALIVE;
|
|
|
|
export const getOpenaiApiKey = () => loadConfig().MODELS.OPENAI.API_KEY;
|
|
|
|
export const getGroqApiKey = () => loadConfig().MODELS.GROQ.API_KEY;
|
|
|
|
export const getAnthropicApiKey = () => loadConfig().MODELS.ANTHROPIC.API_KEY;
|
|
|
|
export const getGeminiApiKey = () => loadConfig().MODELS.GEMINI.API_KEY;
|
|
|
|
export const getSearxngApiEndpoint = () =>
|
|
process.env.SEARXNG_API_URL || loadConfig().API_ENDPOINTS.SEARXNG;
|
|
|
|
export const getOllamaApiEndpoint = () => loadConfig().MODELS.OLLAMA.API_URL;
|
|
|
|
export const getDeepseekApiKey = () => loadConfig().MODELS.DEEPSEEK.API_KEY;
|
|
|
|
export const getCustomOpenaiApiKey = () =>
|
|
loadConfig().MODELS.CUSTOM_OPENAI.API_KEY;
|
|
|
|
export const getCustomOpenaiApiUrl = () =>
|
|
loadConfig().MODELS.CUSTOM_OPENAI.API_URL;
|
|
|
|
export const getCustomOpenaiModelName = () =>
|
|
loadConfig().MODELS.CUSTOM_OPENAI.MODEL_NAME;
|
|
|
|
export const getLMStudioApiEndpoint = () => loadConfig().MODELS.LM_STUDIO.API_URL;
|
|
|
|
const mergeConfigs = (current: any, update: any): any => {
|
|
if (update === null || update === undefined) {
|
|
return current;
|
|
}
|
|
|
|
if (typeof current !== 'object' || current === null) {
|
|
return update;
|
|
}
|
|
|
|
const result = { ...current };
|
|
|
|
for (const key in update) {
|
|
if (Object.prototype.hasOwnProperty.call(update, key)) {
|
|
const updateValue = update[key];
|
|
|
|
if (
|
|
typeof updateValue === 'object' &&
|
|
updateValue !== null &&
|
|
typeof result[key] === 'object' &&
|
|
result[key] !== null
|
|
) {
|
|
result[key] = mergeConfigs(result[key], updateValue);
|
|
} else if (updateValue !== undefined) {
|
|
result[key] = updateValue;
|
|
}
|
|
}
|
|
}
|
|
|
|
return result;
|
|
};
|
|
|
|
export const updateConfig = (config: RecursivePartial<Config>) => {
|
|
const currentConfig = loadConfig();
|
|
const mergedConfig = mergeConfigs(currentConfig, config);
|
|
fs.writeFileSync(
|
|
path.join(path.join(process.cwd(), `${configFileName}`)),
|
|
toml.stringify(mergedConfig),
|
|
);
|
|
};
|