mirror of
https://github.com/ItzCrazyKns/Perplexica.git
synced 2025-04-30 00:02:44 +00:00
feat(config): update config & custom openai
This commit is contained in:
@ -3,12 +3,25 @@ PORT = 3001 # Port to run the server on
|
|||||||
SIMILARITY_MEASURE = "cosine" # "cosine" or "dot"
|
SIMILARITY_MEASURE = "cosine" # "cosine" or "dot"
|
||||||
KEEP_ALIVE = "5m" # How long to keep Ollama models loaded into memory. (Instead of using -1 use "-1m")
|
KEEP_ALIVE = "5m" # How long to keep Ollama models loaded into memory. (Instead of using -1 use "-1m")
|
||||||
|
|
||||||
[API_KEYS]
|
[MODELS]
|
||||||
OPENAI = "" # OpenAI API key - sk-1234567890abcdef1234567890abcdef
|
[MODELS.OPENAI]
|
||||||
GROQ = "" # Groq API key - gsk_1234567890abcdef1234567890abcdef
|
API_KEY = ""
|
||||||
ANTHROPIC = "" # Anthropic API key - sk-ant-1234567890abcdef1234567890abcdef
|
|
||||||
GEMINI = "" # Gemini API key - sk-1234567890abcdef1234567890abcdef
|
[MODELS.GROQ]
|
||||||
|
API_KEY = ""
|
||||||
|
|
||||||
|
[MODELS.ANTHROPIC]
|
||||||
|
API_KEY = ""
|
||||||
|
|
||||||
|
[MODELS.GEMINI]
|
||||||
|
API_KEY = ""
|
||||||
|
|
||||||
|
[MODELS.CUSTOM_OPENAI]
|
||||||
|
API_KEY = ""
|
||||||
|
API_URL = ""
|
||||||
|
|
||||||
|
[MODELS.OLLAMA]
|
||||||
|
API_URL = "" # Ollama API URL - http://host.docker.internal:11434
|
||||||
|
|
||||||
[API_ENDPOINTS]
|
[API_ENDPOINTS]
|
||||||
SEARXNG = "http://localhost:32768" # SearxNG API URL
|
SEARXNG = "http://localhost:32768" # SearxNG API URL
|
||||||
OLLAMA = "" # Ollama API URL - http://host.docker.internal:11434
|
|
@ -10,15 +10,30 @@ interface Config {
|
|||||||
SIMILARITY_MEASURE: string;
|
SIMILARITY_MEASURE: string;
|
||||||
KEEP_ALIVE: string;
|
KEEP_ALIVE: string;
|
||||||
};
|
};
|
||||||
API_KEYS: {
|
MODELS: {
|
||||||
OPENAI: string;
|
OPENAI: {
|
||||||
GROQ: string;
|
API_KEY: string;
|
||||||
ANTHROPIC: string;
|
};
|
||||||
GEMINI: string;
|
GROQ: {
|
||||||
|
API_KEY: string;
|
||||||
|
};
|
||||||
|
ANTHROPIC: {
|
||||||
|
API_KEY: string;
|
||||||
|
};
|
||||||
|
GEMINI: {
|
||||||
|
API_KEY: string;
|
||||||
|
};
|
||||||
|
OLLAMA: {
|
||||||
|
API_URL: string;
|
||||||
|
};
|
||||||
|
CUSTOM_OPENAI: {
|
||||||
|
API_URL: string;
|
||||||
|
API_KEY: string;
|
||||||
|
MODEL_NAME: string;
|
||||||
|
};
|
||||||
};
|
};
|
||||||
API_ENDPOINTS: {
|
API_ENDPOINTS: {
|
||||||
SEARXNG: string;
|
SEARXNG: string;
|
||||||
OLLAMA: string;
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -38,42 +53,65 @@ export const getSimilarityMeasure = () =>
|
|||||||
|
|
||||||
export const getKeepAlive = () => loadConfig().GENERAL.KEEP_ALIVE;
|
export const getKeepAlive = () => loadConfig().GENERAL.KEEP_ALIVE;
|
||||||
|
|
||||||
export const getOpenaiApiKey = () => loadConfig().API_KEYS.OPENAI;
|
export const getOpenaiApiKey = () => loadConfig().MODELS.OPENAI.API_KEY;
|
||||||
|
|
||||||
export const getGroqApiKey = () => loadConfig().API_KEYS.GROQ;
|
export const getGroqApiKey = () => loadConfig().MODELS.GROQ.API_KEY;
|
||||||
|
|
||||||
export const getAnthropicApiKey = () => loadConfig().API_KEYS.ANTHROPIC;
|
export const getAnthropicApiKey = () => loadConfig().MODELS.ANTHROPIC.API_KEY;
|
||||||
|
|
||||||
export const getGeminiApiKey = () => loadConfig().API_KEYS.GEMINI;
|
export const getGeminiApiKey = () => loadConfig().MODELS.GEMINI.API_KEY;
|
||||||
|
|
||||||
export const getSearxngApiEndpoint = () =>
|
export const getSearxngApiEndpoint = () =>
|
||||||
process.env.SEARXNG_API_URL || loadConfig().API_ENDPOINTS.SEARXNG;
|
process.env.SEARXNG_API_URL || loadConfig().API_ENDPOINTS.SEARXNG;
|
||||||
|
|
||||||
export const getOllamaApiEndpoint = () => loadConfig().API_ENDPOINTS.OLLAMA;
|
export const getOllamaApiEndpoint = () => loadConfig().MODELS.OLLAMA.API_URL;
|
||||||
|
|
||||||
export const updateConfig = (config: RecursivePartial<Config>) => {
|
export const getCustomOpenaiApiKey = () =>
|
||||||
const currentConfig = loadConfig();
|
loadConfig().MODELS.CUSTOM_OPENAI.API_KEY;
|
||||||
|
|
||||||
for (const key in currentConfig) {
|
export const getCustomOpenaiApiUrl = () =>
|
||||||
if (!config[key]) config[key] = {};
|
loadConfig().MODELS.CUSTOM_OPENAI.API_URL;
|
||||||
|
|
||||||
if (typeof currentConfig[key] === 'object' && currentConfig[key] !== null) {
|
export const getCustomOpenaiModelName = () =>
|
||||||
for (const nestedKey in currentConfig[key]) {
|
loadConfig().MODELS.CUSTOM_OPENAI.MODEL_NAME;
|
||||||
if (
|
|
||||||
!config[key][nestedKey] &&
|
const mergeConfigs = (current: any, update: any): any => {
|
||||||
currentConfig[key][nestedKey] &&
|
if (update === null || update === undefined) {
|
||||||
config[key][nestedKey] !== ''
|
return current;
|
||||||
) {
|
}
|
||||||
config[key][nestedKey] = currentConfig[key][nestedKey];
|
|
||||||
}
|
if (typeof current !== 'object' || current === null) {
|
||||||
|
return update;
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = { ...current };
|
||||||
|
|
||||||
|
for (const key in update) {
|
||||||
|
if (Object.prototype.hasOwnProperty.call(update, key)) {
|
||||||
|
const updateValue = update[key];
|
||||||
|
|
||||||
|
if (
|
||||||
|
typeof updateValue === 'object' &&
|
||||||
|
updateValue !== null &&
|
||||||
|
typeof result[key] === 'object' &&
|
||||||
|
result[key] !== null
|
||||||
|
) {
|
||||||
|
result[key] = mergeConfigs(result[key], updateValue);
|
||||||
|
} else if (updateValue !== undefined) {
|
||||||
|
result[key] = updateValue;
|
||||||
}
|
}
|
||||||
} else if (currentConfig[key] && config[key] !== '') {
|
|
||||||
config[key] = currentConfig[key];
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
|
||||||
|
export const updateConfig = (config: RecursivePartial<Config>) => {
|
||||||
|
const currentConfig = loadConfig();
|
||||||
|
const mergedConfig = mergeConfigs(currentConfig, config);
|
||||||
|
|
||||||
fs.writeFileSync(
|
fs.writeFileSync(
|
||||||
path.join(__dirname, `../${configFileName}`),
|
path.join(__dirname, `../${configFileName}`),
|
||||||
toml.stringify(config),
|
toml.stringify(mergedConfig),
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
Reference in New Issue
Block a user