From 77de9b776a16cec0cb9c39f33ad4d7a99d6cc4b8 Mon Sep 17 00:00:00 2001 From: chris depalma Date: Tue, 30 Jul 2024 11:26:44 -0400 Subject: [PATCH] Move front end port to 3010 to avoid collison with apps which commonly use it, like open-webui for ollama. Fix issue with server not able to reach searxng. Added config.toml that works locally. Fixed sample.config.toml --- config.toml | 12 ++++++++++++ 1 file changed, 12 insertions(+) create mode 100644 config.toml diff --git a/config.toml b/config.toml new file mode 100644 index 0000000..5d01e31 --- /dev/null +++ b/config.toml @@ -0,0 +1,12 @@ +[GENERAL] +PORT = 3001 # Port to run the server on +SIMILARITY_MEASURE = "cosine" # "cosine" or "dot" + +[API_KEYS] +OPENAI = "" # OpenAI API key - sk-1234567890abcdef1234567890abcdef +GROQ = "" # Groq API key - gsk_1234567890abcdef1234567890abcdef +ANTHROPIC = "" # Anthropic API key - sk-ant-1234567890abcdef1234567890abcdef + +[API_ENDPOINTS] +SEARXNG = "http://SearxNG:8080" # SearxNG API URL +OLLAMA = "" # Ollama API URL - http://host.docker.internal:11434 \ No newline at end of file