From 64136b84100de835d73da3bb42742b321153d213 Mon Sep 17 00:00:00 2001 From: Navratan Lal Gupta Date: Mon, 14 Apr 2025 00:13:01 +0530 Subject: [PATCH] docker: configure config.toml using environment variable Signed-off-by: Navratan Lal Gupta --- README.md | 16 ++++++++------ app.dockerfile | 4 +++- container_entrypoint.sh | 49 +++++++++++++++++++++++++++++++++++++++++ docker-compose.yaml | 14 +++++++++++- 4 files changed, 74 insertions(+), 9 deletions(-) create mode 100644 container_entrypoint.sh diff --git a/README.md b/README.md index 18c9f84..26d4b66 100644 --- a/README.md +++ b/README.md @@ -84,16 +84,18 @@ There are mainly 2 ways of installing Perplexica - With Docker, Without Docker. 3. After cloning, navigate to the directory containing the project files. -4. Rename the `sample.config.toml` file to `config.toml`. For Docker setups, you need only fill in the following fields: +4. Update environment variables in `docker-compose.yml` file to configure `config.toml`. - - `OPENAI`: Your OpenAI API key. **You only need to fill this if you wish to use OpenAI's models**. - - `OLLAMA`: Your Ollama API URL. You should enter it as `http://host.docker.internal:PORT_NUMBER`. If you installed Ollama on port 11434, use `http://host.docker.internal:11434`. For other ports, adjust accordingly. **You need to fill this if you wish to use Ollama's models instead of OpenAI's**. - - `GROQ`: Your Groq API key. **You only need to fill this if you wish to use Groq's hosted models**. - - `ANTHROPIC`: Your Anthropic API key. **You only need to fill this if you wish to use Anthropic models**. +Example: - **Note**: You can change these after starting Perplexica from the settings dialog. +Below section in `config.toml` can be configured using variables `MODELS_CUSTOM_OPENAI_API_KEY="sk-123456"`, `MODELS_CUSTOM_OPENAI_API_URL="http://localopenai:11134"` and `MODELS_CUSTOM_OPENAI_MODEL_NAME="meta-llama/llama-4"` - - `SIMILARITY_MEASURE`: The similarity measure to use (This is filled by default; you can leave it as is if you are unsure about it.) +```toml +[MODELS.CUSTOM_OPENAI] +API_KEY = "sk-123456" +API_URL = "http://localopenai:11134" +MODEL_NAME = "meta-llama/llama-4" +``` 5. Ensure you are in the directory containing the `docker-compose.yaml` file and execute: diff --git a/app.dockerfile b/app.dockerfile index 3433288..04cde5c 100644 --- a/app.dockerfile +++ b/app.dockerfile @@ -21,7 +21,9 @@ COPY --from=builder /home/perplexica/.next/static ./public/_next/static COPY --from=builder /home/perplexica/.next/standalone ./ COPY --from=builder /home/perplexica/data ./data +COPY sample.config.toml /home/perplexica/config.toml +COPY container_entrypoint.sh /home/perplexica/container_entrypoint.sh RUN mkdir /home/perplexica/uploads -CMD ["node", "server.js"] \ No newline at end of file +CMD ["bash", "/home/perplexica/container_entrypoint.sh"] \ No newline at end of file diff --git a/container_entrypoint.sh b/container_entrypoint.sh new file mode 100644 index 0000000..b8dc0d7 --- /dev/null +++ b/container_entrypoint.sh @@ -0,0 +1,49 @@ +#!/usr/bin/env bash + +CONFIG_TOML_FILE=/home/perplexica/config.toml + +TMP_FILE=${CONFIG_TOML_FILE}.tmp +touch $TMP_FILE + +while IFS= read -r line; do + # Check if line is a section header (e.g., "[GENERAL]") + if [[ "$line" =~ ^\[([^]]+)\] ]]; then + current_section="${BASH_REMATCH[1]}" + echo "$line" >> "$TMP_FILE" + continue + fi + + # Skip empty lines and comments + if [[ -z "$line" || "$line" =~ ^[[:space:]]*\# ]]; then + echo "$line" >> "$TMP_FILE" + continue + fi + + # Extract key and value (handling quoted values) + key=$(echo "$line" | cut -d '=' -f 1 | xargs) + value=$(echo "$line" | cut -d '=' -f 2- | xargs) + + + # Construct the environment variable name in form of SECTION_KEY (e.g., GENERAL_SIMILARITY_MEASURE, MODELS_GEMINI_API_KEY) + current_section=$(echo "$current_section" | sed 's/\./_/') + env_var_name="${current_section}_${key}" + + # Check if the environment variable exists + env_var_value=$(echo "${!env_var_name}") + if [ -n "$env_var_value" ]; then + new_value="$env_var_value" + echo "$key = $new_value" >> "$TMP_FILE" + else + # Keep original line if no env var exists + echo "$line" >> "$TMP_FILE" + fi + +done < "$CONFIG_TOML_FILE" + +# Replace the original file +mv "$TMP_FILE" "$CONFIG_TOML_FILE" + +echo "Config file updated successfully." + +# Start server +node server.js \ No newline at end of file diff --git a/docker-compose.yaml b/docker-compose.yaml index b702b4e..d3b6c7f 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -16,6 +16,19 @@ services: dockerfile: app.dockerfile environment: - SEARXNG_API_URL=http://searxng:8080 + - GENERAL_SIMILARITY_MEASURE="cosine" # "cosine" or "dot" + - GENERAL_KEEP_ALIVE="5m" # How long to keep Ollama models loaded into memory. (Instead of using -1 use "-1m") + - MODELS_OPENAI_API_KEY="" + - MODELS_GROQ_API_KEY="" + - MODELS_ANTHROPIC_API_KEY="" + - MODELS_GEMINI_API_KEY="" + - MODELS_CUSTOM_OPENAI_API_KEY="" + - MODELS_CUSTOM_OPENAI_API_URL="" + - MODELS_CUSTOM_OPENAI_MODEL_NAME="" + - MODELS_OLLAMA_API_KEY="" # Ollama API URL - http://host.docker.internal:11434 + - MODELS_DEEPSEEK_API_KEY="" + - MODELS_LM_STUDIO_API_KEY="" # LM Studio API URL - http://host.docker.internal:1234 + - API_ENDPOINTS_SEARXNG="" # SearxNG API URL - http://localhost:32768 ports: - 3000:3000 networks: @@ -23,7 +36,6 @@ services: volumes: - backend-dbstore:/home/perplexica/data - uploads:/home/perplexica/uploads - - ./config.toml:/home/perplexica/config.toml restart: unless-stopped networks: