# ============================================================================ # Library RAG MCP Server - Environment Configuration # ============================================================================ # Copy this file to .env and fill in your values. # Required variables are marked with [REQUIRED]. # ============================================================================ # [REQUIRED] Mistral API Key for OCR and LLM services # Get your key at: https://console.mistral.ai/ MISTRAL_API_KEY=your-mistral-api-key-here # ============================================================================ # LLM Configuration # ============================================================================ # Ollama base URL for local LLM (default: http://localhost:11434) OLLAMA_BASE_URL=http://localhost:11434 # LLM model for structure extraction (default: deepseek-r1:14b) STRUCTURE_LLM_MODEL=deepseek-r1:14b # Temperature for LLM generation (0.0-2.0, default: 0.2) STRUCTURE_LLM_TEMPERATURE=0.2 # Default LLM provider: "ollama" (local, free) or "mistral" (API, paid) # For MCP server, always uses "mistral" with mistral-medium-latest DEFAULT_LLM_PROVIDER=ollama # ============================================================================ # Weaviate Configuration # ============================================================================ # Weaviate server hostname (default: localhost) WEAVIATE_HOST=localhost # Weaviate server port (default: 8080) WEAVIATE_PORT=8080 # ============================================================================ # Logging # ============================================================================ # Log level: DEBUG, INFO, WARNING, ERROR, CRITICAL (default: INFO) LOG_LEVEL=INFO # ============================================================================ # File System # ============================================================================ # Base directory for processed files (default: output) OUTPUT_DIR=output