features: local_ai: true # Needs to be set so that network is loaded docker: services: ollama: backup: no_stop_required: true image: ollama/ollama version: latest name: ollama port: 11434 volumes: models: "ollama_models" network: "ollama" preload_models: - "llama3:latest" - "mistral:latest" - "nomic-embed-text:latest"