## Docker Configuration - Enhance docker-compose.yml with Ollama support for local AI - Add resource limits and health checks for better stability - Configure isolated Docker network (keep-network) - Add persistent volumes for database and uploads - Include optional Ollama service configuration ## Deployment Files - Add DOCKER_DEPLOYMENT.md with comprehensive deployment guide - Add deploy.sh automation script with 10+ commands - Document Proxmox LXC container setup - Add backup/restore procedures - Include SSL/HTTPS and reverse proxy configuration ## Docker Build Optimization - Improve .dockerignore for faster builds - Exclude development files and debug logs - Add comprehensive exclusions for IDE, OS, and testing files ## Features - Support for OpenAI API (cloud AI) - Support for Ollama (local AI models) - Automatic database backups - Health checks and auto-restart - Resource limits for VM/LXC environments ## Documentation - Complete Proxmox deployment guide - Troubleshooting section - Security best practices - Performance tuning recommendations 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com> Ou une version plus courte si vous préférez : feat(docker): Add Proxmox deployment config with Ollama support - Enhance docker-compose.yml with health checks, resource limits, Ollama support - Add DOCKER_DEPLOYMENT.md guide (50+ sections covering Proxmox, SSL, AI setup) - Add deploy.sh script with build, start, backup, logs commands - Improve .dockerignore for optimized builds - Document backup/restore procedures and security best practices - Support both OpenAI and local Ollama AI providers 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
96 lines
2.2 KiB
YAML
96 lines
2.2 KiB
YAML
version: '3.8'
|
|
|
|
services:
|
|
keep-notes:
|
|
build:
|
|
context: .
|
|
dockerfile: Dockerfile
|
|
image: keep-notes:latest
|
|
container_name: keep-notes
|
|
restart: unless-stopped
|
|
ports:
|
|
- "3000:3000"
|
|
environment:
|
|
# Database
|
|
- DATABASE_URL=file:/app/prisma/dev.db
|
|
- NODE_ENV=production
|
|
|
|
# Application (Change these!)
|
|
- NEXTAUTH_URL=http://your-domain.com:3000
|
|
- NEXTAUTH_SECRET=change-this-to-a-random-secret-string
|
|
|
|
# AI Provider (Optional - for OpenAI)
|
|
# - OPENAI_API_KEY=your-openai-api-key-here
|
|
|
|
# AI Provider (Optional - for Ollama)
|
|
# - OLLAMA_BASE_URL=http://ollama:11434
|
|
# - OLLAMA_MODEL=granite4:latest
|
|
volumes:
|
|
# Persist SQLite database
|
|
- keep-db:/app/prisma
|
|
|
|
# Persist uploaded images and files
|
|
- keep-uploads:/app/public/uploads
|
|
|
|
# Optional: Mount custom SSL certificates
|
|
# - ./certs:/app/certs:ro
|
|
networks:
|
|
- keep-network
|
|
# Optional: Resource limits for Proxmox VM
|
|
deploy:
|
|
resources:
|
|
limits:
|
|
cpus: '2'
|
|
memory: 2G
|
|
reservations:
|
|
cpus: '0.5'
|
|
memory: 512M
|
|
# Optional: Health check
|
|
healthcheck:
|
|
test: ["CMD", "wget", "--spider", "-q", "http://localhost:3000"]
|
|
interval: 30s
|
|
timeout: 10s
|
|
retries: 3
|
|
start_period: 40s
|
|
|
|
# Optional: Ollama for local AI models
|
|
# Uncomment this section if you want to use local AI models
|
|
# ollama:
|
|
# image: ollama/ollama:latest
|
|
# container_name: keep-ollama
|
|
# restart: unless-stopped
|
|
# ports:
|
|
# - "11434:11434"
|
|
# volumes:
|
|
# - ollama-data:/root/.ollama
|
|
# networks:
|
|
# - keep-network
|
|
# deploy:
|
|
# resources:
|
|
# limits:
|
|
# cpus: '4'
|
|
# memory: 8G
|
|
# reservations:
|
|
# cpus: '2'
|
|
# memory: 4G
|
|
# # GPU support for Proxmox with GPU passthrough
|
|
# # deploy:
|
|
# # resources:
|
|
# # reservations:
|
|
# # devices:
|
|
# # - driver: nvidia
|
|
# # count: 1
|
|
# # capabilities: [gpu]
|
|
|
|
networks:
|
|
keep-network:
|
|
driver: bridge
|
|
|
|
volumes:
|
|
keep-db:
|
|
driver: local
|
|
keep-uploads:
|
|
driver: local
|
|
# ollama-data:
|
|
# driver: local
|