Keep/keep-notes/docker-compose.yml

91 lines
2.1 KiB
YAML

version: '3.8'
services:
keep-notes:
build:
context: .
dockerfile: Dockerfile
image: keep-notes:latest
container_name: keep-notes
restart: unless-stopped
ports:
- "3000:3000"
environment:
# Database
- DATABASE_URL=file:/app/prisma/dev.db
- NODE_ENV=production
# Application (IMPORTANT: Change these!)
- NEXTAUTH_URL=http://your-domain.com:3000
- NEXTAUTH_SECRET=change-this-to-a-random-secret-string
# Disable Next.js telemetry
- NEXT_TELEMETRY_DISABLED=1
# AI Provider (Optional - for OpenAI)
# - OPENAI_API_KEY=your-openai-api-key-here
# AI Provider (Optional - for Ollama)
# - OLLAMA_BASE_URL=http://ollama:11434
# - OLLAMA_MODEL=granite4:latest
volumes:
# Persist SQLite database
- keep-db:/app/prisma
# Persist uploaded images and files
- keep-uploads:/app/public/uploads
# Optional: Mount custom SSL certificates
# - ./certs:/app/certs:ro
networks:
- keep-network
# Optional: Resource limits for Proxmox VM
deploy:
resources:
limits:
cpus: '2'
memory: 2G
reservations:
cpus: '0.5'
memory: 512M
# Health check for automatic restart
healthcheck:
test: ["CMD", "wget", "--spider", "-q", "http://localhost:3000"]
interval: 30s
timeout: 10s
retries: 3
start_period: 40s
# Optional: Ollama for local AI models
# Uncomment this section if you want to use local AI models
# ollama:
# image: ollama/ollama:latest
# container_name: keep-ollama
# restart: unless-stopped
# ports:
# - "11434:11434"
# volumes:
# - ollama-data:/root/.ollama
# networks:
# - keep-network
# deploy:
# resources:
# limits:
# cpus: '4'
# memory: 8G
# reservations:
# cpus: '2'
# memory: 4G
networks:
keep-network:
driver: bridge
volumes:
keep-db:
driver: local
keep-uploads:
driver: local
# ollama-data:
# driver: local