services: # Run scraper once (for manual triggering) scraper: build: . container_name: job-scraper volumes: - ./data:/app/data - ./config.yaml:/app/config.yaml:ro environment: - TZ=America/Toronto # Scheduled scraper - runs daily at 9 AM scraper-scheduled: build: . container_name: job-scraper-scheduled volumes: - ./data:/app/data - ./config.yaml:/app/config.yaml:ro environment: - TZ=America/Toronto command: ["python", "main.py", "--schedule"] restart: unless-stopped logging: driver: json-file options: max-size: "10m" max-file: "3" # Web dashboard - lightweight static file server dashboard: image: nginx:alpine container_name: job-dashboard ports: - "127.0.0.1:8085:80" volumes: - ./data:/usr/share/nginx/html:ro - ./nginx.conf:/etc/nginx/conf.d/default.conf:ro restart: unless-stopped logging: driver: json-file options: max-size: "10m" max-file: "3"