Files
site11/docker-compose.yml
jungwoo choi 070032006e feat: Implement async queue-based news pipeline with microservices
Major architectural transformation from synchronous to asynchronous processing:

## Pipeline Services (8 microservices)
- pipeline-scheduler: APScheduler for 30-minute periodic job triggers
- pipeline-rss-collector: RSS feed collection with deduplication (7-day TTL)
- pipeline-google-search: Content enrichment via Google Search API
- pipeline-ai-summarizer: AI summarization using Claude API (claude-sonnet-4-20250514)
- pipeline-translator: Translation using DeepL Pro API
- pipeline-image-generator: Image generation with Replicate API (Stable Diffusion)
- pipeline-article-assembly: Final article assembly and MongoDB storage
- pipeline-monitor: Real-time monitoring dashboard (port 8100)

## Key Features
- Redis-based job queue with deduplication
- Asynchronous processing with Python asyncio
- Shared models and queue manager for inter-service communication
- Docker containerization for all services
- Container names standardized with site11_ prefix

## Removed Services
- Moved to backup: google-search, rss-feed, news-aggregator, ai-writer

## Configuration
- DeepL Pro API: 3abbc796-2515-44a8-972d-22dcf27ab54a
- Claude Model: claude-sonnet-4-20250514
- Redis Queue TTL: 7 days for deduplication

🤖 Generated with [Claude Code](https://claude.ai/code)

Co-Authored-By: Claude <noreply@anthropic.com>
2025-09-13 19:22:14 +09:00

653 lines
18 KiB
YAML

services:
console-frontend:
build:
context: ./console/frontend
dockerfile: Dockerfile
container_name: ${COMPOSE_PROJECT_NAME}_console_frontend
ports:
- "${CONSOLE_FRONTEND_PORT}:80"
networks:
- site11_network
restart: unless-stopped
depends_on:
- console-backend
console-backend:
build:
context: ./console/backend
dockerfile: Dockerfile
container_name: ${COMPOSE_PROJECT_NAME}_console_backend
ports:
- "${CONSOLE_BACKEND_PORT}:8000"
environment:
- ENV=${ENV}
- PORT=8000
- USERS_SERVICE_URL=${USERS_SERVICE_URL}
- JWT_SECRET_KEY=${JWT_SECRET_KEY}
- JWT_ALGORITHM=${JWT_ALGORITHM}
- ACCESS_TOKEN_EXPIRE_MINUTES=${ACCESS_TOKEN_EXPIRE_MINUTES}
volumes:
- ./console/backend:/app
networks:
- site11_network
restart: unless-stopped
depends_on:
- users-backend
users-backend:
build:
context: ./services/users/backend
dockerfile: Dockerfile
container_name: ${COMPOSE_PROJECT_NAME}_users_backend
ports:
- "${USERS_BACKEND_PORT}:8000"
environment:
- ENV=${ENV}
- PORT=8000
- MONGODB_URL=${MONGODB_URL}
- DB_NAME=${USERS_DB_NAME}
- KAFKA_BOOTSTRAP_SERVERS=${KAFKA_BOOTSTRAP_SERVERS}
- KAFKA_GROUP_ID=${KAFKA_GROUP_ID}
volumes:
- ./services/users/backend:/app
- ./shared:/app/shared
networks:
- site11_network
restart: unless-stopped
depends_on:
- mongodb
- kafka
images-backend:
build:
context: ./services/images/backend
dockerfile: Dockerfile
container_name: ${COMPOSE_PROJECT_NAME}_images_backend
ports:
- "${IMAGES_SERVICE_PORT}:8000"
environment:
- ENV=${ENV}
- PORT=8000
- REDIS_URL=${REDIS_URL}
- MONGODB_URL=${MONGODB_URL}
- CACHE_DIR=/app/cache
- CONVERT_TO_WEBP=true
volumes:
- ./services/images/backend:/app
- ./data/images-cache:/app/cache
networks:
- site11_network
restart: unless-stopped
depends_on:
- redis
- mongodb
oauth-backend:
build:
context: ./services/oauth/backend
dockerfile: Dockerfile
container_name: ${COMPOSE_PROJECT_NAME}_oauth_backend
ports:
- "${OAUTH_SERVICE_PORT}:8000"
environment:
- ENV=${ENV}
- PORT=8000
- MONGODB_URL=${MONGODB_URL}
- OAUTH_DB_NAME=${OAUTH_DB_NAME}
- JWT_SECRET_KEY=${JWT_SECRET_KEY}
- JWT_ALGORITHM=${JWT_ALGORITHM}
- KAFKA_BOOTSTRAP_SERVERS=${KAFKA_BOOTSTRAP_SERVERS}
- KAFKA_GROUP_ID=${KAFKA_GROUP_ID}
volumes:
- ./services/oauth/backend:/app
- ./shared:/app/shared
networks:
- site11_network
restart: unless-stopped
depends_on:
- mongodb
- kafka
mongodb:
image: mongo:7.0
container_name: ${COMPOSE_PROJECT_NAME}_mongodb
environment:
- MONGO_INITDB_DATABASE=${MONGODB_DATABASE}
ports:
- "${MONGODB_PORT}:27017"
volumes:
- ./data/mongodb:/data/db
- ./data/mongodb/configdb:/data/configdb
networks:
- site11_network
restart: unless-stopped
healthcheck:
test: echo 'db.runCommand("ping").ok' | mongosh localhost:27017/test --quiet
interval: 10s
timeout: 5s
retries: 5
redis:
image: redis:7-alpine
container_name: ${COMPOSE_PROJECT_NAME}_redis
ports:
- "${REDIS_PORT}:6379"
volumes:
- ./data/redis:/data
networks:
- site11_network
restart: unless-stopped
healthcheck:
test: ["CMD", "redis-cli", "ping"]
interval: 10s
timeout: 5s
retries: 5
zookeeper:
image: confluentinc/cp-zookeeper:7.5.0
container_name: ${COMPOSE_PROJECT_NAME}_zookeeper
environment:
ZOOKEEPER_CLIENT_PORT: 2181
ZOOKEEPER_TICK_TIME: 2000
ports:
- "${KAFKA_ZOOKEEPER_PORT}:2181"
volumes:
- ./data/zookeeper/data:/var/lib/zookeeper/data
- ./data/zookeeper/logs:/var/lib/zookeeper/log
networks:
- site11_network
restart: unless-stopped
kafka:
image: confluentinc/cp-kafka:7.5.0
container_name: ${COMPOSE_PROJECT_NAME}_kafka
depends_on:
- zookeeper
ports:
- "${KAFKA_PORT}:9092"
- "9101:9101"
environment:
KAFKA_BROKER_ID: 1
KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT
KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:29092,PLAINTEXT_HOST://localhost:9092
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
KAFKA_JMX_PORT: 9101
KAFKA_JMX_HOSTNAME: localhost
KAFKA_AUTO_CREATE_TOPICS_ENABLE: 'true'
volumes:
- ./data/kafka:/var/lib/kafka/data
networks:
- site11_network
restart: unless-stopped
healthcheck:
test: ["CMD", "kafka-broker-api-versions", "--bootstrap-server", "localhost:9092"]
interval: 10s
timeout: 5s
retries: 5
# Notifications Service
notifications-backend:
build:
context: ./services/notifications/backend
dockerfile: Dockerfile
container_name: ${COMPOSE_PROJECT_NAME}_notifications_backend
ports:
- "8013:8000"
environment:
- MONGODB_URL=mongodb://mongodb:27017
- REDIS_URL=redis://redis:6379
- KAFKA_BOOTSTRAP_SERVERS=kafka:9092
- SMTP_HOST=${SMTP_HOST:-smtp.gmail.com}
- SMTP_PORT=${SMTP_PORT:-587}
- SMTP_USER=${SMTP_USER:-}
- SMTP_PASSWORD=${SMTP_PASSWORD:-}
- SMS_API_KEY=${SMS_API_KEY:-}
- SMS_API_URL=${SMS_API_URL:-}
- FCM_SERVER_KEY=${FCM_SERVER_KEY:-}
depends_on:
- mongodb
- redis
- kafka
networks:
- site11_network
restart: unless-stopped
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:8000/health"]
interval: 30s
timeout: 10s
retries: 3
# MinIO Object Storage
minio:
image: minio/minio:latest
container_name: ${COMPOSE_PROJECT_NAME}_minio
ports:
- "9000:9000"
- "9001:9001"
environment:
- MINIO_ROOT_USER=${MINIO_ROOT_USER:-minioadmin}
- MINIO_ROOT_PASSWORD=${MINIO_ROOT_PASSWORD:-minioadmin}
volumes:
- ./data/minio:/data
command: server /data --console-address ":9001"
networks:
- site11_network
restart: unless-stopped
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:9000/minio/health/live"]
interval: 30s
timeout: 20s
retries: 3
# File Management Service
files-backend:
build:
context: ./services/files/backend
dockerfile: Dockerfile
container_name: ${COMPOSE_PROJECT_NAME}_files_backend
ports:
- "8014:8000"
environment:
- ENV=${ENV}
- PORT=8000
- MONGODB_URL=${MONGODB_URL}
- FILES_DB_NAME=${FILES_DB_NAME:-files_db}
- MINIO_ENDPOINT=minio:9000
- MINIO_ACCESS_KEY=${MINIO_ACCESS_KEY:-minioadmin}
- MINIO_SECRET_KEY=${MINIO_SECRET_KEY:-minioadmin}
- MINIO_SECURE=false
volumes:
- ./services/files/backend:/app
- ./data/files-temp:/tmp
networks:
- site11_network
restart: unless-stopped
depends_on:
- mongodb
- minio
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:8000/health"]
interval: 30s
timeout: 10s
retries: 3
# Apache Solr Search Engine
solr:
image: solr:9.4
container_name: ${COMPOSE_PROJECT_NAME}_solr
ports:
- "8983:8983"
volumes:
- ./data/solr:/var/solr
- ./services/search/solr-config:/opt/solr/server/solr/configsets/site11_config
command:
- solr-precreate
- site11
- /opt/solr/server/solr/configsets/site11_config
networks:
- site11_network
restart: unless-stopped
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:8983/solr/site11/admin/ping"]
interval: 30s
timeout: 10s
retries: 3
# Search Service
search-backend:
build:
context: ./services/search/backend
dockerfile: Dockerfile
container_name: ${COMPOSE_PROJECT_NAME}_search_backend
ports:
- "8015:8000"
environment:
- ENV=${ENV}
- PORT=8000
- SOLR_URL=http://solr:8983/solr
- MONGODB_URL=${MONGODB_URL}
- KAFKA_BOOTSTRAP_SERVERS=${KAFKA_BOOTSTRAP_SERVERS}
volumes:
- ./services/search/backend:/app
networks:
- site11_network
restart: unless-stopped
depends_on:
- solr
- mongodb
- kafka
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:8000/health"]
interval: 30s
timeout: 10s
retries: 3
# Statistics Service
statistics-backend:
build:
context: ./services/statistics/backend
dockerfile: Dockerfile
container_name: ${COMPOSE_PROJECT_NAME}_statistics_backend
ports:
- "8012:8000"
environment:
- REDIS_URL=redis://redis:6379
- KAFKA_BOOTSTRAP_SERVERS=kafka:9092
- INFLUXDB_HOST=influxdb
- INFLUXDB_PORT=8086
- INFLUXDB_DATABASE=statistics
depends_on:
- redis
- kafka
networks:
- site11_network
restart: unless-stopped
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:8000/health"]
interval: 30s
timeout: 10s
retries: 3
# Google Search Service
google-search-backend:
build:
context: ./services/google-search/backend
dockerfile: Dockerfile
container_name: ${COMPOSE_PROJECT_NAME}_google_search_backend
ports:
- "8016:8000"
environment:
- REDIS_HOST=redis
- REDIS_PORT=6379
- REDIS_DB=2
- GOOGLE_API_KEY=AIzaSyBakoCsDP_oF5V4oq_eEKs4eQb-ekqxnRM
- GOOGLE_SEARCH_ENGINE_ID=35bfbdb7b6f244569
- SERPAPI_KEY=${SERPAPI_KEY:-}
- DEFAULT_LANGUAGE=ko
- DEFAULT_COUNTRY=kr
- CACHE_TTL=3600
depends_on:
- redis
networks:
- site11_network
restart: unless-stopped
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:8000/health"]
interval: 30s
timeout: 10s
retries: 3
# RSS Feed Service
rss-feed-backend:
build:
context: ./services/rss-feed/backend
dockerfile: Dockerfile
container_name: ${COMPOSE_PROJECT_NAME}_rss_feed_backend
ports:
- "8017:8000"
environment:
- MONGODB_URL=mongodb://mongodb:27017
- DB_NAME=rss_feed_db
- REDIS_URL=redis://redis:6379
- REDIS_DB=3
- DEFAULT_UPDATE_INTERVAL=900
- MAX_ENTRIES_PER_FEED=100
- ENABLE_SCHEDULER=true
- SCHEDULER_TIMEZONE=Asia/Seoul
depends_on:
- mongodb
- redis
networks:
- site11_network
restart: unless-stopped
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:8000/health"]
interval: 30s
timeout: 10s
retries: 3
# News Aggregator Service
news-aggregator-backend:
build:
context: ./services/news-aggregator/backend
dockerfile: Dockerfile
container_name: ${COMPOSE_PROJECT_NAME}_news_aggregator_backend
ports:
- "8018:8000"
environment:
- RSS_SERVICE_URL=http://rss-feed-backend:8000
- GOOGLE_SEARCH_SERVICE_URL=http://google-search-backend:8000
depends_on:
- rss-feed-backend
- google-search-backend
networks:
- site11_network
restart: unless-stopped
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:8000/health"]
interval: 30s
timeout: 10s
retries: 3
# AI Writer Service
ai-writer-backend:
build:
context: ./services/ai-writer/backend
dockerfile: Dockerfile
container_name: ${COMPOSE_PROJECT_NAME}_ai_writer_backend
ports:
- "8019:8000"
environment:
- NEWS_AGGREGATOR_URL=http://news-aggregator-backend:8000
- CLAUDE_API_KEY=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
- MONGODB_URL=mongodb://mongodb:27017
- DB_NAME=ai_writer_db
- REDIS_URL=redis://redis:6379
depends_on:
- mongodb
- redis
- news-aggregator-backend
networks:
- site11_network
restart: unless-stopped
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:8000/health"]
interval: 30s
timeout: 10s
retries: 3
# AI Writer Worker Service
ai-writer-worker:
build:
context: ./services/ai-writer
dockerfile: worker/Dockerfile
container_name: ${COMPOSE_PROJECT_NAME}_ai_writer_worker
environment:
- CLAUDE_API_KEY=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
- MONGODB_URL=mongodb://mongodb:27017
- DB_NAME=ai_writer_db
- REDIS_URL=redis://redis:6379
- WORKER_COUNT=3
depends_on:
- mongodb
- redis
- ai-writer-backend
networks:
- site11_network
restart: unless-stopped
# ============ Pipeline Services ============
# Pipeline Scheduler Service
pipeline-scheduler:
build:
context: ./services/pipeline
dockerfile: scheduler/Dockerfile
container_name: ${COMPOSE_PROJECT_NAME}_pipeline_scheduler
restart: unless-stopped
depends_on:
- redis
- mongodb
environment:
- REDIS_URL=redis://redis:6379
- MONGODB_URL=mongodb://mongodb:27017
- DB_NAME=pipeline_db
- LOG_LEVEL=INFO
volumes:
- ./services/pipeline/shared:/app/shared:ro
networks:
- site11_network
# Pipeline RSS Collector Worker
pipeline-rss-collector:
build:
context: ./services/pipeline
dockerfile: rss-collector/Dockerfile
container_name: ${COMPOSE_PROJECT_NAME}_pipeline_rss_collector
restart: unless-stopped
depends_on:
- redis
environment:
- REDIS_URL=redis://redis:6379
- LOG_LEVEL=INFO
volumes:
- ./services/pipeline/shared:/app/shared:ro
networks:
- site11_network
# Pipeline Google Search Worker
pipeline-google-search:
build:
context: ./services/pipeline
dockerfile: google-search/Dockerfile
container_name: ${COMPOSE_PROJECT_NAME}_pipeline_google_search
restart: unless-stopped
depends_on:
- redis
environment:
- REDIS_URL=redis://redis:6379
- GOOGLE_API_KEY=AIzaSyBakoCsDP_oF5V4oq_eEKs4eQb-ekqxnRM
- GOOGLE_SEARCH_ENGINE_ID=35bfbdb7b6f244569
- LOG_LEVEL=INFO
volumes:
- ./services/pipeline/shared:/app/shared:ro
networks:
- site11_network
# Pipeline AI Summarizer Worker
pipeline-ai-summarizer:
build:
context: ./services/pipeline
dockerfile: ai-summarizer/Dockerfile
container_name: ${COMPOSE_PROJECT_NAME}_pipeline_ai_summarizer
restart: unless-stopped
depends_on:
- redis
environment:
- REDIS_URL=redis://redis:6379
- CLAUDE_API_KEY=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
- LOG_LEVEL=INFO
volumes:
- ./services/pipeline/shared:/app/shared:ro
networks:
- site11_network
# Pipeline Article Assembly Worker
pipeline-article-assembly:
build:
context: ./services/pipeline
dockerfile: article-assembly/Dockerfile
container_name: ${COMPOSE_PROJECT_NAME}_pipeline_article_assembly
restart: unless-stopped
depends_on:
- redis
- mongodb
environment:
- REDIS_URL=redis://redis:6379
- MONGODB_URL=mongodb://mongodb:27017
- DB_NAME=pipeline_db
- CLAUDE_API_KEY=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
- LOG_LEVEL=INFO
volumes:
- ./services/pipeline/shared:/app/shared:ro
networks:
- site11_network
# Pipeline Monitor (optional dashboard)
pipeline-monitor:
build:
context: ./services/pipeline
dockerfile: monitor/Dockerfile
container_name: ${COMPOSE_PROJECT_NAME}_pipeline_monitor
restart: unless-stopped
depends_on:
- redis
- mongodb
ports:
- "8100:8000"
environment:
- REDIS_URL=redis://redis:6379
- MONGODB_URL=mongodb://mongodb:27017
- DB_NAME=pipeline_db
- LOG_LEVEL=INFO
volumes:
- ./services/pipeline/shared:/app/shared:ro
networks:
- site11_network
# Pipeline Translator
pipeline-translator:
build:
context: ./services/pipeline
dockerfile: translator/Dockerfile
container_name: ${COMPOSE_PROJECT_NAME}_pipeline_translator
restart: unless-stopped
depends_on:
- redis
environment:
- REDIS_URL=redis://redis:6379
- DEEPL_API_KEY=3abbc796-2515-44a8-972d-22dcf27ab54a
- LOG_LEVEL=INFO
volumes:
- ./services/pipeline/shared:/app/shared:ro
networks:
- site11_network
# Pipeline Image Generator
pipeline-image-generator:
build:
context: ./services/pipeline
dockerfile: image-generator/Dockerfile
container_name: ${COMPOSE_PROJECT_NAME}_pipeline_image_generator
restart: unless-stopped
depends_on:
- redis
environment:
- REDIS_URL=redis://redis:6379
- REPLICATE_API_KEY=${REPLICATE_API_KEY:-}
- LOG_LEVEL=INFO
volumes:
- ./services/pipeline/shared:/app/shared:ro
networks:
- site11_network
networks:
site11_network:
driver: bridge
name: site11_network
# Named volumes are replaced with bind mounts in ./data/ directory
# volumes:
# mongodb_data:
# mongodb_config:
# redis_data:
# images_cache:
# zookeeper_data:
# zookeeper_logs:
# kafka_data:
# minio_data:
# files_temp:
# solr_data: