Step 7: Kafka 이벤트 시스템 구현

- Kafka 및 Zookeeper 컨테이너 추가
- 공유 Kafka 라이브러리 생성 (Producer/Consumer)
- 이벤트 타입 정의 및 이벤트 모델 구현
- Users 서비스에 이벤트 발행 기능 추가 (USER_CREATED, USER_UPDATED, USER_DELETED)
- PROGRESS.md 및 PLAN.md 문서 생성
- aiokafka 통합 완료
This commit is contained in:
jungwoo choi
2025-09-10 17:00:57 +09:00
parent 4451170466
commit bf05e173cc
9 changed files with 687 additions and 3 deletions

5
shared/kafka/__init__.py Normal file
View File

@ -0,0 +1,5 @@
from .producer import KafkaProducer
from .consumer import KafkaConsumer
from .events import Event, EventType
__all__ = ['KafkaProducer', 'KafkaConsumer', 'Event', 'EventType']

125
shared/kafka/consumer.py Normal file
View File

@ -0,0 +1,125 @@
import json
import asyncio
from typing import Optional, Callable, Dict, Any, List
from aiokafka import AIOKafkaConsumer
from aiokafka.errors import KafkaError
import logging
from .events import Event, EventType
logger = logging.getLogger(__name__)
class KafkaConsumer:
def __init__(
self,
topics: List[str],
group_id: str,
bootstrap_servers: str = "kafka:9092"
):
self.topics = topics
self.group_id = group_id
self.bootstrap_servers = bootstrap_servers
self._consumer: Optional[AIOKafkaConsumer] = None
self._handlers: Dict[EventType, List[Callable]] = {}
self._running = False
def register_handler(self, event_type: EventType, handler: Callable):
"""이벤트 타입별 핸들러 등록"""
if event_type not in self._handlers:
self._handlers[event_type] = []
self._handlers[event_type].append(handler)
logger.info(f"Registered handler for {event_type}")
async def start(self):
"""Kafka Consumer 시작"""
try:
self._consumer = AIOKafkaConsumer(
*self.topics,
bootstrap_servers=self.bootstrap_servers,
group_id=self.group_id,
value_deserializer=lambda v: json.loads(v.decode()),
auto_offset_reset='earliest',
enable_auto_commit=True,
auto_commit_interval_ms=1000,
session_timeout_ms=30000,
heartbeat_interval_ms=10000
)
await self._consumer.start()
self._running = True
logger.info(f"Kafka Consumer started: {self.topics} (group: {self.group_id})")
# 메시지 처리 루프 시작
asyncio.create_task(self._consume_messages())
except Exception as e:
logger.error(f"Failed to start Kafka Consumer: {e}")
raise
async def stop(self):
"""Kafka Consumer 종료"""
self._running = False
if self._consumer:
await self._consumer.stop()
logger.info("Kafka Consumer stopped")
async def _consume_messages(self):
"""메시지 소비 루프"""
if not self._consumer:
return
while self._running:
try:
# 메시지 배치로 가져오기 (최대 100ms 대기)
msg_batch = await self._consumer.getmany(timeout_ms=100)
for tp, messages in msg_batch.items():
for msg in messages:
await self._process_message(msg.value)
except KafkaError as e:
logger.error(f"Kafka error: {e}")
await asyncio.sleep(1)
except Exception as e:
logger.error(f"Error processing messages: {e}")
await asyncio.sleep(1)
async def _process_message(self, message: Dict[str, Any]):
"""개별 메시지 처리"""
try:
# Event 객체로 변환
event = Event(**message)
# 등록된 핸들러 실행
handlers = self._handlers.get(event.event_type, [])
for handler in handlers:
try:
if asyncio.iscoroutinefunction(handler):
await handler(event)
else:
handler(event)
except Exception as e:
logger.error(f"Handler error for {event.event_type}: {e}")
if not handlers:
logger.debug(f"No handlers for event type: {event.event_type}")
except Exception as e:
logger.error(f"Failed to process message: {e}")
async def consume_one(self, timeout: float = 1.0) -> Optional[Event]:
"""단일 메시지 소비 (테스트/디버깅용)"""
if not self._consumer:
return None
try:
msg = await asyncio.wait_for(
self._consumer.getone(),
timeout=timeout
)
return Event(**msg.value)
except asyncio.TimeoutError:
return None
except Exception as e:
logger.error(f"Error consuming message: {e}")
return None

31
shared/kafka/events.py Normal file
View File

@ -0,0 +1,31 @@
from enum import Enum
from pydantic import BaseModel, Field
from datetime import datetime
from typing import Any, Optional, Dict
class EventType(str, Enum):
USER_CREATED = "user.created"
USER_UPDATED = "user.updated"
USER_DELETED = "user.deleted"
USER_LOGIN = "user.login"
IMAGE_UPLOADED = "image.uploaded"
IMAGE_CACHED = "image.cached"
IMAGE_DELETED = "image.deleted"
TASK_CREATED = "task.created"
TASK_COMPLETED = "task.completed"
TASK_FAILED = "task.failed"
class Event(BaseModel):
event_type: EventType
timestamp: datetime = Field(default_factory=datetime.now)
service: str
data: Dict[str, Any]
correlation_id: Optional[str] = None
user_id: Optional[str] = None
class Config:
json_encoders = {
datetime: lambda v: v.isoformat()
}

102
shared/kafka/producer.py Normal file
View File

@ -0,0 +1,102 @@
import json
import asyncio
from typing import Optional, Dict, Any
from aiokafka import AIOKafkaProducer
from aiokafka.errors import KafkaError
import logging
from .events import Event
logger = logging.getLogger(__name__)
class KafkaProducer:
def __init__(self, bootstrap_servers: str = "kafka:9092"):
self.bootstrap_servers = bootstrap_servers
self._producer: Optional[AIOKafkaProducer] = None
async def start(self):
"""Kafka Producer 시작"""
try:
self._producer = AIOKafkaProducer(
bootstrap_servers=self.bootstrap_servers,
value_serializer=lambda v: json.dumps(v).encode(),
compression_type="gzip",
acks='all',
retry_backoff_ms=100,
max_in_flight_requests_per_connection=5
)
await self._producer.start()
logger.info(f"Kafka Producer started: {self.bootstrap_servers}")
except Exception as e:
logger.error(f"Failed to start Kafka Producer: {e}")
raise
async def stop(self):
"""Kafka Producer 종료"""
if self._producer:
await self._producer.stop()
logger.info("Kafka Producer stopped")
async def send_event(self, topic: str, event: Event) -> bool:
"""이벤트 전송"""
if not self._producer:
logger.error("Producer not started")
return False
try:
event_dict = event.dict()
event_dict['timestamp'] = event.timestamp.isoformat()
await self._producer.send_and_wait(
topic,
value=event_dict,
key=event.correlation_id.encode() if event.correlation_id else None
)
logger.info(f"Event sent to {topic}: {event.event_type}")
return True
except KafkaError as e:
logger.error(f"Failed to send event to {topic}: {e}")
return False
except Exception as e:
logger.error(f"Unexpected error sending event: {e}")
return False
async def send_batch(self, topic: str, events: list[Event]) -> int:
"""여러 이벤트를 배치로 전송"""
if not self._producer:
logger.error("Producer not started")
return 0
sent_count = 0
batch = self._producer.create_batch()
for event in events:
event_dict = event.dict()
event_dict['timestamp'] = event.timestamp.isoformat()
metadata = batch.append(
key=event.correlation_id.encode() if event.correlation_id else None,
value=json.dumps(event_dict).encode(),
timestamp=None
)
if metadata is None:
# 배치가 가득 찼으면 전송하고 새 배치 생성
await self._producer.send_batch(batch, topic)
sent_count += len(batch)
batch = self._producer.create_batch()
batch.append(
key=event.correlation_id.encode() if event.correlation_id else None,
value=json.dumps(event_dict).encode(),
timestamp=None
)
# 남은 배치 전송
if batch:
await self._producer.send_batch(batch, topic)
sent_count += len(batch)
logger.info(f"Sent {sent_count} events to {topic}")
return sent_count