Step 7: Kafka 이벤트 시스템 구현
- Kafka 및 Zookeeper 컨테이너 추가 - 공유 Kafka 라이브러리 생성 (Producer/Consumer) - 이벤트 타입 정의 및 이벤트 모델 구현 - Users 서비스에 이벤트 발행 기능 추가 (USER_CREATED, USER_UPDATED, USER_DELETED) - PROGRESS.md 및 PLAN.md 문서 생성 - aiokafka 통합 완료
This commit is contained in:
@ -4,11 +4,20 @@ from pydantic import BaseModel
|
||||
from typing import List, Optional
|
||||
from datetime import datetime
|
||||
import uvicorn
|
||||
import os
|
||||
import sys
|
||||
import logging
|
||||
from contextlib import asynccontextmanager
|
||||
from database import init_db
|
||||
from models import User
|
||||
from beanie import PydanticObjectId
|
||||
|
||||
sys.path.append('/app')
|
||||
from shared.kafka import KafkaProducer, Event, EventType
|
||||
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# Pydantic models for requests
|
||||
class UserCreate(BaseModel):
|
||||
@ -30,13 +39,32 @@ class UserResponse(BaseModel):
|
||||
updated_at: datetime
|
||||
|
||||
|
||||
# Global Kafka producer
|
||||
kafka_producer: Optional[KafkaProducer] = None
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
# Startup
|
||||
global kafka_producer
|
||||
|
||||
await init_db()
|
||||
|
||||
# Initialize Kafka producer
|
||||
try:
|
||||
kafka_producer = KafkaProducer(
|
||||
bootstrap_servers=os.getenv('KAFKA_BOOTSTRAP_SERVERS', 'kafka:9092')
|
||||
)
|
||||
await kafka_producer.start()
|
||||
logger.info("Kafka producer initialized")
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to initialize Kafka producer: {e}")
|
||||
kafka_producer = None
|
||||
|
||||
yield
|
||||
|
||||
# Shutdown
|
||||
pass
|
||||
if kafka_producer:
|
||||
await kafka_producer.stop()
|
||||
|
||||
|
||||
app = FastAPI(
|
||||
@ -110,6 +138,20 @@ async def create_user(user_data: UserCreate):
|
||||
|
||||
await user.create()
|
||||
|
||||
# Publish event
|
||||
if kafka_producer:
|
||||
event = Event(
|
||||
event_type=EventType.USER_CREATED,
|
||||
service="users",
|
||||
data={
|
||||
"user_id": str(user.id),
|
||||
"username": user.username,
|
||||
"email": user.email
|
||||
},
|
||||
user_id=str(user.id)
|
||||
)
|
||||
await kafka_producer.send_event("user-events", event)
|
||||
|
||||
return UserResponse(
|
||||
id=str(user.id),
|
||||
username=user.username,
|
||||
@ -147,6 +189,21 @@ async def update_user(user_id: str, user_update: UserUpdate):
|
||||
user.updated_at = datetime.now()
|
||||
await user.save()
|
||||
|
||||
# Publish event
|
||||
if kafka_producer:
|
||||
event = Event(
|
||||
event_type=EventType.USER_UPDATED,
|
||||
service="users",
|
||||
data={
|
||||
"user_id": str(user.id),
|
||||
"username": user.username,
|
||||
"email": user.email,
|
||||
"updated_fields": list(user_update.dict(exclude_unset=True).keys())
|
||||
},
|
||||
user_id=str(user.id)
|
||||
)
|
||||
await kafka_producer.send_event("user-events", event)
|
||||
|
||||
return UserResponse(
|
||||
id=str(user.id),
|
||||
username=user.username,
|
||||
@ -162,7 +219,25 @@ async def delete_user(user_id: str):
|
||||
user = await User.get(PydanticObjectId(user_id))
|
||||
if not user:
|
||||
raise HTTPException(status_code=404, detail="User not found")
|
||||
|
||||
user_id_str = str(user.id)
|
||||
username = user.username
|
||||
|
||||
await user.delete()
|
||||
|
||||
# Publish event
|
||||
if kafka_producer:
|
||||
event = Event(
|
||||
event_type=EventType.USER_DELETED,
|
||||
service="users",
|
||||
data={
|
||||
"user_id": user_id_str,
|
||||
"username": username
|
||||
},
|
||||
user_id=user_id_str
|
||||
)
|
||||
await kafka_producer.send_event("user-events", event)
|
||||
|
||||
return {"message": "User deleted successfully"}
|
||||
except Exception:
|
||||
raise HTTPException(status_code=404, detail="User not found")
|
||||
|
||||
@ -3,4 +3,5 @@ uvicorn[standard]==0.27.0
|
||||
pydantic[email]==2.5.3
|
||||
pymongo==4.6.1
|
||||
motor==3.3.2
|
||||
beanie==1.23.6
|
||||
beanie==1.23.6
|
||||
aiokafka==0.10.0
|
||||
Reference in New Issue
Block a user