feat: Phase 1 - Complete authentication system with JWT

Backend Implementation (FastAPI + MongoDB):
- JWT authentication with access/refresh tokens
- User registration and login endpoints
- Password hashing with bcrypt (fixed 72-byte limit)
- Protected endpoints with JWT middleware
- Token refresh mechanism
- Role-Based Access Control (RBAC) structure
- Pydantic v2 models and async MongoDB with Motor
- API endpoints: /api/auth/register, /api/auth/login, /api/auth/me, /api/auth/refresh

Frontend Implementation (React + TypeScript + Material-UI):
- Login and Register pages with validation
- AuthContext for global authentication state
- API client with Axios interceptors for token refresh
- Protected routes with automatic redirect
- User profile display in navigation
- Logout functionality

Technical Achievements:
- Resolved bcrypt 72-byte limit (replaced passlib with native bcrypt)
- Fixed Pydantic v2 compatibility (PyObjectId, ConfigDict)
- Implemented automatic token refresh on 401 errors
- Created comprehensive test suite for all auth endpoints

Docker & Kubernetes:
- Backend image: yakenator/site11-console-backend:latest
- Frontend image: yakenator/site11-console-frontend:latest
- Deployed to site11-pipeline namespace
- Nginx reverse proxy configuration

Documentation:
- CONSOLE_ARCHITECTURE.md - Complete system architecture
- PHASE1_COMPLETION.md - Detailed completion report
- PROGRESS.md - Updated with Phase 1 status

All authentication endpoints tested and verified working.

🤖 Generated with Claude Code
Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
jungwoo choi
2025-10-28 16:23:07 +09:00
parent 161f206ae2
commit f4b75b96a5
51 changed files with 2480 additions and 100 deletions

View File

@ -0,0 +1,6 @@
from .producer import KafkaProducer
from .consumer import KafkaConsumer
from .events import Event, EventType
from .schema_registry import SchemaRegistry
__all__ = ['KafkaProducer', 'KafkaConsumer', 'Event', 'EventType', 'SchemaRegistry']

View File

@ -0,0 +1,125 @@
import json
import asyncio
from typing import Optional, Callable, Dict, Any, List
from aiokafka import AIOKafkaConsumer
from aiokafka.errors import KafkaError
import logging
from .events import Event, EventType
logger = logging.getLogger(__name__)
class KafkaConsumer:
def __init__(
self,
topics: List[str],
group_id: str,
bootstrap_servers: str = "kafka:9092"
):
self.topics = topics
self.group_id = group_id
self.bootstrap_servers = bootstrap_servers
self._consumer: Optional[AIOKafkaConsumer] = None
self._handlers: Dict[EventType, List[Callable]] = {}
self._running = False
def register_handler(self, event_type: EventType, handler: Callable):
"""이벤트 타입별 핸들러 등록"""
if event_type not in self._handlers:
self._handlers[event_type] = []
self._handlers[event_type].append(handler)
logger.info(f"Registered handler for {event_type}")
async def start(self):
"""Kafka Consumer 시작"""
try:
self._consumer = AIOKafkaConsumer(
*self.topics,
bootstrap_servers=self.bootstrap_servers,
group_id=self.group_id,
value_deserializer=lambda v: json.loads(v.decode()),
auto_offset_reset='earliest',
enable_auto_commit=True,
auto_commit_interval_ms=1000,
session_timeout_ms=30000,
heartbeat_interval_ms=10000
)
await self._consumer.start()
self._running = True
logger.info(f"Kafka Consumer started: {self.topics} (group: {self.group_id})")
# 메시지 처리 루프 시작
asyncio.create_task(self._consume_messages())
except Exception as e:
logger.error(f"Failed to start Kafka Consumer: {e}")
raise
async def stop(self):
"""Kafka Consumer 종료"""
self._running = False
if self._consumer:
await self._consumer.stop()
logger.info("Kafka Consumer stopped")
async def _consume_messages(self):
"""메시지 소비 루프"""
if not self._consumer:
return
while self._running:
try:
# 메시지 배치로 가져오기 (최대 100ms 대기)
msg_batch = await self._consumer.getmany(timeout_ms=100)
for tp, messages in msg_batch.items():
for msg in messages:
await self._process_message(msg.value)
except KafkaError as e:
logger.error(f"Kafka error: {e}")
await asyncio.sleep(1)
except Exception as e:
logger.error(f"Error processing messages: {e}")
await asyncio.sleep(1)
async def _process_message(self, message: Dict[str, Any]):
"""개별 메시지 처리"""
try:
# Event 객체로 변환
event = Event(**message)
# 등록된 핸들러 실행
handlers = self._handlers.get(event.event_type, [])
for handler in handlers:
try:
if asyncio.iscoroutinefunction(handler):
await handler(event)
else:
handler(event)
except Exception as e:
logger.error(f"Handler error for {event.event_type}: {e}")
if not handlers:
logger.debug(f"No handlers for event type: {event.event_type}")
except Exception as e:
logger.error(f"Failed to process message: {e}")
async def consume_one(self, timeout: float = 1.0) -> Optional[Event]:
"""단일 메시지 소비 (테스트/디버깅용)"""
if not self._consumer:
return None
try:
msg = await asyncio.wait_for(
self._consumer.getone(),
timeout=timeout
)
return Event(**msg.value)
except asyncio.TimeoutError:
return None
except Exception as e:
logger.error(f"Error consuming message: {e}")
return None

View File

@ -0,0 +1,31 @@
from enum import Enum
from pydantic import BaseModel, Field
from datetime import datetime
from typing import Any, Optional, Dict
class EventType(str, Enum):
USER_CREATED = "user.created"
USER_UPDATED = "user.updated"
USER_DELETED = "user.deleted"
USER_LOGIN = "user.login"
IMAGE_UPLOADED = "image.uploaded"
IMAGE_CACHED = "image.cached"
IMAGE_DELETED = "image.deleted"
TASK_CREATED = "task.created"
TASK_COMPLETED = "task.completed"
TASK_FAILED = "task.failed"
class Event(BaseModel):
event_type: EventType
timestamp: datetime = Field(default_factory=datetime.now)
service: str
data: Dict[str, Any]
correlation_id: Optional[str] = None
user_id: Optional[str] = None
class Config:
json_encoders = {
datetime: lambda v: v.isoformat()
}

View File

@ -0,0 +1,101 @@
import json
import asyncio
from typing import Optional, Dict, Any
from aiokafka import AIOKafkaProducer
from aiokafka.errors import KafkaError
import logging
from .events import Event
logger = logging.getLogger(__name__)
class KafkaProducer:
def __init__(self, bootstrap_servers: str = "kafka:9092"):
self.bootstrap_servers = bootstrap_servers
self._producer: Optional[AIOKafkaProducer] = None
async def start(self):
"""Kafka Producer 시작"""
try:
self._producer = AIOKafkaProducer(
bootstrap_servers=self.bootstrap_servers,
value_serializer=lambda v: json.dumps(v).encode(),
compression_type="gzip",
acks='all',
retry_backoff_ms=100
)
await self._producer.start()
logger.info(f"Kafka Producer started: {self.bootstrap_servers}")
except Exception as e:
logger.error(f"Failed to start Kafka Producer: {e}")
raise
async def stop(self):
"""Kafka Producer 종료"""
if self._producer:
await self._producer.stop()
logger.info("Kafka Producer stopped")
async def send_event(self, topic: str, event: Event) -> bool:
"""이벤트 전송"""
if not self._producer:
logger.error("Producer not started")
return False
try:
event_dict = event.dict()
event_dict['timestamp'] = event.timestamp.isoformat()
await self._producer.send_and_wait(
topic,
value=event_dict,
key=event.correlation_id.encode() if event.correlation_id else None
)
logger.info(f"Event sent to {topic}: {event.event_type}")
return True
except KafkaError as e:
logger.error(f"Failed to send event to {topic}: {e}")
return False
except Exception as e:
logger.error(f"Unexpected error sending event: {e}")
return False
async def send_batch(self, topic: str, events: list[Event]) -> int:
"""여러 이벤트를 배치로 전송"""
if not self._producer:
logger.error("Producer not started")
return 0
sent_count = 0
batch = self._producer.create_batch()
for event in events:
event_dict = event.dict()
event_dict['timestamp'] = event.timestamp.isoformat()
metadata = batch.append(
key=event.correlation_id.encode() if event.correlation_id else None,
value=json.dumps(event_dict).encode(),
timestamp=None
)
if metadata is None:
# 배치가 가득 찼으면 전송하고 새 배치 생성
await self._producer.send_batch(batch, topic)
sent_count += len(batch)
batch = self._producer.create_batch()
batch.append(
key=event.correlation_id.encode() if event.correlation_id else None,
value=json.dumps(event_dict).encode(),
timestamp=None
)
# 남은 배치 전송
if batch:
await self._producer.send_batch(batch, topic)
sent_count += len(batch)
logger.info(f"Sent {sent_count} events to {topic}")
return sent_count

View File

@ -0,0 +1,333 @@
"""
이벤트 스키마 레지스트리
이벤트 스키마 정의 및 버전 관리
"""
from typing import Dict, Any, Optional, List, Literal
from enum import Enum
from pydantic import BaseModel, Field, field_validator
from datetime import datetime
import json
class SchemaVersion(str, Enum):
V1 = "1.0.0"
V2 = "2.0.0"
class EventSchemaBase(BaseModel):
"""이벤트 스키마 베이스"""
event_id: str = Field(..., description="고유 이벤트 ID")
event_type: str = Field(..., description="이벤트 타입")
timestamp: datetime = Field(default_factory=datetime.now, description="이벤트 발생 시간")
version: str = Field(default=SchemaVersion.V1, description="스키마 버전")
service: str = Field(..., description="이벤트 발생 서비스")
class Config:
json_encoders = {
datetime: lambda v: v.isoformat()
}
# User Events Schemas
class UserCreatedSchema(EventSchemaBase):
"""사용자 생성 이벤트 스키마"""
event_type: Literal["USER_CREATED"] = "USER_CREATED"
data: Dict[str, Any] = Field(..., description="이벤트 데이터")
@field_validator('data')
@classmethod
def validate_data(cls, v):
required_fields = ['user_id', 'username', 'email']
for field in required_fields:
if field not in v:
raise ValueError(f"Missing required field: {field}")
return v
class UserUpdatedSchema(EventSchemaBase):
"""사용자 업데이트 이벤트 스키마"""
event_type: Literal["USER_UPDATED"] = "USER_UPDATED"
data: Dict[str, Any] = Field(..., description="이벤트 데이터")
@field_validator('data')
@classmethod
def validate_data(cls, v):
required_fields = ['user_id']
optional_fields = ['username', 'email', 'full_name', 'profile_picture',
'bio', 'location', 'website', 'updated_fields']
for field in required_fields:
if field not in v:
raise ValueError(f"Missing required field: {field}")
# updated_fields가 있으면 검증
if 'updated_fields' in v and not isinstance(v['updated_fields'], list):
raise ValueError("updated_fields must be a list")
return v
class UserDeletedSchema(EventSchemaBase):
"""사용자 삭제 이벤트 스키마"""
event_type: Literal["USER_DELETED"] = "USER_DELETED"
data: Dict[str, Any] = Field(..., description="이벤트 데이터")
@field_validator('data')
@classmethod
def validate_data(cls, v):
required_fields = ['user_id', 'username']
for field in required_fields:
if field not in v:
raise ValueError(f"Missing required field: {field}")
return v
# OAuth Events Schemas
class OAuthAppCreatedSchema(EventSchemaBase):
"""OAuth 앱 생성 이벤트 스키마"""
event_type: Literal["OAUTH_APP_CREATED"] = "OAUTH_APP_CREATED"
data: Dict[str, Any] = Field(..., description="이벤트 데이터")
@field_validator('data')
@classmethod
def validate_data(cls, v):
required_fields = ['app_id', 'name', 'owner_id', 'client_id']
for field in required_fields:
if field not in v:
raise ValueError(f"Missing required field: {field}")
return v
class OAuthTokenIssuedSchema(EventSchemaBase):
"""OAuth 토큰 발급 이벤트 스키마"""
event_type: Literal["OAUTH_TOKEN_ISSUED"] = "OAUTH_TOKEN_ISSUED"
data: Dict[str, Any] = Field(..., description="이벤트 데이터")
@field_validator('data')
@classmethod
def validate_data(cls, v):
required_fields = ['client_id', 'grant_type']
optional_fields = ['user_id', 'scopes', 'expires_in']
for field in required_fields:
if field not in v:
raise ValueError(f"Missing required field: {field}")
# scopes가 있으면 리스트여야 함
if 'scopes' in v and not isinstance(v['scopes'], list):
raise ValueError("scopes must be a list")
return v
class OAuthTokenRevokedSchema(EventSchemaBase):
"""OAuth 토큰 폐기 이벤트 스키마"""
event_type: Literal["OAUTH_TOKEN_REVOKED"] = "OAUTH_TOKEN_REVOKED"
data: Dict[str, Any] = Field(..., description="이벤트 데이터")
@field_validator('data')
@classmethod
def validate_data(cls, v):
required_fields = ['token_id', 'client_id']
optional_fields = ['user_id', 'revoked_by']
for field in required_fields:
if field not in v:
raise ValueError(f"Missing required field: {field}")
return v
# Image Events Schemas
class ImageUploadedSchema(EventSchemaBase):
"""이미지 업로드 이벤트 스키마"""
event_type: Literal["IMAGE_UPLOADED"] = "IMAGE_UPLOADED"
data: Dict[str, Any] = Field(..., description="이벤트 데이터")
@field_validator('data')
@classmethod
def validate_data(cls, v):
required_fields = ['image_id', 'user_id', 'url']
optional_fields = ['size', 'mime_type', 'width', 'height', 'thumbnail_url']
for field in required_fields:
if field not in v:
raise ValueError(f"Missing required field: {field}")
return v
class ImageProcessedSchema(EventSchemaBase):
"""이미지 처리 완료 이벤트 스키마"""
event_type: Literal["IMAGE_PROCESSED"] = "IMAGE_PROCESSED"
data: Dict[str, Any] = Field(..., description="이벤트 데이터")
@field_validator('data')
@classmethod
def validate_data(cls, v):
required_fields = ['image_id', 'process_type']
optional_fields = ['original_url', 'processed_url', 'processing_time_ms']
for field in required_fields:
if field not in v:
raise ValueError(f"Missing required field: {field}")
return v
class SchemaRegistry:
"""스키마 레지스트리"""
# 스키마 매핑
SCHEMAS = {
"USER_CREATED": UserCreatedSchema,
"USER_UPDATED": UserUpdatedSchema,
"USER_DELETED": UserDeletedSchema,
"OAUTH_APP_CREATED": OAuthAppCreatedSchema,
"OAUTH_TOKEN_ISSUED": OAuthTokenIssuedSchema,
"OAUTH_TOKEN_REVOKED": OAuthTokenRevokedSchema,
"IMAGE_UPLOADED": ImageUploadedSchema,
"IMAGE_PROCESSED": ImageProcessedSchema,
}
# 스키마 버전 호환성 매트릭스
COMPATIBILITY_MATRIX = {
SchemaVersion.V1: [SchemaVersion.V1],
SchemaVersion.V2: [SchemaVersion.V1, SchemaVersion.V2], # V2는 V1과 호환
}
@classmethod
def get_schema(cls, event_type: str) -> Optional[type]:
"""이벤트 타입에 대한 스키마 반환"""
return cls.SCHEMAS.get(event_type)
@classmethod
def validate_event(cls, event_data: Dict[str, Any]) -> tuple[bool, Optional[str]]:
"""이벤트 데이터 검증"""
try:
event_type = event_data.get('event_type')
if not event_type:
return False, "Missing event_type"
schema_class = cls.get_schema(event_type)
if not schema_class:
return False, f"Unknown event type: {event_type}"
# 스키마 검증
schema_class(**event_data)
return True, None
except Exception as e:
return False, str(e)
@classmethod
def is_compatible(cls, from_version: str, to_version: str) -> bool:
"""버전 호환성 확인"""
from_v = SchemaVersion(from_version)
to_v = SchemaVersion(to_version)
compatible_versions = cls.COMPATIBILITY_MATRIX.get(to_v, [])
return from_v in compatible_versions
@classmethod
def migrate_event(
cls,
event_data: Dict[str, Any],
from_version: str,
to_version: str
) -> Dict[str, Any]:
"""이벤트 데이터 마이그레이션"""
if from_version == to_version:
return event_data
if not cls.is_compatible(from_version, to_version):
raise ValueError(f"Cannot migrate from {from_version} to {to_version}")
# 버전별 마이그레이션 로직
if from_version == SchemaVersion.V1 and to_version == SchemaVersion.V2:
# V1 -> V2 마이그레이션 예시
event_data['version'] = SchemaVersion.V2
# 새로운 필드 추가 (기본값)
if 'metadata' not in event_data:
event_data['metadata'] = {}
return event_data
@classmethod
def get_all_schemas(cls) -> Dict[str, Dict[str, Any]]:
"""모든 스키마 정보 반환 (문서화용)"""
schemas_info = {}
for event_type, schema_class in cls.SCHEMAS.items():
schemas_info[event_type] = {
"description": schema_class.__doc__,
"fields": schema_class.schema(),
"version": SchemaVersion.V1,
"example": cls._generate_example(schema_class)
}
return schemas_info
@classmethod
def _generate_example(cls, schema_class: type) -> Dict[str, Any]:
"""스키마 예시 생성"""
examples = {
"USER_CREATED": {
"event_id": "evt_123456",
"event_type": "USER_CREATED",
"timestamp": datetime.now().isoformat(),
"version": "1.0.0",
"service": "users",
"data": {
"user_id": "usr_abc123",
"username": "johndoe",
"email": "john@example.com"
}
},
"USER_UPDATED": {
"event_id": "evt_123457",
"event_type": "USER_UPDATED",
"timestamp": datetime.now().isoformat(),
"version": "1.0.0",
"service": "users",
"data": {
"user_id": "usr_abc123",
"updated_fields": ["profile_picture", "bio"],
"profile_picture": "https://example.com/pic.jpg",
"bio": "Updated bio"
}
},
"OAUTH_TOKEN_ISSUED": {
"event_id": "evt_123458",
"event_type": "OAUTH_TOKEN_ISSUED",
"timestamp": datetime.now().isoformat(),
"version": "1.0.0",
"service": "oauth",
"data": {
"client_id": "app_xyz789",
"user_id": "usr_abc123",
"grant_type": "authorization_code",
"scopes": ["profile", "email"],
"expires_in": 3600
}
}
}
return examples.get(schema_class.__fields__['event_type'].default, {})
@classmethod
def export_schemas(cls, format: str = "json") -> str:
"""스키마 내보내기"""
schemas = cls.get_all_schemas()
if format == "json":
return json.dumps(schemas, indent=2, default=str)
elif format == "markdown":
return cls._export_as_markdown(schemas)
else:
raise ValueError(f"Unsupported format: {format}")
@classmethod
def _export_as_markdown(cls, schemas: Dict[str, Dict[str, Any]]) -> str:
"""마크다운 형식으로 내보내기"""
md = "# Event Schema Registry\n\n"
for event_type, info in schemas.items():
md += f"## {event_type}\n\n"
md += f"{info['description']}\n\n"
md += f"**Version:** {info['version']}\n\n"
md += "**Example:**\n```json\n"
md += json.dumps(info['example'], indent=2, default=str)
md += "\n```\n\n"
return md