Phase 1 Backend Implementation: - ✅ MongoDB data models (Keyword, Pipeline, User, Application) - ✅ Pydantic schemas for all models with validation - ✅ KeywordService: Full CRUD, filtering, pagination, stats, toggle status - ✅ PipelineService: Full CRUD, start/stop/restart, logs, config management - ✅ Keywords API: 8 endpoints with complete functionality - ✅ Pipelines API: 11 endpoints with complete functionality - ✅ Updated TODO.md to reflect completion Key Features: - Async MongoDB operations with Motor - Comprehensive filtering and pagination support - Pipeline logging system - Statistics tracking for keywords and pipelines - Proper error handling with HTTP status codes - Type-safe request/response models Files Added: - models/: 4 data models with PyObjectId support - schemas/: 4 schema modules with Create/Update/Response patterns - services/: KeywordService (234 lines) + PipelineService (332 lines) Files Modified: - api/keywords.py: 40 → 212 lines (complete implementation) - api/pipelines.py: 25 → 300 lines (complete implementation) - TODO.md: Updated checklist with completed items Next Steps: - UserService with authentication - ApplicationService for OAuth2 - MonitoringService - Redis integration - Frontend implementation 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude <noreply@anthropic.com>
63 lines
1.8 KiB
Python
63 lines
1.8 KiB
Python
from datetime import datetime
|
|
from typing import Optional, Dict, Any, List
|
|
from pydantic import BaseModel, Field
|
|
|
|
|
|
class PipelineStatsSchema(BaseModel):
|
|
"""Pipeline statistics schema"""
|
|
total_processed: int = 0
|
|
success_count: int = 0
|
|
error_count: int = 0
|
|
last_run: Optional[datetime] = None
|
|
average_duration_seconds: Optional[float] = None
|
|
|
|
|
|
class PipelineBase(BaseModel):
|
|
"""Base pipeline schema"""
|
|
name: str = Field(..., min_length=1, max_length=100)
|
|
type: str = Field(..., description="Type: rss_collector, translator, image_generator")
|
|
config: Dict[str, Any] = Field(default_factory=dict)
|
|
schedule: Optional[str] = Field(None, description="Cron expression")
|
|
|
|
|
|
class PipelineCreate(PipelineBase):
|
|
"""Schema for creating a new pipeline"""
|
|
pass
|
|
|
|
|
|
class PipelineUpdate(BaseModel):
|
|
"""Schema for updating a pipeline (all fields optional)"""
|
|
name: Optional[str] = Field(None, min_length=1, max_length=100)
|
|
status: Optional[str] = Field(None, description="Status: running, stopped, error")
|
|
config: Optional[Dict[str, Any]] = None
|
|
schedule: Optional[str] = None
|
|
|
|
|
|
class PipelineResponse(PipelineBase):
|
|
"""Schema for pipeline response"""
|
|
id: str = Field(..., alias="_id")
|
|
status: str
|
|
stats: PipelineStatsSchema
|
|
last_run: Optional[datetime] = None
|
|
next_run: Optional[datetime] = None
|
|
created_at: datetime
|
|
updated_at: datetime
|
|
|
|
class Config:
|
|
populate_by_name = True
|
|
from_attributes = True
|
|
|
|
|
|
class PipelineListResponse(BaseModel):
|
|
"""Schema for pipeline list response"""
|
|
pipelines: List[PipelineResponse]
|
|
total: int
|
|
|
|
|
|
class PipelineLog(BaseModel):
|
|
"""Schema for pipeline log entry"""
|
|
timestamp: datetime
|
|
level: str = Field(..., description="Log level: INFO, WARNING, ERROR")
|
|
message: str
|
|
details: Optional[Dict[str, Any]] = None
|