diff --git a/backend/app/core/config.py b/backend/app/core/config.py index 455425f..17adeca 100644 --- a/backend/app/core/config.py +++ b/backend/app/core/config.py @@ -24,7 +24,7 @@ class Settings(BaseSettings): # Site inspection SITE_MAX_PAGES: int = 500 SITE_MAX_DEPTH: int = 2 - SITE_CONCURRENCY: int = 2 + SITE_CONCURRENCY: int = 4 # Application PROJECT_NAME: str = "Web Inspector API" diff --git a/backend/app/models/site_schemas.py b/backend/app/models/site_schemas.py index 10a2a09..cc5b9fd 100644 --- a/backend/app/models/site_schemas.py +++ b/backend/app/models/site_schemas.py @@ -30,6 +30,7 @@ class StartSiteInspectionRequest(BaseModel): url: HttpUrl max_pages: int = Field(default=20, ge=0, le=500, description="최대 크롤링 페이지 수 (0=무제한)") max_depth: int = Field(default=2, ge=1, le=3, description="최대 크롤링 깊이") + concurrency: int = Field(default=4, ge=1, le=8, description="동시 검사 수") class InspectPageRequest(BaseModel): @@ -67,6 +68,7 @@ class SiteInspectionConfig(BaseModel): """사이트 검사 설정.""" max_pages: int = 20 max_depth: int = 2 + concurrency: int = 4 # --- Response Models --- diff --git a/backend/app/routers/site_inspections.py b/backend/app/routers/site_inspections.py index 76a31cf..f5391dd 100644 --- a/backend/app/routers/site_inspections.py +++ b/backend/app/routers/site_inspections.py @@ -66,6 +66,7 @@ async def start_site_inspection(request: StartSiteInspectionRequest): url=url, max_pages=request.max_pages, max_depth=request.max_depth, + concurrency=request.concurrency, ) except httpx.HTTPStatusError as e: raise HTTPException( diff --git a/backend/app/services/site_inspection_service.py b/backend/app/services/site_inspection_service.py index 7a1c5b1..149e918 100644 --- a/backend/app/services/site_inspection_service.py +++ b/backend/app/services/site_inspection_service.py @@ -49,6 +49,7 @@ class SiteInspectionService: url: str, max_pages: int = 20, max_depth: int = 2, + concurrency: int = 4, ) -> str: """ Start a site-wide inspection. @@ -65,6 +66,7 @@ class SiteInspectionService: if max_pages > 0: max_pages = min(max_pages, settings.SITE_MAX_PAGES) max_depth = min(max_depth, settings.SITE_MAX_DEPTH) + concurrency = min(concurrency, settings.SITE_CONCURRENCY) site_inspection_id = str(uuid.uuid4()) parsed = urlparse(url) @@ -81,6 +83,7 @@ class SiteInspectionService: "config": { "max_pages": max_pages, "max_depth": max_depth, + "concurrency": concurrency, }, "discovered_pages": [], "aggregate_scores": None, @@ -88,13 +91,13 @@ class SiteInspectionService: await self.db.site_inspections.insert_one(doc) logger.info( - "Site inspection started: id=%s, url=%s, max_pages=%d, max_depth=%d", - site_inspection_id, url, max_pages, max_depth, + "Site inspection started: id=%s, url=%s, max_pages=%d, max_depth=%d, concurrency=%d", + site_inspection_id, url, max_pages, max_depth, concurrency, ) # Launch background task asyncio.create_task( - self._crawl_and_inspect(site_inspection_id, url, max_pages, max_depth) + self._crawl_and_inspect(site_inspection_id, url, max_pages, max_depth, concurrency) ) return site_inspection_id @@ -268,6 +271,7 @@ class SiteInspectionService: url: str, max_pages: int, max_depth: int, + concurrency: int = 4, ) -> None: """ Background task that runs in two phases: @@ -349,8 +353,7 @@ class SiteInspectionService: # ============================== logger.info("Phase 2 (inspection) started: %s", site_inspection_id) - settings = get_settings() - semaphore = asyncio.Semaphore(settings.SITE_CONCURRENCY) + semaphore = asyncio.Semaphore(concurrency) tasks = [ self._inspect_page_with_semaphore( diff --git a/frontend/src/components/inspection/UrlInputForm.tsx b/frontend/src/components/inspection/UrlInputForm.tsx index 0a44f57..dded516 100644 --- a/frontend/src/components/inspection/UrlInputForm.tsx +++ b/frontend/src/components/inspection/UrlInputForm.tsx @@ -18,6 +18,9 @@ const MAX_PAGES_OPTIONS = [10, 20, 50, 0] as const; /** 크롤링 깊이 옵션 */ const MAX_DEPTH_OPTIONS = [1, 2, 3] as const; +/** 동시 검사 수 옵션 */ +const CONCURRENCY_OPTIONS = [1, 2, 4, 8] as const; + export function UrlInputForm() { const [url, setUrl] = useState(""); const [error, setError] = useState(null); @@ -26,6 +29,7 @@ export function UrlInputForm() { const [showSiteOptions, setShowSiteOptions] = useState(false); const [maxPages, setMaxPages] = useState(20); const [maxDepth, setMaxDepth] = useState(2); + const [concurrency, setConcurrency] = useState(4); const router = useRouter(); const { setInspection } = useInspectionStore(); const { setSiteInspection } = useSiteInspectionStore(); @@ -101,7 +105,8 @@ export function UrlInputForm() { const response = await api.startSiteInspection( trimmedUrl, maxPages, - maxDepth + maxDepth, + concurrency ); setSiteInspection(response.site_inspection_id, trimmedUrl); router.push( @@ -225,7 +230,7 @@ export function UrlInputForm() { {/* 크롤링 깊이 */} -
+
@@ -250,6 +255,32 @@ export function UrlInputForm() {
+ {/* 동시 검사 수 */} +
+ +
+ {CONCURRENCY_OPTIONS.map((option) => ( + + ))} +
+
+ {/* 사이트 검사 시작 버튼 */}