From 3e224d221aed76dbb743429a1c0c5099d235a184 Mon Sep 17 00:00:00 2001 From: jungwoo choi Date: Fri, 13 Feb 2026 17:14:32 +0900 Subject: [PATCH] =?UTF-8?q?feat:=20=EC=82=AC=EC=9D=B4=ED=8A=B8=20=EA=B2=80?= =?UTF-8?q?=EC=82=AC=20=EC=B5=9C=EB=8C=80=20=ED=8E=98=EC=9D=B4=EC=A7=80=20?= =?UTF-8?q?=EC=88=98=20=EB=AC=B4=EC=A0=9C=ED=95=9C=20=EC=98=B5=EC=85=98=20?= =?UTF-8?q?=EC=B6=94=EA=B0=80?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - max_pages=0으로 무제한 모드 지원 (안전 상한 500페이지) - 프론트엔드에 "무제한" 버튼 추가 Co-Authored-By: Claude Opus 4.6 --- backend/app/models/site_schemas.py | 2 +- backend/app/services/link_crawler.py | 6 +++++- frontend/src/components/inspection/UrlInputForm.tsx | 6 +++--- 3 files changed, 9 insertions(+), 5 deletions(-) diff --git a/backend/app/models/site_schemas.py b/backend/app/models/site_schemas.py index 39e3469..10a2a09 100644 --- a/backend/app/models/site_schemas.py +++ b/backend/app/models/site_schemas.py @@ -28,7 +28,7 @@ class PageStatus(str, Enum): class StartSiteInspectionRequest(BaseModel): url: HttpUrl - max_pages: int = Field(default=20, ge=1, le=50, description="최대 크롤링 페이지 수") + max_pages: int = Field(default=20, ge=0, le=500, description="최대 크롤링 페이지 수 (0=무제한)") max_depth: int = Field(default=2, ge=1, le=3, description="최대 크롤링 깊이") diff --git a/backend/app/services/link_crawler.py b/backend/app/services/link_crawler.py index 46c3522..bfc388f 100644 --- a/backend/app/services/link_crawler.py +++ b/backend/app/services/link_crawler.py @@ -29,6 +29,9 @@ _SKIP_EXTENSIONS = { # Type alias for progress callback: (pages_found, current_url) -> None ProgressCallback = Callable[[int, str], Awaitable[None]] +# Safety limit for "unlimited" mode to prevent runaway crawls +_UNLIMITED_SAFETY_CAP = 500 + def normalize_url(url: str) -> str: """ @@ -112,7 +115,8 @@ class LinkCrawler: max_depth: int = 2, ): self.root_url = normalize_url(root_url) - self.max_pages = max_pages + # 0 means unlimited → use safety cap + self.max_pages = max_pages if max_pages > 0 else _UNLIMITED_SAFETY_CAP self.max_depth = max_depth parsed = urlparse(self.root_url) diff --git a/frontend/src/components/inspection/UrlInputForm.tsx b/frontend/src/components/inspection/UrlInputForm.tsx index f13def2..0a44f57 100644 --- a/frontend/src/components/inspection/UrlInputForm.tsx +++ b/frontend/src/components/inspection/UrlInputForm.tsx @@ -12,8 +12,8 @@ import { useInspectionStore } from "@/stores/useInspectionStore"; import { useSiteInspectionStore } from "@/stores/useSiteInspectionStore"; import { cn } from "@/lib/utils"; -/** 최대 페이지 수 옵션 */ -const MAX_PAGES_OPTIONS = [10, 20, 50] as const; +/** 최대 페이지 수 옵션 (0 = 무제한) */ +const MAX_PAGES_OPTIONS = [10, 20, 50, 0] as const; /** 크롤링 깊이 옵션 */ const MAX_DEPTH_OPTIONS = [1, 2, 3] as const; @@ -218,7 +218,7 @@ export function UrlInputForm() { )} onClick={() => setMaxPages(option)} > - {option} + {option === 0 ? "무제한" : option} ))}