From f5d7639de8ae82b1c7bbdd2c9c98f1c56f449458 Mon Sep 17 00:00:00 2001 From: Jonathan Dobson Date: Fri, 18 Jul 2025 10:20:33 -0400 Subject: [PATCH] allow empty urls (#2984) --- skyvern/webeye/scraper/scraper.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/skyvern/webeye/scraper/scraper.py b/skyvern/webeye/scraper/scraper.py index 5f45e460..f6cba310 100644 --- a/skyvern/webeye/scraper/scraper.py +++ b/skyvern/webeye/scraper/scraper.py @@ -13,7 +13,7 @@ from pydantic import BaseModel, PrivateAttr from skyvern.config import settings from skyvern.constants import BUILDING_ELEMENT_TREE_TIMEOUT_MS, DEFAULT_MAX_TOKENS, SKYVERN_DIR, SKYVERN_ID_ATTR -from skyvern.exceptions import FailedToTakeScreenshot, ScrapingFailed, ScrapingFailedNoUrl, UnknownElementTreeFormat +from skyvern.exceptions import FailedToTakeScreenshot, ScrapingFailed, UnknownElementTreeFormat from skyvern.forge.sdk.api.crypto import calculate_sha256 from skyvern.forge.sdk.core import skyvern_context from skyvern.forge.sdk.trace import TraceManager @@ -427,8 +427,10 @@ async def scrape_website( :raises Exception: When scraping fails after maximum retries. """ - if not url.strip(): - raise ScrapingFailedNoUrl() + # TODO(jdo) why is this a problem? + # ref: https://skyvern.slack.com/archives/C074UNDSRJM/p1752771256298149 + # if not url.strip(): + # raise ScrapingFailedNoUrl() try: num_retry += 1