don't drawbox when check user goal (#1964)

This commit is contained in:
Shuchang Zheng
2025-03-18 11:06:09 -07:00
committed by GitHub
parent 9cc9fff7c0
commit c680e33b30
2 changed files with 9 additions and 3 deletions

View File

@@ -1163,7 +1163,7 @@ class ForgeAgent:
step_id=step.step_id,
workflow_run_id=task.workflow_run_id,
)
scraped_page_refreshed = await scraped_page.refresh()
scraped_page_refreshed = await scraped_page.refresh(draw_boxes=False)
verification_prompt = prompt_engine.load_prompt(
"check-user-goal",

View File

@@ -272,12 +272,13 @@ class ScrapedPage(BaseModel):
raise UnknownElementTreeFormat(fmt=fmt)
async def refresh(self) -> Self:
async def refresh(self, draw_boxes: bool = True) -> Self:
refreshed_page = await scrape_website(
browser_state=self._browser_state,
url=self.url,
cleanup_element_tree=self._clean_up_func,
scrape_exclude=self._scrape_exclude,
draw_boxes=draw_boxes,
)
self.elements = refreshed_page.elements
self.id_to_css_dict = refreshed_page.id_to_css_dict
@@ -310,6 +311,7 @@ async def scrape_website(
num_retry: int = 0,
scrape_exclude: ScrapeExcludeFunc | None = None,
take_screenshots: bool = True,
draw_boxes: bool = True,
) -> ScrapedPage:
"""
************************************************************************************************
@@ -340,6 +342,7 @@ async def scrape_website(
cleanup_element_tree=cleanup_element_tree,
scrape_exclude=scrape_exclude,
take_screenshots=take_screenshots,
draw_boxes=draw_boxes,
)
except Exception as e:
# NOTE: MAX_SCRAPING_RETRIES is set to 0 in both staging and production
@@ -361,6 +364,8 @@ async def scrape_website(
cleanup_element_tree,
num_retry=num_retry,
scrape_exclude=scrape_exclude,
take_screenshots=take_screenshots,
draw_boxes=draw_boxes,
)
@@ -409,6 +414,7 @@ async def scrape_web_unsafe(
cleanup_element_tree: CleanupElementTreeFunc,
scrape_exclude: ScrapeExcludeFunc | None = None,
take_screenshots: bool = True,
draw_boxes: bool = True,
) -> ScrapedPage:
"""
Asynchronous function that performs web scraping without any built-in error handling. This function is intended
@@ -435,7 +441,7 @@ async def scrape_web_unsafe(
screenshots = []
if take_screenshots:
screenshots = await SkyvernFrame.take_split_screenshots(page=page, url=url, draw_boxes=True)
screenshots = await SkyvernFrame.take_split_screenshots(page=page, url=url, draw_boxes=draw_boxes)
elements, element_tree = await get_interactable_element_tree(page, scrape_exclude)
element_tree = await cleanup_element_tree(page, url, copy.deepcopy(element_tree))