Reduce execution waits (#2282)
This commit is contained in:
@@ -1030,7 +1030,7 @@ class ForgeAgent:
|
|||||||
results,
|
results,
|
||||||
)
|
)
|
||||||
# wait random time between actions to avoid detection
|
# wait random time between actions to avoid detection
|
||||||
await asyncio.sleep(random.uniform(1.0, 2.0))
|
await asyncio.sleep(random.uniform(0.5, 1.0))
|
||||||
await self.record_artifacts_after_action(task, step, browser_state, engine)
|
await self.record_artifacts_after_action(task, step, browser_state, engine)
|
||||||
for result in results:
|
for result in results:
|
||||||
result.step_retry_number = step.retry_index
|
result.step_retry_number = step.retry_index
|
||||||
|
|||||||
@@ -593,7 +593,7 @@ class BrowserState:
|
|||||||
async def navigate_to_url(self, page: Page, url: str, retry_times: int = NAVIGATION_MAX_RETRY_TIME) -> None:
|
async def navigate_to_url(self, page: Page, url: str, retry_times: int = NAVIGATION_MAX_RETRY_TIME) -> None:
|
||||||
try:
|
try:
|
||||||
for retry_time in range(retry_times):
|
for retry_time in range(retry_times):
|
||||||
LOG.info(f"Trying to navigate to {url} and waiting for 5 seconds.", url=url, retry_time=retry_time)
|
LOG.info(f"Trying to navigate to {url} and waiting for 1 second.", url=url, retry_time=retry_time)
|
||||||
try:
|
try:
|
||||||
start_time = time.time()
|
start_time = time.time()
|
||||||
await page.goto(url, timeout=settings.BROWSER_LOADING_TIMEOUT_MS)
|
await page.goto(url, timeout=settings.BROWSER_LOADING_TIMEOUT_MS)
|
||||||
@@ -617,8 +617,8 @@ class BrowserState:
|
|||||||
url=url,
|
url=url,
|
||||||
retry_time=retry_time,
|
retry_time=retry_time,
|
||||||
)
|
)
|
||||||
# Wait for 5 seconds before retrying
|
# Wait for 1 seconds before retrying
|
||||||
await asyncio.sleep(5)
|
await asyncio.sleep(1)
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
LOG.exception(
|
LOG.exception(
|
||||||
|
|||||||
@@ -506,8 +506,8 @@ async def scrape_web_unsafe(
|
|||||||
# This also solves the issue where we can't scroll due to a popup.(e.g. geico first popup on the homepage after
|
# This also solves the issue where we can't scroll due to a popup.(e.g. geico first popup on the homepage after
|
||||||
# clicking start my quote)
|
# clicking start my quote)
|
||||||
|
|
||||||
LOG.info("Waiting for 5 seconds before scraping the website.")
|
LOG.info("Waiting for 3 seconds before scraping the website.")
|
||||||
await asyncio.sleep(5)
|
await asyncio.sleep(3)
|
||||||
|
|
||||||
elements, element_tree = await get_interactable_element_tree(page, scrape_exclude)
|
elements, element_tree = await get_interactable_element_tree(page, scrape_exclude)
|
||||||
element_tree = await cleanup_element_tree(page, url, copy.deepcopy(element_tree))
|
element_tree = await cleanup_element_tree(page, url, copy.deepcopy(element_tree))
|
||||||
|
|||||||
Reference in New Issue
Block a user