From afd0d6b7d85cbb73bc85ee01e18a1acab77d05c4 Mon Sep 17 00:00:00 2001 From: terrtia Date: Mon, 13 Jan 2025 10:11:01 +0100 Subject: [PATCH] fix: [crawler] increase timeout QUEUED captures --- bin/crawlers/Crawler.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bin/crawlers/Crawler.py b/bin/crawlers/Crawler.py index 5a9318b3..0ae33178 100755 --- a/bin/crawlers/Crawler.py +++ b/bin/crawlers/Crawler.py @@ -180,11 +180,11 @@ class Crawler(AbstractModule): capture.update(status) elif status == crawlers.CaptureStatus.QUEUED: capture_start = capture.get_start_time(r_str=False) - if int(time.time()) - capture_start > 3600: # TODO ADD in new crawler config + if int(time.time()) - capture_start > 36000: # TODO ADD in new crawler config task = capture.get_task() task.reset() capture.delete() - self.logger.warning(f'capture QUEUED Timeout, {task.uuid} Send back in queue') + self.logger.warning(f'capture QUEUED Timeout, {task.uuid} Send back in queue, start_time={capture_start}') else: capture.update(status) print(capture.uuid, crawlers.CaptureStatus(status).name, int(time.time()))