fix: [crawler] crawler queued capture loop

This commit is contained in:
terrtia 2025-01-07 15:31:10 +01:00
parent e6e48c69f5
commit 8692d9b45b
No known key found for this signature in database
GPG key ID: 1E1B1F50D84613D0
2 changed files with 5 additions and 5 deletions

View file

@ -166,8 +166,8 @@ class Crawler(AbstractModule):
else:
capture.update(status)
elif status == crawlers.CaptureStatus.QUEUED:
capture.update(status, delta=30)
print(capture.uuid, crawlers.CaptureStatus(status).name, int(time.time() + 30))
capture.update(status)
print(capture.uuid, crawlers.CaptureStatus(status).name, int(time.time()))
elif status == crawlers.CaptureStatus.ONGOING:
capture.update(status)
print(capture.uuid, crawlers.CaptureStatus(status).name, int(time.time()))

View file

@ -1531,13 +1531,13 @@ class CrawlerCapture:
r_crawler.zadd('crawler:captures', {self.uuid: launch_time})
r_cache.zadd('crawler:captures', {self.uuid: launch_time})
def update(self, status, delta=0):
def update(self, status):
# Error or Reload
if not status:
if status is None:
r_cache.hset(f'crawler:capture:{self.uuid}', 'status', CaptureStatus.UNKNOWN.value)
r_cache.zadd('crawler:captures', {self.uuid: 0})
else:
last_check = int(time.time() + delta)
last_check = int(time.time())
r_cache.hset(f'crawler:capture:{self.uuid}', 'status', status)
r_cache.zadd('crawler:captures', {self.uuid: last_check})