fix: [crawler] avoid crawler loop if a capture end up in an invalid state

This commit is contained in:
terrtia 2024-12-09 16:44:40 +01:00
parent bdb80ee4a4
commit ea12a44836
No known key found for this signature in database
GPG key ID: 1E1B1F50D84613D0

View file

@ -164,9 +164,15 @@ class Crawler(AbstractModule):
self.logger.warning(f'capture UNKNOWN Timeout, {task.uuid} Send back in queue')
else:
capture.update(status)
else:
elif status == crawlers.CaptureStatus.QUEUED or status == crawlers.CaptureStatus.ONGOING:
capture.update(status)
print(capture.uuid, crawlers.CaptureStatus(status).name, int(time.time()))
# Invalid State
else:
task = capture.get_task()
task.reset()
capture.delete()
self.logger.warning(f'ERROR INVALID CAPTURE STATUS {status}, {task.uuid} Send back in queue')
except ConnectionError:
self.logger.warning(f'Lacus ConnectionError, capture {capture.uuid}')