From bd2ca4b31933a16fbb815200b9ed9fef7fe34643 Mon Sep 17 00:00:00 2001 From: terrtia Date: Tue, 9 Jan 2024 09:47:49 +0100 Subject: [PATCH] fix: [crawler] fix api create_task --- bin/lib/crawlers.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/bin/lib/crawlers.py b/bin/lib/crawlers.py index 13c8f75f..f98af175 100755 --- a/bin/lib/crawlers.py +++ b/bin/lib/crawlers.py @@ -1788,7 +1788,7 @@ def api_add_crawler_capture(data, user_id): return {'error': 'Invalid task_uuid', 'task_uuid': task_uuid}, 400 capture_uuid = data.get('capture_uuid') if not capture_uuid: - return {'error': 'Invalid capture_uuid', 'task_uuid': capture_uuid}, 400 + return {'error': 'Invalid capture_uuid', 'capture_uuid': capture_uuid}, 400 # parent = data.get('parent') @@ -1796,6 +1796,8 @@ def api_add_crawler_capture(data, user_id): task_uuid = create_task(task['url'], depth=task['depth_limit'], har=task['har'], screenshot=task['screenshot'], proxy=task['proxy'], tags=task['tags'], parent='manual', task_uuid=task_uuid, external=True) + if not task_uuid: + return {'error': 'Aborted by Crawler', 'task_uuid': task_uuid, 'capture_uuid': capture_uuid}, 400 task = CrawlerTask(task_uuid) create_capture(capture_uuid, task_uuid) task.start()