mirror of
https://github.com/ail-project/ail-framework.git
synced 2024-11-10 00:28:22 +00:00
chg: [crawler] auto tag crawled domains
This commit is contained in:
parent
c02b9f2a81
commit
501d10bbbd
6 changed files with 121 additions and 17 deletions
|
@ -200,10 +200,13 @@ class Crawler(AbstractModule):
|
||||||
self.save_capture_response(parent_id, entries)
|
self.save_capture_response(parent_id, entries)
|
||||||
|
|
||||||
self.domain.update_daterange(self.date.replace('/', ''))
|
self.domain.update_daterange(self.date.replace('/', ''))
|
||||||
# Origin + History
|
# Origin + History + tags
|
||||||
if self.root_item:
|
if self.root_item:
|
||||||
self.domain.set_last_origin(parent_id)
|
self.domain.set_last_origin(parent_id)
|
||||||
self.domain.add_history(epoch, root_item=self.root_item)
|
self.domain.add_history(epoch, root_item=self.root_item)
|
||||||
|
# Tags
|
||||||
|
for tag in task.get_tags():
|
||||||
|
self.domain.add_tag(tag)
|
||||||
elif self.domain.was_up():
|
elif self.domain.was_up():
|
||||||
self.domain.add_history(epoch, root_item=epoch)
|
self.domain.add_history(epoch, root_item=epoch)
|
||||||
|
|
||||||
|
|
|
@ -967,6 +967,7 @@ class CrawlerScheduler:
|
||||||
task_uuid = create_task(meta['url'], depth=meta['depth'], har=meta['har'], screenshot=meta['screenshot'],
|
task_uuid = create_task(meta['url'], depth=meta['depth'], har=meta['har'], screenshot=meta['screenshot'],
|
||||||
header=meta['header'],
|
header=meta['header'],
|
||||||
cookiejar=meta['cookiejar'], proxy=meta['proxy'],
|
cookiejar=meta['cookiejar'], proxy=meta['proxy'],
|
||||||
|
tags=meta['tags'],
|
||||||
user_agent=meta['user_agent'], parent='scheduler', priority=40)
|
user_agent=meta['user_agent'], parent='scheduler', priority=40)
|
||||||
if task_uuid:
|
if task_uuid:
|
||||||
schedule.set_task(task_uuid)
|
schedule.set_task(task_uuid)
|
||||||
|
@ -1069,6 +1070,14 @@ class CrawlerSchedule:
|
||||||
def _set_field(self, field, value):
|
def _set_field(self, field, value):
|
||||||
return r_crawler.hset(f'schedule:{self.uuid}', field, value)
|
return r_crawler.hset(f'schedule:{self.uuid}', field, value)
|
||||||
|
|
||||||
|
def get_tags(self):
|
||||||
|
return r_crawler.smembers(f'schedule:tags:{self.uuid}')
|
||||||
|
|
||||||
|
def set_tags(self, tags=[]):
|
||||||
|
for tag in tags:
|
||||||
|
r_crawler.sadd(f'schedule:tags:{self.uuid}', tag)
|
||||||
|
# Tag.create_custom_tag(tag)
|
||||||
|
|
||||||
def get_meta(self, ui=False):
|
def get_meta(self, ui=False):
|
||||||
meta = {
|
meta = {
|
||||||
'uuid': self.uuid,
|
'uuid': self.uuid,
|
||||||
|
@ -1083,6 +1092,7 @@ class CrawlerSchedule:
|
||||||
'cookiejar': self.get_cookiejar(),
|
'cookiejar': self.get_cookiejar(),
|
||||||
'header': self.get_header(),
|
'header': self.get_header(),
|
||||||
'proxy': self.get_proxy(),
|
'proxy': self.get_proxy(),
|
||||||
|
'tags': self.get_tags(),
|
||||||
}
|
}
|
||||||
status = self.get_status()
|
status = self.get_status()
|
||||||
if ui:
|
if ui:
|
||||||
|
@ -1098,6 +1108,7 @@ class CrawlerSchedule:
|
||||||
meta = {'uuid': self.uuid,
|
meta = {'uuid': self.uuid,
|
||||||
'url': self.get_url(),
|
'url': self.get_url(),
|
||||||
'user': self.get_user(),
|
'user': self.get_user(),
|
||||||
|
'tags': self.get_tags(),
|
||||||
'next_run': self.get_next_run(r_str=True)}
|
'next_run': self.get_next_run(r_str=True)}
|
||||||
status = self.get_status()
|
status = self.get_status()
|
||||||
if isinstance(status, ScheduleStatus):
|
if isinstance(status, ScheduleStatus):
|
||||||
|
@ -1106,7 +1117,7 @@ class CrawlerSchedule:
|
||||||
return meta
|
return meta
|
||||||
|
|
||||||
def create(self, frequency, user, url,
|
def create(self, frequency, user, url,
|
||||||
depth=1, har=True, screenshot=True, header=None, cookiejar=None, proxy=None, user_agent=None):
|
depth=1, har=True, screenshot=True, header=None, cookiejar=None, proxy=None, user_agent=None, tags=[]):
|
||||||
|
|
||||||
if self.exists():
|
if self.exists():
|
||||||
raise Exception('Error: Monitor already exists')
|
raise Exception('Error: Monitor already exists')
|
||||||
|
@ -1135,6 +1146,9 @@ class CrawlerSchedule:
|
||||||
if user_agent:
|
if user_agent:
|
||||||
self._set_field('user_agent', user_agent)
|
self._set_field('user_agent', user_agent)
|
||||||
|
|
||||||
|
if tags:
|
||||||
|
self.set_tags(tags)
|
||||||
|
|
||||||
r_crawler.sadd('scheduler:schedules', self.uuid)
|
r_crawler.sadd('scheduler:schedules', self.uuid)
|
||||||
|
|
||||||
def delete(self):
|
def delete(self):
|
||||||
|
@ -1148,12 +1162,13 @@ class CrawlerSchedule:
|
||||||
|
|
||||||
# delete meta
|
# delete meta
|
||||||
r_crawler.delete(f'schedule:{self.uuid}')
|
r_crawler.delete(f'schedule:{self.uuid}')
|
||||||
|
r_crawler.delete(f'schedule:tags:{self.uuid}')
|
||||||
r_crawler.srem('scheduler:schedules', self.uuid)
|
r_crawler.srem('scheduler:schedules', self.uuid)
|
||||||
|
|
||||||
def create_schedule(frequency, user, url, depth=1, har=True, screenshot=True, header=None, cookiejar=None, proxy=None, user_agent=None):
|
def create_schedule(frequency, user, url, depth=1, har=True, screenshot=True, header=None, cookiejar=None, proxy=None, user_agent=None, tags=[]):
|
||||||
schedule_uuid = gen_uuid()
|
schedule_uuid = gen_uuid()
|
||||||
schedule = CrawlerSchedule(schedule_uuid)
|
schedule = CrawlerSchedule(schedule_uuid)
|
||||||
schedule.create(frequency, user, url, depth=depth, har=har, screenshot=screenshot, header=header, cookiejar=cookiejar, proxy=proxy, user_agent=user_agent)
|
schedule.create(frequency, user, url, depth=depth, har=har, screenshot=screenshot, header=header, cookiejar=cookiejar, proxy=proxy, user_agent=user_agent, tags=tags)
|
||||||
return schedule_uuid
|
return schedule_uuid
|
||||||
|
|
||||||
# TODO sanityze UUID
|
# TODO sanityze UUID
|
||||||
|
@ -1289,6 +1304,11 @@ def get_captures_status():
|
||||||
status.append(meta)
|
status.append(meta)
|
||||||
return status
|
return status
|
||||||
|
|
||||||
|
def delete_captures():
|
||||||
|
for capture_uuid in get_crawler_captures():
|
||||||
|
capture = CrawlerCapture(capture_uuid)
|
||||||
|
capture.delete()
|
||||||
|
|
||||||
##-- CRAWLER STATE --##
|
##-- CRAWLER STATE --##
|
||||||
|
|
||||||
|
|
||||||
|
@ -1371,6 +1391,14 @@ class CrawlerTask:
|
||||||
def _set_field(self, field, value):
|
def _set_field(self, field, value):
|
||||||
return r_crawler.hset(f'crawler:task:{self.uuid}', field, value)
|
return r_crawler.hset(f'crawler:task:{self.uuid}', field, value)
|
||||||
|
|
||||||
|
def get_tags(self):
|
||||||
|
return r_crawler.smembers(f'crawler:task:tags:{self.uuid}')
|
||||||
|
|
||||||
|
def set_tags(self, tags):
|
||||||
|
for tag in tags:
|
||||||
|
r_crawler.sadd(f'crawler:task:tags:{self.uuid}', tag)
|
||||||
|
# Tag.create_custom_tag(tag)
|
||||||
|
|
||||||
def get_meta(self):
|
def get_meta(self):
|
||||||
meta = {
|
meta = {
|
||||||
'uuid': self.uuid,
|
'uuid': self.uuid,
|
||||||
|
@ -1385,6 +1413,7 @@ class CrawlerTask:
|
||||||
'header': self.get_header(),
|
'header': self.get_header(),
|
||||||
'proxy': self.get_proxy(),
|
'proxy': self.get_proxy(),
|
||||||
'parent': self.get_parent(),
|
'parent': self.get_parent(),
|
||||||
|
'tags': self.get_tags(),
|
||||||
}
|
}
|
||||||
return meta
|
return meta
|
||||||
|
|
||||||
|
@ -1392,7 +1421,7 @@ class CrawlerTask:
|
||||||
# TODO SANITIZE PRIORITY
|
# TODO SANITIZE PRIORITY
|
||||||
# PRIORITY: discovery = 0/10, feeder = 10, manual = 50, auto = 40, test = 100
|
# PRIORITY: discovery = 0/10, feeder = 10, manual = 50, auto = 40, test = 100
|
||||||
def create(self, url, depth=1, har=True, screenshot=True, header=None, cookiejar=None, proxy=None,
|
def create(self, url, depth=1, har=True, screenshot=True, header=None, cookiejar=None, proxy=None,
|
||||||
user_agent=None, parent='manual', priority=0):
|
user_agent=None, tags=[], parent='manual', priority=0):
|
||||||
if self.exists():
|
if self.exists():
|
||||||
raise Exception('Error: Task already exists')
|
raise Exception('Error: Task already exists')
|
||||||
|
|
||||||
|
@ -1423,7 +1452,7 @@ class CrawlerTask:
|
||||||
# TODO SANITIZE COOKIEJAR -> UUID
|
# TODO SANITIZE COOKIEJAR -> UUID
|
||||||
|
|
||||||
# Check if already in queue
|
# Check if already in queue
|
||||||
hash_query = get_task_hash(url, domain, depth, har, screenshot, priority, proxy, cookiejar, user_agent, header)
|
hash_query = get_task_hash(url, domain, depth, har, screenshot, priority, proxy, cookiejar, user_agent, header, tags)
|
||||||
if r_crawler.hexists(f'crawler:queue:hash', hash_query):
|
if r_crawler.hexists(f'crawler:queue:hash', hash_query):
|
||||||
self.uuid = r_crawler.hget(f'crawler:queue:hash', hash_query)
|
self.uuid = r_crawler.hget(f'crawler:queue:hash', hash_query)
|
||||||
return self.uuid
|
return self.uuid
|
||||||
|
@ -1444,6 +1473,9 @@ class CrawlerTask:
|
||||||
if user_agent:
|
if user_agent:
|
||||||
self._set_field('user_agent', user_agent)
|
self._set_field('user_agent', user_agent)
|
||||||
|
|
||||||
|
if tags:
|
||||||
|
self.set_tags(tags)
|
||||||
|
|
||||||
r_crawler.hset('crawler:queue:hash', hash_query, self.uuid)
|
r_crawler.hset('crawler:queue:hash', hash_query, self.uuid)
|
||||||
self._set_field('hash', hash_query)
|
self._set_field('hash', hash_query)
|
||||||
r_crawler.zadd('crawler:queue', {self.uuid: priority})
|
r_crawler.zadd('crawler:queue', {self.uuid: priority})
|
||||||
|
@ -1483,10 +1515,10 @@ class CrawlerTask:
|
||||||
|
|
||||||
|
|
||||||
# TODO move to class ???
|
# TODO move to class ???
|
||||||
def get_task_hash(url, domain, depth, har, screenshot, priority, proxy, cookiejar, user_agent, header):
|
def get_task_hash(url, domain, depth, har, screenshot, priority, proxy, cookiejar, user_agent, header, tags):
|
||||||
to_enqueue = {'domain': domain, 'depth': depth, 'har': har, 'screenshot': screenshot,
|
to_enqueue = {'domain': domain, 'depth': depth, 'har': har, 'screenshot': screenshot,
|
||||||
'priority': priority, 'proxy': proxy, 'cookiejar': cookiejar, 'user_agent': user_agent,
|
'priority': priority, 'proxy': proxy, 'cookiejar': cookiejar, 'user_agent': user_agent,
|
||||||
'header': header}
|
'header': header, 'tags': tags}
|
||||||
if priority != 0:
|
if priority != 0:
|
||||||
to_enqueue['url'] = url
|
to_enqueue['url'] = url
|
||||||
return hashlib.sha512(pickle.dumps(to_enqueue)).hexdigest()
|
return hashlib.sha512(pickle.dumps(to_enqueue)).hexdigest()
|
||||||
|
@ -1502,7 +1534,7 @@ def add_task_to_lacus_queue():
|
||||||
|
|
||||||
# PRIORITY: discovery = 0/10, feeder = 10, manual = 50, auto = 40, test = 100
|
# PRIORITY: discovery = 0/10, feeder = 10, manual = 50, auto = 40, test = 100
|
||||||
def create_task(url, depth=1, har=True, screenshot=True, header=None, cookiejar=None, proxy=None,
|
def create_task(url, depth=1, har=True, screenshot=True, header=None, cookiejar=None, proxy=None,
|
||||||
user_agent=None, parent='manual', priority=0, task_uuid=None):
|
user_agent=None, tags=[], parent='manual', priority=0, task_uuid=None):
|
||||||
if task_uuid:
|
if task_uuid:
|
||||||
if CrawlerTask(task_uuid).exists():
|
if CrawlerTask(task_uuid).exists():
|
||||||
task_uuid = gen_uuid()
|
task_uuid = gen_uuid()
|
||||||
|
@ -1510,7 +1542,7 @@ def create_task(url, depth=1, har=True, screenshot=True, header=None, cookiejar=
|
||||||
task_uuid = gen_uuid()
|
task_uuid = gen_uuid()
|
||||||
task = CrawlerTask(task_uuid)
|
task = CrawlerTask(task_uuid)
|
||||||
task_uuid = task.create(url, depth=depth, har=har, screenshot=screenshot, header=header, cookiejar=cookiejar,
|
task_uuid = task.create(url, depth=depth, har=har, screenshot=screenshot, header=header, cookiejar=cookiejar,
|
||||||
proxy=proxy, user_agent=user_agent, parent=parent, priority=priority)
|
proxy=proxy, user_agent=user_agent, tags=tags, parent=parent, priority=priority)
|
||||||
return task_uuid
|
return task_uuid
|
||||||
|
|
||||||
|
|
||||||
|
@ -1586,15 +1618,17 @@ def api_add_crawler_task(data, user_id=None):
|
||||||
if verify[1] != 200:
|
if verify[1] != 200:
|
||||||
return verify
|
return verify
|
||||||
|
|
||||||
|
tags = data.get('tags', [])
|
||||||
|
|
||||||
if frequency:
|
if frequency:
|
||||||
# TODO verify user
|
# TODO verify user
|
||||||
return create_schedule(frequency, user_id, url, depth=depth_limit, har=har, screenshot=screenshot, header=None,
|
return create_schedule(frequency, user_id, url, depth=depth_limit, har=har, screenshot=screenshot, header=None,
|
||||||
cookiejar=cookiejar_uuid, proxy=proxy, user_agent=None), 200
|
cookiejar=cookiejar_uuid, proxy=proxy, user_agent=None, tags=tags), 200
|
||||||
else:
|
else:
|
||||||
# TODO HEADERS
|
# TODO HEADERS
|
||||||
# TODO USER AGENT
|
# TODO USER AGENT
|
||||||
return create_task(url, depth=depth_limit, har=har, screenshot=screenshot, header=None,
|
return create_task(url, depth=depth_limit, har=har, screenshot=screenshot, header=None,
|
||||||
cookiejar=cookiejar_uuid, proxy=proxy, user_agent=None,
|
cookiejar=cookiejar_uuid, proxy=proxy, user_agent=None, tags=tags,
|
||||||
parent='manual', priority=90), 200
|
parent='manual', priority=90), 200
|
||||||
|
|
||||||
|
|
||||||
|
@ -1870,6 +1904,8 @@ def test_ail_crawlers():
|
||||||
load_blacklist()
|
load_blacklist()
|
||||||
|
|
||||||
# if __name__ == '__main__':
|
# if __name__ == '__main__':
|
||||||
|
# delete_captures()
|
||||||
|
|
||||||
# item_id = 'crawled/2023/02/20/data.gz'
|
# item_id = 'crawled/2023/02/20/data.gz'
|
||||||
# item = Item(item_id)
|
# item = Item(item_id)
|
||||||
# content = item.get_content()
|
# content = item.get_content()
|
||||||
|
|
|
@ -96,7 +96,8 @@ def manual():
|
||||||
is_manager_connected=crawlers.get_lacus_connection_metadata(),
|
is_manager_connected=crawlers.get_lacus_connection_metadata(),
|
||||||
crawlers_types=crawlers_types,
|
crawlers_types=crawlers_types,
|
||||||
proxies=proxies,
|
proxies=proxies,
|
||||||
l_cookiejar=l_cookiejar)
|
l_cookiejar=l_cookiejar,
|
||||||
|
tags_selector_data=Tag.get_tags_selector_data())
|
||||||
|
|
||||||
|
|
||||||
@crawler_splash.route("/crawlers/send_to_spider", methods=['POST'])
|
@crawler_splash.route("/crawlers/send_to_spider", methods=['POST'])
|
||||||
|
@ -113,6 +114,34 @@ def send_to_spider():
|
||||||
depth_limit = request.form.get('depth_limit')
|
depth_limit = request.form.get('depth_limit')
|
||||||
cookiejar_uuid = request.form.get('cookiejar')
|
cookiejar_uuid = request.form.get('cookiejar')
|
||||||
|
|
||||||
|
# TAGS
|
||||||
|
tags = request.form.get("tags", [])
|
||||||
|
taxonomies_tags = request.form.get('taxonomies_tags')
|
||||||
|
if taxonomies_tags:
|
||||||
|
try:
|
||||||
|
taxonomies_tags = json.loads(taxonomies_tags)
|
||||||
|
except:
|
||||||
|
taxonomies_tags = []
|
||||||
|
else:
|
||||||
|
taxonomies_tags = []
|
||||||
|
galaxies_tags = request.form.get('galaxies_tags')
|
||||||
|
if galaxies_tags:
|
||||||
|
try:
|
||||||
|
galaxies_tags = json.loads(galaxies_tags)
|
||||||
|
except:
|
||||||
|
galaxies_tags = []
|
||||||
|
else:
|
||||||
|
galaxies_tags = []
|
||||||
|
# custom tags
|
||||||
|
if tags:
|
||||||
|
tags = tags.split()
|
||||||
|
else:
|
||||||
|
tags = []
|
||||||
|
escaped = []
|
||||||
|
for tag in tags:
|
||||||
|
escaped.append(tag)
|
||||||
|
tags = escaped + taxonomies_tags + galaxies_tags
|
||||||
|
|
||||||
# Frequency
|
# Frequency
|
||||||
if request.form.get('crawler_scheduler'):
|
if request.form.get('crawler_scheduler'):
|
||||||
frequency = request.form.get('frequency')
|
frequency = request.form.get('frequency')
|
||||||
|
@ -147,6 +176,8 @@ def send_to_spider():
|
||||||
data['proxy'] = proxy
|
data['proxy'] = proxy
|
||||||
if cookiejar_uuid:
|
if cookiejar_uuid:
|
||||||
data['cookiejar'] = cookiejar_uuid
|
data['cookiejar'] = cookiejar_uuid
|
||||||
|
if tags:
|
||||||
|
data['tags'] = tags
|
||||||
# print(data)
|
# print(data)
|
||||||
res = crawlers.api_add_crawler_task(data, user_id=user_id)
|
res = crawlers.api_add_crawler_task(data, user_id=user_id)
|
||||||
|
|
||||||
|
@ -163,6 +194,7 @@ def scheduler_dashboard():
|
||||||
# print(schedulers)
|
# print(schedulers)
|
||||||
# TODO list currently queued ?
|
# TODO list currently queued ?
|
||||||
return render_template("crawler_scheduler_dashboard.html",
|
return render_template("crawler_scheduler_dashboard.html",
|
||||||
|
bootstrap_label=bootstrap_label,
|
||||||
schedulers=schedulers,
|
schedulers=schedulers,
|
||||||
is_manager_connected=crawlers.get_lacus_connection_metadata())
|
is_manager_connected=crawlers.get_lacus_connection_metadata())
|
||||||
|
|
||||||
|
@ -176,6 +208,7 @@ def schedule_show():
|
||||||
abort(404)
|
abort(404)
|
||||||
meta = schedule.get_meta(ui=True)
|
meta = schedule.get_meta(ui=True)
|
||||||
return render_template("crawler_schedule_uuid.html",
|
return render_template("crawler_schedule_uuid.html",
|
||||||
|
bootstrap_label=bootstrap_label,
|
||||||
meta=meta)
|
meta=meta)
|
||||||
|
|
||||||
@crawler_splash.route("/crawlers/schedule/delete", methods=['GET'])
|
@crawler_splash.route("/crawlers/schedule/delete", methods=['GET'])
|
||||||
|
|
|
@ -8,14 +8,16 @@
|
||||||
<link href="{{ url_for('static', filename='css/bootstrap4.min.css') }}" rel="stylesheet">
|
<link href="{{ url_for('static', filename='css/bootstrap4.min.css') }}" rel="stylesheet">
|
||||||
<link href="{{ url_for('static', filename='css/font-awesome.min.css') }}" rel="stylesheet">
|
<link href="{{ url_for('static', filename='css/font-awesome.min.css') }}" rel="stylesheet">
|
||||||
<link href="{{ url_for('static', filename='css/daterangepicker.min.css') }}" rel="stylesheet">
|
<link href="{{ url_for('static', filename='css/daterangepicker.min.css') }}" rel="stylesheet">
|
||||||
|
<link href="{{ url_for('static', filename='css/tags.css') }}" rel="stylesheet">
|
||||||
|
|
||||||
<!-- JS -->
|
<!-- JS -->
|
||||||
<script src="{{ url_for('static', filename='js/jquery.js')}}"></script>
|
<script src="{{ url_for('static', filename='js/jquery.js')}}"></script>
|
||||||
<script src="{{ url_for('static', filename='js/popper.min.js')}}"></script>
|
<script src="{{ url_for('static', filename='js/popper.min.js')}}"></script>
|
||||||
<script src="{{ url_for('static', filename='js/bootstrap4.min.js')}}"></script>
|
<script src="{{ url_for('static', filename='js/bootstrap4.min.js')}}"></script>
|
||||||
<script language="javascript" src="{{ url_for('static', filename='js/moment.min.js') }}"></script>
|
<script src="{{ url_for('static', filename='js/moment.min.js') }}"></script>
|
||||||
<script language="javascript" src="{{ url_for('static', filename='js/jquery.daterangepicker.min.js') }}"></script>
|
<script src="{{ url_for('static', filename='js/jquery.daterangepicker.min.js') }}"></script>
|
||||||
<script language="javascript" src="{{ url_for('static', filename='js/d3.min.js') }}"></script>
|
<script src="{{ url_for('static', filename='js/d3.min.js') }}"></script>
|
||||||
|
<script src="{{ url_for('static', filename='js/tags.js') }}"></script>
|
||||||
|
|
||||||
</head>
|
</head>
|
||||||
|
|
||||||
|
@ -119,6 +121,21 @@
|
||||||
|
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
<div class="card bg-light my-4">
|
||||||
|
<div class="card-header bg-dark text-white">
|
||||||
|
<b>Tags</b>
|
||||||
|
</div>
|
||||||
|
<div class="card-body">
|
||||||
|
<div class="input-group mb-2 mr-sm-2">
|
||||||
|
<div class="input-group-prepend">
|
||||||
|
<div class="input-group-text bg-danger text-white"><i class="fas fa-tag"></i></div>
|
||||||
|
</div>
|
||||||
|
<input id="tags" name="tags" class="form-control" placeholder="Custom Tags (optional, space separated)" type="text" {%if dict_tracker%}{%if dict_tracker['tags']%}value="{%for tag in dict_tracker['tags']%}{{tag}} {%endfor%}"{%endif%}{%endif%}>
|
||||||
|
</div>
|
||||||
|
{% include 'tags/block_tags_selector.html' %}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -72,6 +72,14 @@
|
||||||
{% endif %}
|
{% endif %}
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<th>Tags</th>
|
||||||
|
<td>
|
||||||
|
{%for tag in meta['tags']%}
|
||||||
|
<span class="badge badge-{{ bootstrap_label[loop.index0 % 5] }}">{{ tag }}</span>
|
||||||
|
{%endfor%}
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
</tbody>
|
</tbody>
|
||||||
</table>
|
</table>
|
||||||
|
|
||||||
|
|
|
@ -45,7 +45,14 @@
|
||||||
<tbody id="tbody_last_crawled">
|
<tbody id="tbody_last_crawled">
|
||||||
{% for meta in schedulers %}
|
{% for meta in schedulers %}
|
||||||
<tr>
|
<tr>
|
||||||
<td><a href="{{ url_for('crawler_splash.schedule_show') }}?uuid={{ meta['uuid'] }}">{{ meta['url'] }}</a></td>
|
<td>
|
||||||
|
<a href="{{ url_for('crawler_splash.schedule_show') }}?uuid={{ meta['uuid'] }}">{{ meta['url'] }}</a>
|
||||||
|
<div>
|
||||||
|
{% for tag in meta['tags'] %}
|
||||||
|
<span class="badge badge-{{ bootstrap_label[loop.index0 % 5] }}">{{ tag }}</span>
|
||||||
|
{%endfor%}
|
||||||
|
</div>
|
||||||
|
</td>
|
||||||
<td>{{ meta['status'] }}</td>
|
<td>{{ meta['status'] }}</td>
|
||||||
<td>
|
<td>
|
||||||
{% if not meta['next_run'] %}
|
{% if not meta['next_run'] %}
|
||||||
|
|
Loading…
Reference in a new issue