chg: [dashboard] add crawler stats

This commit is contained in:
terrtia 2024-11-27 15:26:07 +01:00
parent 3d14bac434
commit 14b0675330
No known key found for this signature in database
GPG key ID: 1E1B1F50D84613D0
5 changed files with 103 additions and 4 deletions

View file

@ -15,6 +15,7 @@ sys.path.append(os.environ['AIL_BIN'])
##################################
from lib.ConfigLoader import ConfigLoader
from lib.objects import ail_objects
from lib.crawlers import get_crawlers_stats
from lib import Tag
from lib import Tracker
@ -91,6 +92,9 @@ def get_nb_objs_today():
nb_objs = ail_objects.get_nb_objects_by_date(date)
return nb_objs
def get_crawler_stats():
return get_crawlers_stats()
def get_nb_objs_dashboard():
date = datetime.date.today().strftime("%Y%m%d")
return ail_objects.get_nb_objects_dashboard(date)

View file

@ -316,7 +316,8 @@ def ws_dashboard(ws):
objs = ail_stats.get_nb_objs_today()
tags = ail_stats.get_tagged_objs_dashboard()
trackers = ail_stats.get_tracked_objs_dashboard(user_org, user_id)
ws.send(json.dumps({'feeders': feeders, 'objs': objs, 'tags': tags, 'trackers': trackers}))
crawler = ail_stats.get_crawlers_stats()
ws.send(json.dumps({'feeders': feeders, 'objs': objs, 'crawler': crawler, 'tags': tags, 'trackers': trackers}))
next_feeders = next_feeders + 30
time.sleep(1)
except Exception as e: # ConnectionClosed ?

View file

@ -598,6 +598,36 @@ def domains_search_name():
l_dict_domains=l_dict_domains, bootstrap_label=bootstrap_label,
domains_types=domains_types)
@crawler_splash.route('/domains/today', methods=['GET'])
@login_required
@login_read_only
def domains_search_today():
dom_types = request.args.get('type')
down = bool(request.args.get('down', False))
up = bool(request.args.get('up'))
# page = request.args.get('page')
all_types = Domains.get_all_domains_types()
if dom_types == 'all':
domain_types = all_types
elif dom_types in Domains.get_all_domains_types():
domain_types = [dom_types]
else:
dom_types = 'all'
domain_types = all_types
date = Date.get_today_date_str()
domains_date = Domains.get_domains_dates_by_daterange(date, date, domain_types, up=up, down=down)
dict_domains = {}
for d in domains_date:
dict_domains[d] = Domains.get_domains_meta(domains_date[d])
date_from = f"{date[0:4]}-{date[4:6]}-{date[6:8]}"
date_to = date_from
return render_template("domains_daterange.html", date_from=date_from, date_to=date_to,
bootstrap_label=bootstrap_label,
filter_down=down, filter_up=up,
dict_domains=dict_domains, type=dom_types)
@crawler_splash.route('/domains/date', methods=['GET'])
@login_required

View file

@ -19,7 +19,6 @@ sys.path.append(os.environ['AIL_BIN'])
##################################
# Import Project packages
##################################
from lib.objects import ail_objects
from lib import ail_stats
@ -41,11 +40,12 @@ def objects():
user_id = current_user.get_user_id()
nb_objects = ail_stats.get_nb_objs_dashboard()
feeders_dashboard = ail_stats.get_feeders_dashboard_full()
crawlers_stats = ail_stats.get_crawlers_stats()
trackers = ail_stats.get_tracked_objs_dashboard(user_org, user_id)
tagged_objs = ail_stats.get_tagged_objs_dashboard()
return render_template("objs_dashboard.html", feeders_dashboard=feeders_dashboard,
nb_objects=nb_objects, trackers=trackers, tagged_objs=tagged_objs,
bootstrap_label=bootstrap_label)
bootstrap_label=bootstrap_label, crawlers_stats=crawlers_stats)

View file

@ -68,6 +68,59 @@
<div class="col-12 col-xl-6">
<div id="feeders_dashboard" style="width: 100%; height:600px;"></div>
<div class="card mt-1 mb-4">
<div class="card-header text-white bg-dark">
<div class="row">
<div class="col-1">
<i class="fas fa-user-secret fa-2x"></i>
</div>
<div class="col-5">
<div class="row">
<div class="col-6">
<div>
<a href="{{ url_for('crawler_splash.domains_search_today') }}?type=onion&up=True" class="badge badge-success" id="stat_crawler_onion_domain_up">{{ crawlers_stats['onion']['up'] }}</a> Up
</div>
<div>
<a href="{{ url_for('crawler_splash.domains_search_today') }}?type=onion&down=True" class="badge badge-danger" id="stat_crawler_onion_domain_down">{{ crawlers_stats['onion']['down'] }}</a> Down
</div>
</div>
<div class="col-6">
<div>
<a href="{{ url_for('crawler_splash.domains_search_today') }}?type=onion&up=True&down=True" class="badge badge-success" id="stat_crawler_onion_total">{{ crawlers_stats['onion']['crawled'] }}</a> Crawled
</div>
<div>
<span class="badge badge-warning" id="stat_crawler_onion_queue">{{ crawlers_stats['onion']['queue'] }}</span> Queue
</div>
</div>
</div>
</div>
<div class="col-1">
<i class="fab fa-html5 fa-2x"></i>
</div>
<div class="col-5">
<div class="row">
<div class="col-6">
<div>
<a href="{{ url_for('crawler_splash.domains_search_today') }}?type=web&up=True" class="badge badge-success" id="stat_crawler_regular_domain_up">{{ crawlers_stats['web']['up'] }}</a> UP
</div>
<div>
<a href="{{ url_for('crawler_splash.domains_search_today') }}?type=web&down=True" class="badge badge-danger" id="stat_crawler_regular_domain_down">{{ crawlers_stats['web']['down'] }}</a> DOWN
</div>
</div>
<div class="col-6">
<div>
<a href="{{ url_for('crawler_splash.domains_search_today') }}?type=web&up=True&down=True" class="badge badge-success" id="stat_crawler_regular_total">{{ crawlers_stats['web']['crawled'] }}</a> Crawled
</div>
<div>
<span class="badge badge-warning" id="stat_crawler_regular_queue">{{ crawlers_stats['web']['queue'] }}</span> Queue
</div>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
<div class="col-12 col-xl-6 mt-4">
@ -88,7 +141,6 @@
</div>
<div class="row">
@ -286,6 +338,17 @@ function updateNbObjects(data) {
}
}
function updateCrawlerStats(data) {
$("#stat_crawler_onion_domain_up").text(data['onion']['up']);
$("#stat_crawler_onion_domain_down").text(data['onion']['down']);
$("#stat_crawler_onion_total").text(data['onion']['crawled']);
$("#stat_crawler_onion_queue").text(data['onion']['queue']);
$("#stat_crawler_regular_domain_up").text(data['web']['up']);
$("#stat_crawler_regular_domain_down").text(data['web']['down']);
$("#stat_crawler_regular_total").text(data['web']['crawled']);
$("#stat_crawler_regular_queue").text(data['web']['queue']);
}
///////
function create_obj_svg(container, obj_gid, url, color, fa_style, icon) {
@ -362,6 +425,7 @@ socket.onmessage = function(event) {
let data = JSON.parse(event.data);
updateFeederChart(data['feeders']);
updateNbObjects(data['objs']);
updateCrawlerStats(data['crawler']);
update_tracked_objs_dashboard(data['trackers']);
update_tags_obj_dashboard(data['tags']);
};