2018-08-21 13:54:53 +00:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
# -*-coding:UTF-8 -*
|
|
|
|
|
|
|
|
'''
|
|
|
|
Flask functions and routes for the trending modules page
|
|
|
|
'''
|
|
|
|
import redis
|
|
|
|
import datetime
|
2018-08-27 09:02:39 +00:00
|
|
|
import sys
|
|
|
|
import os
|
2019-03-06 14:05:58 +00:00
|
|
|
import time
|
2019-02-26 13:50:48 +00:00
|
|
|
import json
|
2019-02-19 10:41:45 +00:00
|
|
|
from pyfaup.faup import Faup
|
2019-06-07 11:47:44 +00:00
|
|
|
from flask import Flask, render_template, jsonify, request, send_file, Blueprint, redirect, url_for
|
2019-06-19 15:02:09 +00:00
|
|
|
|
2019-11-20 15:15:08 +00:00
|
|
|
from Role_Manager import login_admin, login_analyst, login_read_only, no_cache
|
2019-05-02 15:31:14 +00:00
|
|
|
from flask_login import login_required
|
2018-08-21 13:54:53 +00:00
|
|
|
|
|
|
|
from Date import Date
|
2018-08-27 09:02:39 +00:00
|
|
|
from HiddenServices import HiddenServices
|
2020-08-24 20:31:41 +00:00
|
|
|
import crawlers
|
2018-08-21 13:54:53 +00:00
|
|
|
|
|
|
|
# ============ VARIABLES ============
|
|
|
|
import Flask_config
|
|
|
|
|
|
|
|
app = Flask_config.app
|
2018-09-28 13:42:06 +00:00
|
|
|
baseUrl = Flask_config.baseUrl
|
2019-01-29 11:00:14 +00:00
|
|
|
r_cache = Flask_config.r_cache
|
2018-08-21 13:54:53 +00:00
|
|
|
r_serv_onion = Flask_config.r_serv_onion
|
2018-08-27 12:34:08 +00:00
|
|
|
r_serv_metadata = Flask_config.r_serv_metadata
|
|
|
|
bootstrap_label = Flask_config.bootstrap_label
|
2018-08-21 13:54:53 +00:00
|
|
|
|
2020-03-24 16:15:43 +00:00
|
|
|
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib'))
|
|
|
|
import crawlers
|
|
|
|
|
2018-08-21 13:54:53 +00:00
|
|
|
hiddenServices = Blueprint('hiddenServices', __name__, template_folder='templates')
|
|
|
|
|
2019-02-19 10:41:45 +00:00
|
|
|
faup = Faup()
|
2022-04-12 15:40:17 +00:00
|
|
|
list_types=['onion', 'i2p', 'regular']
|
|
|
|
dic_type_name={'onion':'Onion', 'i2p':'I2P', 'regular':'Website'}
|
2019-02-19 10:41:45 +00:00
|
|
|
|
2018-08-21 13:54:53 +00:00
|
|
|
# ============ FUNCTIONS ============
|
2019-05-03 14:52:05 +00:00
|
|
|
|
2018-08-21 13:54:53 +00:00
|
|
|
def one():
|
|
|
|
return 1
|
|
|
|
|
|
|
|
def get_date_range(num_day):
|
|
|
|
curr_date = datetime.date.today()
|
|
|
|
date = Date( '{}{}{}'.format(str(curr_date.year), str(curr_date.month).zfill(2), str(curr_date.day).zfill(2)) )
|
|
|
|
date_list = []
|
|
|
|
|
|
|
|
for i in range(0, num_day):
|
|
|
|
date_list.append(date.substract_day(i))
|
|
|
|
|
|
|
|
return list(reversed(date_list))
|
|
|
|
|
2018-12-17 14:58:48 +00:00
|
|
|
def substract_date(date_from, date_to):
|
|
|
|
date_from = datetime.date(int(date_from[0:4]), int(date_from[4:6]), int(date_from[6:8]))
|
|
|
|
date_to = datetime.date(int(date_to[0:4]), int(date_to[4:6]), int(date_to[6:8]))
|
|
|
|
delta = date_to - date_from # timedelta
|
|
|
|
l_date = []
|
|
|
|
for i in range(delta.days + 1):
|
|
|
|
date = date_from + datetime.timedelta(i)
|
|
|
|
l_date.append( date.strftime('%Y%m%d') )
|
|
|
|
return l_date
|
|
|
|
|
2018-09-12 07:55:49 +00:00
|
|
|
def unpack_paste_tags(p_tags):
|
|
|
|
l_tags = []
|
|
|
|
for tag in p_tags:
|
|
|
|
complete_tag = tag
|
|
|
|
tag = tag.split('=')
|
|
|
|
if len(tag) > 1:
|
|
|
|
if tag[1] != '':
|
|
|
|
tag = tag[1][1:-1]
|
|
|
|
# no value
|
|
|
|
else:
|
|
|
|
tag = tag[0][1:-1]
|
|
|
|
# use for custom tags
|
|
|
|
else:
|
|
|
|
tag = tag[0]
|
|
|
|
l_tags.append( (tag, complete_tag) )
|
|
|
|
return l_tags
|
|
|
|
|
2019-02-28 13:56:14 +00:00
|
|
|
def is_valid_domain(domain):
|
|
|
|
faup.decode(domain)
|
2019-02-19 10:41:45 +00:00
|
|
|
domain_unpack = faup.get()
|
2019-02-28 13:56:14 +00:00
|
|
|
if domain_unpack['tld'] is not None and domain_unpack['scheme'] is None and domain_unpack['port'] is None and domain_unpack['query_string'] is None:
|
2019-02-19 10:41:45 +00:00
|
|
|
return True
|
|
|
|
else:
|
|
|
|
return False
|
|
|
|
|
2019-04-26 13:14:29 +00:00
|
|
|
def is_valid_service_type(service_type):
|
2022-04-12 15:40:17 +00:00
|
|
|
accepted_service = ['onion', 'i2p', 'regular']
|
2019-04-26 13:14:29 +00:00
|
|
|
if service_type in accepted_service:
|
|
|
|
return True
|
|
|
|
else:
|
|
|
|
return False
|
|
|
|
|
2018-08-21 13:54:53 +00:00
|
|
|
def get_onion_status(domain, date):
|
|
|
|
if r_serv_onion.sismember('onion_up:'+date , domain):
|
|
|
|
return True
|
|
|
|
else:
|
|
|
|
return False
|
2019-02-19 10:41:45 +00:00
|
|
|
|
|
|
|
def get_domain_type(domain):
|
|
|
|
type_id = domain.split(':')[-1]
|
|
|
|
if type_id == 'onion':
|
|
|
|
return 'onion'
|
2022-04-12 15:40:17 +00:00
|
|
|
elif type_id == 'i2p':
|
|
|
|
return 'i2p'
|
2019-02-19 10:41:45 +00:00
|
|
|
else:
|
|
|
|
return 'regular'
|
|
|
|
|
2019-03-06 14:05:58 +00:00
|
|
|
def get_type_domain(domain):
|
|
|
|
if domain is None:
|
|
|
|
type = 'regular'
|
|
|
|
else:
|
2022-04-12 15:40:17 +00:00
|
|
|
loc = domain.rsplit('.', 1)[1]
|
|
|
|
if loc == 'onion':
|
2019-03-06 14:05:58 +00:00
|
|
|
type = 'onion'
|
2022-04-12 15:40:17 +00:00
|
|
|
elif loc == 'i2p':
|
|
|
|
type = 'i2p'
|
2019-03-06 14:05:58 +00:00
|
|
|
else:
|
|
|
|
type = 'regular'
|
|
|
|
return type
|
|
|
|
|
2019-04-18 14:57:51 +00:00
|
|
|
def get_domain_from_url(url):
|
|
|
|
faup.decode(url)
|
|
|
|
unpack_url = faup.get()
|
2019-05-06 11:38:13 +00:00
|
|
|
domain = unpack_url['domain']
|
|
|
|
## TODO: FIXME remove me
|
|
|
|
try:
|
|
|
|
domain = domain.decode()
|
|
|
|
except:
|
|
|
|
pass
|
2019-04-18 14:57:51 +00:00
|
|
|
return domain
|
|
|
|
|
2019-02-28 13:56:14 +00:00
|
|
|
def get_last_domains_crawled(type):
|
|
|
|
return r_serv_onion.lrange('last_{}'.format(type), 0 ,-1)
|
|
|
|
|
2019-04-26 13:14:29 +00:00
|
|
|
def get_nb_domains_inqueue(type):
|
|
|
|
nb = r_serv_onion.scard('{}_crawler_queue'.format(type))
|
|
|
|
nb += r_serv_onion.scard('{}_crawler_priority_queue'.format(type))
|
|
|
|
return nb
|
|
|
|
|
2019-02-28 13:56:14 +00:00
|
|
|
def get_stats_last_crawled_domains(type, date):
|
|
|
|
statDomains = {}
|
|
|
|
statDomains['domains_up'] = r_serv_onion.scard('{}_up:{}'.format(type, date))
|
|
|
|
statDomains['domains_down'] = r_serv_onion.scard('{}_down:{}'.format(type, date))
|
|
|
|
statDomains['total'] = statDomains['domains_up'] + statDomains['domains_down']
|
2019-04-26 13:14:29 +00:00
|
|
|
statDomains['domains_queue'] = get_nb_domains_inqueue(type)
|
2019-02-28 13:56:14 +00:00
|
|
|
return statDomains
|
|
|
|
|
2019-04-18 14:57:51 +00:00
|
|
|
def get_last_crawled_domains_metadata(list_domains_crawled, date, type=None, auto_mode=False):
|
2019-02-19 10:41:45 +00:00
|
|
|
list_crawled_metadata = []
|
2019-02-26 13:50:48 +00:00
|
|
|
for domain_epoch in list_domains_crawled:
|
2019-04-18 14:57:51 +00:00
|
|
|
if not auto_mode:
|
|
|
|
domain, epoch = domain_epoch.rsplit(';', 1)
|
|
|
|
else:
|
|
|
|
url = domain_epoch
|
|
|
|
domain = domain_epoch
|
2019-03-22 15:48:07 +00:00
|
|
|
domain = domain.split(':')
|
|
|
|
if len(domain) == 1:
|
|
|
|
port = 80
|
|
|
|
domain = domain[0]
|
|
|
|
else:
|
|
|
|
port = domain[1]
|
|
|
|
domain = domain[0]
|
2019-02-19 10:41:45 +00:00
|
|
|
metadata_domain = {}
|
|
|
|
# get Domain type
|
|
|
|
if type is None:
|
2019-04-18 14:57:51 +00:00
|
|
|
type_domain = get_type_domain(domain)
|
|
|
|
else:
|
|
|
|
type_domain = type
|
|
|
|
if auto_mode:
|
|
|
|
metadata_domain['url'] = url
|
|
|
|
epoch = r_serv_onion.zscore('crawler_auto_queue', '{};auto;{}'.format(domain, type_domain))
|
|
|
|
#domain in priority queue
|
|
|
|
if epoch is None:
|
|
|
|
epoch = 'In Queue'
|
|
|
|
else:
|
|
|
|
epoch = datetime.datetime.fromtimestamp(float(epoch)).strftime('%Y-%m-%d %H:%M:%S')
|
2019-02-19 10:41:45 +00:00
|
|
|
|
|
|
|
metadata_domain['domain'] = domain
|
2019-03-01 10:40:43 +00:00
|
|
|
if len(domain) > 45:
|
|
|
|
domain_name, tld_domain = domain.rsplit('.', 1)
|
|
|
|
metadata_domain['domain_name'] = '{}[...].{}'.format(domain_name[:40], tld_domain)
|
|
|
|
else:
|
|
|
|
metadata_domain['domain_name'] = domain
|
2019-03-22 15:48:07 +00:00
|
|
|
metadata_domain['port'] = port
|
2019-02-26 13:50:48 +00:00
|
|
|
metadata_domain['epoch'] = epoch
|
2019-04-18 14:57:51 +00:00
|
|
|
metadata_domain['last_check'] = r_serv_onion.hget('{}_metadata:{}'.format(type_domain, domain), 'last_check')
|
2019-02-19 10:41:45 +00:00
|
|
|
if metadata_domain['last_check'] is None:
|
|
|
|
metadata_domain['last_check'] = '********'
|
2019-04-18 14:57:51 +00:00
|
|
|
metadata_domain['first_seen'] = r_serv_onion.hget('{}_metadata:{}'.format(type_domain, domain), 'first_seen')
|
2019-02-19 10:41:45 +00:00
|
|
|
if metadata_domain['first_seen'] is None:
|
|
|
|
metadata_domain['first_seen'] = '********'
|
2019-04-18 14:57:51 +00:00
|
|
|
if r_serv_onion.sismember('{}_up:{}'.format(type_domain, metadata_domain['last_check']) , domain):
|
2019-02-19 10:41:45 +00:00
|
|
|
metadata_domain['status_text'] = 'UP'
|
|
|
|
metadata_domain['status_color'] = 'Green'
|
|
|
|
metadata_domain['status_icon'] = 'fa-check-circle'
|
|
|
|
else:
|
|
|
|
metadata_domain['status_text'] = 'DOWN'
|
|
|
|
metadata_domain['status_color'] = 'Red'
|
|
|
|
metadata_domain['status_icon'] = 'fa-times-circle'
|
|
|
|
list_crawled_metadata.append(metadata_domain)
|
|
|
|
return list_crawled_metadata
|
|
|
|
|
2019-02-26 13:50:48 +00:00
|
|
|
def get_crawler_splash_status(type):
|
2019-02-19 10:41:45 +00:00
|
|
|
crawler_metadata = []
|
2019-02-26 13:50:48 +00:00
|
|
|
all_crawlers = r_cache.smembers('{}_crawlers'.format(type))
|
2019-02-19 10:41:45 +00:00
|
|
|
for crawler in all_crawlers:
|
|
|
|
crawling_domain = r_cache.hget('metadata_crawler:{}'.format(crawler), 'crawling_domain')
|
|
|
|
started_time = r_cache.hget('metadata_crawler:{}'.format(crawler), 'started_time')
|
|
|
|
status_info = r_cache.hget('metadata_crawler:{}'.format(crawler), 'status')
|
|
|
|
crawler_info = '{} - {}'.format(crawler, started_time)
|
|
|
|
if status_info=='Waiting' or status_info=='Crawling':
|
|
|
|
status=True
|
|
|
|
else:
|
|
|
|
status=False
|
|
|
|
crawler_metadata.append({'crawler_info': crawler_info, 'crawling_domain': crawling_domain, 'status_info': status_info, 'status': status})
|
|
|
|
|
|
|
|
return crawler_metadata
|
|
|
|
|
2019-04-18 14:57:51 +00:00
|
|
|
def delete_auto_crawler(url):
|
|
|
|
domain = get_domain_from_url(url)
|
|
|
|
type = get_type_domain(domain)
|
|
|
|
# remove from set
|
|
|
|
r_serv_onion.srem('auto_crawler_url:{}'.format(type), url)
|
|
|
|
# remove config
|
2019-04-23 09:15:34 +00:00
|
|
|
r_serv_onion.delete('crawler_config:auto:{}:{}:{}'.format(type, domain, url))
|
2019-04-18 14:57:51 +00:00
|
|
|
# remove from queue
|
|
|
|
r_serv_onion.srem('{}_crawler_priority_queue'.format(type), '{};auto'.format(url))
|
|
|
|
# remove from crawler_auto_queue
|
|
|
|
r_serv_onion.zrem('crawler_auto_queue'.format(type), '{};auto;{}'.format(url, type))
|
|
|
|
|
2018-08-21 13:54:53 +00:00
|
|
|
# ============= ROUTES ==============
|
|
|
|
|
2020-08-17 19:52:57 +00:00
|
|
|
# @hiddenServices.route("/crawlers/", methods=['GET'])
|
|
|
|
# @login_required
|
|
|
|
# @login_read_only
|
|
|
|
# def dashboard():
|
|
|
|
# crawler_metadata_onion = get_crawler_splash_status('onion')
|
|
|
|
# crawler_metadata_regular = get_crawler_splash_status('regular')
|
|
|
|
#
|
|
|
|
# now = datetime.datetime.now()
|
|
|
|
# date = now.strftime("%Y%m%d")
|
|
|
|
# statDomains_onion = get_stats_last_crawled_domains('onion', date)
|
|
|
|
# statDomains_regular = get_stats_last_crawled_domains('regular', date)
|
|
|
|
#
|
|
|
|
# return render_template("Crawler_dashboard.html", crawler_metadata_onion = crawler_metadata_onion,
|
2020-08-24 20:31:41 +00:00
|
|
|
# date=date,
|
2020-08-17 19:52:57 +00:00
|
|
|
# crawler_metadata_regular=crawler_metadata_regular,
|
|
|
|
# statDomains_onion=statDomains_onion, statDomains_regular=statDomains_regular)
|
2019-02-28 13:56:14 +00:00
|
|
|
|
2019-02-08 16:16:58 +00:00
|
|
|
@hiddenServices.route("/crawlers/crawler_splash_onion", methods=['GET'])
|
2019-05-02 15:31:14 +00:00
|
|
|
@login_required
|
2019-11-20 15:15:08 +00:00
|
|
|
@login_read_only
|
2019-02-19 10:41:45 +00:00
|
|
|
def crawler_splash_onion():
|
2019-02-28 13:56:14 +00:00
|
|
|
type = 'onion'
|
|
|
|
last_onions = get_last_domains_crawled(type)
|
2019-02-08 16:16:58 +00:00
|
|
|
list_onion = []
|
|
|
|
|
|
|
|
now = datetime.datetime.now()
|
2019-02-19 10:41:45 +00:00
|
|
|
date = now.strftime("%Y%m%d")
|
2019-02-28 13:56:14 +00:00
|
|
|
statDomains = get_stats_last_crawled_domains(type, date)
|
2019-02-08 16:16:58 +00:00
|
|
|
|
2019-02-28 13:56:14 +00:00
|
|
|
list_onion = get_last_crawled_domains_metadata(last_onions, date, type=type)
|
|
|
|
crawler_metadata = get_crawler_splash_status(type)
|
2019-02-19 10:41:45 +00:00
|
|
|
|
|
|
|
date_string = '{}-{}-{}'.format(date[0:4], date[4:6], date[6:8])
|
|
|
|
return render_template("Crawler_Splash_onion.html", last_onions=list_onion, statDomains=statDomains,
|
|
|
|
crawler_metadata=crawler_metadata, date_from=date_string, date_to=date_string)
|
|
|
|
|
2019-03-01 10:40:43 +00:00
|
|
|
@hiddenServices.route("/crawlers/Crawler_Splash_last_by_type", methods=['GET'])
|
2019-05-02 15:31:14 +00:00
|
|
|
@login_required
|
2019-11-20 15:15:08 +00:00
|
|
|
@login_read_only
|
2019-03-01 10:40:43 +00:00
|
|
|
def Crawler_Splash_last_by_type():
|
|
|
|
type = request.args.get('type')
|
|
|
|
# verify user input
|
|
|
|
if type not in list_types:
|
|
|
|
type = 'onion'
|
2019-02-28 13:56:14 +00:00
|
|
|
type_name = dic_type_name[type]
|
|
|
|
list_domains = []
|
2019-02-19 10:41:45 +00:00
|
|
|
|
|
|
|
now = datetime.datetime.now()
|
2019-02-28 13:56:14 +00:00
|
|
|
date = now.strftime("%Y%m%d")
|
|
|
|
date_string = '{}-{}-{}'.format(date[0:4], date[4:6], date[6:8])
|
2019-02-19 10:41:45 +00:00
|
|
|
|
2019-02-28 13:56:14 +00:00
|
|
|
statDomains = get_stats_last_crawled_domains(type, date)
|
2019-02-08 16:16:58 +00:00
|
|
|
|
2019-02-28 13:56:14 +00:00
|
|
|
list_domains = get_last_crawled_domains_metadata(get_last_domains_crawled(type), date, type=type)
|
|
|
|
crawler_metadata = get_crawler_splash_status(type)
|
2019-02-08 16:16:58 +00:00
|
|
|
|
2019-02-28 13:56:14 +00:00
|
|
|
return render_template("Crawler_Splash_last_by_type.html", type=type, type_name=type_name,
|
2020-08-24 20:31:41 +00:00
|
|
|
is_manager_connected=crawlers.get_splash_manager_connection_metadata(),
|
2019-02-28 13:56:14 +00:00
|
|
|
last_domains=list_domains, statDomains=statDomains,
|
2019-02-08 16:16:58 +00:00
|
|
|
crawler_metadata=crawler_metadata, date_from=date_string, date_to=date_string)
|
|
|
|
|
2019-02-28 13:56:14 +00:00
|
|
|
@hiddenServices.route("/crawlers/blacklisted_domains", methods=['GET'])
|
2019-05-02 15:31:14 +00:00
|
|
|
@login_required
|
2019-11-20 15:15:08 +00:00
|
|
|
@login_read_only
|
2019-02-28 13:56:14 +00:00
|
|
|
def blacklisted_domains():
|
|
|
|
blacklist_domain = request.args.get('blacklist_domain')
|
|
|
|
unblacklist_domain = request.args.get('unblacklist_domain')
|
|
|
|
type = request.args.get('type')
|
|
|
|
if type in list_types:
|
|
|
|
type_name = dic_type_name[type]
|
|
|
|
if blacklist_domain is not None:
|
|
|
|
blacklist_domain = int(blacklist_domain)
|
|
|
|
if unblacklist_domain is not None:
|
|
|
|
unblacklist_domain = int(unblacklist_domain)
|
|
|
|
try:
|
|
|
|
page = int(request.args.get('page'))
|
|
|
|
except:
|
|
|
|
page = 1
|
|
|
|
if page <= 0:
|
|
|
|
page = 1
|
|
|
|
nb_page_max = r_serv_onion.scard('blacklist_{}'.format(type))/(1000)
|
|
|
|
if isinstance(nb_page_max, float):
|
|
|
|
nb_page_max = int(nb_page_max)+1
|
|
|
|
if page > nb_page_max:
|
|
|
|
page = nb_page_max
|
|
|
|
start = 1000*(page -1)
|
|
|
|
stop = 1000*page
|
|
|
|
|
|
|
|
list_blacklisted = list(r_serv_onion.smembers('blacklist_{}'.format(type)))
|
|
|
|
list_blacklisted_1 = list_blacklisted[start:stop]
|
|
|
|
list_blacklisted_2 = list_blacklisted[stop:stop+1000]
|
|
|
|
return render_template("blacklisted_domains.html", list_blacklisted_1=list_blacklisted_1, list_blacklisted_2=list_blacklisted_2,
|
|
|
|
type=type, type_name=type_name, page=page, nb_page_max=nb_page_max,
|
|
|
|
blacklist_domain=blacklist_domain, unblacklist_domain=unblacklist_domain)
|
|
|
|
else:
|
|
|
|
return 'Incorrect Type'
|
|
|
|
|
|
|
|
@hiddenServices.route("/crawler/blacklist_domain", methods=['GET'])
|
2019-05-02 15:31:14 +00:00
|
|
|
@login_required
|
2019-06-19 15:02:09 +00:00
|
|
|
@login_analyst
|
2019-02-28 13:56:14 +00:00
|
|
|
def blacklist_domain():
|
|
|
|
domain = request.args.get('domain')
|
|
|
|
type = request.args.get('type')
|
2019-02-19 10:41:45 +00:00
|
|
|
try:
|
|
|
|
page = int(request.args.get('page'))
|
|
|
|
except:
|
|
|
|
page = 1
|
2019-02-28 13:56:14 +00:00
|
|
|
if type in list_types:
|
|
|
|
if is_valid_domain(domain):
|
|
|
|
res = r_serv_onion.sadd('blacklist_{}'.format(type), domain)
|
|
|
|
if page:
|
|
|
|
if res == 0:
|
|
|
|
return redirect(url_for('hiddenServices.blacklisted_domains', page=page, type=type, blacklist_domain=2))
|
|
|
|
else:
|
|
|
|
return redirect(url_for('hiddenServices.blacklisted_domains', page=page, type=type, blacklist_domain=1))
|
|
|
|
else:
|
|
|
|
return redirect(url_for('hiddenServices.blacklisted_domains', page=page, type=type, blacklist_domain=0))
|
2019-02-19 10:41:45 +00:00
|
|
|
else:
|
2019-02-28 13:56:14 +00:00
|
|
|
return 'Incorrect type'
|
2019-02-19 10:41:45 +00:00
|
|
|
|
2019-02-28 13:56:14 +00:00
|
|
|
@hiddenServices.route("/crawler/unblacklist_domain", methods=['GET'])
|
2019-05-02 15:31:14 +00:00
|
|
|
@login_required
|
2019-06-19 15:02:09 +00:00
|
|
|
@login_analyst
|
2019-02-28 13:56:14 +00:00
|
|
|
def unblacklist_domain():
|
|
|
|
domain = request.args.get('domain')
|
|
|
|
type = request.args.get('type')
|
2019-02-19 10:41:45 +00:00
|
|
|
try:
|
|
|
|
page = int(request.args.get('page'))
|
|
|
|
except:
|
|
|
|
page = 1
|
2019-02-28 13:56:14 +00:00
|
|
|
if type in list_types:
|
|
|
|
if is_valid_domain(domain):
|
|
|
|
res = r_serv_onion.srem('blacklist_{}'.format(type), domain)
|
|
|
|
if page:
|
|
|
|
if res == 0:
|
|
|
|
return redirect(url_for('hiddenServices.blacklisted_domains', page=page, type=type, unblacklist_domain=2))
|
|
|
|
else:
|
|
|
|
return redirect(url_for('hiddenServices.blacklisted_domains', page=page, type=type, unblacklist_domain=1))
|
|
|
|
else:
|
|
|
|
return redirect(url_for('hiddenServices.blacklisted_domains', page=page, type=type, unblacklist_domain=0))
|
2019-02-19 10:41:45 +00:00
|
|
|
else:
|
2019-02-28 13:56:14 +00:00
|
|
|
return 'Incorrect type'
|
2019-02-19 10:41:45 +00:00
|
|
|
|
2019-04-18 14:57:51 +00:00
|
|
|
@hiddenServices.route("/crawlers/auto_crawler", methods=['GET'])
|
2019-05-02 15:31:14 +00:00
|
|
|
@login_required
|
2019-11-20 15:15:08 +00:00
|
|
|
@login_read_only
|
2019-04-18 14:57:51 +00:00
|
|
|
def auto_crawler():
|
|
|
|
nb_element_to_display = 100
|
|
|
|
try:
|
|
|
|
page = int(request.args.get('page'))
|
|
|
|
except:
|
|
|
|
page = 1
|
|
|
|
if page <= 0:
|
|
|
|
page = 1
|
|
|
|
|
|
|
|
nb_auto_onion = r_serv_onion.scard('auto_crawler_url:onion')
|
2022-04-12 15:40:17 +00:00
|
|
|
nb_auto_i2p = r_serv_onion.scard('auto_crawler_url:i2p')
|
2019-04-18 14:57:51 +00:00
|
|
|
nb_auto_regular = r_serv_onion.scard('auto_crawler_url:regular')
|
|
|
|
|
|
|
|
if nb_auto_onion > nb_auto_regular:
|
2022-04-12 15:40:17 +00:00
|
|
|
if nb_auto_onion > nb_auto_i2p:
|
|
|
|
nb_max = nb_auto_onion
|
|
|
|
else:
|
|
|
|
nb_max = nb_auto_i2p
|
|
|
|
elif nb_auto_regular > nb_auto_i2p:
|
2019-04-18 14:57:51 +00:00
|
|
|
nb_max = nb_auto_regular
|
2022-04-12 15:40:17 +00:00
|
|
|
else:
|
|
|
|
nb_max = nb_auto_i2p
|
2019-04-18 14:57:51 +00:00
|
|
|
|
|
|
|
nb_page_max = nb_max/(nb_element_to_display)
|
|
|
|
if isinstance(nb_page_max, float):
|
|
|
|
nb_page_max = int(nb_page_max)+1
|
|
|
|
if page > nb_page_max:
|
|
|
|
page = nb_page_max
|
|
|
|
start = nb_element_to_display*(page -1)
|
|
|
|
stop = nb_element_to_display*page
|
|
|
|
|
|
|
|
last_auto_crawled = get_last_domains_crawled('auto_crawled')
|
|
|
|
last_domains = get_last_crawled_domains_metadata(last_auto_crawled, '')
|
|
|
|
|
|
|
|
if start > nb_auto_onion:
|
|
|
|
auto_crawler_domain_onions = []
|
|
|
|
elif stop > nb_auto_onion:
|
|
|
|
auto_crawler_domain_onions = list(r_serv_onion.smembers('auto_crawler_url:onion'))[start:nb_auto_onion]
|
|
|
|
else:
|
|
|
|
auto_crawler_domain_onions = list(r_serv_onion.smembers('auto_crawler_url:onion'))[start:stop]
|
|
|
|
|
2022-04-12 15:40:17 +00:00
|
|
|
if start > nb_auto_i2p:
|
|
|
|
auto_crawler_domain_i2p = []
|
|
|
|
elif stop > nb_auto_i2p:
|
2022-04-14 07:59:52 +00:00
|
|
|
<<<<<<< Updated upstream
|
2022-04-12 15:40:17 +00:00
|
|
|
auto_crawler_domain_i2p = list(r_serv_onion.smembers('auto_crawler_url:onion'))[start:nb_auto_i2p]
|
|
|
|
else:
|
|
|
|
auto_crawler_domain_i2p = list(r_serv_onion.smembers('auto_crawler_url:onion'))[start:stop]
|
2022-04-14 07:59:52 +00:00
|
|
|
=======
|
|
|
|
auto_crawler_domain_i2p = list(r_serv_onion.smembers('auto_crawler_url:i2p'))[start:nb_auto_i2p]
|
|
|
|
else:
|
|
|
|
auto_crawler_domain_i2p = list(r_serv_onion.smembers('auto_crawler_url:i2p'))[start:stop]
|
|
|
|
>>>>>>> Stashed changes
|
2022-04-12 15:40:17 +00:00
|
|
|
|
2019-04-18 14:57:51 +00:00
|
|
|
if start > nb_auto_regular:
|
|
|
|
auto_crawler_domain_regular = []
|
|
|
|
elif stop > nb_auto_regular:
|
|
|
|
auto_crawler_domain_regular = list(r_serv_onion.smembers('auto_crawler_url:regular'))[start:nb_auto_regular]
|
|
|
|
else:
|
|
|
|
auto_crawler_domain_regular = list(r_serv_onion.smembers('auto_crawler_url:regular'))[start:stop]
|
|
|
|
|
|
|
|
auto_crawler_domain_onions_metadata = get_last_crawled_domains_metadata(auto_crawler_domain_onions, '', type='onion', auto_mode=True)
|
2022-04-12 15:40:17 +00:00
|
|
|
auto_crawler_domain_i2p_metadata = get_last_crawled_domains_metadata(auto_crawler_domain_i2p, '', type='i2p', auto_mode=True)
|
2019-04-18 14:57:51 +00:00
|
|
|
auto_crawler_domain_regular_metadata = get_last_crawled_domains_metadata(auto_crawler_domain_regular, '', type='regular', auto_mode=True)
|
|
|
|
|
|
|
|
return render_template("Crawler_auto.html", page=page, nb_page_max=nb_page_max,
|
|
|
|
last_domains=last_domains,
|
2020-08-24 20:31:41 +00:00
|
|
|
is_manager_connected=crawlers.get_splash_manager_connection_metadata(),
|
2019-04-18 14:57:51 +00:00
|
|
|
auto_crawler_domain_onions_metadata=auto_crawler_domain_onions_metadata,
|
2022-04-12 15:40:17 +00:00
|
|
|
auto_crawler_domain_i2p_metadata=auto_crawler_domain_i2p_metadata,
|
2019-04-18 14:57:51 +00:00
|
|
|
auto_crawler_domain_regular_metadata=auto_crawler_domain_regular_metadata)
|
|
|
|
|
|
|
|
@hiddenServices.route("/crawlers/remove_auto_crawler", methods=['GET'])
|
2019-05-02 15:31:14 +00:00
|
|
|
@login_required
|
2019-06-19 15:02:09 +00:00
|
|
|
@login_analyst
|
2019-04-18 14:57:51 +00:00
|
|
|
def remove_auto_crawler():
|
|
|
|
url = request.args.get('url')
|
|
|
|
page = request.args.get('page')
|
|
|
|
|
|
|
|
if url:
|
|
|
|
delete_auto_crawler(url)
|
|
|
|
return redirect(url_for('hiddenServices.auto_crawler', page=page))
|
|
|
|
|
2019-03-22 15:48:07 +00:00
|
|
|
# # TODO: refractor
|
2019-01-08 14:51:39 +00:00
|
|
|
@hiddenServices.route("/hiddenServices/last_crawled_domains_with_stats_json", methods=['GET'])
|
2019-05-02 15:31:14 +00:00
|
|
|
@login_required
|
2019-11-20 15:15:08 +00:00
|
|
|
@login_read_only
|
2019-01-08 14:51:39 +00:00
|
|
|
def last_crawled_domains_with_stats_json():
|
2019-01-08 14:48:40 +00:00
|
|
|
last_onions = r_serv_onion.lrange('last_onion', 0 ,-1)
|
|
|
|
list_onion = []
|
|
|
|
|
|
|
|
now = datetime.datetime.now()
|
|
|
|
date = '{}{}{}'.format(now.strftime("%Y"), now.strftime("%m"), now.strftime("%d"))
|
|
|
|
statDomains = {}
|
|
|
|
statDomains['domains_up'] = r_serv_onion.scard('onion_up:{}'.format(date))
|
|
|
|
statDomains['domains_down'] = r_serv_onion.scard('onion_down:{}'.format(date))
|
|
|
|
statDomains['total'] = statDomains['domains_up'] + statDomains['domains_down']
|
|
|
|
statDomains['domains_queue'] = r_serv_onion.scard('onion_domain_crawler_queue')
|
|
|
|
|
|
|
|
for onion in last_onions:
|
|
|
|
metadata_onion = {}
|
|
|
|
metadata_onion['domain'] = onion
|
|
|
|
metadata_onion['last_check'] = r_serv_onion.hget('onion_metadata:{}'.format(onion), 'last_check')
|
2019-01-29 15:43:56 +00:00
|
|
|
if metadata_onion['last_check'] is None:
|
|
|
|
metadata_onion['last_check'] = '********'
|
2019-01-08 14:48:40 +00:00
|
|
|
metadata_onion['first_seen'] = r_serv_onion.hget('onion_metadata:{}'.format(onion), 'first_seen')
|
2019-01-29 15:43:56 +00:00
|
|
|
if metadata_onion['first_seen'] is None:
|
|
|
|
metadata_onion['first_seen'] = '********'
|
2019-01-08 14:48:40 +00:00
|
|
|
if get_onion_status(onion, metadata_onion['last_check']):
|
|
|
|
metadata_onion['status_text'] = 'UP'
|
|
|
|
metadata_onion['status_color'] = 'Green'
|
|
|
|
metadata_onion['status_icon'] = 'fa-check-circle'
|
|
|
|
else:
|
|
|
|
metadata_onion['status_text'] = 'DOWN'
|
|
|
|
metadata_onion['status_color'] = 'Red'
|
|
|
|
metadata_onion['status_icon'] = 'fa-times-circle'
|
|
|
|
list_onion.append(metadata_onion)
|
|
|
|
|
2019-01-29 13:27:50 +00:00
|
|
|
crawler_metadata=[]
|
|
|
|
all_onion_crawler = r_cache.smembers('all_crawler:onion')
|
|
|
|
for crawler in all_onion_crawler:
|
|
|
|
crawling_domain = r_cache.hget('metadata_crawler:{}'.format(crawler), 'crawling_domain')
|
|
|
|
started_time = r_cache.hget('metadata_crawler:{}'.format(crawler), 'started_time')
|
|
|
|
status_info = r_cache.hget('metadata_crawler:{}'.format(crawler), 'status')
|
|
|
|
crawler_info = '{} - {}'.format(crawler, started_time)
|
|
|
|
if status_info=='Waiting' or status_info=='Crawling':
|
|
|
|
status=True
|
|
|
|
else:
|
|
|
|
status=False
|
|
|
|
crawler_metadata.append({'crawler_info': crawler_info, 'crawling_domain': crawling_domain, 'status_info': status_info, 'status': status})
|
|
|
|
|
2019-01-08 14:48:40 +00:00
|
|
|
date_string = '{}-{}-{}'.format(date[0:4], date[4:6], date[6:8])
|
|
|
|
|
2019-01-29 13:27:50 +00:00
|
|
|
return jsonify({'last_onions': list_onion, 'statDomains': statDomains, 'crawler_metadata':crawler_metadata})
|
2018-08-21 13:54:53 +00:00
|
|
|
|
2018-12-17 14:58:48 +00:00
|
|
|
@hiddenServices.route("/hiddenServices/get_onions_by_daterange", methods=['POST'])
|
2019-05-02 15:31:14 +00:00
|
|
|
@login_required
|
2019-11-20 15:15:08 +00:00
|
|
|
@login_read_only
|
2018-12-17 14:58:48 +00:00
|
|
|
def get_onions_by_daterange():
|
|
|
|
date_from = request.form.get('date_from')
|
|
|
|
date_to = request.form.get('date_to')
|
2019-04-26 13:14:29 +00:00
|
|
|
service_type = request.form.get('service_type')
|
2018-12-17 14:58:48 +00:00
|
|
|
domains_up = request.form.get('domains_up')
|
|
|
|
domains_down = request.form.get('domains_down')
|
|
|
|
domains_tags = request.form.get('domains_tags')
|
|
|
|
|
2019-04-26 13:14:29 +00:00
|
|
|
return redirect(url_for('hiddenServices.show_domains_by_daterange', date_from=date_from, date_to=date_to, service_type=service_type, domains_up=domains_up, domains_down=domains_down, domains_tags=domains_tags))
|
2018-12-17 14:58:48 +00:00
|
|
|
|
|
|
|
@hiddenServices.route("/hiddenServices/show_domains_by_daterange", methods=['GET'])
|
2019-05-02 15:31:14 +00:00
|
|
|
@login_required
|
2019-11-20 15:15:08 +00:00
|
|
|
@login_read_only
|
2018-12-17 14:58:48 +00:00
|
|
|
def show_domains_by_daterange():
|
|
|
|
date_from = request.args.get('date_from')
|
|
|
|
date_to = request.args.get('date_to')
|
2019-04-26 13:14:29 +00:00
|
|
|
service_type = request.args.get('service_type')
|
2018-12-17 14:58:48 +00:00
|
|
|
domains_up = request.args.get('domains_up')
|
|
|
|
domains_down = request.args.get('domains_down')
|
|
|
|
domains_tags = request.args.get('domains_tags')
|
|
|
|
|
2019-04-26 13:14:29 +00:00
|
|
|
# incorrect service type
|
|
|
|
if not is_valid_service_type(service_type):
|
|
|
|
service_type = 'onion'
|
|
|
|
|
|
|
|
type_name = dic_type_name[service_type]
|
|
|
|
|
2018-12-17 14:58:48 +00:00
|
|
|
date_range = []
|
|
|
|
if date_from is not None and date_to is not None:
|
|
|
|
#change format
|
|
|
|
try:
|
|
|
|
if len(date_from) != 8:
|
|
|
|
date_from = date_from[0:4] + date_from[5:7] + date_from[8:10]
|
|
|
|
date_to = date_to[0:4] + date_to[5:7] + date_to[8:10]
|
|
|
|
date_range = substract_date(date_from, date_to)
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
|
|
|
|
if not date_range:
|
|
|
|
date_range.append(datetime.date.today().strftime("%Y%m%d"))
|
|
|
|
date_from = date_range[0][0:4] + '-' + date_range[0][4:6] + '-' + date_range[0][6:8]
|
|
|
|
date_to = date_from
|
|
|
|
|
|
|
|
else:
|
|
|
|
date_from = date_from[0:4] + '-' + date_from[4:6] + '-' + date_from[6:8]
|
|
|
|
date_to = date_to[0:4] + '-' + date_to[4:6] + '-' + date_to[6:8]
|
|
|
|
|
2019-04-26 13:14:29 +00:00
|
|
|
statDomains = {}
|
|
|
|
statDomains['domains_up'] = 0
|
|
|
|
statDomains['domains_down'] = 0
|
|
|
|
statDomains['total'] = 0
|
|
|
|
statDomains['domains_queue'] = get_nb_domains_inqueue(service_type)
|
|
|
|
|
2018-12-17 14:58:48 +00:00
|
|
|
domains_by_day = {}
|
|
|
|
domain_metadata = {}
|
2019-04-26 13:14:29 +00:00
|
|
|
stats_by_date = {}
|
2018-12-17 14:58:48 +00:00
|
|
|
for date in date_range:
|
2019-04-26 13:14:29 +00:00
|
|
|
stats_by_date[date] = {}
|
|
|
|
stats_by_date[date]['domain_up'] = 0
|
|
|
|
stats_by_date[date]['domain_down'] = 0
|
2018-12-17 14:58:48 +00:00
|
|
|
if domains_up:
|
|
|
|
domains_up = True
|
2019-04-26 13:14:29 +00:00
|
|
|
domains_by_day[date] = list(r_serv_onion.smembers('{}_up:{}'.format(service_type, date)))
|
2018-12-17 14:58:48 +00:00
|
|
|
for domain in domains_by_day[date]:
|
|
|
|
h = HiddenServices(domain, 'onion')
|
|
|
|
domain_metadata[domain] = {}
|
|
|
|
if domains_tags:
|
|
|
|
domains_tags = True
|
|
|
|
domain_metadata[domain]['tags'] = h.get_domain_tags(update=True)
|
|
|
|
|
2019-04-26 13:14:29 +00:00
|
|
|
domain_metadata[domain]['last_check'] = r_serv_onion.hget('{}_metadata:{}'.format(service_type, domain), 'last_check')
|
2018-12-17 14:58:48 +00:00
|
|
|
if domain_metadata[domain]['last_check'] is None:
|
|
|
|
domain_metadata[domain]['last_check'] = '********'
|
2019-04-26 13:14:29 +00:00
|
|
|
domain_metadata[domain]['first_seen'] = r_serv_onion.hget('{}_metadata:{}'.format(service_type, domain), 'first_seen')
|
2018-12-17 14:58:48 +00:00
|
|
|
if domain_metadata[domain]['first_seen'] is None:
|
|
|
|
domain_metadata[domain]['first_seen'] = '********'
|
|
|
|
domain_metadata[domain]['status_text'] = 'UP'
|
|
|
|
domain_metadata[domain]['status_color'] = 'Green'
|
|
|
|
domain_metadata[domain]['status_icon'] = 'fa-check-circle'
|
2019-04-26 13:14:29 +00:00
|
|
|
statDomains['domains_up'] += 1
|
|
|
|
stats_by_date[date]['domain_up'] += 1
|
2018-12-17 14:58:48 +00:00
|
|
|
|
|
|
|
if domains_down:
|
|
|
|
domains_down = True
|
2019-04-26 13:14:29 +00:00
|
|
|
domains_by_day_down = list(r_serv_onion.smembers('{}_down:{}'.format(service_type, date)))
|
2018-12-17 14:58:48 +00:00
|
|
|
if domains_up:
|
|
|
|
domains_by_day[date].extend(domains_by_day_down)
|
|
|
|
else:
|
|
|
|
domains_by_day[date] = domains_by_day_down
|
|
|
|
for domain in domains_by_day_down:
|
|
|
|
#h = HiddenServices(onion_domain, 'onion')
|
|
|
|
domain_metadata[domain] = {}
|
|
|
|
#domain_metadata[domain]['tags'] = h.get_domain_tags()
|
|
|
|
|
2019-04-26 13:14:29 +00:00
|
|
|
domain_metadata[domain]['last_check'] = r_serv_onion.hget('{}_metadata:{}'.format(service_type, domain), 'last_check')
|
2018-12-17 14:58:48 +00:00
|
|
|
if domain_metadata[domain]['last_check'] is None:
|
|
|
|
domain_metadata[domain]['last_check'] = '********'
|
2019-04-26 13:14:29 +00:00
|
|
|
domain_metadata[domain]['first_seen'] = r_serv_onion.hget('{}_metadata:{}'.format(service_type, domain), 'first_seen')
|
2018-12-17 14:58:48 +00:00
|
|
|
if domain_metadata[domain]['first_seen'] is None:
|
|
|
|
domain_metadata[domain]['first_seen'] = '********'
|
|
|
|
|
|
|
|
domain_metadata[domain]['status_text'] = 'DOWN'
|
|
|
|
domain_metadata[domain]['status_color'] = 'Red'
|
|
|
|
domain_metadata[domain]['status_icon'] = 'fa-times-circle'
|
2019-04-26 13:14:29 +00:00
|
|
|
statDomains['domains_down'] += 1
|
|
|
|
stats_by_date[date]['domain_down'] += 1
|
|
|
|
|
|
|
|
statDomains['total'] = statDomains['domains_up'] + statDomains['domains_down']
|
2018-12-17 14:58:48 +00:00
|
|
|
|
2019-04-26 13:14:29 +00:00
|
|
|
return render_template("domains.html", date_range=date_range, domains_by_day=domains_by_day,
|
|
|
|
statDomains=statDomains, type_name=type_name,
|
|
|
|
domain_metadata=domain_metadata,
|
|
|
|
stats_by_date=stats_by_date,
|
2018-12-17 14:58:48 +00:00
|
|
|
date_from=date_from, date_to=date_to, domains_up=domains_up, domains_down=domains_down,
|
2019-04-26 13:14:29 +00:00
|
|
|
domains_tags=domains_tags, type=service_type, bootstrap_label=bootstrap_label)
|
2018-12-17 14:58:48 +00:00
|
|
|
|
2019-06-07 11:47:44 +00:00
|
|
|
@hiddenServices.route("/crawlers/download_domain", methods=['GET'])
|
2019-06-19 15:02:09 +00:00
|
|
|
@login_required
|
2019-11-20 15:15:08 +00:00
|
|
|
@login_read_only
|
2019-09-03 11:42:17 +00:00
|
|
|
@no_cache
|
2019-06-07 11:47:44 +00:00
|
|
|
def download_domain():
|
|
|
|
domain = request.args.get('domain')
|
|
|
|
epoch = request.args.get('epoch')
|
|
|
|
try:
|
|
|
|
epoch = int(epoch)
|
|
|
|
except:
|
|
|
|
epoch = None
|
|
|
|
port = request.args.get('port')
|
|
|
|
faup.decode(domain)
|
|
|
|
unpack_url = faup.get()
|
|
|
|
|
|
|
|
## TODO: # FIXME: remove me
|
|
|
|
try:
|
|
|
|
domain = unpack_url['domain'].decode()
|
|
|
|
except:
|
|
|
|
domain = unpack_url['domain']
|
|
|
|
|
|
|
|
if not port:
|
|
|
|
if unpack_url['port']:
|
|
|
|
try:
|
|
|
|
port = unpack_url['port'].decode()
|
|
|
|
except:
|
|
|
|
port = unpack_url['port']
|
|
|
|
else:
|
|
|
|
port = 80
|
|
|
|
try:
|
|
|
|
port = int(port)
|
|
|
|
except:
|
|
|
|
port = 80
|
|
|
|
type = get_type_domain(domain)
|
|
|
|
if domain is None or not r_serv_onion.exists('{}_metadata:{}'.format(type, domain)):
|
|
|
|
return '404'
|
|
|
|
# # TODO: FIXME return 404
|
|
|
|
|
|
|
|
origin_paste = r_serv_onion.hget('{}_metadata:{}'.format(type, domain), 'paste_parent')
|
|
|
|
|
|
|
|
h = HiddenServices(domain, type, port=port)
|
|
|
|
item_core = h.get_domain_crawled_core_item(epoch=epoch)
|
|
|
|
if item_core:
|
|
|
|
l_pastes = h.get_last_crawled_pastes(item_root=item_core['root_item'])
|
|
|
|
else:
|
|
|
|
l_pastes = []
|
|
|
|
#dict_links = h.get_all_links(l_pastes)
|
|
|
|
|
|
|
|
zip_file = h.create_domain_basic_archive(l_pastes)
|
2019-06-07 11:53:33 +00:00
|
|
|
filename = domain + '.zip'
|
2019-06-07 11:47:44 +00:00
|
|
|
|
2019-06-07 11:53:33 +00:00
|
|
|
return send_file(zip_file, attachment_filename=filename, as_attachment=True)
|
2019-06-07 11:47:44 +00:00
|
|
|
|
|
|
|
|
2018-09-21 08:34:06 +00:00
|
|
|
@hiddenServices.route("/hiddenServices/onion_son", methods=['GET'])
|
2019-05-02 15:31:14 +00:00
|
|
|
@login_required
|
2019-06-19 15:02:09 +00:00
|
|
|
@login_analyst
|
2018-09-21 08:34:06 +00:00
|
|
|
def onion_son():
|
|
|
|
onion_domain = request.args.get('onion_domain')
|
|
|
|
|
|
|
|
h = HiddenServices(onion_domain, 'onion')
|
|
|
|
l_pastes = h.get_last_crawled_pastes()
|
|
|
|
l_son = h.get_domain_son(l_pastes)
|
|
|
|
return 'l_son'
|
|
|
|
|
2018-08-21 13:54:53 +00:00
|
|
|
# ============= JSON ==============
|
|
|
|
@hiddenServices.route("/hiddenServices/domain_crawled_7days_json", methods=['GET'])
|
2019-05-02 15:31:14 +00:00
|
|
|
@login_required
|
2019-11-20 15:15:08 +00:00
|
|
|
@login_read_only
|
2018-08-21 13:54:53 +00:00
|
|
|
def domain_crawled_7days_json():
|
|
|
|
type = 'onion'
|
|
|
|
## TODO: # FIXME: 404 error
|
|
|
|
|
|
|
|
date_range = get_date_range(7)
|
|
|
|
json_domain_stats = []
|
|
|
|
#try:
|
|
|
|
for date in date_range:
|
|
|
|
nb_domain_up = r_serv_onion.scard('{}_up:{}'.format(type, date))
|
|
|
|
nb_domain_down = r_serv_onion.scard('{}_up:{}'.format(type, date))
|
|
|
|
date = date[0:4] + '-' + date[4:6] + '-' + date[6:8]
|
|
|
|
json_domain_stats.append({ 'date': date, 'value': int( nb_domain_up ), 'nb_domain_down': int( nb_domain_down )})
|
|
|
|
#except:
|
|
|
|
#return jsonify()
|
|
|
|
|
|
|
|
return jsonify(json_domain_stats)
|
|
|
|
|
2019-02-28 13:56:14 +00:00
|
|
|
@hiddenServices.route('/hiddenServices/domain_crawled_by_type_json')
|
2019-05-02 15:31:14 +00:00
|
|
|
@login_required
|
2019-11-20 15:15:08 +00:00
|
|
|
@login_read_only
|
2019-02-28 13:56:14 +00:00
|
|
|
def domain_crawled_by_type_json():
|
2019-02-15 16:00:07 +00:00
|
|
|
current_date = request.args.get('date')
|
2019-02-28 13:56:14 +00:00
|
|
|
type = request.args.get('type')
|
|
|
|
if type in list_types:
|
2019-02-15 16:00:07 +00:00
|
|
|
|
2019-02-28 13:56:14 +00:00
|
|
|
num_day_type = 7
|
|
|
|
date_range = get_date_range(num_day_type)
|
|
|
|
range_decoder = []
|
|
|
|
for date in date_range:
|
|
|
|
day_crawled = {}
|
|
|
|
day_crawled['date']= date[0:4] + '-' + date[4:6] + '-' + date[6:8]
|
|
|
|
day_crawled['UP']= nb_domain_up = r_serv_onion.scard('{}_up:{}'.format(type, date))
|
2019-05-03 08:04:20 +00:00
|
|
|
day_crawled['DOWN']= nb_domain_up = r_serv_onion.scard('{}_down:{}'.format(type, date))
|
2019-02-28 13:56:14 +00:00
|
|
|
range_decoder.append(day_crawled)
|
2019-02-15 16:00:07 +00:00
|
|
|
|
2019-02-28 13:56:14 +00:00
|
|
|
return jsonify(range_decoder)
|
2019-02-15 16:00:07 +00:00
|
|
|
|
2019-02-28 13:56:14 +00:00
|
|
|
else:
|
|
|
|
return jsonify('Incorrect Type')
|
2019-02-15 16:00:07 +00:00
|
|
|
|
2018-08-21 13:54:53 +00:00
|
|
|
# ========= REGISTRATION =========
|
2018-09-28 13:42:06 +00:00
|
|
|
app.register_blueprint(hiddenServices, url_prefix=baseUrl)
|