chg: [crawler] cookies migration + refactor

This commit is contained in:
Terrtia 2023-02-17 14:50:20 +01:00
parent 11cd571f44
commit c04bc7bb57
No known key found for this signature in database
GPG key ID: 1E1B1F50D84613D0
7 changed files with 535 additions and 352 deletions

View file

@ -64,11 +64,11 @@ spec.loader.exec_module(old_Investigations)
old_Investigations.r_tracking = r_serv_tracker
from lib import crawlers
spec = importlib.util.find_spec('lib.crawlers')
old_crawlers = importlib.util.module_from_spec(spec)
spec.loader.exec_module(old_crawlers)
old_crawlers.r_serv_onion = r_crawler
# spec = importlib.util.find_spec('lib.crawlers')
# old_crawlers = importlib.util.module_from_spec(spec)
# spec.loader.exec_module(old_crawlers)
#
# old_crawlers.r_serv_onion = r_crawler
# # TODO: disable features - credentials - stats ? - sentiment analysis
@ -411,6 +411,39 @@ def items_migration():
# #
###############################
def get_all_cookiejar():
return r_crawler.smembers('cookiejar:all')
def get_cookiejar_level(cookiejar_uuid):
level = r_crawler.hget(f'cookiejar_metadata:{cookiejar_uuid}', 'level')
try:
level = int(level)
except:
level = 0
return level
def get_cookiejar_metadata(cookiejar_uuid):
dict_cookiejar = {}
if r_crawler.exists(f'cookiejar_metadata:{cookiejar_uuid}'):
dict_cookiejar['uuid'] = cookiejar_uuid
dict_cookiejar['description'] = r_crawler.hget(f'cookiejar_metadata:{cookiejar_uuid}', 'description')
dict_cookiejar['date'] = r_crawler.hget(f'cookiejar_metadata:{cookiejar_uuid}', 'date')
dict_cookiejar['user'] = r_crawler.hget(f'cookiejar_metadata:{cookiejar_uuid}', 'user_id')
dict_cookiejar['level'] = get_cookiejar_level(cookiejar_uuid)
return dict_cookiejar
def get_cookiejar_cookies_uuid(cookiejar_uuid):
res = r_crawler.smembers(f'cookiejar:{cookiejar_uuid}:cookies:uuid')
if not res:
res = []
return res
def get_cookie_dict(cookie_uuid):
cookie_dict = {}
for key_name in r_crawler.hkeys(f'cookiejar:cookie:{cookie_uuid}'):
cookie_dict[key_name] = r_crawler.hget(f'cookiejar:cookie:{cookie_uuid}', key_name)
return cookie_dict
# Return last crawled domains by type
# domain;epoch
def get_last_crawled_domains(domain_type):
@ -422,9 +455,9 @@ def get_domains_blacklist(domain_type):
def crawler_migration():
print('CRAWLER MIGRATION...')
for domain_type in ['onion', 'regular']:
for domain in get_domains_blacklist(domain_type):
crawlers.add_domain_blacklist(domain_type, domain)
# for domain_type in ['onion', 'regular']:
# for domain in get_domains_blacklist(domain_type):
# crawlers.add_domain_blacklist(domain_type, domain)
# for domain_type in ['onion', 'regular']:
# for row in get_last_crawled_domains(domain_type):
@ -433,15 +466,21 @@ def crawler_migration():
# print(domain, port, epoch)
# #crawlers.add_last_crawled_domain(domain_type, domain, port, epoch)
# for cookiejar_uuid in old_crawlers.get_all_cookiejar():
# meta = old_crawlers.get_cookiejar_metadata(cookiejar_uuid, level=True)
# #print(meta)
# crawlers.create_cookiejar(meta['user_id'], level=meta['level'], description=meta['description'], cookiejar_uuid=cookiejar_uuid)
# crawlers._set_cookiejar_date(meta['date'])
#
# for meta_cookie, cookie_uuid in old_crawlers.get_cookiejar_cookies_list(cookiejar_uuid, add_cookie_uuid=True):
# print(cookie_uuid)
# crawlers.add_cookie_to_cookiejar(cookiejar_uuid, meta_cookie, cookie_uuid=cookie_uuid)
for cookiejar_uuid in get_all_cookiejar():
meta = get_cookiejar_metadata(cookiejar_uuid)
if meta:
# print(meta)
cookiejar = crawlers.Cookiejar(meta['uuid'])
if not cookiejar.exists():
crawlers.create_cookiejar(meta['user'], description=meta['description'], level=meta['level'],
cookiejar_uuid=meta['uuid'])
cookiejar._set_date(meta['date'])
for cookie_uuid in get_cookiejar_cookies_uuid(meta['uuid']):
cookie_dict = get_cookie_dict(cookie_uuid)
if cookie_dict:
# print(cookie_dict)
crawlers.api_create_cookie(meta['user'], cookiejar_uuid, cookie_dict)
# TODO: auto crawler -> to Fix / change
@ -876,11 +915,11 @@ def cves_migration():
if __name__ == '__main__':
core_migration()
user_migration()
# core_migration()
# user_migration()
#tags_migration()
# items_migration()
# crawler_migration()
crawler_migration()
# domain_migration() # TO TEST ###########################
# decodeds_migration()
# screenshots_migration()

View file

@ -38,7 +38,6 @@ from packages import git_status
from lib.ConfigLoader import ConfigLoader
from lib.objects.Domains import Domain
from lib.objects.Items import Item
from core import screen
config_loader = ConfigLoader()
r_db = config_loader.get_db_conn("Kvrocks_DB")
@ -210,115 +209,224 @@ def load_crawler_cookies(cookiejar_uuid, domain, crawler_type='web'):
return all_cookies
################################################################################
################################################################################
################################################################################
def get_all_cookiejar():
return r_serv_onion.smembers('cookiejar:all')
def get_cookiejars():
return r_crawler.smembers('cookiejars:all')
def get_global_cookiejar():
cookiejars = r_serv_onion.smembers('cookiejar:global')
def get_cookiejars_global():
cookiejars = r_crawler.smembers('cookiejars:global')
if not cookiejars:
cookiejars = []
return cookiejars
def get_user_cookiejar(user_id):
cookiejars = r_serv_onion.smembers('cookiejar:user:{}'.format(user_id))
def get_cookiejars_user(user_id):
cookiejars = r_crawler.smembers(f'cookiejars:user:{user_id}')
if not cookiejars:
cookiejars = []
return cookiejars
def exist_cookiejar(cookiejar_uuid):
return r_serv_onion.exists('cookiejar_metadata:{}'.format(cookiejar_uuid))
class Cookiejar:
def _set_cookiejar_date(cookiejar_uuid, date):
r_serv_onion.hset(f'cookiejar_metadata:{cookiejar_uuid}', 'date', date)
def __init__(self, cookiejar_uuid):
self.uuid = cookiejar_uuid
# # TODO: sanitize cookie_uuid
def create_cookiejar(user_id, level=1, description=None, cookiejar_uuid=None):
if not cookiejar_uuid:
cookiejar_uuid = str(uuid.uuid4())
def exists(self):
return r_crawler.exists(f'cookiejar:meta:{self.uuid}') # or cookiejar:uuid
r_serv_onion.sadd('cookiejar:all', cookiejar_uuid)
if level == 0:
r_serv_onion.sadd(f'cookiejar:user:{user_id}', cookiejar_uuid)
def get_date(self):
return r_crawler.hget(f'cookiejar:meta:{self.uuid}', 'date')
def _set_date(self, date):
r_crawler.hset(f'cookiejar:meta:{self.uuid}', 'date', date)
def get_description(self):
return r_crawler.hget(f'cookiejar:meta:{self.uuid}', 'description')
def set_description(self, description):
r_crawler.hset(f'cookiejar:meta:{self.uuid}', 'description', description)
def get_user(self):
return r_crawler.hget(f'cookiejar:meta:{self.uuid}', 'user')
def _set_user(self, user_id):
return r_crawler.hset(f'cookiejar:meta:{self.uuid}', 'user', user_id)
def get_level(self):
level = r_crawler.hget(f'cookiejar:meta:{self.uuid}', 'level')
if level:
level = 1
else:
level = 0
return level
def _set_level(self, level):
if level:
level = 1
else:
level = 0
r_crawler.hset(f'cookiejar:meta:{self.uuid}', 'level', level)
def is_cookie_in_jar(self, cookie_uuid):
return r_crawler.sismember(f'cookiejar:cookies:{self.uuid}', cookie_uuid)
def get_cookies_uuid(self):
return r_crawler.smembers(f'cookiejar:cookies:{self.uuid}')
def get_cookies(self, r_json=False):
l_cookies = []
for cookie_uuid in self.get_cookies_uuid():
cookies = Cookie(cookie_uuid)
l_cookies.append(cookies.get_meta(r_json=r_json))
return l_cookies
def get_nb_cookies(self):
return r_crawler.scard(f'cookiejar:cookies:{self.uuid}')
def get_meta(self, level=False, nb_cookies=False, cookies=False, r_json=False):
meta = {'uuid': self.uuid,
'date': self.get_date(),
'description': self.get_description(),
'user': self.get_user()}
if level:
meta['level'] = self.get_level()
if nb_cookies:
meta['nb_cookies'] = self.get_nb_cookies()
if cookies:
meta['cookies'] = self.get_cookies(r_json=r_json)
return meta
def add_cookie(self, name, value, cookie_uuid=None, domain=None, httponly=None, path=None, secure=None, text=None):
if cookie_uuid:
cookie = Cookie(cookie_uuid)
if cookie.exists():
cookie_uuid = generate_uuid()
else:
cookie_uuid = generate_uuid()
r_crawler.sadd(f'cookiejar:cookies:{self.uuid}', cookie_uuid)
cookie = Cookie(cookie_uuid)
cookie.set_cookiejar(self.uuid)
cookie.set_field('name', name)
cookie.set_field('value', value)
if domain:
cookie.set_field('domain', domain)
if httponly:
cookie.set_field('httpOnly', str(httponly))
if path:
cookie.set_field('path', path)
if secure:
cookie.set_field('secure', str(secure))
if text:
cookie.set_field('path', text)
return cookie_uuid
def delete_cookie(self, cookie_uuid):
if self.is_cookie_in_jar(cookie_uuid):
cookie = Cookie(cookie_uuid)
cookie.delete()
# TODO INIT with COOKIES ?????
def create(self, user_id, description=None, level=1):
if self.exists():
raise Exception('Cookiejar already exists')
r_crawler.sadd('cookiejars:all', self.uuid)
if level == 0:
r_crawler.sadd(f'cookiejars:user:{user_id}', self.uuid)
else:
r_crawler.sadd('cookiejars:global', self.uuid)
self._set_user(user_id)
self._set_date(datetime.now().strftime("%Y%m%d"))
self._set_level(level)
if description:
self.set_description(description)
def delete(self):
if self.get_level() == 0:
user = self.get_user()
if user:
r_crawler.srem(f'cookiejars:user:{user}', self.uuid)
else:
r_crawler.srem('cookiejar:global', self.uuid)
r_crawler.srem('cookiejars:all', self.uuid)
r_crawler.delete(f'cookiejar:meta:{self.uuid}')
for cookie_uuid in self.get_cookies_uuid():
self.delete_cookie(cookie_uuid)
def create_cookiejar(user_id, description=None, level=1, cookiejar_uuid=None):
if cookiejar_uuid:
cookiejar = Cookiejar(cookiejar_uuid)
if cookiejar.exists():
cookiejar_uuid = generate_uuid()
else:
r_serv_onion.sadd('cookiejar:global', cookiejar_uuid)
# metadata
r_serv_onion.hset(f'cookiejar_metadata:{cookiejar_uuid}', 'user_id', user_id)
r_serv_onion.hset(f'cookiejar_metadata:{cookiejar_uuid}', 'level', level)
r_serv_onion.hset(f'cookiejar_metadata:{cookiejar_uuid}', 'description', description)
_set_cookiejar_date(cookiejar_uuid, datetime.now().strftime("%Y%m%d"))
cookiejar_uuid = generate_uuid()
cookiejar = Cookiejar(cookiejar_uuid)
cookiejar.create(user_id, description=description, level=level)
return cookiejar_uuid
# if json_cookies:
# json_cookies = json.loads(json_cookies) # # TODO: catch Exception
# r_serv_onion.set('cookies:json_cookies:{}'.format(cookies_uuid), json.dumps(json_cookies))
#
# for cookie_dict in l_cookies:
# r_serv_onion.hset('cookies:manual_cookies:{}'.format(cookies_uuid), cookie_dict['name'], cookie_dict['value'])
return cookiejar_uuid
def delete_cookie_jar(cookiejar_uuid):
level = get_cookiejar_level(cookiejar_uuid)
if level == 0:
user_id = get_cookiejar_owner(cookiejar_uuid)
r_serv_onion.srem('cookiejar:user:{}'.format(user_id), cookiejar_uuid)
else:
r_serv_onion.srem('cookiejar:global', cookiejar_uuid)
r_serv_onion.delete('cookiejar_metadata:{}'.format(cookiejar_uuid))
def get_cookiejar_cookies_uuid(cookiejar_uuid):
cookies = r_serv_onion.smembers('cookiejar:{}:cookies:uuid'.format(cookiejar_uuid))
if not cookies:
cookies = []
return cookies
def get_cookiejar_cookies_list(cookiejar_uuid, add_cookie_uuid=False):
l_cookiejar = []
for cookie_uuid in get_cookiejar_cookies_uuid(cookiejar_uuid):
if add_cookie_uuid:
l_cookiejar.append((get_cookie_dict(cookie_uuid), cookie_uuid))
else:
l_cookiejar.append(get_cookie_dict(cookie_uuid))
return l_cookiejar
## Cookiejar metadata ##
def get_cookiejar_description(cookiejar_uuid):
return r_serv_onion.hget('cookiejar_metadata:{}'.format(cookiejar_uuid), 'description')
def get_cookiejar_date(cookiejar_uuid):
return r_serv_onion.hget('cookiejar_metadata:{}'.format(cookiejar_uuid), 'date')
def get_cookiejar_owner(cookiejar_uuid):
return r_serv_onion.hget('cookiejar_metadata:{}'.format(cookiejar_uuid), 'user_id')
def get_cookiejar_date(cookiejar_uuid):
return r_serv_onion.hget('cookiejar_metadata:{}'.format(cookiejar_uuid), 'date')
def get_cookiejar_level(cookiejar_uuid):
level = r_serv_onion.hget('cookiejar_metadata:{}'.format(cookiejar_uuid), 'level')
if not level:
level = 1
return int(level)
def get_cookiejar_metadata(cookiejar_uuid, level=False):
dict_cookiejar = {}
if exist_cookiejar(cookiejar_uuid):
dict_cookiejar['cookiejar_uuid'] = cookiejar_uuid
dict_cookiejar['description'] = get_cookiejar_description(cookiejar_uuid)
dict_cookiejar['date'] = get_cookiejar_date(cookiejar_uuid)
dict_cookiejar['user_id'] = get_cookiejar_owner(cookiejar_uuid)
if level:
dict_cookiejar['level'] = get_cookiejar_level(cookiejar_uuid)
return dict_cookiejar
def get_cookiejar_metadata_by_iterator(iter_cookiejar_uuid):
l_cookiejar_metadata = []
def get_cookiejars_meta_by_iterator(iter_cookiejar_uuid):
cookiejars_meta = []
for cookiejar_uuid in iter_cookiejar_uuid:
l_cookiejar_metadata.append(get_cookiejar_metadata(cookiejar_uuid))
return l_cookiejar_metadata
cookiejar = Cookiejar(cookiejar_uuid)
cookiejars_meta.append(cookiejar.get_meta(nb_cookies=True))
return cookiejars_meta
def edit_cookiejar_description(cookiejar_uuid, description):
r_serv_onion.hset('cookiejar_metadata:{}'.format(cookiejar_uuid), 'description', description)
def get_cookiejars_by_user(user_id):
cookiejars_global = get_cookiejars_global()
cookiejars_user = get_cookiejars_user(user_id)
return [*cookiejars_user, *cookiejars_global]
## API ##
def api_get_cookiejars_selector(user_id):
cookiejars = []
for cookiejar_uuid in get_cookiejars_by_user(user_id):
cookiejar = Cookiejar(cookiejar_uuid)
cookiejars.append(f'{cookiejar.get_description()} : {cookiejar.uuid}')
return sorted(cookiejars)
def api_verify_cookiejar_acl(cookiejar_uuid, user_id):
cookiejar = Cookiejar(cookiejar_uuid)
if not cookiejar.exists():
return {'error': 'unknown cookiejar uuid', 'cookiejar_uuid': cookiejar_uuid}, 404
if cookiejar.get_level() == 0: # TODO: check if user is admin
if cookiejar.get_user() != user_id:
return {'error': 'The access to this cookiejar is restricted'}, 403
def api_edit_cookiejar_description(user_id, cookiejar_uuid, description):
resp = api_verify_cookiejar_acl(cookiejar_uuid, user_id)
if resp:
return resp
cookiejar = Cookiejar(cookiejar_uuid)
cookiejar.set_description(description)
return {'cookiejar_uuid': cookiejar_uuid}, 200
def api_delete_cookiejar(user_id, cookiejar_uuid):
resp = api_verify_cookiejar_acl(cookiejar_uuid, user_id)
if resp:
return resp
cookiejar = Cookiejar(cookiejar_uuid)
cookiejar.delete()
return {'cookiejar_uuid': cookiejar_uuid}, 200
def api_get_cookiejar(cookiejar_uuid, user_id):
resp = api_verify_cookiejar_acl(cookiejar_uuid, user_id)
if resp:
return resp
cookiejar = Cookiejar(cookiejar_uuid)
meta = cookiejar.get_meta(level=True, cookies=True, r_json=True)
return meta, 200
# # # # # # # #
# #
@ -336,85 +444,162 @@ def edit_cookiejar_description(cookiejar_uuid, description):
# - httpOnly (optional)
# - text (optional)
# # # #
def get_cookie_all_keys_name():
return ['name', 'value', 'domain', 'path', 'httpOnly', 'secure']
def exists_cookie(cookie_uuid):
if int(r_serv_onion.scard(f'cookies:map:cookiejar:{cookie_uuid}')) > 0:
return True
return False
# TODO MISP Import
def get_cookie_value(cookie_uuid, name):
return r_serv_onion.hget(f'cookiejar:cookie:{cookie_uuid}', name)
class Cookie:
def set_cookie_value(cookie_uuid, name, value):
r_serv_onion.hset(f'cookiejar:cookie:{cookie_uuid}', name, value)
def __init__(self, cookie_uuid):
self.uuid = cookie_uuid
def delete_cookie_value(cookie_uuid, name):
r_serv_onion.hdel(f'cookiejar:cookie:{cookie_uuid}', name)
def exists(self):
return r_crawler.exists(f'cookie:meta:{self.uuid}')
def get_cookie_dict(cookie_uuid):
cookie_dict = {}
for key_name in r_serv_onion.hkeys(f'cookiejar:cookie:{cookie_uuid}'):
cookie_dict[key_name] = get_cookie_value(cookie_uuid, key_name)
return cookie_dict
def get_cookiejar(self):
return r_crawler.hget(f'cookie:meta:{self.uuid}', 'cookiejar')
# name, value, path=None, httpOnly=None, secure=None, domain=None, text=None
def add_cookie_to_cookiejar(cookiejar_uuid, cookie_dict, cookie_uuid=None):
# # TODO: sanitize cookie_uuid
if not cookie_uuid:
cookie_uuid = generate_uuid()
r_serv_onion.sadd(f'cookiejar:{cookiejar_uuid}:cookies:uuid', cookie_uuid)
r_serv_onion.sadd(f'cookies:map:cookiejar:{cookie_uuid}', cookiejar_uuid)
def set_cookiejar(self, cookiejar_uuid):
r_crawler.hset(f'cookie:meta:{self.uuid}', 'cookiejar', cookiejar_uuid)
set_cookie_value(cookie_uuid, 'name', cookie_dict['name'])
set_cookie_value(cookie_uuid, 'value', cookie_dict['value'])
if 'path' in cookie_dict:
set_cookie_value(cookie_uuid, 'path', cookie_dict['path'])
if 'httpOnly' in cookie_dict:
set_cookie_value(cookie_uuid, 'httpOnly', cookie_dict['httpOnly'])
if 'secure' in cookie_dict:
set_cookie_value(cookie_uuid, 'secure', cookie_dict['secure'])
if 'domain' in cookie_dict:
set_cookie_value(cookie_uuid, 'domain', cookie_dict['domain'])
if 'text' in cookie_dict:
set_cookie_value(cookie_uuid, 'text', cookie_dict['text'])
return cookie_uuid
def get_name(self):
return r_crawler.hget(f'cookie:meta:{self.uuid}', 'name')
def add_cookies_to_cookiejar(cookiejar_uuid, l_cookies):
for cookie_dict in l_cookies:
add_cookie_to_cookiejar(cookiejar_uuid, cookie_dict)
def get_value(self):
return r_crawler.hget(f'cookie:meta:{self.uuid}', 'value')
def delete_all_cookies_from_cookiejar(cookiejar_uuid):
for cookie_uuid in get_cookiejar_cookies_uuid(cookiejar_uuid):
delete_cookie_from_cookiejar(cookiejar_uuid, cookie_uuid)
def _get_field(self, field):
return r_crawler.hget(f'cookie:meta:{self.uuid}', field)
def delete_cookie_from_cookiejar(cookiejar_uuid, cookie_uuid):
r_serv_onion.srem(f'cookiejar:{cookiejar_uuid}:cookies:uuid', cookie_uuid)
r_serv_onion.srem(f'cookies:map:cookiejar:{cookie_uuid}', cookiejar_uuid)
if not exists_cookie(cookie_uuid):
r_serv_onion.delete(f'cookiejar:cookie:{cookie_uuid}')
def set_field(self, field, value):
return r_crawler.hset(f'cookie:meta:{self.uuid}', field, value)
def edit_cookie(cookiejar_uuid, cookie_uuid, cookie_dict):
# delete old keys
for key_name in r_serv_onion.hkeys(f'cookiejar:cookie:{cookie_uuid}'):
if key_name not in cookie_dict:
delete_cookie_value(cookie_uuid, key_name)
# add new keys
cookie_all_keys_name = get_cookie_all_keys_name()
for key_name in cookie_dict:
if key_name in cookie_all_keys_name:
set_cookie_value(cookie_uuid, key_name, cookie_dict[key_name])
def remove_field(self, field):
return r_crawler.hdel(f'cookie:meta:{self.uuid}', field)
def get_fields(self):
fields = set(r_crawler.hkeys(f'cookie:meta:{self.uuid}'))
if 'cookiejar' in fields:
fields.remove('cookiejar')
return fields
# def get_domain(self):
# return r_crawler.hget(f'cookie:meta:{self.uuid}', 'domain')
#
# def get_path(self):
# return r_crawler.hget(f'cookie:meta:{self.uuid}', 'path')
#
# def get_httpOnly(self):
# return r_crawler.hget(f'cookie:meta:{self.uuid}', 'httpOnly')
#
# def get_secure(self):
# return r_crawler.hget(f'cookie:meta:{self.uuid}', 'secure')
# TODO expire ????
def get_meta(self, r_json=False):
meta = {}
# ['domain', 'path', 'httpOnly', 'secure'] + name + value
for field in self.get_fields():
value = self._get_field(field)
if value:
meta[field] = value
if r_json:
data = json.dumps(meta, indent=4, sort_keys=True)
meta = {'data': data}
meta['uuid'] = self.uuid
return meta
def edit(self, cookie_dict):
# remove old keys
for field in self.get_fields():
if field not in cookie_dict:
self.remove_field(field)
# add new keys
for field in cookie_dict:
value = cookie_dict[field]
if value:
if field == 'secure' or field == 'httpOnly':
value = str(value)
self.set_field(field, value)
def delete(self):
cookiejar_uuid = self.get_cookiejar()
r_crawler.delete(f'cookie:meta:{self.uuid}')
r_crawler.srem(f'cookiejar:cookies:{cookiejar_uuid}', self.uuid)
## API ##
def api_get_cookie(user_id, cookie_uuid):
cookie = Cookie(cookie_uuid)
if not cookie.exists():
return {'error': 'unknown cookie uuid', 'cookie_uuid': cookie_uuid}, 404
resp = api_verify_cookiejar_acl(cookie.get_cookiejar(), user_id)
if resp:
return resp
return cookie.get_meta()
def api_edit_cookie(user_id, cookie_uuid, cookie_dict):
cookie = Cookie(cookie_uuid)
if not cookie.exists():
return {'error': 'unknown cookie uuid', 'cookie_uuid': cookie_uuid}, 404
resp = api_verify_cookiejar_acl(cookie.get_cookiejar(), user_id)
if resp:
return resp
if 'name' not in cookie_dict or 'value' not in cookie_dict or not cookie_dict['name'] or not cookie_dict['value']:
return {'error': 'cookie name or value not provided'}, 400
cookie.edit(cookie_dict)
return cookie.get_meta(), 200
def api_create_cookie(user_id, cookiejar_uuid, cookie_dict):
resp = api_verify_cookiejar_acl(cookiejar_uuid, user_id)
if resp:
return resp
if 'name' not in cookie_dict or 'value' not in cookie_dict or not cookie_dict['name'] or not cookie_dict['value']:
return {'error': 'cookie name or value not provided'}, 400
cookiejar = Cookiejar(cookiejar_uuid)
name = cookie_dict.get('name')
value = cookie_dict.get('value')
domain = cookie_dict.get('domain')
path = cookie_dict.get('path')
text = cookie_dict.get('text')
httponly = bool(cookie_dict.get('httponly'))
secure = bool(cookie_dict.get('secure'))
cookiejar.add_cookie(name, value, domain=domain, httponly=httponly, path=path, secure=secure, text=text)
return resp, 200
def api_delete_cookie(user_id, cookie_uuid):
cookie = Cookie(cookie_uuid)
if not cookie.exists():
return {'error': 'unknown cookie uuid', 'cookie_uuid': cookie_uuid}, 404
cookiejar_uuid = cookie.get_cookiejar()
resp = api_verify_cookiejar_acl(cookiejar_uuid, user_id)
if resp:
return resp
cookiejar = Cookiejar(cookiejar_uuid)
if not cookiejar.is_cookie_in_jar(cookie_uuid):
return {'error': 'Cookie isn\'t in the jar', 'cookiejar_uuid': cookiejar_uuid}, 404
cookiejar.delete_cookie(cookie_uuid)
return {'cookiejar_uuid': cookiejar_uuid, 'cookie_uuid': cookie_uuid}, 200
# def get_cookie_all_keys_name():
# return ['name', 'value', 'domain', 'path', 'httpOnly', 'secure']
## - - ##
## Cookies import ## # TODO: add browser type ?
def import_cookies_from_json(json_cookies, cookiejar_uuid):
cookiejar = Cookiejar(cookiejar_uuid)
for cookie in json_cookies:
try:
cookie_dict = unpack_imported_json_cookie(cookie)
add_cookie_to_cookiejar(cookiejar_uuid, cookie_dict)
name = cookie_dict.get('name')
value = cookie_dict.get('value')
domain = cookie_dict.get('domain')
httponly = cookie_dict.get('httponly')
path = cookie_dict.get('path')
secure = cookie_dict.get('secure')
text = cookie_dict.get('text')
cookiejar.add_cookie(name, value, domain=domain, httponly=httponly, path=path, secure=secure, text=text)
except KeyError:
return {'error': 'Invalid cookie key, please submit a valid JSON', 'cookiejar_uuid': cookiejar_uuid}
return {'error': 'Invalid cookie key, please submit a valid JSON', 'cookiejar_uuid': cookiejar_uuid}, 400
# # TODO: add text field
def unpack_imported_json_cookie(json_cookie):
@ -430,93 +615,21 @@ def unpack_imported_json_cookie(json_cookie):
cookie_dict['domain'] = url.netloc.split(':', 1)[0]
return cookie_dict
def misp_cookie_import(misp_object, cookiejar_uuid):
pass
## - - ##
#### COOKIEJAR API ####
def api_import_cookies_from_json(json_cookies_str, cookiejar_uuid): # # TODO: add catch
def api_import_cookies_from_json(user_id, cookiejar_uuid, json_cookies_str): # # TODO: add catch
resp = api_verify_cookiejar_acl(cookiejar_uuid, user_id)
if resp:
return resp
json_cookies = json.loads(json_cookies_str)
resp = import_cookies_from_json(json_cookies, cookiejar_uuid)
if resp:
return resp, 400
#### ####
#### COOKIES API ####
def api_verify_basic_cookiejar(cookiejar_uuid, user_id):
if not exist_cookiejar(cookiejar_uuid):
return {'error': 'unknown cookiejar uuid', 'cookiejar_uuid': cookiejar_uuid}, 404
level = get_cookiejar_level(cookiejar_uuid)
if level == 0: # # TODO: check if user is admin
cookie_owner = get_cookiejar_owner(cookiejar_uuid)
if cookie_owner != user_id:
return {'error': 'The access to this cookiejar is restricted'}, 403
def api_get_cookiejar_cookies(cookiejar_uuid, user_id):
resp = api_verify_basic_cookiejar(cookiejar_uuid, user_id)
if resp:
return resp
resp = get_cookiejar_cookies_list(cookiejar_uuid)
return resp, 200
def api_edit_cookiejar_description(user_id, cookiejar_uuid, description):
resp = api_verify_basic_cookiejar(cookiejar_uuid, user_id)
if resp:
return resp
edit_cookiejar_description(cookiejar_uuid, description)
return {'cookiejar_uuid': cookiejar_uuid}, 200
def api_get_cookiejar_cookies_with_uuid(cookiejar_uuid, user_id):
resp = api_verify_basic_cookiejar(cookiejar_uuid, user_id)
if resp:
return resp
resp = get_cookiejar_cookies_list(cookiejar_uuid, add_cookie_uuid=True)
return resp, 200
def api_get_cookies_list_select(user_id):
l_cookiejar = []
for cookies_uuid in get_global_cookiejar():
l_cookiejar.append(f'{get_cookiejar_description(cookies_uuid)} : {cookies_uuid}')
for cookies_uuid in get_user_cookiejar(user_id):
l_cookiejar.append(f'{get_cookiejar_description(cookies_uuid)} : {cookies_uuid}')
return sorted(l_cookiejar)
def api_delete_cookie_from_cookiejar(user_id, cookiejar_uuid, cookie_uuid):
resp = api_verify_basic_cookiejar(cookiejar_uuid, user_id)
if resp:
return resp
delete_cookie_from_cookiejar(cookiejar_uuid, cookie_uuid)
return {'cookiejar_uuid': cookiejar_uuid, 'cookie_uuid': cookie_uuid}, 200
def api_delete_cookie_jar(user_id, cookiejar_uuid):
resp = api_verify_basic_cookiejar(cookiejar_uuid, user_id)
if resp:
return resp
delete_cookie_jar(cookiejar_uuid)
return {'cookiejar_uuid': cookiejar_uuid}, 200
def api_edit_cookie(user_id, cookiejar_uuid, cookie_uuid, cookie_dict):
resp = api_verify_basic_cookiejar(cookiejar_uuid, user_id)
if resp:
return resp
if 'name' not in cookie_dict or 'value' not in cookie_dict or cookie_dict['name'] == '':
return {'error': 'cookie name or value not provided'}, 400
edit_cookie(cookiejar_uuid, cookie_uuid, cookie_dict)
return get_cookie_dict(cookie_uuid), 200
def api_create_cookie(user_id, cookiejar_uuid, cookie_dict):
resp = api_verify_basic_cookiejar(cookiejar_uuid, user_id)
if resp:
return resp
if 'name' not in cookie_dict or 'value' not in cookie_dict or cookie_dict['name'] == '':
return {'error': 'cookie name or value not provided'}, 400
resp = add_cookie_to_cookiejar(cookiejar_uuid, cookie_dict)
return resp, 200
#### ####
# # # # # # # #
# #
# # TODO CLASS CrawlerTask
# CRAWLER # ###################################################################################
# #
# # # # # # # #
@ -858,7 +971,7 @@ def api_add_crawler_task(data, user_id=None):
# cookiejar_uuid = data.get('cookiejar_uuid', None)
# if cookiejar_uuid:
# if not exist_cookiejar(cookiejar_uuid):
# return ({'error': 'unknow cookiejar uuid', 'cookiejar_uuid': cookiejar_uuid}, 404)
# return ({'error': 'unknown cookiejar uuid', 'cookiejar_uuid': cookiejar_uuid}, 404)
# level = get_cookiejar_level(cookiejar_uuid)
# if level == 0: # # TODO: check if user is admin ######################################################
# cookie_owner = get_cookiejar_owner(cookiejar_uuid)
@ -1196,14 +1309,6 @@ def get_nb_elem_to_crawl_by_type(queue_type): # # TODO: rename me
return nb
###################################################################################
def get_all_crawlers_queues_types():
all_queues_types = set()
all_splash_name = get_all_crawlers_to_launch_splash_name()
for splash_name in all_splash_name:
all_queues_types.add(get_splash_crawler_type(splash_name))
all_splash_name = list()
return all_queues_types
def get_crawler_queue_types_by_splash_name(splash_name):
all_domain_type = [splash_name]
crawler_type = get_splash_crawler_type(splash_name)

View file

@ -1,9 +1,9 @@
#!/usr/bin/env python3
# -*-coding:UTF-8 -*
'''
"""
Blueprint Flask: crawler splash endpoints: dashboard, onion crawler ...
'''
"""
import os
import json
@ -21,7 +21,6 @@ import Flask_config
# Import Role_Manager
from Role_Manager import login_admin, login_analyst, login_read_only
sys.path.append(os.environ['AIL_BIN'])
##################################
# Import Project packages
@ -38,20 +37,23 @@ from packages import Date
bootstrap_label = Flask_config.bootstrap_label
# ============ BLUEPRINT ============
crawler_splash = Blueprint('crawler_splash', __name__, template_folder=os.path.join(os.environ['AIL_FLASK'], 'templates/crawler/crawler_splash'))
crawler_splash = Blueprint('crawler_splash', __name__,
template_folder=os.path.join(os.environ['AIL_FLASK'], 'templates/crawler/crawler_splash'))
# ============ VARIABLES ============
# ============ FUNCTIONS ============
def api_validator(message, code):
if message and code:
return Response(json.dumps(message, indent=2, sort_keys=True), mimetype='application/json'), code
def create_json_response(data, status_code):
return Response(json.dumps(data, indent=2, sort_keys=True), mimetype='application/json'), status_code
# ============= ROUTES ==============
@crawler_splash.route("/crawlers/dashboard", methods=['GET'])
@login_required
@ -68,6 +70,7 @@ def crawlers_dashboard():
crawlers_status=crawlers_status,
crawlers_latest_stats=crawlers_latest_stats)
@crawler_splash.route("/crawlers/crawler_dashboard_json", methods=['GET'])
@login_required
@login_read_only
@ -78,19 +81,21 @@ def crawler_dashboard_json():
return jsonify({'crawlers_status': crawlers_status,
'stats': crawlers_latest_stats})
@crawler_splash.route("/crawlers/manual", methods=['GET'])
@login_required
@login_read_only
def manual():
user_id = current_user.get_id()
l_cookiejar = crawlers.api_get_cookies_list_select(user_id)
l_cookiejar = crawlers.api_get_cookiejars_selector(user_id)
crawlers_types = crawlers.get_crawler_all_types()
proxies = [] # TODO HANDLE PROXIES
return render_template("crawler_manual.html",
is_manager_connected=crawlers.get_lacus_connection_metadata(),
crawlers_types=crawlers_types,
proxies=proxies,
l_cookiejar=l_cookiejar)
is_manager_connected=crawlers.get_lacus_connection_metadata(),
crawlers_types=crawlers_types,
proxies=proxies,
l_cookiejar=l_cookiejar)
@crawler_splash.route("/crawlers/send_to_spider", methods=['POST'])
@login_required
@ -102,7 +107,7 @@ def send_to_spider():
url = request.form.get('url_to_crawl')
crawler_type = request.form.get('crawler_queue_type')
proxy = request.form.get('proxy_name')
auto_crawler = request.form.get('crawler_type') # TODO Auto Crawler
auto_crawler = request.form.get('crawler_type') # TODO Auto Crawler
crawler_delta = request.form.get('crawler_epoch') # TODO Auto Crawler
screenshot = request.form.get('screenshot')
har = request.form.get('har')
@ -130,6 +135,7 @@ def send_to_spider():
return create_json_response(res[0], res[1])
return redirect(url_for('crawler_splash.manual'))
@crawler_splash.route("/crawlers/last/domains", methods=['GET'])
@login_required
@login_read_only
@ -157,6 +163,7 @@ def crawlers_last_domains():
date_from=date_string, date_to=date_string,
crawler_stats=crawler_stats)
@crawler_splash.route('/crawlers/last/domains/json')
@login_required
@login_read_only
@ -169,6 +176,7 @@ def crawlers_last_domains_json():
stats.append(crawlers.get_crawlers_stats_by_day(date, domain_type))
return jsonify(stats)
#### Domains ####
# add route : /crawlers/show_domain
@ -195,7 +203,8 @@ def showDomain():
dict_domain['domain'] = domain.id
if domain.was_up():
dict_domain = {**dict_domain, **domain.get_correlations()}
dict_domain['correlation_nb'] = len(dict_domain['decoded']) + len(dict_domain['username']) + len(dict_domain['pgp']) + len(dict_domain['cryptocurrency']) + len(dict_domain['screenshot'])
dict_domain['correlation_nb'] = len(dict_domain['decoded']) + len(dict_domain['username']) + len(
dict_domain['pgp']) + len(dict_domain['cryptocurrency']) + len(dict_domain['screenshot'])
dict_domain['tags_safe'] = Tag.is_tags_safe(dict_domain['tags'])
dict_domain['history'] = domain.get_history(status=True)
curr_epoch = None
@ -225,6 +234,7 @@ def showDomain():
dict_domain=dict_domain, bootstrap_label=bootstrap_label,
modal_add_tags=Tag.get_modal_add_tags(dict_domain['domain'], object_type="domain"))
@crawler_splash.route('/crawlers/domain/download', methods=['GET'])
@login_required
@login_read_only
@ -243,6 +253,7 @@ def crawlers_domain_download():
abort(404)
return send_file(zip_file, download_name=f'{dom.get_id()}.zip', as_attachment=True)
@crawler_splash.route('/domains/explorer/domain_type_post', methods=['POST'])
@login_required
@login_read_only
@ -276,6 +287,7 @@ def domains_explorer_post_filter():
else:
return redirect(url_for('crawler_splash.domains_explorer_onion'))
# TODO TEMP DISABLE
# @crawler_splash.route('/domains/explorer/all', methods=['GET'])
# @login_required
@ -306,7 +318,9 @@ def domains_explorer_onion():
page = 1
dict_data = Domains.get_domains_up_by_filers('onion', page=page, date_from=date_from, date_to=date_to)
return render_template("domain_explorer.html", dict_data=dict_data, bootstrap_label=bootstrap_label, domain_type='onion')
return render_template("domain_explorer.html", dict_data=dict_data, bootstrap_label=bootstrap_label,
domain_type='onion')
@crawler_splash.route('/domains/explorer/web', methods=['GET'])
@login_required
@ -321,7 +335,9 @@ def domains_explorer_web():
page = 1
dict_data = Domains.get_domains_up_by_filers('web', page=page, date_from=date_from, date_to=date_to)
return render_template("domain_explorer.html", dict_data=dict_data, bootstrap_label=bootstrap_label, domain_type='regular')
return render_template("domain_explorer.html", dict_data=dict_data, bootstrap_label=bootstrap_label,
domain_type='regular')
@crawler_splash.route('/domains/languages/all/json', methods=['GET'])
@login_required
@ -332,6 +348,7 @@ def domains_all_languages_json():
domain_types = request.args.getlist('domain_types')
return jsonify(Language.get_languages_from_iso(Domains.get_all_domains_languages(), sort=True))
@crawler_splash.route('/domains/languages/search_get', methods=['GET'])
@login_required
@login_read_only
@ -350,10 +367,12 @@ def domains_search_languages_get():
languages = request.args.getlist('languages')
if languages:
languages = languages[0].split(',')
l_dict_domains = Domains.api_get_domains_by_languages(domains_types, Language.get_iso_from_languages(languages), meta=True, page=page)
l_dict_domains = Domains.api_get_domains_by_languages(domains_types, Language.get_iso_from_languages(languages),
meta=True, page=page)
return render_template("domains/domains_filter_languages.html", template_folder='../../',
l_dict_domains=l_dict_domains, bootstrap_label=bootstrap_label,
current_languages=languages, domains_types=domains_types)
l_dict_domains=l_dict_domains, bootstrap_label=bootstrap_label,
current_languages=languages, domains_types=domains_types)
@crawler_splash.route('/domains/name/search', methods=['GET'])
@login_required
@ -373,8 +392,9 @@ def domains_search_name():
l_dict_domains = Domains.api_search_domains_by_name(name, domains_types, meta=True, page=page)
return render_template("domains/domains_result_list.html", template_folder='../../',
l_dict_domains=l_dict_domains, bootstrap_label=bootstrap_label,
domains_types=domains_types)
l_dict_domains=l_dict_domains, bootstrap_label=bootstrap_label,
domains_types=domains_types)
@crawler_splash.route('/domains/date', methods=['GET'])
@login_required
@ -398,6 +418,7 @@ def domains_search_date():
bootstrap_label=bootstrap_label,
dict_domains=dict_domains, type=domain_type)
@crawler_splash.route('/domains/date/post', methods=['POST'])
@login_required
@login_analyst
@ -418,6 +439,7 @@ def domains_search_date_post():
def crawler_cookiejar_add():
return render_template("add_cookiejar.html")
@crawler_splash.route('/crawler/cookiejar/add_post', methods=['POST'])
@login_required
@login_analyst
@ -443,107 +465,110 @@ def crawler_cookiejar_add_post():
for obj_tuple in list(request.form):
l_input = request.form.getlist(obj_tuple)
if len(l_input) == 2:
if l_input[0]: # cookie_name
if l_input[0]: # Cookie Name
cookie_dict = {'name': l_input[0], 'value': l_input[1]}
l_manual_cookie.append(cookie_dict)
elif l_input[1]: # cookie_value
l_invalid_cookie.append({'name': '', 'value': l_input[1]})
elif l_input[1]: # Cookie Value
l_invalid_cookie.append({'name': '', 'value': l_input[1]})
if l_invalid_cookie:
return create_json_response({'error': 'invalid cookie', 'invalid fileds': l_invalid_cookie}, 400)
return create_json_response({'error': 'invalid cookie', 'invalid fields': l_invalid_cookie}, 400)
# Create Cookiejar
cookiejar_uuid = crawlers.create_cookiejar(user_id, level=level, description=description)
if json_cookies:
res = crawlers.api_import_cookies_from_json(json_cookies, cookiejar_uuid)
# Create Cookies
if json_cookies: # TODO CHECK Import
res = crawlers.api_import_cookies_from_json(user_id, cookiejar_uuid, json_cookies)
if res:
return create_json_response(res[0], res[1])
if l_manual_cookie:
crawlers.add_cookies_to_cookiejar(cookiejar_uuid, l_manual_cookie)
for cookie_dict in l_manual_cookie:
crawlers.api_create_cookie(user_id, cookiejar_uuid, cookie_dict)
return redirect(url_for('crawler_splash.crawler_cookiejar_show', uuid=cookiejar_uuid))
return redirect(url_for('crawler_splash.crawler_cookiejar_show', cookiejar_uuid=cookiejar_uuid))
@crawler_splash.route('/crawler/cookiejar/all', methods=['GET'])
@login_required
@login_read_only
def crawler_cookiejar_all():
user_id = current_user.get_id()
user_cookiejar = crawlers.get_cookiejar_metadata_by_iterator(crawlers.get_user_cookiejar(user_id))
global_cookiejar = crawlers.get_cookiejar_metadata_by_iterator(crawlers.get_global_cookiejar())
return render_template("all_cookiejar.html", user_cookiejar=user_cookiejar, global_cookiejar=global_cookiejar)
user_cookiejars = crawlers.get_cookiejars_meta_by_iterator(crawlers.get_cookiejars_user(user_id))
global_cookiejars = crawlers.get_cookiejars_meta_by_iterator(crawlers.get_cookiejars_global())
return render_template("all_cookiejar.html", user_cookiejar=user_cookiejars, global_cookiejar=global_cookiejars)
@crawler_splash.route('/crawler/cookiejar/show', methods=['GET'])
@login_required
@login_read_only
def crawler_cookiejar_show():
user_id = current_user.get_id()
cookiejar_uuid = request.args.get('cookiejar_uuid')
cookiejar_uuid = request.args.get('uuid')
res = crawlers.api_get_cookiejar_cookies_with_uuid(cookiejar_uuid, user_id)
if res[1] !=200:
res = crawlers.api_get_cookiejar(cookiejar_uuid, user_id)
if res[1] != 200:
return create_json_response(res[0], res[1])
else:
cookiejar_meta = res[0]
cookiejar_metadata = crawlers.get_cookiejar_metadata(cookiejar_uuid, level=False)
return render_template("show_cookiejar.html", cookiejar_metadata=cookiejar_meta)
l_cookies = []
l_cookie_uuid = []
for cookie in res[0]:
l_cookies.append(json.dumps(cookie[0], indent=4, sort_keys=True))
l_cookie_uuid.append(cookie[1])
return render_template("show_cookiejar.html", cookiejar_uuid=cookiejar_uuid, cookiejar_metadata=cookiejar_metadata,
l_cookies=l_cookies, l_cookie_uuid=l_cookie_uuid)
@crawler_splash.route('/crawler/cookiejar/cookie/delete', methods=['GET'])
@crawler_splash.route('/crawler/cookie/delete', methods=['GET'])
@login_required
@login_read_only
def crawler_cookiejar_cookie_delete():
user_id = current_user.get_id()
cookiejar_uuid = request.args.get('cookiejar_uuid')
cookie_uuid = request.args.get('cookie_uuid')
cookie_uuid = request.args.get('uuid')
res = crawlers.api_delete_cookie_from_cookiejar(user_id, cookiejar_uuid, cookie_uuid)
if res[1] !=200:
res = crawlers.api_delete_cookie(user_id, cookie_uuid)
if res[1] != 200:
return create_json_response(res[0], res[1])
return redirect(url_for('crawler_splash.crawler_cookiejar_show', cookiejar_uuid=cookiejar_uuid))
else:
cookiejar_uuid = res[0]['cookiejar_uuid']
return redirect(url_for('crawler_splash.crawler_cookiejar_show', uuid=cookiejar_uuid))
@crawler_splash.route('/crawler/cookiejar/delete', methods=['GET'])
@login_required
@login_read_only
def crawler_cookiejar_delete():
user_id = current_user.get_id()
cookiejar_uuid = request.args.get('cookiejar_uuid')
cookiejar_uuid = request.args.get('uuid')
res = crawlers.api_delete_cookie_jar(user_id, cookiejar_uuid)
if res[1] !=200:
res = crawlers.api_delete_cookiejar(user_id, cookiejar_uuid)
if res[1] != 200:
return create_json_response(res[0], res[1])
return redirect(url_for('crawler_splash.crawler_cookiejar_all'))
@crawler_splash.route('/crawler/cookiejar/edit', methods=['GET'])
@login_required
@login_read_only
def crawler_cookiejar_edit():
user_id = current_user.get_id()
cookiejar_uuid = request.args.get('cookiejar_uuid')
cookiejar_uuid = request.args.get('uuid')
description = request.args.get('description')
res = crawlers.api_edit_cookiejar_description(user_id, cookiejar_uuid, description)
return create_json_response(res[0], res[1])
@crawler_splash.route('/crawler/cookiejar/cookie/edit', methods=['GET'])
@crawler_splash.route('/crawler/cookie/edit', methods=['GET'])
@login_required
@login_read_only
def crawler_cookiejar_cookie_edit():
user_id = current_user.get_id()
cookiejar_uuid = request.args.get('cookiejar_uuid')
cookie_uuid = request.args.get('cookie_uuid')
cookie_uuid = request.args.get('uuid')
cookie_dict = crawlers.get_cookie_dict(cookie_uuid)
return render_template("edit_cookie.html", cookiejar_uuid=cookiejar_uuid, cookie_uuid=cookie_uuid, cookie_dict=cookie_dict)
cookie_dict = crawlers.api_get_cookie(user_id, cookie_uuid)
return render_template("edit_cookie.html", cookie_uuid=cookie_uuid, cookie_dict=cookie_dict)
@crawler_splash.route('/crawler/cookiejar/cookie/edit_post', methods=['POST'])
@crawler_splash.route('/crawler/cookie/edit_post', methods=['POST'])
@login_required
@login_read_only
def crawler_cookiejar_cookie_edit_post():
user_id = current_user.get_id()
cookiejar_uuid = request.form.get('cookiejar_uuid')
cookie_uuid = request.form.get('cookie_uuid')
name = request.form.get('name')
value = request.form.get('value')
@ -562,19 +587,22 @@ def crawler_cookiejar_cookie_edit_post():
if secure:
cookie_dict['secure'] = True
res = crawlers.api_edit_cookie(user_id, cookiejar_uuid, cookie_uuid, cookie_dict)
res = crawlers.api_edit_cookie(user_id, cookie_uuid, cookie_dict)
if res[1] != 200:
return create_json_response(res[0], res[1])
return redirect(url_for('crawler_splash.crawler_cookiejar_show', cookiejar_uuid=cookiejar_uuid))
cookie = crawlers.Cookie(cookie_uuid)
cookiejar_uuid = cookie.get_cookiejar()
return redirect(url_for('crawler_splash.crawler_cookiejar_show', uuid=cookiejar_uuid))
@crawler_splash.route('/crawler/cookiejar/cookie/add', methods=['GET'])
@login_required
@login_read_only
def crawler_cookiejar_cookie_add():
user_id = current_user.get_id()
cookiejar_uuid = request.args.get('cookiejar_uuid')
return render_template("add_cookie.html", cookiejar_uuid=cookiejar_uuid)
@crawler_splash.route('/crawler/cookiejar/cookie/manual_add_post', methods=['POST'])
@login_required
@login_read_only
@ -598,8 +626,13 @@ def crawler_cookiejar_cookie_manual_add_post():
if secure:
cookie_dict['secure'] = True
res = crawlers.api_create_cookie(user_id, cookiejar_uuid, cookie_dict)
if res[1] != 200:
return create_json_response(res[0], res[1])
return redirect(url_for('crawler_splash.crawler_cookiejar_show', cookiejar_uuid=cookiejar_uuid))
@crawler_splash.route('/crawler/cookiejar/cookie/json_add_post', methods=['POST'])
@login_required
@login_read_only
@ -611,12 +644,16 @@ def crawler_cookiejar_cookie_json_add_post():
file = request.files['file']
json_cookies = file.read().decode()
if json_cookies:
res = crawlers.api_import_cookies_from_json(json_cookies, cookiejar_uuid)
res = crawlers.api_import_cookies_from_json(user_id, cookiejar_uuid, json_cookies)
if res[1] != 200:
return create_json_response(res[0], res[1])
return redirect(url_for('crawler_splash.crawler_cookiejar_show', cookiejar_uuid=cookiejar_uuid))
return redirect(url_for('crawler_splash.crawler_cookiejar_cookie_add', cookiejar_uuid=cookiejar_uuid))
#--- Cookiejar ---#
# --- Cookiejar ---#
@crawler_splash.route('/crawler/settings/crawlers_to_lauch', methods=['GET', 'POST'])
@ -626,7 +663,7 @@ def crawler_splash_setings_crawlers_to_lauch():
if request.method == 'POST':
dict_splash_name = {}
for crawler_name in list(request.form):
dict_splash_name[crawler_name]= request.form.get(crawler_name)
dict_splash_name[crawler_name] = request.form.get(crawler_name)
res = crawlers.api_set_nb_crawlers_to_launch(dict_splash_name)
if res[1] != 200:
return Response(json.dumps(res[0], indent=2, sort_keys=True), mimetype='application/json'), res[1]
@ -635,7 +672,8 @@ def crawler_splash_setings_crawlers_to_lauch():
else:
nb_crawlers_to_launch = crawlers.get_nb_crawlers_to_launch_ui()
return render_template("settings_edit_crawlers_to_launch.html",
nb_crawlers_to_launch=nb_crawlers_to_launch)
nb_crawlers_to_launch=nb_crawlers_to_launch)
@crawler_splash.route('/crawler/settings/relaunch_crawler', methods=['GET'])
@login_required
@ -644,6 +682,7 @@ def crawler_splash_setings_relaunch_crawler():
crawlers.relaunch_crawlers()
return redirect(url_for('crawler_splash.crawler_splash_setings'))
## - - ##
#### LACUS ####
@ -666,13 +705,14 @@ def crawler_settings():
# crawler_full_config = Config_DB.get_full_config_by_section('crawler')
return render_template("settings_crawler.html",
is_manager_connected=is_manager_connected,
lacus_url=lacus_url, api_key=api_key,
#all_proxies=all_proxies,
#nb_crawlers_to_launch=nb_crawlers_to_launch,
is_crawler_working=is_crawler_working,
crawler_error_mess=crawler_error_mess,
)
is_manager_connected=is_manager_connected,
lacus_url=lacus_url, api_key=api_key,
# all_proxies=all_proxies,
# nb_crawlers_to_launch=nb_crawlers_to_launch,
is_crawler_working=is_crawler_working,
crawler_error_mess=crawler_error_mess,
)
@crawler_splash.route('/crawler/settings/crawler/manager', methods=['GET', 'POST'])
@login_required
@ -693,6 +733,7 @@ def crawler_lacus_settings_crawler_manager():
api_key = crawlers.get_lacus_api_key()
return render_template("settings_edit_lacus_crawler.html", lacus_url=lacus_url, api_key=api_key)
@crawler_splash.route('/crawler/settings/crawler/test', methods=['GET'])
@login_required
@login_admin
@ -700,4 +741,4 @@ def crawler_settings_crawler_test():
crawlers.test_ail_crawlers()
return redirect(url_for('crawler_splash.crawler_settings'))
#--- LACUS ---#
# --- LACUS ---#

View file

@ -8,12 +8,12 @@
<div class="card-header py-0">
<div class="d-flex flex-row-reverse">
<div>
<a class="btn btn-light" href="{{ url_for('crawler_splash.crawler_cookiejar_cookie_edit') }}?cookiejar_uuid={{cookiejar_uuid}}&cookie_uuid={{l_cookie_uuid[loop.index0]}}" style="font-size: 15px">
<a class="btn btn-light" href="{{ url_for('crawler_splash.crawler_cookiejar_cookie_edit') }}?uuid={{dict_cookie['uuid']}}" style="font-size: 15px">
<i class="text-secondary fas fa-pencil-alt"></i>
</a>
</div>
<div>
<a class="btn btn-light" href="{{ url_for('crawler_splash.crawler_cookiejar_cookie_delete') }}?cookiejar_uuid={{cookiejar_uuid}}&cookie_uuid={{l_cookie_uuid[loop.index0]}}" style="font-size: 15px">
<a class="btn btn-light" href="{{ url_for('crawler_splash.crawler_cookiejar_cookie_delete') }}?uuid={{dict_cookie['uuid']}}" style="font-size: 15px">
<i class="text-danger fas fa-trash-alt"></i>
</a>
</div>
@ -21,7 +21,7 @@
</div>
<div class="card-body">
<pre>{{dict_cookie}}</pre>
<pre>{{dict_cookie['data']}}</pre>
</div>
</div>

View file

@ -42,7 +42,6 @@
<div class="card-body">
<form action="{{ url_for('crawler_splash.crawler_cookiejar_cookie_edit_post') }}" method="post" enctype="multipart/form-data">
<input type="text" name="cookiejar_uuid" value="{{cookiejar_uuid}}" hidden>
<input type="text" name="cookie_uuid" value="{{cookie_uuid}}" hidden>
{% include 'crawler/crawler_splash/cookie_edit_block.html' %}
<div class="form-group">

View file

@ -28,36 +28,36 @@
<div class="col-12 col-lg-10" id="core_content">
<div class="card mb-3 mt-1">
<div class="card-header text-white bg-dark">
<div class="card-header text-white bg-dark">
<div class="row">
<div class="col-8">
<h5 class="card-title"><i class="fas fa-cookie-bite"></i> Edit Cookiejar</h5>
<h5 class="card-title"><i class="fas fa-cookie-bite"></i> Edit Cookiejar</h5>
</div>
<div class="col-4">
<a class="btn btn-danger float-right" href="{{ url_for('crawler_splash.crawler_cookiejar_delete') }}?cookiejar_uuid={{cookiejar_uuid}}">
<i class="fas fa-trash-alt"></i>
</a>
<a class="btn btn-danger float-right" href="{{ url_for('crawler_splash.crawler_cookiejar_delete') }}?uuid={{cookiejar_metadata['uuid']}}">
<i class="fas fa-trash-alt"></i>
</a>
</div>
</div>
</div>
<div class="card-body">
</div>
<div class="card-body">
{% with all_cookiejar=[cookiejar_metadata], table_id='table_cookiejar'%}
{% include 'crawler/crawler_splash/table_cookiejar.html' %}
{% endwith %}
{% include 'crawler/crawler_splash/table_cookiejar.html' %}
{% endwith %}
<button class="btn btn-info" onclick="show_edit_description();">
Edit Description <i class="fas fa-pencil-alt"></i>
</button>
<a href="{{ url_for('crawler_splash.crawler_cookiejar_cookie_add')}}?cookiejar_uuid={{cookiejar_uuid}}">
<a href="{{ url_for('crawler_splash.crawler_cookiejar_cookie_add')}}?uuid={{cookiejar_metadata['uuid']}}">
<button class="btn btn-primary">
Add Cookies <i class="fas fa-cookie"></i>
</button>
</a>
</a>
<span class="mt-1" id="description-edit-block">
<div class="input-group">
<input class="form-control" type="text" id="input-description" value="{{cookiejar_metadata['description']}}"></input>
<input class="form-control" type="text" id="input-description" value="{{cookiejar_metadata['description']}}">
<div class="input-group-append">
<button class="btn btn-info" onclick="edit_description();">
<i class="fas fa-pencil-alt"></i> Edit
@ -69,9 +69,9 @@
</div>
</div>
{% with l_elem=l_cookies, l_cookie_uuid=l_cookie_uuid, cookiejar_uuid=cookiejar_uuid %}
{% include 'crawler/crawler_splash/cookies_card_block.html' %}
{% endwith %}
{% with l_elem=cookiejar_metadata['cookies'] %}
{% include 'crawler/crawler_splash/cookies_card_block.html' %}
{% endwith %}
</div>
</div>
@ -103,16 +103,15 @@ function toggle_sidebar(){
}
function show_edit_description(){
console.log('edit');
$('#description-edit-block').show();
}
function edit_description(){
var new_description = $('#input-description').val()
var data_to_send = { cookiejar_uuid: "{{cookiejar_uuid}}", "description": new_description}
var data_to_send = { uuid: "{{cookiejar_metadata['uuid']}}", "description": new_description}
$.get("{{ url_for('crawler_splash.crawler_cookiejar_edit') }}", data_to_send, function(data, status){
if(status == "success") {
if(status === "success") {
$('#description-text').text(new_description)
$('#description-edit-block').hide();
}

View file

@ -17,11 +17,11 @@
{%endif%}
</td>
<td>
<a target="_blank" href="{{ url_for('crawler_splash.crawler_cookiejar_show') }}?cookiejar_uuid={{ dict_cookiejar['cookiejar_uuid'] }}">
{{ dict_cookiejar['cookiejar_uuid']}}
<a target="_blank" href="{{ url_for('crawler_splash.crawler_cookiejar_show') }}?uuid={{ dict_cookiejar['uuid'] }}">
{{ dict_cookiejar['uuid']}}
</a>
</td>
<td>{{dict_cookiejar['user_id']}}</td>
<td>{{dict_cookiejar['user']}}</td>
</tr>
{% endfor %}
</tbody>