diff --git a/bin/lib/Investigations.py b/bin/lib/Investigations.py index c827fbee..6190f43e 100755 --- a/bin/lib/Investigations.py +++ b/bin/lib/Investigations.py @@ -450,7 +450,7 @@ def check_access_acl(inv, user_org, is_admin=False): level = inv.get_level() if level == 1: return True - if level == 2: + elif level == 2: return ail_orgs.check_access_acl(inv, user_org, is_admin=is_admin) else: return False diff --git a/bin/lib/crawlers.py b/bin/lib/crawlers.py index 1142163c..b7c2bc41 100755 --- a/bin/lib/crawlers.py +++ b/bin/lib/crawlers.py @@ -37,6 +37,7 @@ sys.path.append(os.environ['AIL_BIN']) ################################## from packages import git_status from packages import Date +from lib import ail_orgs from lib.ConfigLoader import ConfigLoader from lib.objects.Domains import Domain from lib.objects import HHHashs @@ -477,6 +478,14 @@ def create_cookie_crawler(cookie_dict, domain, crawler_type='web'): ################################################################################ ################################################################################ +# # # # # # # # # +# # +# COOKIEJARS # +# # +# # # # # # # # # + +# TODO EDIT COOKIEJAR + def get_cookiejars(): return r_crawler.smembers('cookiejars:all') @@ -486,6 +495,12 @@ def get_cookiejars_global(): cookiejars = [] return cookiejars +def get_cookiejars_org(org_uuid): + cookiejars = ail_orgs.get_org_objs_by_type(org_uuid, 'cookiejar') + if not cookiejars: + cookiejars = [] + return cookiejars + def get_cookiejars_user(user_id): cookiejars = r_crawler.smembers(f'cookiejars:user:{user_id}') if not cookiejars: @@ -518,20 +533,46 @@ class Cookiejar: def _set_user(self, user_id): return r_crawler.hset(f'cookiejar:meta:{self.uuid}', 'user', user_id) - def get_level(self): - level = r_crawler.hget(f'cookiejar:meta:{self.uuid}', 'level') - if level: - level = 1 - else: - level = 0 - return level + ## LEVEL ## - def _set_level(self, level): - if level: - level = 1 - else: - level = 0 + def get_level(self): + return int(r_crawler.hget(f'cookiejar:meta:{self.uuid}', 'level')) + + def set_level(self, level, org_uuid): + level = int(level) r_crawler.hset(f'cookiejar:meta:{self.uuid}', 'level', level) + if level == 0: + r_crawler.sadd(f'cookiejars:user:{self.get_user()}', self.uuid) + elif level == 1: + r_crawler.sadd('cookiejars:global', self.uuid) + elif level == 2: + self.add_to_org(org_uuid) + + def reset_level(self, old_level, new_level, new_org_uuid): + if old_level == 0: + r_crawler.srem(f'cookiejars:user:{self.get_user()}', self.uuid) + elif old_level == 1: + r_crawler.srem('cookiejars:global', self.uuid) + # Org + elif old_level == 2: + ail_orgs.remove_obj_to_org(self.get_org(), 'cookiejar', self.uuid) + self.set_level(new_level, new_org_uuid) + + ## --LEVEL-- ## + + ## ORG ## + + def get_creator_org(self): + return r_crawler.hget(f'cookiejar:meta:{self.uuid}', 'creator_org') + + def get_org(self): + return r_crawler.hget(f'cookiejar:meta:{self.uuid}', 'org') + + def add_to_org(self, org_uuid): + r_crawler.hset(f'cookiejar:meta:{self.uuid}', 'org', org_uuid) + ail_orgs.add_obj_to_org(org_uuid, 'cookiejar', self.uuid) + + ## -ORG- ## def is_cookie_in_jar(self, cookie_uuid): # kvrocks sismember TEMP fix @@ -597,19 +638,18 @@ class Cookiejar: cookie = Cookie(cookie_uuid) cookie.delete() - def create(self, user_id, description=None, level=1): + # TODO Last EDIT + def create(self, user_org, user_id, level, description=None): if self.exists(): raise Exception('Cookiejar already exists') r_crawler.sadd('cookiejars:all', self.uuid) - if level == 0: - r_crawler.sadd(f'cookiejars:user:{user_id}', self.uuid) - else: - r_crawler.sadd('cookiejars:global', self.uuid) + r_crawler.hset(f'cookiejar:meta:{self.uuid}', 'creator_org', user_org) self._set_user(user_id) - self._set_date(datetime.now().strftime("%Y%m%d")) - self._set_level(level) + self.set_level(level, user_org) + + self._set_date(datetime.now().strftime("%Y%m%d")) # TODO improve DATE if description: self.set_description(description) @@ -619,10 +659,13 @@ class Cookiejar: r_crawler.srem(f'cookiejars:user:{self.get_user()}', self.uuid) r_crawler.srem('cookiejars:global', self.uuid) r_crawler.srem('cookiejars:all', self.uuid) + level = self.get_level() + if level == 2: + ail_orgs.remove_obj_to_org(self.get_org(), 'investigation', self.uuid) r_crawler.delete(f'cookiejar:meta:{self.uuid}') -def create_cookiejar(user_id, description=None, level=1, cookiejar_uuid=None): +def create_cookiejar(user_org, user_id, description=None, level=1, cookiejar_uuid=None): if cookiejar_uuid: cookiejar = Cookiejar(cookiejar_uuid) if cookiejar.exists(): @@ -630,7 +673,7 @@ def create_cookiejar(user_id, description=None, level=1, cookiejar_uuid=None): else: cookiejar_uuid = generate_uuid() cookiejar = Cookiejar(cookiejar_uuid) - cookiejar.create(user_id, description=description, level=level) + cookiejar.create(user_org, user_id, level, description=description) return cookiejar_uuid def get_cookiejars_meta_by_iterator(iter_cookiejar_uuid): @@ -640,16 +683,17 @@ def get_cookiejars_meta_by_iterator(iter_cookiejar_uuid): cookiejars_meta.append(cookiejar.get_meta(nb_cookies=True)) return cookiejars_meta -def get_cookiejars_by_user(user_id): +def get_cookiejars_by_user(user_org, user_id): cookiejars_global = get_cookiejars_global() + cookiejars_org = get_cookiejars_org(user_org) cookiejars_user = get_cookiejars_user(user_id) - return [*cookiejars_user, *cookiejars_global] + return [*cookiejars_user, *cookiejars_org, *cookiejars_global] ## API ## -def api_get_cookiejars_selector(user_id): +def api_get_cookiejars_selector(user_org, user_id): cookiejars = [] - for cookiejar_uuid in get_cookiejars_by_user(user_id): + for cookiejar_uuid in get_cookiejars_by_user(user_org, user_id): cookiejar = Cookiejar(cookiejar_uuid) description = cookiejar.get_description() if not description: @@ -657,38 +701,55 @@ def api_get_cookiejars_selector(user_id): cookiejars.append(f'{description} : {cookiejar.uuid}') return sorted(cookiejars) -def api_verify_cookiejar_acl(cookiejar_uuid, user_id): - cookiejar = Cookiejar(cookiejar_uuid) - if not cookiejar.exists(): - return {'error': 'unknown cookiejar uuid', 'cookiejar_uuid': cookiejar_uuid}, 404 - if cookiejar.get_level() == 0: # TODO: check if user is admin - if cookiejar.get_user() != user_id: - return {'error': 'The access to this cookiejar is restricted'}, 403 - -def api_edit_cookiejar_description(user_id, cookiejar_uuid, description): - resp = api_verify_cookiejar_acl(cookiejar_uuid, user_id) +def api_edit_cookiejar_description(user_org, user_id, is_admin, cookiejar_uuid, description): + resp = api_check_cookiejar_access_acl(cookiejar_uuid, user_org, user_id, is_admin) if resp: return resp cookiejar = Cookiejar(cookiejar_uuid) cookiejar.set_description(description) return {'cookiejar_uuid': cookiejar_uuid}, 200 -def api_delete_cookiejar(user_id, cookiejar_uuid): - resp = api_verify_cookiejar_acl(cookiejar_uuid, user_id) +def api_delete_cookiejar(user_org, user_id, is_admin, cookiejar_uuid): + resp = api_check_cookiejar_access_acl(cookiejar_uuid, user_org, user_id, is_admin) if resp: return resp cookiejar = Cookiejar(cookiejar_uuid) cookiejar.delete() return {'cookiejar_uuid': cookiejar_uuid}, 200 -def api_get_cookiejar(cookiejar_uuid, user_id): - resp = api_verify_cookiejar_acl(cookiejar_uuid, user_id) +def api_get_cookiejar(user_org, user_id, is_admin, cookiejar_uuid): + resp = api_check_cookiejar_access_acl(cookiejar_uuid, user_org, user_id, is_admin) if resp: return resp cookiejar = Cookiejar(cookiejar_uuid) meta = cookiejar.get_meta(level=True, cookies=True, r_json=True) return meta, 200 +#### ACL #### + +def check_cookiejar_access_acl(cookiejar, user_org, user_id, is_admin=False): + if is_admin: + return True + + level = cookiejar.get_level() + if level == 0: + return user_id == cookiejar.get_user() + elif level == 1: + return True + elif level == 2: + return ail_orgs.check_access_acl(cookiejar, user_org, is_admin=is_admin) + else: + return False + +def api_check_cookiejar_access_acl(cookiejar_uuid, user_org, user_id, is_admin=False): + cookiejar = Cookiejar(cookiejar_uuid) + if not cookiejar.exists(): + return {'error': 'unknown cookiejar uuid', 'cookiejar_uuid': cookiejar_uuid}, 404 + if not check_cookiejar_access_acl(cookiejar, user_org, user_id, is_admin=is_admin): + return {"status": "error", "reason": "Access Denied"}, 403 + +#### API #### + # # # # # # # # # # # COOKIES # @@ -788,20 +849,20 @@ class Cookie: ## API ## -def api_get_cookie(user_id, cookie_uuid): +def api_get_cookie(user_org, user_id, is_admin, cookie_uuid): cookie = Cookie(cookie_uuid) if not cookie.exists(): return {'error': 'unknown cookie uuid', 'cookie_uuid': cookie_uuid}, 404 - resp = api_verify_cookiejar_acl(cookie.get_cookiejar(), user_id) + resp = api_check_cookiejar_access_acl(cookie.get_cookiejar(), user_org, user_id, is_admin) if resp: return resp return cookie.get_meta() -def api_edit_cookie(user_id, cookie_uuid, cookie_dict): +def api_edit_cookie(user_org, user_id, is_admin, cookie_uuid, cookie_dict): cookie = Cookie(cookie_uuid) if not cookie.exists(): return {'error': 'unknown cookie uuid', 'cookie_uuid': cookie_uuid}, 404 - resp = api_verify_cookiejar_acl(cookie.get_cookiejar(), user_id) + resp = api_check_cookiejar_access_acl(cookie.get_cookiejar(), user_org, user_id, is_admin) if resp: return resp if 'name' not in cookie_dict or 'value' not in cookie_dict or not cookie_dict['name'] or not cookie_dict['value']: @@ -809,8 +870,8 @@ def api_edit_cookie(user_id, cookie_uuid, cookie_dict): cookie.edit(cookie_dict) return cookie.get_meta(), 200 -def api_create_cookie(user_id, cookiejar_uuid, cookie_dict): - resp = api_verify_cookiejar_acl(cookiejar_uuid, user_id) +def api_create_cookie(user_org, user_id, is_admin, cookiejar_uuid, cookie_dict): + resp = api_check_cookiejar_access_acl(cookiejar_uuid, user_org, user_id, is_admin) if resp: return resp if 'name' not in cookie_dict or 'value' not in cookie_dict or not cookie_dict['name'] or not cookie_dict['value']: @@ -826,12 +887,12 @@ def api_create_cookie(user_id, cookiejar_uuid, cookie_dict): cookiejar.add_cookie(name, value, domain=domain, httponly=httponly, path=path, secure=secure, text=text) return resp, 200 -def api_delete_cookie(user_id, cookie_uuid): +def api_delete_cookie(user_org, user_id, is_admin, cookie_uuid): cookie = Cookie(cookie_uuid) if not cookie.exists(): return {'error': 'unknown cookie uuid', 'cookie_uuid': cookie_uuid}, 404 cookiejar_uuid = cookie.get_cookiejar() - resp = api_verify_cookiejar_acl(cookiejar_uuid, user_id) + resp = api_check_cookiejar_access_acl(cookiejar_uuid, user_org, user_id, is_admin) if resp: return resp cookiejar = Cookiejar(cookiejar_uuid) @@ -877,8 +938,8 @@ def unpack_imported_json_cookie(json_cookie): ## - - ## #### COOKIEJAR API #### -def api_import_cookies_from_json(user_id, cookiejar_uuid, json_cookies_str): # # TODO: add catch - resp = api_verify_cookiejar_acl(cookiejar_uuid, user_id) +def api_import_cookies_from_json(user_org, user_id, is_admin, cookiejar_uuid, json_cookies_str): # # TODO: add catch + resp = api_check_cookiejar_access_acl(cookiejar_uuid, user_org, user_id, is_admin) if resp: return resp json_cookies = json.loads(json_cookies_str) @@ -1724,7 +1785,7 @@ def api_parse_task_dict_basic(data, user_id): return {'url': url, 'depth_limit': depth_limit, 'har': har, 'screenshot': screenshot, 'proxy': proxy, 'tags': tags}, 200 -def api_add_crawler_task(data, user_id=None): +def api_add_crawler_task(data, user_org, user_id=None): task, resp = api_parse_task_dict_basic(data, user_id) if resp != 200: return task, resp @@ -1750,7 +1811,7 @@ def api_add_crawler_task(data, user_id=None): cookies = data.get('cookies', None) if not cookiejar_uuid and cookies: # Create new cookiejar - cookiejar_uuid = create_cookiejar(user_id, "single-shot cookiejar", 1, None) + cookiejar_uuid = create_cookiejar(user_org, user_id, "single-shot cookiejar", 1, None) # TODO REVIEW DEFAULT LEVEL cookiejar = Cookiejar(cookiejar_uuid) for cookie in cookies: try: diff --git a/update/v5.0/DB_KVROCKS_MIGRATION.py b/update/v5.0/DB_KVROCKS_MIGRATION.py index f3c8e4e4..d8d43a48 100755 --- a/update/v5.0/DB_KVROCKS_MIGRATION.py +++ b/update/v5.0/DB_KVROCKS_MIGRATION.py @@ -444,7 +444,7 @@ def crawler_migration(): # print(meta) cookiejar = crawlers.Cookiejar(meta['uuid']) if not cookiejar.exists(): - crawlers.create_cookiejar(meta['user'], description=meta['description'], level=meta['level'], + crawlers.create_cookiejar(get_ail_uuid(), meta['user'], description=meta['description'], level=meta['level'], cookiejar_uuid=meta['uuid']) cookiejar._set_date(meta['date']) @@ -452,7 +452,7 @@ def crawler_migration(): cookie_dict = get_cookie_dict(cookie_uuid) if cookie_dict: # print(cookie_dict) - crawlers.api_create_cookie(meta['user'], cookiejar_uuid, cookie_dict) + crawlers.api_create_cookie(get_ail_uuid(), meta['user'], True, cookiejar_uuid, cookie_dict) auto_crawler_web = r_crawler.smembers('auto_crawler_url:regular') auto_crawler_onion = r_crawler.smembers('auto_crawler_url:onion') diff --git a/var/www/blueprints/api_rest.py b/var/www/blueprints/api_rest.py index cf2f9293..b39c9ec5 100644 --- a/var/www/blueprints/api_rest.py +++ b/var/www/blueprints/api_rest.py @@ -124,8 +124,8 @@ def v1_pyail_version(): def add_crawler_task(): data = request.get_json() user_token = get_auth_from_header() - user_id = ail_api.get_user_from_token(user_token) - res = crawlers.api_add_crawler_task(data, user_id=user_id) + user_org, user_id, _ = get_basic_user_meta(token) + res = crawlers.api_add_crawler_task(data, user_org, user_id=user_id) if res: return create_json_response(res[0], res[1]) diff --git a/var/www/blueprints/crawler_splash.py b/var/www/blueprints/crawler_splash.py index 160da23d..c4011181 100644 --- a/var/www/blueprints/crawler_splash.py +++ b/var/www/blueprints/crawler_splash.py @@ -95,8 +95,9 @@ def crawlers_dashboard_captures_delete(): @login_required @login_read_only def manual(): + user_org = current_user.get_org() user_id = current_user.get_user_id() - l_cookiejar = crawlers.api_get_cookiejars_selector(user_id) + l_cookiejar = crawlers.api_get_cookiejars_selector(user_org, user_id) crawlers_types = crawlers.get_crawler_all_types() proxies = [] # TODO HANDLE PROXIES return render_template("crawler_manual.html", @@ -111,6 +112,7 @@ def manual(): @login_required @login_analyst def send_to_spider(): + user_org = current_user.get_org() user_id = current_user.get_user_id() # POST val @@ -186,7 +188,7 @@ def send_to_spider(): if tags: data['tags'] = tags # print(data) - res = crawlers.api_add_crawler_task(data, user_id=user_id) + res = crawlers.api_add_crawler_task(data, user_org, user_id=user_id) if res[1] != 200: return create_json_response(res[0], res[1]) @@ -656,14 +658,19 @@ def crawler_cookiejar_add(): @login_required @login_analyst def crawler_cookiejar_add_post(): + user_org = current_user.get_org() user_id = current_user.get_user_id() + is_admin = current_user.is_admin() description = request.form.get('description') level = request.form.get('level') - if level: + + try: + level = int(level) + except TypeError: + level = 1 + if level not in range(0, 3): level = 1 - else: - level = 0 if 'file' in request.files: file = request.files['file'] @@ -686,15 +693,15 @@ def crawler_cookiejar_add_post(): return create_json_response({'error': 'invalid cookie', 'invalid fields': l_invalid_cookie}, 400) # Create Cookiejar - cookiejar_uuid = crawlers.create_cookiejar(user_id, level=level, description=description) + cookiejar_uuid = crawlers.create_cookiejar(user_org, user_id, level=level, description=description) # Create Cookies if json_cookies: # TODO CHECK Import - res = crawlers.api_import_cookies_from_json(user_id, cookiejar_uuid, json_cookies) + res = crawlers.api_import_cookies_from_json(user_org, user_id, is_admin, cookiejar_uuid, json_cookies) if res: return create_json_response(res[0], res[1]) for cookie_dict in l_manual_cookie: - crawlers.api_create_cookie(user_id, cookiejar_uuid, cookie_dict) + crawlers.api_create_cookie(user_org, user_id, is_admin, cookiejar_uuid, cookie_dict) return redirect(url_for('crawler_splash.crawler_cookiejar_show', uuid=cookiejar_uuid)) @@ -703,20 +710,25 @@ def crawler_cookiejar_add_post(): @login_required @login_read_only def crawler_cookiejar_all(): + user_org = current_user.get_org() user_id = current_user.get_user_id() user_cookiejars = crawlers.get_cookiejars_meta_by_iterator(crawlers.get_cookiejars_user(user_id)) + org_cookiejars = crawlers.get_cookiejars_meta_by_iterator(crawlers.get_cookiejars_org(user_org)) global_cookiejars = crawlers.get_cookiejars_meta_by_iterator(crawlers.get_cookiejars_global()) - return render_template("all_cookiejar.html", user_cookiejar=user_cookiejars, global_cookiejar=global_cookiejars) + return render_template("all_cookiejar.html", user_cookiejar=user_cookiejars, + org_cookiejar=org_cookiejars, global_cookiejar=global_cookiejars) @crawler_splash.route('/crawler/cookiejar/show', methods=['GET']) @login_required @login_read_only def crawler_cookiejar_show(): + user_org = current_user.get_org() user_id = current_user.get_user_id() + is_admin = current_user.is_admin() cookiejar_uuid = request.args.get('uuid') - res = crawlers.api_get_cookiejar(cookiejar_uuid, user_id) + res = crawlers.api_get_cookiejar(user_org, user_id, is_admin, cookiejar_uuid) if res[1] != 200: return create_json_response(res[0], res[1]) else: @@ -729,10 +741,12 @@ def crawler_cookiejar_show(): @login_required @login_analyst def crawler_cookiejar_cookie_delete(): + user_org = current_user.get_org() user_id = current_user.get_user_id() + is_admin = current_user.is_admin() cookie_uuid = request.args.get('uuid') - res = crawlers.api_delete_cookie(user_id, cookie_uuid) + res = crawlers.api_delete_cookie(user_org, user_id, is_admin, cookie_uuid) if res[1] != 200: return create_json_response(res[0], res[1]) else: @@ -744,10 +758,12 @@ def crawler_cookiejar_cookie_delete(): @login_required @login_analyst def crawler_cookiejar_delete(): + user_org = current_user.get_org() user_id = current_user.get_user_id() + is_admin = current_user.is_admin() cookiejar_uuid = request.args.get('uuid') - res = crawlers.api_delete_cookiejar(user_id, cookiejar_uuid) + res = crawlers.api_delete_cookiejar(user_org, user_id, is_admin, cookiejar_uuid) if res[1] != 200: return create_json_response(res[0], res[1]) return redirect(url_for('crawler_splash.crawler_cookiejar_all')) @@ -757,11 +773,13 @@ def crawler_cookiejar_delete(): @login_required @login_read_only def crawler_cookiejar_edit(): + user_org = current_user.get_org() user_id = current_user.get_user_id() + is_admin = current_user.is_admin() cookiejar_uuid = request.args.get('uuid') description = request.args.get('description') - res = crawlers.api_edit_cookiejar_description(user_id, cookiejar_uuid, description) + res = crawlers.api_edit_cookiejar_description(user_org, user_id, is_admin, cookiejar_uuid, description) return create_json_response(res[0], res[1]) @@ -769,10 +787,12 @@ def crawler_cookiejar_edit(): @login_required @login_read_only def crawler_cookiejar_cookie_edit(): + user_org = current_user.get_org() user_id = current_user.get_user_id() + is_admin = current_user.is_admin() cookie_uuid = request.args.get('uuid') - cookie_dict = crawlers.api_get_cookie(user_id, cookie_uuid) + cookie_dict = crawlers.api_get_cookie(user_org, user_id, is_admin, cookie_uuid) return render_template("edit_cookie.html", cookie_uuid=cookie_uuid, cookie_dict=cookie_dict) @@ -780,7 +800,9 @@ def crawler_cookiejar_cookie_edit(): @login_required @login_read_only def crawler_cookiejar_cookie_edit_post(): + user_org = current_user.get_org() user_id = current_user.get_user_id() + is_admin = current_user.is_admin() cookie_uuid = request.form.get('cookie_uuid') name = request.form.get('name') value = request.form.get('value') @@ -799,7 +821,7 @@ def crawler_cookiejar_cookie_edit_post(): if secure: cookie_dict['secure'] = True - res = crawlers.api_edit_cookie(user_id, cookie_uuid, cookie_dict) + res = crawlers.api_edit_cookie(user_org, user_id, is_admin, cookie_uuid, cookie_dict) if res[1] != 200: return create_json_response(res[0], res[1]) cookie = crawlers.Cookie(cookie_uuid) @@ -811,7 +833,13 @@ def crawler_cookiejar_cookie_edit_post(): @login_required @login_read_only def crawler_cookiejar_cookie_add(): + user_org = current_user.get_org() + user_id = current_user.get_user_id() + is_admin = current_user.is_admin() cookiejar_uuid = request.args.get('uuid') + res = crawlers.api_check_cookiejar_access_acl(cookiejar_uuid, user_org, user_id, is_admin) + if res[1] != 200: + return create_json_response(res[0], res[1]) return render_template("add_cookie.html", cookiejar_uuid=cookiejar_uuid) @@ -819,7 +847,9 @@ def crawler_cookiejar_cookie_add(): @login_required @login_read_only def crawler_cookiejar_cookie_manual_add_post(): + user_org = current_user.get_org() user_id = current_user.get_user_id() + is_admin = current_user.is_admin() cookiejar_uuid = request.form.get('cookiejar_uuid') name = request.form.get('name') value = request.form.get('value') @@ -838,7 +868,7 @@ def crawler_cookiejar_cookie_manual_add_post(): if secure: cookie_dict['secure'] = True - res = crawlers.api_create_cookie(user_id, cookiejar_uuid, cookie_dict) + res = crawlers.api_create_cookie(user_org, user_id, is_admin, cookiejar_uuid, cookie_dict) if res[1] != 200: return create_json_response(res[0], res[1]) @@ -849,14 +879,16 @@ def crawler_cookiejar_cookie_manual_add_post(): @login_required @login_read_only def crawler_cookiejar_cookie_json_add_post(): + user_org = current_user.get_org() user_id = current_user.get_user_id() + is_admin = current_user.is_admin() cookiejar_uuid = request.form.get('cookiejar_uuid') if 'file' in request.files: file = request.files['file'] json_cookies = file.read().decode() if json_cookies: - res = crawlers.api_import_cookies_from_json(user_id, cookiejar_uuid, json_cookies) + res = crawlers.api_import_cookies_from_json(user_org, user_id, is_admin, cookiejar_uuid, json_cookies) if res[1] != 200: return create_json_response(res[0], res[1]) diff --git a/var/www/templates/crawler/crawler_splash/add_cookiejar.html b/var/www/templates/crawler/crawler_splash/add_cookiejar.html index 1f2a45d5..9817d691 100644 --- a/var/www/templates/crawler/crawler_splash/add_cookiejar.html +++ b/var/www/templates/crawler/crawler_splash/add_cookiejar.html @@ -44,12 +44,14 @@
-
- - -
+ + + +
diff --git a/var/www/templates/crawler/crawler_splash/all_cookiejar.html b/var/www/templates/crawler/crawler_splash/all_cookiejar.html index ccd8ba82..ba3b6724 100644 --- a/var/www/templates/crawler/crawler_splash/all_cookiejar.html +++ b/var/www/templates/crawler/crawler_splash/all_cookiejar.html @@ -40,6 +40,17 @@ +
+
+
Organisation Cookiejar
+
+
+ {% with all_cookiejar=org_cookiejar, table_id='table_org'%} + {% include 'crawler/crawler_splash/table_cookiejar.html' %} + {% endwith %} +
+
+
Global Cookiejar
@@ -71,6 +82,11 @@ $(document).ready(function(){ "iDisplayLength": 10, "order": [[ 0, "desc" ]] }); + $('#table_org').DataTable({ + "aLengthMenu": [[5, 10, 15, -1], [5, 10, 15, "All"]], + "iDisplayLength": 10, + "order": [[ 0, "desc" ]] + }); $('#table_global').DataTable({ "aLengthMenu": [[5, 10, 15, -1], [5, 10, 15, "All"]], "iDisplayLength": 10, diff --git a/var/www/templates/crawler/crawler_splash/table_cookiejar.html b/var/www/templates/crawler/crawler_splash/table_cookiejar.html index bec68c29..13bd3d86 100644 --- a/var/www/templates/crawler/crawler_splash/table_cookiejar.html +++ b/var/www/templates/crawler/crawler_splash/table_cookiejar.html @@ -4,6 +4,7 @@ Description Date UUID + Level User @@ -21,6 +22,17 @@ {{ dict_cookiejar['uuid']}} + + {% if 'level' in dict_cookiejar %} + {% if dict_cookiejar['level'] == 0 %} + User + {% elif dict_cookiejar['level'] == 1 %} + Global + {% elif dict_cookiejar['level'] == 2 %} + Community + {% endif %} + {% endif %} + {{dict_cookiejar['user']}} {% endfor %}