diff --git a/bin/lib/crawlers.py b/bin/lib/crawlers.py index 9ed5fdf3..e56293ab 100755 --- a/bin/lib/crawlers.py +++ b/bin/lib/crawlers.py @@ -103,6 +103,10 @@ def api_get_onion_lookup(domain): # TODO check if object process done ??? meta['titles'].append(t.get_content()) return meta +def api_get_domain_from_url(url): + url = url.lower() + url_unpack = unpack_url(url) + return url_unpack['domain'] # # # # # # # # # # diff --git a/var/www/blueprints/crawler_splash.py b/var/www/blueprints/crawler_splash.py index 86dd8b98..e553a097 100644 --- a/var/www/blueprints/crawler_splash.py +++ b/var/www/blueprints/crawler_splash.py @@ -587,12 +587,17 @@ def domains_search_name(): if not name: return create_json_response({'error': 'Mandatory args name not provided'}, 400) + name = crawlers.api_get_domain_from_url(name) domains_types = request.args.getlist('domain_types') if domains_types: domains_types = domains_types[0].split(',') domains_types = Domains.sanitize_domains_types(domains_types) + dom = Domains.Domain(name) + if dom.exists(): + return redirect(url_for('crawler_splash.showDomain', domain=dom.get_id())) + l_dict_domains = Domains.api_search_domains_by_name(name, domains_types, meta=True, page=page) return render_template("domains/domains_result_list.html", template_folder='../../', l_dict_domains=l_dict_domains, bootstrap_label=bootstrap_label,