diff --git a/bin/crawlers/Crawler.py b/bin/crawlers/Crawler.py index 7ddcc5e6..b87bd911 100755 --- a/bin/crawlers/Crawler.py +++ b/bin/crawlers/Crawler.py @@ -262,6 +262,7 @@ class Crawler(AbstractModule): # Origin + History + tags if self.root_item: self.domain.set_last_origin(parent_id) + self.domain.update_vanity_cluster() # Tags for tag in task.get_tags(): self.domain.add_tag(tag) @@ -277,7 +278,6 @@ class Crawler(AbstractModule): self.original_domain.add_history(epoch, root_item=self.root_item) # crawlers.update_last_crawled_domain(self.original_domain.get_domain_type(), self.original_domain.id, epoch) - self.domain.update_vanity_cluster() crawlers.update_last_crawled_domain(self.domain.get_domain_type(), self.domain.id, epoch) print('capture:', capture.uuid, 'completed') print('task: ', task.uuid, 'completed') diff --git a/var/www/blueprints/crawler_splash.py b/var/www/blueprints/crawler_splash.py index 31a1e647..697d2acb 100644 --- a/var/www/blueprints/crawler_splash.py +++ b/var/www/blueprints/crawler_splash.py @@ -580,7 +580,7 @@ def domains_search_date_post(): @login_required @login_analyst def domains_explorer_vanity_clusters(): - nb_min = request.args.get('min', 0) + nb_min = request.args.get('min', 4) if int(nb_min) < 0: nb_min = 4 vanity_clusters = Domains.get_vanity_clusters(nb_min=nb_min) @@ -592,7 +592,7 @@ def domains_explorer_vanity_clusters(): @login_analyst def domains_explorer_vanity_explore(): vanity = request.args.get('vanity') - nb_min = request.args.get('min', 0) # TODO SHOW DOMAINS OPTIONS + HARD CODED DOMAINS LIMIT FOR RENDER + nb_min = request.args.get('min', 4) # TODO SHOW DOMAINS OPTIONS + HARD CODED DOMAINS LIMIT FOR RENDER length = len(vanity) if int(nb_min) < 0: nb_min = 4