mirror of
https://github.com/ail-project/ail-framework.git
synced 2024-11-10 08:38:28 +00:00
Added support of browsing concerned paste in dashboard for all modules + Show in the page what was the concerned module
This commit is contained in:
parent
1826b170ec
commit
ac254e0e63
9 changed files with 18 additions and 18 deletions
|
@ -48,7 +48,7 @@ if __name__ == "__main__":
|
|||
if sites_set:
|
||||
message += ' Related websites: {}'.format(', '.join(sites_set))
|
||||
|
||||
to_print = 'Credential;{};{};{};{}'.format(paste.p_source, paste.p_date, paste.p_name, message)
|
||||
to_print = 'Credential;{};{};{};{};{}'.format(paste.p_source, paste.p_date, paste.p_name, message, paste.p_path)
|
||||
|
||||
print('\n '.join(creds))
|
||||
|
||||
|
|
|
@ -63,14 +63,14 @@ if __name__ == "__main__":
|
|||
to_print = 'CreditCard;{};{};{};'.format(
|
||||
paste.p_source, paste.p_date, paste.p_name)
|
||||
if (len(creditcard_set) > 0):
|
||||
publisher.warning('{}Checked {} valid number(s)'.format(
|
||||
to_print, len(creditcard_set)))
|
||||
publisher.warning('{}Checked {} valid number(s);{}'.format(
|
||||
to_print, len(creditcard_set), paste.p_path))
|
||||
#Send to duplicate
|
||||
p.populate_set_out(filename, 'Duplicate')
|
||||
#send to Browse_warning_paste
|
||||
p.populate_set_out('creditcard;{}'.format(filename), 'BrowseWarningPaste')
|
||||
else:
|
||||
publisher.info('{}CreditCard related'.format(to_print))
|
||||
publisher.info('{}CreditCard related;{}'.format(to_print, paste.p_path))
|
||||
else:
|
||||
publisher.debug("Script creditcard is idling 1m")
|
||||
time.sleep(10)
|
||||
|
|
|
@ -51,13 +51,13 @@ def main():
|
|||
localizeddomains = c.include(expression=cc_tld)
|
||||
if localizeddomains:
|
||||
print(localizeddomains)
|
||||
publisher.warning('DomainC;{};{};{};Checked {} located in {}'.format(
|
||||
PST.p_source, PST.p_date, PST.p_name, localizeddomains, cc_tld))
|
||||
publisher.warning('DomainC;{};{};{};Checked {} located in {};{}'.format(
|
||||
PST.p_source, PST.p_date, PST.p_name, localizeddomains, cc_tld, PST.p_path))
|
||||
localizeddomains = c.localizedomain(cc=cc)
|
||||
if localizeddomains:
|
||||
print(localizeddomains)
|
||||
publisher.warning('DomainC;{};{};{};Checked {} located in {}'.format(
|
||||
PST.p_source, PST.p_date, PST.p_name, localizeddomains, cc))
|
||||
publisher.warning('DomainC;{};{};{};Checked {} located in {};{}'.format(
|
||||
PST.p_source, PST.p_date, PST.p_name, localizeddomains, cc, PST.p_path))
|
||||
except IOError:
|
||||
print "CRC Checksum Failed on :", PST.p_path
|
||||
publisher.error('Duplicate;{};{};{};CRC Checksum Failed'.format(
|
||||
|
|
|
@ -162,7 +162,7 @@ if __name__ == "__main__":
|
|||
if dupl != []:
|
||||
PST.__setattr__("p_duplicate", dupl)
|
||||
PST.save_attribute_redis("p_duplicate", dupl)
|
||||
publisher.info('{}Detected {}'.format(to_print, len(dupl)))
|
||||
publisher.info('{}Detected {};{}'.format(to_print, len(dupl), PST.p_path))
|
||||
print '{}Detected {}'.format(to_print, len(dupl))
|
||||
|
||||
y = time.time()
|
||||
|
|
|
@ -133,8 +133,8 @@ if __name__ == "__main__":
|
|||
PST.p_name)
|
||||
if len(domains_list) > 0:
|
||||
|
||||
publisher.warning('{}Detected {} .onion(s)'.format(
|
||||
to_print, len(domains_list)))
|
||||
publisher.warning('{}Detected {} .onion(s);{}'.format(
|
||||
to_print, len(domains_list),PST.p_path))
|
||||
now = datetime.datetime.now()
|
||||
path = os.path.join('onions', str(now.year).zfill(4),
|
||||
str(now.month).zfill(2),
|
||||
|
@ -144,9 +144,9 @@ if __name__ == "__main__":
|
|||
PST.p_date,
|
||||
PST.p_name)
|
||||
for url in fetch(p, r_cache, urls, domains_list, path):
|
||||
publisher.warning('{}Checked {}'.format(to_print, url))
|
||||
publisher.warning('{}Checked {};{}'.format(to_print, url, PST.p_path))
|
||||
else:
|
||||
publisher.info('{}Onion related'.format(to_print))
|
||||
publisher.info('{}Onion related;{}'.format(to_print, PST.p_path))
|
||||
|
||||
prec_filename = filename
|
||||
else:
|
||||
|
|
|
@ -34,7 +34,7 @@ if __name__ == "__main__":
|
|||
if len(releases) == 0:
|
||||
continue
|
||||
|
||||
to_print = 'Release;{};{};{};{} releases'.format(paste.p_source, paste.p_date, paste.p_name, len(releases))
|
||||
to_print = 'Release;{};{};{};{} releases;{}'.format(paste.p_source, paste.p_date, paste.p_name, len(releases), paste.p_path)
|
||||
if len(releases) > 30:
|
||||
publisher.warning(to_print)
|
||||
else:
|
||||
|
|
|
@ -69,7 +69,7 @@ def analyse(url, path):
|
|||
if (result_path > 1) or (result_query > 1):
|
||||
print "Detected SQL in URL: "
|
||||
print urllib2.unquote(url)
|
||||
to_print = 'SQLInjection;{};{};{};{}'.format(paste.p_source, paste.p_date, paste.p_name, "Detected SQL in URL")
|
||||
to_print = 'SQLInjection;{};{};{};{};{}'.format(paste.p_source, paste.p_date, paste.p_name, "Detected SQL in URL", paste.p_path)
|
||||
publisher.warning(to_print)
|
||||
#Send to duplicate
|
||||
p.populate_set_out(path, 'Duplicate')
|
||||
|
|
|
@ -131,8 +131,8 @@ if __name__ == "__main__":
|
|||
list(A_values[1])))
|
||||
|
||||
pprint.pprint(A_values)
|
||||
publisher.info('Url;{};{};{};Checked {} URL'.format(
|
||||
PST.p_source, PST.p_date, PST.p_name, A_values[0]))
|
||||
publisher.info('Url;{};{};{};Checked {} URL;{}'.format(
|
||||
PST.p_source, PST.p_date, PST.p_name, A_values[0], PST.p_path))
|
||||
prec_filename = filename
|
||||
|
||||
else:
|
||||
|
|
|
@ -170,7 +170,7 @@ function create_log_table(obj_json) {
|
|||
msage.appendChild(document.createTextNode(message.join(" ")));
|
||||
|
||||
var paste_path = parsedmess[5];
|
||||
var url_to_saved_paste = url_showSavedPath+"?paste="+paste_path+"&num=0";
|
||||
var url_to_saved_paste = url_showSavedPath+"?paste="+paste_path+"&num="+parsedmess[0];
|
||||
|
||||
var action_icon_a = document.createElement("A");
|
||||
action_icon_a.setAttribute("TARGET", "_blank");
|
||||
|
|
Loading…
Reference in a new issue