2018-05-04 11:53:29 +00:00
|
|
|
#!/usr/bin/env python3
|
2016-12-09 07:46:37 +00:00
|
|
|
# -*-coding:UTF-8 -*
|
|
|
|
|
|
|
|
'''
|
|
|
|
Flask functions and routes for the trending modules page
|
2018-02-28 08:19:27 +00:00
|
|
|
|
2017-07-20 08:24:48 +00:00
|
|
|
note: The matching of credential against supplied credential is done using Levenshtein distance
|
2016-12-09 07:46:37 +00:00
|
|
|
'''
|
2019-08-14 07:44:49 +00:00
|
|
|
import json
|
2016-12-09 07:46:37 +00:00
|
|
|
import redis
|
|
|
|
import datetime
|
|
|
|
import calendar
|
|
|
|
import flask
|
2019-08-14 07:44:49 +00:00
|
|
|
from flask import Flask, render_template, jsonify, request, Blueprint, url_for, redirect, Response
|
2019-06-19 15:02:09 +00:00
|
|
|
|
|
|
|
from Role_Manager import login_admin, login_analyst
|
2019-08-14 07:44:49 +00:00
|
|
|
from flask_login import login_required, current_user
|
2019-05-02 15:31:14 +00:00
|
|
|
|
2017-03-28 15:42:44 +00:00
|
|
|
import re
|
2017-07-18 14:57:15 +00:00
|
|
|
from pprint import pprint
|
2017-07-19 09:52:06 +00:00
|
|
|
import Levenshtein
|
2016-12-09 07:46:37 +00:00
|
|
|
|
2019-08-14 07:44:49 +00:00
|
|
|
# ---------------------------------------------------------------
|
|
|
|
|
|
|
|
import Paste
|
|
|
|
import Term
|
|
|
|
|
2016-12-09 07:46:37 +00:00
|
|
|
# ============ VARIABLES ============
|
|
|
|
import Flask_config
|
|
|
|
|
|
|
|
app = Flask_config.app
|
|
|
|
cfg = Flask_config.cfg
|
2018-09-20 08:38:19 +00:00
|
|
|
baseUrl = Flask_config.baseUrl
|
2016-12-09 07:46:37 +00:00
|
|
|
r_serv_term = Flask_config.r_serv_term
|
2017-07-18 14:57:15 +00:00
|
|
|
r_serv_cred = Flask_config.r_serv_cred
|
2018-11-06 15:08:58 +00:00
|
|
|
r_serv_db = Flask_config.r_serv_db
|
2018-11-06 12:38:37 +00:00
|
|
|
bootstrap_label = Flask_config.bootstrap_label
|
2017-03-28 15:42:44 +00:00
|
|
|
|
2017-04-19 09:02:03 +00:00
|
|
|
terms = Blueprint('terms', __name__, template_folder='templates')
|
|
|
|
|
2017-07-20 08:04:30 +00:00
|
|
|
'''TERM'''
|
2017-03-28 15:42:44 +00:00
|
|
|
DEFAULT_MATCH_PERCENT = 50
|
|
|
|
|
|
|
|
#tracked
|
|
|
|
TrackedTermsSet_Name = "TrackedSetTermSet"
|
|
|
|
TrackedTermsDate_Name = "TrackedTermDate"
|
|
|
|
#black
|
|
|
|
BlackListTermsDate_Name = "BlackListTermDate"
|
|
|
|
BlackListTermsSet_Name = "BlackListSetTermSet"
|
|
|
|
#regex
|
|
|
|
TrackedRegexSet_Name = "TrackedRegexSet"
|
|
|
|
TrackedRegexDate_Name = "TrackedRegexDate"
|
|
|
|
#set
|
|
|
|
TrackedSetSet_Name = "TrackedSetSet"
|
|
|
|
TrackedSetDate_Name = "TrackedSetDate"
|
|
|
|
|
2018-03-01 07:50:27 +00:00
|
|
|
# notifications enabled/disabled
|
2018-02-27 14:12:02 +00:00
|
|
|
# same value as in `bin/NotificationHelper.py`
|
|
|
|
TrackedTermsNotificationEnabled_Name = "TrackedNotifications"
|
|
|
|
|
|
|
|
# associated notification email addresses for a specific term`
|
|
|
|
# same value as in `bin/NotificationHelper.py`
|
|
|
|
# Keys will be e.g. TrackedNotificationEmails_<TERMNAME>
|
|
|
|
TrackedTermsNotificationEmailsPrefix_Name = "TrackedNotificationEmails_"
|
2018-11-06 12:38:37 +00:00
|
|
|
TrackedTermsNotificationTagsPrefix_Name = "TrackedNotificationTags_"
|
2017-07-20 08:04:30 +00:00
|
|
|
|
|
|
|
'''CRED'''
|
|
|
|
REGEX_CRED = '[a-z]+|[A-Z]{3,}|[A-Z]{1,2}[a-z]+|[0-9]+'
|
|
|
|
REDIS_KEY_NUM_USERNAME = 'uniqNumForUsername'
|
|
|
|
REDIS_KEY_NUM_PATH = 'uniqNumForUsername'
|
|
|
|
REDIS_KEY_ALL_CRED_SET = 'AllCredentials'
|
|
|
|
REDIS_KEY_ALL_CRED_SET_REV = 'AllCredentialsRev'
|
|
|
|
REDIS_KEY_ALL_PATH_SET = 'AllPath'
|
|
|
|
REDIS_KEY_ALL_PATH_SET_REV = 'AllPathRev'
|
|
|
|
REDIS_KEY_MAP_CRED_TO_PATH = 'CredToPathMapping'
|
|
|
|
|
|
|
|
|
|
|
|
|
2016-12-09 07:46:37 +00:00
|
|
|
# ============ FUNCTIONS ============
|
|
|
|
|
2017-02-28 14:54:39 +00:00
|
|
|
def Term_getValueOverRange(word, startDate, num_day, per_paste=""):
|
2016-12-09 07:46:37 +00:00
|
|
|
passed_days = 0
|
|
|
|
oneDay = 60*60*24
|
|
|
|
to_return = []
|
|
|
|
curr_to_return = 0
|
|
|
|
for timestamp in range(startDate, startDate - max(num_day)*oneDay, -oneDay):
|
2017-02-28 14:54:39 +00:00
|
|
|
value = r_serv_term.hget(per_paste+str(timestamp), word)
|
2018-05-04 11:53:29 +00:00
|
|
|
curr_to_return += int(value) if value is not None else 0
|
2016-12-09 07:46:37 +00:00
|
|
|
for i in num_day:
|
|
|
|
if passed_days == i-1:
|
|
|
|
to_return.append(curr_to_return)
|
|
|
|
passed_days += 1
|
|
|
|
return to_return
|
|
|
|
|
2017-07-20 08:50:24 +00:00
|
|
|
#Mix suplied username, if extensive is set, slice username(s) with different windows
|
2017-07-20 08:04:30 +00:00
|
|
|
def mixUserName(supplied, extensive=False):
|
2017-07-19 09:52:06 +00:00
|
|
|
#e.g.: John Smith
|
|
|
|
terms = supplied.split()[:2]
|
|
|
|
usernames = []
|
|
|
|
if len(terms) == 1:
|
|
|
|
terms.append(' ')
|
|
|
|
|
|
|
|
#john, smith, John, Smith, JOHN, SMITH
|
|
|
|
usernames += [terms[0].lower()]
|
|
|
|
usernames += [terms[1].lower()]
|
|
|
|
usernames += [terms[0][0].upper() + terms[0][1:].lower()]
|
|
|
|
usernames += [terms[1][0].upper() + terms[1][1:].lower()]
|
|
|
|
usernames += [terms[0].upper()]
|
|
|
|
usernames += [terms[1].upper()]
|
|
|
|
|
|
|
|
#johnsmith, smithjohn, JOHNsmith, johnSMITH, SMITHjohn, smithJOHN
|
|
|
|
usernames += [(terms[0].lower() + terms[1].lower()).strip()]
|
|
|
|
usernames += [(terms[1].lower() + terms[0].lower()).strip()]
|
|
|
|
usernames += [(terms[0].upper() + terms[1].lower()).strip()]
|
|
|
|
usernames += [(terms[0].lower() + terms[1].upper()).strip()]
|
|
|
|
usernames += [(terms[1].upper() + terms[0].lower()).strip()]
|
|
|
|
usernames += [(terms[1].lower() + terms[0].upper()).strip()]
|
|
|
|
#Jsmith, JSmith, jsmith, jSmith, johnS, Js, JohnSmith, Johnsmith, johnSmith
|
|
|
|
usernames += [(terms[0][0].upper() + terms[1][0].lower() + terms[1][1:].lower()).strip()]
|
|
|
|
usernames += [(terms[0][0].upper() + terms[1][0].upper() + terms[1][1:].lower()).strip()]
|
|
|
|
usernames += [(terms[0][0].lower() + terms[1][0].lower() + terms[1][1:].lower()).strip()]
|
|
|
|
usernames += [(terms[0][0].lower() + terms[1][0].upper() + terms[1][1:].lower()).strip()]
|
|
|
|
usernames += [(terms[0].lower() + terms[1][0].upper()).strip()]
|
|
|
|
usernames += [(terms[0].upper() + terms[1][0].lower()).strip()]
|
|
|
|
usernames += [(terms[0][0].upper() + terms[0][1:].lower() + terms[1][0].upper() + terms[1][1:].lower()).strip()]
|
|
|
|
usernames += [(terms[0][0].upper() + terms[0][1:].lower() + terms[1][0].lower() + terms[1][1:].lower()).strip()]
|
|
|
|
usernames += [(terms[0][0].lower() + terms[0][1:].lower() + terms[1][0].upper() + terms[1][1:].lower()).strip()]
|
|
|
|
|
2017-07-20 08:04:30 +00:00
|
|
|
if not extensive:
|
|
|
|
return usernames
|
|
|
|
|
2017-07-20 08:50:24 +00:00
|
|
|
#Slice the supplied username(s)
|
2017-07-20 08:04:30 +00:00
|
|
|
mixedSupplied = supplied.replace(' ','')
|
|
|
|
minWindow = 3 if len(mixedSupplied)/2 < 4 else len(mixedSupplied)/2
|
|
|
|
for winSize in range(3,len(mixedSupplied)):
|
|
|
|
for startIndex in range(0, len(mixedSupplied)-winSize):
|
|
|
|
usernames += [mixedSupplied[startIndex:startIndex+winSize]]
|
|
|
|
|
|
|
|
filtered_usernames = []
|
|
|
|
for usr in usernames:
|
|
|
|
if len(usr) > 2:
|
|
|
|
filtered_usernames.append(usr)
|
|
|
|
return filtered_usernames
|
2018-02-28 08:19:27 +00:00
|
|
|
|
2018-11-06 15:08:58 +00:00
|
|
|
def save_tag_to_auto_push(list_tag):
|
|
|
|
for tag in set(list_tag):
|
2018-11-07 09:17:57 +00:00
|
|
|
#limit tag length
|
|
|
|
if len(tag) > 49:
|
|
|
|
tag = tag[0:48]
|
2018-11-06 15:08:58 +00:00
|
|
|
r_serv_db.sadd('list_export_tags', tag)
|
2016-12-09 07:46:37 +00:00
|
|
|
|
|
|
|
# ============ ROUTES ============
|
|
|
|
|
2019-08-14 07:44:49 +00:00
|
|
|
@terms.route("/tracker_term")
|
|
|
|
def tracked_term_menu():
|
|
|
|
user_id = current_user.get_id()
|
|
|
|
user_term = Term.get_all_user_tracked_terms(user_id)
|
|
|
|
global_term = Term.get_all_global_tracked_terms()
|
|
|
|
return render_template("tracker_term_management.html", user_term=user_term, global_term=global_term, bootstrap_label=bootstrap_label)
|
2018-02-28 08:19:27 +00:00
|
|
|
|
|
|
|
|
2019-08-14 07:44:49 +00:00
|
|
|
@terms.route("/tracker/add", methods=['GET', 'POST'])
|
2019-05-02 15:31:14 +00:00
|
|
|
@login_required
|
2019-06-19 15:02:09 +00:00
|
|
|
@login_analyst
|
2019-08-14 07:44:49 +00:00
|
|
|
def add_tracked_term_menu():
|
|
|
|
if request.method == 'POST':
|
|
|
|
term = request.form.get("term")
|
|
|
|
term_type = request.form.get("tracker_type")
|
|
|
|
nb_words = request.form.get("nb_word", 1)
|
|
|
|
level = request.form.get("level", 1)
|
|
|
|
tags = request.form.get("tags", [])
|
|
|
|
mails = request.form.get("mails", [])
|
|
|
|
|
|
|
|
if mails:
|
|
|
|
mails = mails.split()
|
|
|
|
if tags:
|
|
|
|
tags = tags.split()
|
|
|
|
input_dict = {"term": term, "type": term_type, "nb_words": nb_words, "tags": tags, "mails": mails}
|
|
|
|
user_id = current_user.get_id()
|
|
|
|
res = Term.parse_json_term_to_add(input_dict, user_id)
|
|
|
|
if res[1] == 200:
|
|
|
|
return redirect(url_for('terms.tracked_term_menu'))
|
|
|
|
else:
|
|
|
|
## TODO: use modal
|
|
|
|
return Response(json.dumps(res[0], indent=2, sort_keys=True), mimetype='application/json'), res[1]
|
2017-03-28 15:42:44 +00:00
|
|
|
else:
|
2019-08-14 07:44:49 +00:00
|
|
|
return render_template("Add_tracker.html")
|
2016-12-09 07:46:37 +00:00
|
|
|
|
2019-08-14 07:44:49 +00:00
|
|
|
@terms.route("/tracker/show_term_tracker")
|
2019-05-02 15:31:14 +00:00
|
|
|
@login_required
|
2019-06-19 15:02:09 +00:00
|
|
|
@login_analyst
|
2019-08-14 07:44:49 +00:00
|
|
|
def show_term_tracker():
|
|
|
|
user_id = current_user.get_id()
|
|
|
|
term_uuid = request.args.get('uuid', None)
|
|
|
|
res = Term.check_term_uuid_valid_access(term_uuid, user_id)
|
|
|
|
if res: # invalid access
|
|
|
|
return Response(json.dumps(res[0], indent=2, sort_keys=True), mimetype='application/json'), res[1]
|
|
|
|
|
|
|
|
date_from = request.args.get('date_from')
|
|
|
|
date_to = request.args.get('date_to')
|
|
|
|
|
|
|
|
if date_from:
|
|
|
|
date_from = date_from.replace('-', '')
|
|
|
|
if date_to:
|
|
|
|
date_to = date_to.replace('-', '')
|
|
|
|
|
|
|
|
term_metadata = Term.get_term_metedata(term_uuid, user_id=True, level=True, tags=True, mails=True, sparkline=True)
|
|
|
|
|
|
|
|
if date_from:
|
|
|
|
res = Term.parse_get_tracker_term_item({'uuid': term_uuid, 'date_from': date_from, 'date_to': date_to}, user_id)
|
|
|
|
if res[1] !=200:
|
|
|
|
return Response(json.dumps(res[0], indent=2, sort_keys=True), mimetype='application/json'), res[1]
|
|
|
|
term_metadata['items'] = res[0]['items']
|
|
|
|
term_metadata['date_from'] = res[0]['date_from']
|
|
|
|
term_metadata['date_to'] = res[0]['date_to']
|
2016-12-09 07:46:37 +00:00
|
|
|
else:
|
2019-08-14 07:44:49 +00:00
|
|
|
term_metadata['items'] = []
|
|
|
|
term_metadata['date_from'] = ''
|
|
|
|
term_metadata['date_to'] = ''
|
2016-12-09 07:46:37 +00:00
|
|
|
|
2019-08-14 07:44:49 +00:00
|
|
|
return render_template("showTrackerTerm.html", term_metadata=term_metadata, bootstrap_label=bootstrap_label)
|
2016-12-09 07:46:37 +00:00
|
|
|
|
2019-08-14 07:44:49 +00:00
|
|
|
@terms.route("/tracker/update_tracker_tags", methods=['POST'])
|
2019-05-02 15:31:14 +00:00
|
|
|
@login_required
|
2019-06-19 15:02:09 +00:00
|
|
|
@login_analyst
|
2019-08-14 07:44:49 +00:00
|
|
|
def update_tracker_tags():
|
|
|
|
user_id = current_user.get_id()
|
|
|
|
term_uuid = request.form.get('uuid')
|
|
|
|
res = Term.check_term_uuid_valid_access(term_uuid, user_id)
|
|
|
|
if res: # invalid access
|
|
|
|
return Response(json.dumps(res[0], indent=2, sort_keys=True), mimetype='application/json'), res[1]
|
|
|
|
tags = request.form.get('tags')
|
|
|
|
if tags:
|
|
|
|
tags = tags.split()
|
2018-11-07 14:37:25 +00:00
|
|
|
else:
|
2019-08-14 07:44:49 +00:00
|
|
|
tags = []
|
|
|
|
Term.replace_tracked_term_tags(term_uuid, tags)
|
|
|
|
return redirect(url_for('terms.show_term_tracker', uuid=term_uuid))
|
2018-11-07 14:37:25 +00:00
|
|
|
|
2019-08-14 07:44:49 +00:00
|
|
|
@terms.route("/tracker/update_tracker_mails", methods=['POST'])
|
2019-05-02 15:31:14 +00:00
|
|
|
@login_required
|
2019-06-19 15:02:09 +00:00
|
|
|
@login_analyst
|
2019-08-14 07:44:49 +00:00
|
|
|
def update_tracker_mails():
|
|
|
|
user_id = current_user.get_id()
|
|
|
|
term_uuid = request.form.get('uuid')
|
|
|
|
res = Term.check_term_uuid_valid_access(term_uuid, user_id)
|
|
|
|
if res: # invalid access
|
|
|
|
return Response(json.dumps(res[0], indent=2, sort_keys=True), mimetype='application/json'), res[1]
|
|
|
|
mails = request.form.get('mails')
|
|
|
|
if mails:
|
|
|
|
mails = mails.split()
|
2018-11-07 10:42:31 +00:00
|
|
|
else:
|
2019-08-14 07:44:49 +00:00
|
|
|
mails = []
|
|
|
|
res = Term.replace_tracked_term_mails(term_uuid, mails)
|
|
|
|
if res: # invalid mail
|
|
|
|
return Response(json.dumps(res[0], indent=2, sort_keys=True), mimetype='application/json'), res[1]
|
|
|
|
return redirect(url_for('terms.show_term_tracker', uuid=term_uuid))
|
2016-12-09 07:46:37 +00:00
|
|
|
|
|
|
|
|
2017-04-19 09:02:03 +00:00
|
|
|
@terms.route("/terms_plot_tool/")
|
2019-05-02 15:31:14 +00:00
|
|
|
@login_required
|
2019-06-19 15:02:09 +00:00
|
|
|
@login_analyst
|
2016-12-09 07:46:37 +00:00
|
|
|
def terms_plot_tool():
|
|
|
|
term = request.args.get('term')
|
|
|
|
if term is not None:
|
|
|
|
return render_template("terms_plot_tool.html", term=term)
|
|
|
|
else:
|
|
|
|
return render_template("terms_plot_tool.html", term="")
|
|
|
|
|
|
|
|
|
2017-04-19 09:02:03 +00:00
|
|
|
@terms.route("/terms_plot_tool_data/")
|
2019-05-02 15:31:14 +00:00
|
|
|
@login_required
|
2019-06-19 15:02:09 +00:00
|
|
|
@login_analyst
|
2016-12-09 07:46:37 +00:00
|
|
|
def terms_plot_tool_data():
|
|
|
|
oneDay = 60*60*24
|
|
|
|
range_start = datetime.datetime.utcfromtimestamp(int(float(request.args.get('range_start')))) if request.args.get('range_start') is not None else 0;
|
|
|
|
range_start = range_start.replace(hour=0, minute=0, second=0, microsecond=0)
|
|
|
|
range_start = calendar.timegm(range_start.timetuple())
|
|
|
|
range_end = datetime.datetime.utcfromtimestamp(int(float(request.args.get('range_end')))) if request.args.get('range_end') is not None else 0;
|
|
|
|
range_end = range_end.replace(hour=0, minute=0, second=0, microsecond=0)
|
|
|
|
range_end = calendar.timegm(range_end.timetuple())
|
|
|
|
term = request.args.get('term')
|
|
|
|
|
2017-02-28 14:54:39 +00:00
|
|
|
per_paste = request.args.get('per_paste')
|
|
|
|
if per_paste == "1" or per_paste is None:
|
|
|
|
per_paste = "per_paste_"
|
|
|
|
else:
|
|
|
|
per_paste = ""
|
|
|
|
|
2016-12-09 07:46:37 +00:00
|
|
|
if term is None:
|
|
|
|
return "None"
|
2017-07-20 08:04:30 +00:00
|
|
|
|
2016-12-09 07:46:37 +00:00
|
|
|
else:
|
|
|
|
value_range = []
|
|
|
|
for timestamp in range(range_start, range_end+oneDay, oneDay):
|
2017-02-28 14:54:39 +00:00
|
|
|
value = r_serv_term.hget(per_paste+str(timestamp), term)
|
2016-12-09 07:46:37 +00:00
|
|
|
curr_value_range = int(value) if value is not None else 0
|
|
|
|
value_range.append([timestamp, curr_value_range])
|
|
|
|
value_range.insert(0,term)
|
|
|
|
return jsonify(value_range)
|
|
|
|
|
|
|
|
|
2017-04-19 09:02:03 +00:00
|
|
|
@terms.route("/terms_plot_top/")
|
2019-05-02 15:31:14 +00:00
|
|
|
@login_required
|
2019-06-19 15:02:09 +00:00
|
|
|
@login_analyst
|
2016-12-09 07:46:37 +00:00
|
|
|
def terms_plot_top():
|
2017-02-15 15:29:02 +00:00
|
|
|
per_paste = request.args.get('per_paste')
|
|
|
|
per_paste = per_paste if per_paste is not None else 1
|
|
|
|
return render_template("terms_plot_top.html", per_paste=per_paste)
|
2016-12-09 07:46:37 +00:00
|
|
|
|
|
|
|
|
2017-04-19 09:02:03 +00:00
|
|
|
@terms.route("/terms_plot_top_data/")
|
2019-05-02 15:31:14 +00:00
|
|
|
@login_required
|
2019-06-19 15:02:09 +00:00
|
|
|
@login_analyst
|
2016-12-09 07:46:37 +00:00
|
|
|
def terms_plot_top_data():
|
|
|
|
oneDay = 60*60*24
|
|
|
|
today = datetime.datetime.now()
|
|
|
|
today = today.replace(hour=0, minute=0, second=0, microsecond=0)
|
|
|
|
today_timestamp = calendar.timegm(today.timetuple())
|
|
|
|
|
2017-02-28 14:54:39 +00:00
|
|
|
per_paste = request.args.get('per_paste')
|
|
|
|
if per_paste == "1" or per_paste is None:
|
2017-02-28 14:01:48 +00:00
|
|
|
per_paste = "per_paste_"
|
|
|
|
else:
|
|
|
|
per_paste = ""
|
|
|
|
|
|
|
|
set_day = per_paste + "TopTermFreq_set_day_" + str(today_timestamp)
|
|
|
|
set_week = per_paste + "TopTermFreq_set_week";
|
|
|
|
set_month = per_paste + "TopTermFreq_set_month";
|
|
|
|
|
|
|
|
the_set = per_paste + request.args.get('set')
|
|
|
|
num_day = int(request.args.get('num_day'))
|
2017-02-15 15:29:02 +00:00
|
|
|
|
2016-12-09 07:46:37 +00:00
|
|
|
if the_set is None:
|
|
|
|
return "None"
|
|
|
|
else:
|
|
|
|
to_return = []
|
2017-02-15 15:29:02 +00:00
|
|
|
if "TopTermFreq_set_day" in the_set:
|
2016-12-09 07:46:37 +00:00
|
|
|
the_set += "_" + str(today_timestamp)
|
|
|
|
|
|
|
|
for term, tot_value in r_serv_term.zrevrangebyscore(the_set, '+inf', '-inf', withscores=True, start=0, num=20):
|
|
|
|
position = {}
|
|
|
|
position['day'] = r_serv_term.zrevrank(set_day, term)
|
|
|
|
position['day'] = position['day']+1 if position['day'] is not None else "<20"
|
|
|
|
position['week'] = r_serv_term.zrevrank(set_week, term)
|
|
|
|
position['week'] = position['week']+1 if position['week'] is not None else "<20"
|
|
|
|
position['month'] = r_serv_term.zrevrank(set_month, term)
|
|
|
|
position['month'] = position['month']+1 if position['month'] is not None else "<20"
|
|
|
|
value_range = []
|
|
|
|
for timestamp in range(today_timestamp, today_timestamp - num_day*oneDay, -oneDay):
|
2017-02-28 14:01:48 +00:00
|
|
|
value = r_serv_term.hget(per_paste+str(timestamp), term)
|
2016-12-09 07:46:37 +00:00
|
|
|
curr_value_range = int(value) if value is not None else 0
|
|
|
|
value_range.append([timestamp, curr_value_range])
|
2018-02-28 08:19:27 +00:00
|
|
|
|
2018-05-04 11:53:29 +00:00
|
|
|
to_return.append([term, value_range, tot_value, position])
|
2018-02-28 08:19:27 +00:00
|
|
|
|
2016-12-09 07:46:37 +00:00
|
|
|
return jsonify(to_return)
|
|
|
|
|
|
|
|
|
2017-07-17 15:26:19 +00:00
|
|
|
@terms.route("/credentials_tracker/")
|
2019-05-02 15:31:14 +00:00
|
|
|
@login_required
|
2019-06-19 15:02:09 +00:00
|
|
|
@login_analyst
|
2017-07-17 15:26:19 +00:00
|
|
|
def credentials_tracker():
|
|
|
|
return render_template("credentials_tracker.html")
|
|
|
|
|
2017-07-20 08:04:30 +00:00
|
|
|
@terms.route("/credentials_management_query_paste/", methods=['GET', 'POST'])
|
2019-05-02 15:31:14 +00:00
|
|
|
@login_required
|
2019-06-19 15:02:09 +00:00
|
|
|
@login_analyst
|
2017-07-17 15:26:19 +00:00
|
|
|
def credentials_management_query_paste():
|
|
|
|
cred = request.args.get('cred')
|
2017-07-20 08:04:30 +00:00
|
|
|
allPath = request.json['allPath']
|
2017-07-17 15:26:19 +00:00
|
|
|
|
2017-07-20 08:04:30 +00:00
|
|
|
paste_info = []
|
|
|
|
for pathNum in allPath:
|
|
|
|
path = r_serv_cred.hget(REDIS_KEY_ALL_PATH_SET_REV, pathNum)
|
|
|
|
paste = Paste.Paste(path)
|
|
|
|
p_date = str(paste._get_p_date())
|
2018-07-05 12:45:34 +00:00
|
|
|
p_date = p_date[0:4]+'/'+p_date[4:6]+'/'+p_date[6:8]
|
2017-07-20 08:04:30 +00:00
|
|
|
p_source = paste.p_source
|
|
|
|
p_encoding = paste._get_p_encoding()
|
|
|
|
p_size = paste.p_size
|
|
|
|
p_mime = paste.p_mime
|
|
|
|
p_lineinfo = paste.get_lines_info()
|
2018-04-24 14:44:37 +00:00
|
|
|
p_content = paste.get_p_content()
|
2017-07-20 08:04:30 +00:00
|
|
|
if p_content != 0:
|
|
|
|
p_content = p_content[0:400]
|
|
|
|
paste_info.append({"path": path, "date": p_date, "source": p_source, "encoding": p_encoding, "size": p_size, "mime": p_mime, "lineinfo": p_lineinfo, "content": p_content})
|
2017-07-18 14:57:15 +00:00
|
|
|
|
2017-07-20 08:04:30 +00:00
|
|
|
return jsonify(paste_info)
|
2017-07-18 14:57:15 +00:00
|
|
|
|
2017-07-17 15:26:19 +00:00
|
|
|
@terms.route("/credentials_management_action/", methods=['GET'])
|
2019-05-02 15:31:14 +00:00
|
|
|
@login_required
|
2019-06-19 15:02:09 +00:00
|
|
|
@login_analyst
|
2017-07-17 15:26:19 +00:00
|
|
|
def cred_management_action():
|
2017-07-18 14:57:15 +00:00
|
|
|
|
2018-04-17 14:06:32 +00:00
|
|
|
supplied = request.args.get('term')
|
2017-07-17 15:26:19 +00:00
|
|
|
action = request.args.get('action')
|
2017-07-18 14:57:15 +00:00
|
|
|
section = request.args.get('section')
|
2017-07-20 08:04:30 +00:00
|
|
|
extensive = request.args.get('extensive')
|
|
|
|
extensive = True if extensive == "true" else False
|
2017-07-18 14:57:15 +00:00
|
|
|
|
2017-07-20 08:04:30 +00:00
|
|
|
if extensive:
|
|
|
|
#collectDico
|
|
|
|
AllUsernameInRedis = r_serv_cred.hgetall(REDIS_KEY_ALL_CRED_SET).keys()
|
2017-07-18 14:57:15 +00:00
|
|
|
uniq_num_set = set()
|
|
|
|
if action == "seek":
|
2017-07-20 08:04:30 +00:00
|
|
|
possibilities = mixUserName(supplied, extensive)
|
2017-07-18 14:57:15 +00:00
|
|
|
for poss in possibilities:
|
2017-07-20 08:04:30 +00:00
|
|
|
num = r_serv_cred.hget(REDIS_KEY_ALL_CRED_SET, poss)
|
|
|
|
if num is not None:
|
|
|
|
uniq_num_set.add(num)
|
2017-07-18 14:57:15 +00:00
|
|
|
for num in r_serv_cred.smembers(poss):
|
|
|
|
uniq_num_set.add(num)
|
2017-07-20 08:04:30 +00:00
|
|
|
#Extensive /!\
|
|
|
|
if extensive:
|
2017-07-20 08:50:24 +00:00
|
|
|
iter_num = 0
|
|
|
|
tot_iter = len(AllUsernameInRedis)*len(possibilities)
|
2017-07-20 08:04:30 +00:00
|
|
|
for tempUsername in AllUsernameInRedis:
|
|
|
|
for poss in possibilities:
|
2017-07-20 08:50:24 +00:00
|
|
|
#FIXME print progress
|
|
|
|
if(iter_num % int(tot_iter/20) == 0):
|
|
|
|
#print("searching: {}% done".format(int(iter_num/tot_iter*100)), sep=' ', end='\r', flush=True)
|
|
|
|
print("searching: {}% done".format(float(iter_num)/float(tot_iter)*100))
|
|
|
|
iter_num += 1
|
|
|
|
|
2017-07-20 08:04:30 +00:00
|
|
|
if poss in tempUsername:
|
2018-05-04 11:53:29 +00:00
|
|
|
num = (r_serv_cred.hget(REDIS_KEY_ALL_CRED_SET, tempUsername))
|
2017-07-20 08:04:30 +00:00
|
|
|
if num is not None:
|
|
|
|
uniq_num_set.add(num)
|
|
|
|
for num in r_serv_cred.smembers(tempUsername):
|
|
|
|
uniq_num_set.add(num)
|
2017-07-18 14:57:15 +00:00
|
|
|
|
2017-07-19 09:52:06 +00:00
|
|
|
data = {'usr': [], 'path': [], 'numPaste': [], 'simil': []}
|
2017-07-18 14:57:15 +00:00
|
|
|
for Unum in uniq_num_set:
|
2017-07-20 08:04:30 +00:00
|
|
|
levenRatio = 2.0
|
2018-05-04 11:53:29 +00:00
|
|
|
username = (r_serv_cred.hget(REDIS_KEY_ALL_CRED_SET_REV, Unum))
|
2018-02-28 08:19:27 +00:00
|
|
|
|
2017-07-19 09:52:06 +00:00
|
|
|
# Calculate Levenshtein distance, ignore negative ratio
|
2017-07-20 08:04:30 +00:00
|
|
|
supp_splitted = supplied.split()
|
|
|
|
supp_mixed = supplied.replace(' ','')
|
|
|
|
supp_splitted.append(supp_mixed)
|
|
|
|
for indiv_supplied in supp_splitted:
|
|
|
|
levenRatio = float(Levenshtein.ratio(indiv_supplied, username))
|
|
|
|
levenRatioStr = "{:.1%}".format(levenRatio)
|
2017-07-19 09:52:06 +00:00
|
|
|
|
|
|
|
data['usr'].append(username)
|
2018-04-17 14:06:32 +00:00
|
|
|
|
|
|
|
|
2017-07-20 08:04:30 +00:00
|
|
|
allPathNum = list(r_serv_cred.smembers(REDIS_KEY_MAP_CRED_TO_PATH+'_'+Unum))
|
2018-04-17 14:06:32 +00:00
|
|
|
|
2018-05-04 11:53:29 +00:00
|
|
|
data['path'].append(allPathNum)
|
|
|
|
data['numPaste'].append(len(allPathNum))
|
2017-07-19 09:52:06 +00:00
|
|
|
data['simil'].append(levenRatioStr)
|
2017-07-18 14:57:15 +00:00
|
|
|
|
|
|
|
to_return = {}
|
|
|
|
to_return["section"] = section
|
|
|
|
to_return["action"] = action
|
|
|
|
to_return["term"] = supplied
|
|
|
|
to_return["data"] = data
|
|
|
|
|
|
|
|
return jsonify(to_return)
|
|
|
|
|
2017-07-17 15:26:19 +00:00
|
|
|
|
2017-04-19 09:02:03 +00:00
|
|
|
# ========= REGISTRATION =========
|
2018-09-20 08:38:19 +00:00
|
|
|
app.register_blueprint(terms, url_prefix=baseUrl)
|