mirror of
https://github.com/ail-project/ail-framework.git
synced 2024-11-10 08:38:28 +00:00
decode with redis connection
This commit is contained in:
parent
7ae0f3c837
commit
96a3df42bc
72 changed files with 222 additions and 448 deletions
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python3.5
|
#!/usr/bin/env python3
|
||||||
# -*-coding:UTF-8 -*
|
# -*-coding:UTF-8 -*
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
@ -65,12 +65,6 @@ if __name__ == "__main__":
|
||||||
|
|
||||||
publisher.info("ApiKey started")
|
publisher.info("ApiKey started")
|
||||||
|
|
||||||
# REDIS #
|
|
||||||
r_serv2 = redis.StrictRedis(
|
|
||||||
host=p.config.get("Redis_Cache", "host"),
|
|
||||||
port=p.config.getint("Redis_Cache", "port"),
|
|
||||||
db=p.config.getint("Redis_Cache", "db"))
|
|
||||||
|
|
||||||
message = p.get_from_set()
|
message = p.get_from_set()
|
||||||
|
|
||||||
# TODO improve REGEX
|
# TODO improve REGEX
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python3.5
|
#!/usr/bin/env python3
|
||||||
# -*-coding:UTF-8 -*
|
# -*-coding:UTF-8 -*
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python3.5
|
#!/usr/bin/env python3
|
||||||
# -*-coding:UTF-8 -*
|
# -*-coding:UTF-8 -*
|
||||||
"""
|
"""
|
||||||
Base64 module
|
Base64 module
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python3.5
|
#!/usr/bin/env python3
|
||||||
# -*-coding:UTF-8 -*
|
# -*-coding:UTF-8 -*
|
||||||
"""
|
"""
|
||||||
The Bitcoin Module
|
The Bitcoin Module
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python3.5
|
#!/usr/bin/env python3
|
||||||
# -*-coding:UTF-8 -*
|
# -*-coding:UTF-8 -*
|
||||||
"""
|
"""
|
||||||
The ZMQ_PubSub_Categ Module
|
The ZMQ_PubSub_Categ Module
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python3.5
|
#!/usr/bin/env python3
|
||||||
# -*-coding:UTF-8 -*
|
# -*-coding:UTF-8 -*
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
@ -55,7 +55,8 @@ if __name__ == "__main__":
|
||||||
server_cred = redis.StrictRedis(
|
server_cred = redis.StrictRedis(
|
||||||
host=p.config.get("Redis_Level_DB_TermCred", "host"),
|
host=p.config.get("Redis_Level_DB_TermCred", "host"),
|
||||||
port=p.config.get("Redis_Level_DB_TermCred", "port"),
|
port=p.config.get("Redis_Level_DB_TermCred", "port"),
|
||||||
db=p.config.get("Redis_Level_DB_TermCred", "db"))
|
db=p.config.get("Redis_Level_DB_TermCred", "db"),
|
||||||
|
decode_responses=True)
|
||||||
|
|
||||||
criticalNumberToAlert = p.config.getint("Credential", "criticalNumberToAlert")
|
criticalNumberToAlert = p.config.getint("Credential", "criticalNumberToAlert")
|
||||||
minTopPassList = p.config.getint("Credential", "minTopPassList")
|
minTopPassList = p.config.getint("Credential", "minTopPassList")
|
||||||
|
@ -68,8 +69,8 @@ if __name__ == "__main__":
|
||||||
message = p.get_from_set()
|
message = p.get_from_set()
|
||||||
if message is None:
|
if message is None:
|
||||||
publisher.debug("Script Credential is Idling 10s")
|
publisher.debug("Script Credential is Idling 10s")
|
||||||
print('sleeping 10s')
|
#print('sleeping 10s')
|
||||||
time.sleep(1)
|
time.sleep(10)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
filepath, count = message.split(' ')
|
filepath, count = message.split(' ')
|
||||||
|
@ -109,7 +110,7 @@ if __name__ == "__main__":
|
||||||
site_occurence = re.findall(regex_site_for_stats, content)
|
site_occurence = re.findall(regex_site_for_stats, content)
|
||||||
for site in site_occurence:
|
for site in site_occurence:
|
||||||
site_domain = site[1:-1]
|
site_domain = site[1:-1]
|
||||||
if site_domain.encode('utf8') in creds_sites.keys():
|
if site_domain in creds_sites.keys():
|
||||||
creds_sites[site_domain] += 1
|
creds_sites[site_domain] += 1
|
||||||
else:
|
else:
|
||||||
creds_sites[site_domain] = 1
|
creds_sites[site_domain] = 1
|
||||||
|
@ -123,10 +124,6 @@ if __name__ == "__main__":
|
||||||
creds_sites[domain] = 1
|
creds_sites[domain] = 1
|
||||||
|
|
||||||
for site, num in creds_sites.items(): # Send for each different site to moduleStats
|
for site, num in creds_sites.items(): # Send for each different site to moduleStats
|
||||||
try:
|
|
||||||
site = site.decode('utf8')
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
|
|
||||||
mssg = 'credential;{};{};{}'.format(num, site, paste.p_date)
|
mssg = 'credential;{};{};{}'.format(num, site, paste.p_date)
|
||||||
print(mssg)
|
print(mssg)
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python3.5
|
#!/usr/bin/env python3
|
||||||
# -*-coding:UTF-8 -*
|
# -*-coding:UTF-8 -*
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
12
bin/Curve.py
12
bin/Curve.py
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python3.5
|
#!/usr/bin/env python3
|
||||||
# -*-coding:UTF-8 -*
|
# -*-coding:UTF-8 -*
|
||||||
"""
|
"""
|
||||||
This module is consuming the Redis-list created by the ZMQ_Sub_Curve_Q Module.
|
This module is consuming the Redis-list created by the ZMQ_Sub_Curve_Q Module.
|
||||||
|
@ -49,7 +49,7 @@ top_termFreq_setName_month = ["TopTermFreq_set_month", 31]
|
||||||
top_termFreq_set_array = [top_termFreq_setName_day,top_termFreq_setName_week, top_termFreq_setName_month]
|
top_termFreq_set_array = [top_termFreq_setName_day,top_termFreq_setName_week, top_termFreq_setName_month]
|
||||||
|
|
||||||
def check_if_tracked_term(term, path):
|
def check_if_tracked_term(term, path):
|
||||||
if term.encode('utf8') in server_term.smembers(TrackedTermsSet_Name):
|
if term in server_term.smembers(TrackedTermsSet_Name):
|
||||||
#add_paste to tracked_word_set
|
#add_paste to tracked_word_set
|
||||||
set_name = "tracked_" + term
|
set_name = "tracked_" + term
|
||||||
server_term.sadd(set_name, path)
|
server_term.sadd(set_name, path)
|
||||||
|
@ -84,12 +84,14 @@ if __name__ == "__main__":
|
||||||
r_serv1 = redis.StrictRedis(
|
r_serv1 = redis.StrictRedis(
|
||||||
host=p.config.get("Redis_Level_DB_Curve", "host"),
|
host=p.config.get("Redis_Level_DB_Curve", "host"),
|
||||||
port=p.config.get("Redis_Level_DB_Curve", "port"),
|
port=p.config.get("Redis_Level_DB_Curve", "port"),
|
||||||
db=p.config.get("Redis_Level_DB_Curve", "db"))
|
db=p.config.get("Redis_Level_DB_Curve", "db"),
|
||||||
|
decode_responses=True)
|
||||||
|
|
||||||
server_term = redis.StrictRedis(
|
server_term = redis.StrictRedis(
|
||||||
host=p.config.get("Redis_Level_DB_TermFreq", "host"),
|
host=p.config.get("Redis_Level_DB_TermFreq", "host"),
|
||||||
port=p.config.get("Redis_Level_DB_TermFreq", "port"),
|
port=p.config.get("Redis_Level_DB_TermFreq", "port"),
|
||||||
db=p.config.get("Redis_Level_DB_TermFreq", "db"))
|
db=p.config.get("Redis_Level_DB_TermFreq", "db"),
|
||||||
|
decode_responses=True)
|
||||||
|
|
||||||
# FUNCTIONS #
|
# FUNCTIONS #
|
||||||
publisher.info("Script Curve started")
|
publisher.info("Script Curve started")
|
||||||
|
@ -132,7 +134,7 @@ if __name__ == "__main__":
|
||||||
curr_word_value_perPaste = int(server_term.hincrby("per_paste_" + str(timestamp), low_word, int(1)))
|
curr_word_value_perPaste = int(server_term.hincrby("per_paste_" + str(timestamp), low_word, int(1)))
|
||||||
|
|
||||||
# Add in set only if term is not in the blacklist
|
# Add in set only if term is not in the blacklist
|
||||||
if low_word.encode('utf8') not in server_term.smembers(BlackListTermsSet_Name):
|
if low_word not in server_term.smembers(BlackListTermsSet_Name):
|
||||||
#consider the num of occurence of this term
|
#consider the num of occurence of this term
|
||||||
server_term.zincrby(curr_set, low_word, float(score))
|
server_term.zincrby(curr_set, low_word, float(score))
|
||||||
#1 term per paste
|
#1 term per paste
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python3.5
|
#!/usr/bin/env python3
|
||||||
# -*-coding:UTF-8 -*
|
# -*-coding:UTF-8 -*
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -139,7 +139,8 @@ if __name__ == '__main__':
|
||||||
r_temp = redis.StrictRedis(
|
r_temp = redis.StrictRedis(
|
||||||
host=cfg.get('RedisPubSub', 'host'),
|
host=cfg.get('RedisPubSub', 'host'),
|
||||||
port=cfg.getint('RedisPubSub', 'port'),
|
port=cfg.getint('RedisPubSub', 'port'),
|
||||||
db=cfg.getint('RedisPubSub', 'db'))
|
db=cfg.getint('RedisPubSub', 'db'),
|
||||||
|
decode_responses=True)
|
||||||
|
|
||||||
timestamp = int(time.mktime(datetime.datetime.now().timetuple()))
|
timestamp = int(time.mktime(datetime.datetime.now().timetuple()))
|
||||||
value = str(timestamp) + ", " + "-"
|
value = str(timestamp) + ", " + "-"
|
||||||
|
@ -149,7 +150,8 @@ if __name__ == '__main__':
|
||||||
server_term = redis.StrictRedis(
|
server_term = redis.StrictRedis(
|
||||||
host=cfg.get("Redis_Level_DB_TermFreq", "host"),
|
host=cfg.get("Redis_Level_DB_TermFreq", "host"),
|
||||||
port=cfg.getint("Redis_Level_DB_TermFreq", "port"),
|
port=cfg.getint("Redis_Level_DB_TermFreq", "port"),
|
||||||
db=cfg.getint("Redis_Level_DB_TermFreq", "db"))
|
db=cfg.getint("Redis_Level_DB_TermFreq", "db"),
|
||||||
|
decode_responses=True)
|
||||||
|
|
||||||
publisher.info("Script Curve_manage_top_set started")
|
publisher.info("Script Curve_manage_top_set started")
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python3.5
|
#!/usr/bin/env python3
|
||||||
# -*-coding:UTF-8 -*
|
# -*-coding:UTF-8 -*
|
||||||
"""
|
"""
|
||||||
The CVE Module
|
The CVE Module
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python3.5
|
#!/usr/bin/env python3
|
||||||
# -*-coding:UTF-8 -*
|
# -*-coding:UTF-8 -*
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
|
@ -36,7 +36,8 @@ def main():
|
||||||
|
|
||||||
r_serv = redis.StrictRedis(host=cfg.get("Redis_Queues", "host"),
|
r_serv = redis.StrictRedis(host=cfg.get("Redis_Queues", "host"),
|
||||||
port=cfg.getint("Redis_Queues", "port"),
|
port=cfg.getint("Redis_Queues", "port"),
|
||||||
db=cfg.getint("Redis_Queues", "db"))
|
db=cfg.getint("Redis_Queues", "db"),
|
||||||
|
decode_responses=True)
|
||||||
|
|
||||||
publisher.port = 6380
|
publisher.port = 6380
|
||||||
publisher.channel = "Script"
|
publisher.channel = "Script"
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python3.5
|
#!/usr/bin/env python3
|
||||||
# -*-coding:UTF-8 -*
|
# -*-coding:UTF-8 -*
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python3.5
|
#!/usr/bin/env python3
|
||||||
# -*-coding:UTF-8 -*
|
# -*-coding:UTF-8 -*
|
||||||
|
|
||||||
from pubsublogger import publisher
|
from pubsublogger import publisher
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python3.5
|
#!/usr/bin/env python3
|
||||||
# -*-coding:UTF-8 -*
|
# -*-coding:UTF-8 -*
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
@ -53,7 +53,8 @@ if __name__ == "__main__":
|
||||||
for month in range(0, 13):
|
for month in range(0, 13):
|
||||||
dico_redis[str(year)+str(month).zfill(2)] = redis.StrictRedis(
|
dico_redis[str(year)+str(month).zfill(2)] = redis.StrictRedis(
|
||||||
host=p.config.get("Redis_Level_DB", "host"), port=year,
|
host=p.config.get("Redis_Level_DB", "host"), port=year,
|
||||||
db=month)
|
db=month,
|
||||||
|
decode_responses=True)
|
||||||
|
|
||||||
# FUNCTIONS #
|
# FUNCTIONS #
|
||||||
publisher.info("Script duplicate started")
|
publisher.info("Script duplicate started")
|
||||||
|
@ -102,7 +103,7 @@ if __name__ == "__main__":
|
||||||
yearly_index = str(date_today.year)+'00'
|
yearly_index = str(date_today.year)+'00'
|
||||||
r_serv0 = dico_redis[yearly_index]
|
r_serv0 = dico_redis[yearly_index]
|
||||||
r_serv0.incr("current_index")
|
r_serv0.incr("current_index")
|
||||||
index = (r_serv0.get("current_index")).decode('utf8') + str(PST.p_date)
|
index = (r_serv0.get("current_index")) + str(PST.p_date)
|
||||||
|
|
||||||
# Open selected dico range
|
# Open selected dico range
|
||||||
opened_dico = []
|
opened_dico = []
|
||||||
|
@ -116,7 +117,7 @@ if __name__ == "__main__":
|
||||||
for curr_dico_name, curr_dico_redis in opened_dico:
|
for curr_dico_name, curr_dico_redis in opened_dico:
|
||||||
for hash_type, paste_hash in paste_hashes.items():
|
for hash_type, paste_hash in paste_hashes.items():
|
||||||
for dico_hash in curr_dico_redis.smembers('HASHS_'+hash_type):
|
for dico_hash in curr_dico_redis.smembers('HASHS_'+hash_type):
|
||||||
dico_hash = dico_hash.decode('utf8')
|
dico_hash = dico_hash
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if hash_type == 'ssdeep':
|
if hash_type == 'ssdeep':
|
||||||
|
@ -134,11 +135,11 @@ if __name__ == "__main__":
|
||||||
|
|
||||||
# index of paste
|
# index of paste
|
||||||
index_current = r_serv_dico.get(dico_hash)
|
index_current = r_serv_dico.get(dico_hash)
|
||||||
index_current = index_current.decode('utf8')
|
index_current = index_current
|
||||||
paste_path = r_serv_dico.get(index_current)
|
paste_path = r_serv_dico.get(index_current)
|
||||||
paste_path = paste_path.decode('utf8')
|
paste_path = paste_path
|
||||||
paste_date = r_serv_dico.get(index_current+'_date')
|
paste_date = r_serv_dico.get(index_current+'_date')
|
||||||
paste_date = paste_date.decode('utf8')
|
paste_date = paste_date
|
||||||
paste_date = paste_date if paste_date != None else "No date available"
|
paste_date = paste_date if paste_date != None else "No date available"
|
||||||
if paste_path != None:
|
if paste_path != None:
|
||||||
if paste_path != PST.p_path:
|
if paste_path != PST.p_path:
|
||||||
|
|
|
@ -1,166 +0,0 @@
|
||||||
#!/usr/bin/env python3.5
|
|
||||||
# -*-coding:UTF-8 -*
|
|
||||||
|
|
||||||
"""
|
|
||||||
The Duplicate module
|
|
||||||
====================
|
|
||||||
|
|
||||||
This huge module is, in short term, checking duplicates.
|
|
||||||
|
|
||||||
Requirements:
|
|
||||||
-------------
|
|
||||||
|
|
||||||
|
|
||||||
"""
|
|
||||||
import redis
|
|
||||||
import os
|
|
||||||
import time
|
|
||||||
from packages import Paste
|
|
||||||
from pubsublogger import publisher
|
|
||||||
from pybloomfilter import BloomFilter
|
|
||||||
|
|
||||||
from Helper import Process
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
publisher.port = 6380
|
|
||||||
publisher.channel = "Script"
|
|
||||||
|
|
||||||
config_section = 'Duplicates'
|
|
||||||
|
|
||||||
p = Process(config_section)
|
|
||||||
|
|
||||||
# REDIS #
|
|
||||||
# DB OBJECT & HASHS ( DISK )
|
|
||||||
# FIXME increase flexibility
|
|
||||||
dico_redis = {}
|
|
||||||
for year in range(2013, 2017):
|
|
||||||
for month in range(0, 16):
|
|
||||||
dico_redis[str(year)+str(month).zfill(2)] = redis.StrictRedis(
|
|
||||||
host=p.config.get("Redis_Level_DB", "host"), port=year,
|
|
||||||
db=month)
|
|
||||||
#print("dup: "+str(year)+str(month).zfill(2)+"\n")
|
|
||||||
|
|
||||||
# FUNCTIONS #
|
|
||||||
publisher.info("Script duplicate started")
|
|
||||||
|
|
||||||
set_limit = 100
|
|
||||||
bloompath = os.path.join(os.environ['AIL_HOME'],
|
|
||||||
p.config.get("Directories", "bloomfilters"))
|
|
||||||
|
|
||||||
bloop_path_set = set()
|
|
||||||
while True:
|
|
||||||
try:
|
|
||||||
super_dico = {}
|
|
||||||
hash_dico = {}
|
|
||||||
dupl = []
|
|
||||||
nb_hash_current = 0
|
|
||||||
|
|
||||||
x = time.time()
|
|
||||||
|
|
||||||
message = p.get_from_set()
|
|
||||||
if message is not None:
|
|
||||||
path = message
|
|
||||||
PST = Paste.Paste(path)
|
|
||||||
else:
|
|
||||||
publisher.debug("Script Attribute is idling 10s")
|
|
||||||
time.sleep(10)
|
|
||||||
continue
|
|
||||||
|
|
||||||
PST._set_p_hash_kind("md5")
|
|
||||||
|
|
||||||
# Assignate the correct redis connexion
|
|
||||||
r_serv1 = dico_redis[PST.p_date.year + PST.p_date.month]
|
|
||||||
|
|
||||||
# Creating the bloom filter name: bloomyyyymm
|
|
||||||
filebloompath = os.path.join(bloompath, 'bloom' + PST.p_date.year +
|
|
||||||
PST.p_date.month)
|
|
||||||
if os.path.exists(filebloompath):
|
|
||||||
bloom = BloomFilter.open(filebloompath)
|
|
||||||
bloop_path_set.add(filebloompath)
|
|
||||||
else:
|
|
||||||
bloom = BloomFilter(100000000, 0.01, filebloompath)
|
|
||||||
bloop_path_set.add(filebloompath)
|
|
||||||
|
|
||||||
# UNIQUE INDEX HASHS TABLE
|
|
||||||
r_serv0 = dico_redis["201600"]
|
|
||||||
r_serv0.incr("current_index")
|
|
||||||
index = r_serv0.get("current_index")+str(PST.p_date)
|
|
||||||
# HASHTABLES PER MONTH (because of r_serv1 changing db)
|
|
||||||
r_serv1.set(index, PST.p_path)
|
|
||||||
r_serv1.sadd("INDEX", index)
|
|
||||||
|
|
||||||
# For each bloom filter
|
|
||||||
opened_bloom = []
|
|
||||||
for bloo in bloop_path_set:
|
|
||||||
# Opening blooms
|
|
||||||
opened_bloom.append(BloomFilter.open(bloo))
|
|
||||||
# For each hash of the paste
|
|
||||||
for line_hash in PST._get_hash_lines(min=5, start=1, jump=0):
|
|
||||||
nb_hash_current += 1
|
|
||||||
|
|
||||||
# Adding the hash in Redis & limiting the set
|
|
||||||
if r_serv1.scard(line_hash) <= set_limit:
|
|
||||||
r_serv1.sadd(line_hash, index)
|
|
||||||
r_serv1.sadd("HASHS", line_hash)
|
|
||||||
# Adding the hash in the bloom of the month
|
|
||||||
bloom.add(line_hash)
|
|
||||||
# Go throught the Database of the bloom filter (of the month)
|
|
||||||
for bloo in opened_bloom:
|
|
||||||
if line_hash in bloo:
|
|
||||||
db = bloo.name[-6:]
|
|
||||||
# Go throught the Database of the bloom filter (month)
|
|
||||||
r_serv_bloom = dico_redis[db]
|
|
||||||
|
|
||||||
# set of index paste: set([1,2,4,65])
|
|
||||||
hash_current = r_serv_bloom.smembers(line_hash)
|
|
||||||
# removing itself from the list
|
|
||||||
hash_current = hash_current - set([index])
|
|
||||||
|
|
||||||
# if the hash is present at least in 1 files
|
|
||||||
# (already processed)
|
|
||||||
if len(hash_current) != 0:
|
|
||||||
hash_dico[line_hash] = hash_current
|
|
||||||
|
|
||||||
# if there is data in this dictionnary
|
|
||||||
if len(hash_dico) != 0:
|
|
||||||
super_dico[index] = hash_dico
|
|
||||||
|
|
||||||
###########################################################################
|
|
||||||
|
|
||||||
# if there is data in this dictionnary
|
|
||||||
if len(super_dico) != 0:
|
|
||||||
# current = current paste, phash_dico = {hash: set, ...}
|
|
||||||
occur_dico = {}
|
|
||||||
for current, phash_dico in super_dico.items():
|
|
||||||
# phash = hash, pset = set([ pastes ...])
|
|
||||||
for phash, pset in hash_dico.items():
|
|
||||||
|
|
||||||
for p_fname in pset:
|
|
||||||
occur_dico.setdefault(p_fname, 0)
|
|
||||||
# Count how much hash is similar per file occuring
|
|
||||||
# in the dictionnary
|
|
||||||
if occur_dico[p_fname] >= 0:
|
|
||||||
occur_dico[p_fname] = occur_dico[p_fname] + 1
|
|
||||||
|
|
||||||
for paste, count in occur_dico.items():
|
|
||||||
percentage = round((count/float(nb_hash_current))*100, 2)
|
|
||||||
if percentage >= 50:
|
|
||||||
dupl.append((paste, percentage))
|
|
||||||
else:
|
|
||||||
print('percentage: ' + str(percentage))
|
|
||||||
|
|
||||||
# Creating the object attribute and save it.
|
|
||||||
to_print = 'Duplicate;{};{};{};'.format(
|
|
||||||
PST.p_source, PST.p_date, PST.p_name)
|
|
||||||
if dupl != []:
|
|
||||||
PST.__setattr__("p_duplicate", dupl)
|
|
||||||
PST.save_attribute_redis("p_duplicate", dupl)
|
|
||||||
publisher.info('{}Detected {}'.format(to_print, len(dupl)))
|
|
||||||
print('{}Detected {}'.format(to_print, len(dupl)))
|
|
||||||
|
|
||||||
y = time.time()
|
|
||||||
|
|
||||||
publisher.debug('{}Processed in {} sec'.format(to_print, y-x))
|
|
||||||
except IOError:
|
|
||||||
print("CRC Checksum Failed on :", PST.p_path)
|
|
||||||
publisher.error('{}CRC Checksum Failed'.format(to_print))
|
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python3.5
|
#!/usr/bin/env python3
|
||||||
# -*-coding:UTF-8 -*
|
# -*-coding:UTF-8 -*
|
||||||
"""
|
"""
|
||||||
The ZMQ_Feed_Q Module
|
The ZMQ_Feed_Q Module
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python3.5
|
#!/usr/bin/env python3
|
||||||
# -*-coding:UTF-8 -*
|
# -*-coding:UTF-8 -*
|
||||||
"""
|
"""
|
||||||
Queue helper module
|
Queue helper module
|
||||||
|
@ -45,7 +45,8 @@ class PubSub(object):
|
||||||
r = redis.StrictRedis(
|
r = redis.StrictRedis(
|
||||||
host=self.config.get('RedisPubSub', 'host'),
|
host=self.config.get('RedisPubSub', 'host'),
|
||||||
port=self.config.get('RedisPubSub', 'port'),
|
port=self.config.get('RedisPubSub', 'port'),
|
||||||
db=self.config.get('RedisPubSub', 'db'))
|
db=self.config.get('RedisPubSub', 'db'),
|
||||||
|
decode_responses=True)
|
||||||
self.subscribers = r.pubsub(ignore_subscribe_messages=True)
|
self.subscribers = r.pubsub(ignore_subscribe_messages=True)
|
||||||
self.subscribers.psubscribe(channel)
|
self.subscribers.psubscribe(channel)
|
||||||
elif conn_name.startswith('ZMQ'):
|
elif conn_name.startswith('ZMQ'):
|
||||||
|
@ -69,7 +70,8 @@ class PubSub(object):
|
||||||
if conn_name.startswith('Redis'):
|
if conn_name.startswith('Redis'):
|
||||||
r = redis.StrictRedis(host=self.config.get('RedisPubSub', 'host'),
|
r = redis.StrictRedis(host=self.config.get('RedisPubSub', 'host'),
|
||||||
port=self.config.get('RedisPubSub', 'port'),
|
port=self.config.get('RedisPubSub', 'port'),
|
||||||
db=self.config.get('RedisPubSub', 'db'))
|
db=self.config.get('RedisPubSub', 'db'),
|
||||||
|
decode_responses=True)
|
||||||
self.publishers['Redis'].append((r, channel))
|
self.publishers['Redis'].append((r, channel))
|
||||||
elif conn_name.startswith('ZMQ'):
|
elif conn_name.startswith('ZMQ'):
|
||||||
context = zmq.Context()
|
context = zmq.Context()
|
||||||
|
@ -99,8 +101,7 @@ class PubSub(object):
|
||||||
for sub in self.subscribers:
|
for sub in self.subscribers:
|
||||||
try:
|
try:
|
||||||
msg = sub.recv(zmq.NOBLOCK)
|
msg = sub.recv(zmq.NOBLOCK)
|
||||||
msg = msg.decode('utf8')
|
yield msg.split(b" ", 1)[1]
|
||||||
yield msg.split(" ", 1)[1]
|
|
||||||
except zmq.error.Again as e:
|
except zmq.error.Again as e:
|
||||||
time.sleep(0.2)
|
time.sleep(0.2)
|
||||||
pass
|
pass
|
||||||
|
@ -131,7 +132,8 @@ class Process(object):
|
||||||
self.r_temp = redis.StrictRedis(
|
self.r_temp = redis.StrictRedis(
|
||||||
host=self.config.get('RedisPubSub', 'host'),
|
host=self.config.get('RedisPubSub', 'host'),
|
||||||
port=self.config.get('RedisPubSub', 'port'),
|
port=self.config.get('RedisPubSub', 'port'),
|
||||||
db=self.config.get('RedisPubSub', 'db'))
|
db=self.config.get('RedisPubSub', 'db'),
|
||||||
|
decode_responses=True)
|
||||||
|
|
||||||
self.moduleNum = os.getpid()
|
self.moduleNum = os.getpid()
|
||||||
|
|
||||||
|
@ -152,11 +154,6 @@ class Process(object):
|
||||||
int(self.r_temp.scard(in_set)))
|
int(self.r_temp.scard(in_set)))
|
||||||
message = self.r_temp.spop(in_set)
|
message = self.r_temp.spop(in_set)
|
||||||
|
|
||||||
try:
|
|
||||||
message = message.decode('utf8')
|
|
||||||
except AttributeError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
timestamp = int(time.mktime(datetime.datetime.now().timetuple()))
|
timestamp = int(time.mktime(datetime.datetime.now().timetuple()))
|
||||||
dir_name = os.environ['AIL_HOME']+self.config.get('Directories', 'pastes')
|
dir_name = os.environ['AIL_HOME']+self.config.get('Directories', 'pastes')
|
||||||
|
|
||||||
|
@ -216,11 +213,6 @@ class Process(object):
|
||||||
while True:
|
while True:
|
||||||
message = self.r_temp.spop(self.subscriber_name + 'out')
|
message = self.r_temp.spop(self.subscriber_name + 'out')
|
||||||
|
|
||||||
try:
|
|
||||||
message = message.decode('utf8')
|
|
||||||
except AttributeError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
if message is None:
|
if message is None:
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
continue
|
continue
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python3.5
|
#!/usr/bin/env python3
|
||||||
# -*-coding:UTF-8 -*
|
# -*-coding:UTF-8 -*
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python3.5
|
#!/usr/bin/env python3
|
||||||
# -*-coding:UTF-8 -*
|
# -*-coding:UTF-8 -*
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python3.5
|
#!/usr/bin/env python3
|
||||||
# -*-coding:UTF-8 -*
|
# -*-coding:UTF-8 -*
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python3.5
|
#!/usr/bin/env python3
|
||||||
# -*-coding:UTF-8 -*
|
# -*-coding:UTF-8 -*
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
@ -34,7 +34,8 @@ if __name__ == "__main__":
|
||||||
r_serv2 = redis.StrictRedis(
|
r_serv2 = redis.StrictRedis(
|
||||||
host=p.config.get("Redis_Cache", "host"),
|
host=p.config.get("Redis_Cache", "host"),
|
||||||
port=p.config.getint("Redis_Cache", "port"),
|
port=p.config.getint("Redis_Cache", "port"),
|
||||||
db=p.config.getint("Redis_Cache", "db"))
|
db=p.config.getint("Redis_Cache", "db"),
|
||||||
|
decode_responses=True)
|
||||||
|
|
||||||
# FUNCTIONS #
|
# FUNCTIONS #
|
||||||
publisher.info("Suscribed to channel mails_categ")
|
publisher.info("Suscribed to channel mails_categ")
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python3.5
|
#!/usr/bin/env python3
|
||||||
# -*-coding:UTF-8 -*
|
# -*-coding:UTF-8 -*
|
||||||
"""
|
"""
|
||||||
The Mixer Module
|
The Mixer Module
|
||||||
|
@ -65,7 +65,8 @@ if __name__ == '__main__':
|
||||||
server = redis.StrictRedis(
|
server = redis.StrictRedis(
|
||||||
host=cfg.get("Redis_Mixer_Cache", "host"),
|
host=cfg.get("Redis_Mixer_Cache", "host"),
|
||||||
port=cfg.getint("Redis_Mixer_Cache", "port"),
|
port=cfg.getint("Redis_Mixer_Cache", "port"),
|
||||||
db=cfg.getint("Redis_Mixer_Cache", "db"))
|
db=cfg.getint("Redis_Mixer_Cache", "db"),
|
||||||
|
decode_responses=True)
|
||||||
|
|
||||||
# LOGGING #
|
# LOGGING #
|
||||||
publisher.info("Feed Script started to receive & publish.")
|
publisher.info("Feed Script started to receive & publish.")
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python3.5
|
#!/usr/bin/env python3
|
||||||
# -*-coding:UTF-8 -*
|
# -*-coding:UTF-8 -*
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
@ -199,7 +199,8 @@ if __name__ == "__main__":
|
||||||
server = redis.StrictRedis(
|
server = redis.StrictRedis(
|
||||||
host=cfg.get("Redis_Queues", "host"),
|
host=cfg.get("Redis_Queues", "host"),
|
||||||
port=cfg.getint("Redis_Queues", "port"),
|
port=cfg.getint("Redis_Queues", "port"),
|
||||||
db=cfg.getint("Redis_Queues", "db"))
|
db=cfg.getint("Redis_Queues", "db"),
|
||||||
|
decode_responses=True)
|
||||||
|
|
||||||
if args.clear == 1:
|
if args.clear == 1:
|
||||||
clearRedisModuleInfo()
|
clearRedisModuleInfo()
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python3.5
|
#!/usr/bin/env python3
|
||||||
# -*-coding:UTF-8 -*
|
# -*-coding:UTF-8 -*
|
||||||
"""
|
"""
|
||||||
This module makes statistics for some modules and providers
|
This module makes statistics for some modules and providers
|
||||||
|
@ -52,7 +52,7 @@ def compute_most_posted(server, message):
|
||||||
# Member set is a list of (value, score) pairs
|
# Member set is a list of (value, score) pairs
|
||||||
if int(member_set[0][1]) < keyword_total_sum:
|
if int(member_set[0][1]) < keyword_total_sum:
|
||||||
#remove min from set and add the new one
|
#remove min from set and add the new one
|
||||||
print(module + ': adding ' +keyword+ '(' +str(keyword_total_sum)+') in set and removing '+member_set[0][0].decode('utf8')+'('+str(member_set[0][1])+')')
|
print(module + ': adding ' +keyword+ '(' +str(keyword_total_sum)+') in set and removing '+member_set[0][0]+'('+str(member_set[0][1])+')')
|
||||||
server.zrem(redis_progression_name_set, member_set[0][0])
|
server.zrem(redis_progression_name_set, member_set[0][0])
|
||||||
server.zadd(redis_progression_name_set, float(keyword_total_sum), keyword)
|
server.zadd(redis_progression_name_set, float(keyword_total_sum), keyword)
|
||||||
print(redis_progression_name_set)
|
print(redis_progression_name_set)
|
||||||
|
@ -135,12 +135,14 @@ if __name__ == '__main__':
|
||||||
r_serv_trend = redis.StrictRedis(
|
r_serv_trend = redis.StrictRedis(
|
||||||
host=p.config.get("Redis_Level_DB_Trending", "host"),
|
host=p.config.get("Redis_Level_DB_Trending", "host"),
|
||||||
port=p.config.get("Redis_Level_DB_Trending", "port"),
|
port=p.config.get("Redis_Level_DB_Trending", "port"),
|
||||||
db=p.config.get("Redis_Level_DB_Trending", "db"))
|
db=p.config.get("Redis_Level_DB_Trending", "db"),
|
||||||
|
decode_responses=True)
|
||||||
|
|
||||||
r_serv_pasteName = redis.StrictRedis(
|
r_serv_pasteName = redis.StrictRedis(
|
||||||
host=p.config.get("Redis_Paste_Name", "host"),
|
host=p.config.get("Redis_Paste_Name", "host"),
|
||||||
port=p.config.get("Redis_Paste_Name", "port"),
|
port=p.config.get("Redis_Paste_Name", "port"),
|
||||||
db=p.config.get("Redis_Paste_Name", "db"))
|
db=p.config.get("Redis_Paste_Name", "db"),
|
||||||
|
decode_responses=True)
|
||||||
|
|
||||||
# Endless loop getting messages from the input queue
|
# Endless loop getting messages from the input queue
|
||||||
while True:
|
while True:
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python3.5
|
#!/usr/bin/env python3
|
||||||
# -*-coding:UTF-8 -*
|
# -*-coding:UTF-8 -*
|
||||||
|
|
||||||
from asciimatics.widgets import Frame, ListBox, Layout, Divider, Text, \
|
from asciimatics.widgets import Frame, ListBox, Layout, Divider, Text, \
|
||||||
|
@ -510,9 +510,8 @@ def clearRedisModuleInfo():
|
||||||
|
|
||||||
def cleanRedis():
|
def cleanRedis():
|
||||||
for k in server.keys("MODULE_TYPE_*"):
|
for k in server.keys("MODULE_TYPE_*"):
|
||||||
moduleName = (k[12:].decode('utf8')).split('_')[0]
|
moduleName = k[12:].split('_')[0]
|
||||||
for pid in server.smembers(k):
|
for pid in server.smembers(k):
|
||||||
pid = pid.decode('utf8')
|
|
||||||
flag_pid_valid = False
|
flag_pid_valid = False
|
||||||
proc = Popen([command_search_name.format(pid)], stdin=PIPE, stdout=PIPE, bufsize=1, shell=True)
|
proc = Popen([command_search_name.format(pid)], stdin=PIPE, stdout=PIPE, bufsize=1, shell=True)
|
||||||
try:
|
try:
|
||||||
|
@ -530,7 +529,7 @@ def cleanRedis():
|
||||||
#print flag_pid_valid, 'cleaning', pid, 'in', k
|
#print flag_pid_valid, 'cleaning', pid, 'in', k
|
||||||
server.srem(k, pid)
|
server.srem(k, pid)
|
||||||
inst_time = datetime.datetime.fromtimestamp(int(time.time()))
|
inst_time = datetime.datetime.fromtimestamp(int(time.time()))
|
||||||
log(([str(inst_time).split(' ')[1], moduleName, pid, "Cleared invalid pid in " + (k).decode('utf8')], 0))
|
log(([str(inst_time).split(' ')[1], moduleName, pid, "Cleared invalid pid in " + (k)], 0))
|
||||||
|
|
||||||
#Error due to resize, interrupted sys call
|
#Error due to resize, interrupted sys call
|
||||||
except IOError as e:
|
except IOError as e:
|
||||||
|
@ -607,19 +606,16 @@ def fetchQueueData():
|
||||||
printarray_idle = []
|
printarray_idle = []
|
||||||
printarray_notrunning = []
|
printarray_notrunning = []
|
||||||
for queue, card in iter(server.hgetall("queues").items()):
|
for queue, card in iter(server.hgetall("queues").items()):
|
||||||
queue = queue.decode('utf8')
|
|
||||||
card = card.decode('utf8')
|
|
||||||
all_queue.add(queue)
|
all_queue.add(queue)
|
||||||
key = "MODULE_" + queue + "_"
|
key = "MODULE_" + queue + "_"
|
||||||
keySet = "MODULE_TYPE_" + queue
|
keySet = "MODULE_TYPE_" + queue
|
||||||
array_module_type = []
|
array_module_type = []
|
||||||
|
|
||||||
for moduleNum in server.smembers(keySet):
|
for moduleNum in server.smembers(keySet):
|
||||||
moduleNum = moduleNum.decode('utf8')
|
value = server.get(key + str(moduleNum))
|
||||||
value = ( server.get(key + str(moduleNum)) ).decode('utf8')
|
|
||||||
complete_paste_path = ( server.get(key + str(moduleNum) + "_PATH") )
|
complete_paste_path = ( server.get(key + str(moduleNum) + "_PATH") )
|
||||||
if(complete_paste_path is not None):
|
if(complete_paste_path is not None):
|
||||||
complete_paste_path = complete_paste_path.decode('utf8')
|
complete_paste_path = complete_paste_path
|
||||||
COMPLETE_PASTE_PATH_PER_PID[moduleNum] = complete_paste_path
|
COMPLETE_PASTE_PATH_PER_PID[moduleNum] = complete_paste_path
|
||||||
|
|
||||||
if value is not None:
|
if value is not None:
|
||||||
|
@ -814,7 +810,8 @@ if __name__ == "__main__":
|
||||||
server = redis.StrictRedis(
|
server = redis.StrictRedis(
|
||||||
host=cfg.get("Redis_Queues", "host"),
|
host=cfg.get("Redis_Queues", "host"),
|
||||||
port=cfg.getint("Redis_Queues", "port"),
|
port=cfg.getint("Redis_Queues", "port"),
|
||||||
db=cfg.getint("Redis_Queues", "db"))
|
db=cfg.getint("Redis_Queues", "db"),
|
||||||
|
decode_responses=True)
|
||||||
|
|
||||||
if args.clear == 1:
|
if args.clear == 1:
|
||||||
clearRedisModuleInfo()
|
clearRedisModuleInfo()
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python3.5
|
#!/usr/bin/env python3
|
||||||
# -*-coding:UTF-8 -*
|
# -*-coding:UTF-8 -*
|
||||||
|
|
||||||
import configparser
|
import configparser
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python3.5
|
#!/usr/bin/env python3
|
||||||
# -*-coding:UTF-8 -*
|
# -*-coding:UTF-8 -*
|
||||||
"""
|
"""
|
||||||
The ZMQ_Sub_Onion Module
|
The ZMQ_Sub_Onion Module
|
||||||
|
@ -94,7 +94,8 @@ if __name__ == "__main__":
|
||||||
r_cache = redis.StrictRedis(
|
r_cache = redis.StrictRedis(
|
||||||
host=p.config.get("Redis_Cache", "host"),
|
host=p.config.get("Redis_Cache", "host"),
|
||||||
port=p.config.getint("Redis_Cache", "port"),
|
port=p.config.getint("Redis_Cache", "port"),
|
||||||
db=p.config.getint("Redis_Cache", "db"))
|
db=p.config.getint("Redis_Cache", "db"),
|
||||||
|
decode_responses=True)
|
||||||
|
|
||||||
# FUNCTIONS #
|
# FUNCTIONS #
|
||||||
publisher.info("Script subscribed to channel onion_categ")
|
publisher.info("Script subscribed to channel onion_categ")
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python3.5
|
#!/usr/bin/env python3
|
||||||
# -*-coding:UTF-8 -*
|
# -*-coding:UTF-8 -*
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python3.5
|
#!/usr/bin/env python3
|
||||||
# -*-coding:UTF-8 -*
|
# -*-coding:UTF-8 -*
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python3.5
|
#!/usr/bin/env python3
|
||||||
# -*-coding:UTF-8 -*
|
# -*-coding:UTF-8 -*
|
||||||
|
|
||||||
from pubsublogger import publisher
|
from pubsublogger import publisher
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python3.5
|
#!/usr/bin/env python3
|
||||||
# -*-coding:UTF-8 -*
|
# -*-coding:UTF-8 -*
|
||||||
|
|
||||||
import redis
|
import redis
|
||||||
|
@ -30,7 +30,8 @@ def main():
|
||||||
r_serv = redis.StrictRedis(
|
r_serv = redis.StrictRedis(
|
||||||
host=cfg.get("Redis_Queues", "host"),
|
host=cfg.get("Redis_Queues", "host"),
|
||||||
port=cfg.getint("Redis_Queues", "port"),
|
port=cfg.getint("Redis_Queues", "port"),
|
||||||
db=cfg.getint("Redis_Queues", "db"))
|
db=cfg.getint("Redis_Queues", "db"),
|
||||||
|
decode_responses=True)
|
||||||
|
|
||||||
# LOGGING #
|
# LOGGING #
|
||||||
publisher.port = 6380
|
publisher.port = 6380
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python3.5
|
#!/usr/bin/env python3
|
||||||
# -*-coding:UTF-8 -*
|
# -*-coding:UTF-8 -*
|
||||||
"""
|
"""
|
||||||
This Module is used for term frequency.
|
This Module is used for term frequency.
|
||||||
|
@ -56,7 +56,8 @@ if __name__ == "__main__":
|
||||||
server_term = redis.StrictRedis(
|
server_term = redis.StrictRedis(
|
||||||
host=p.config.get("Redis_Level_DB_TermFreq", "host"),
|
host=p.config.get("Redis_Level_DB_TermFreq", "host"),
|
||||||
port=p.config.get("Redis_Level_DB_TermFreq", "port"),
|
port=p.config.get("Redis_Level_DB_TermFreq", "port"),
|
||||||
db=p.config.get("Redis_Level_DB_TermFreq", "db"))
|
db=p.config.get("Redis_Level_DB_TermFreq", "db"),
|
||||||
|
decode_responses=True)
|
||||||
|
|
||||||
# FUNCTIONS #
|
# FUNCTIONS #
|
||||||
publisher.info("RegexForTermsFrequency script started")
|
publisher.info("RegexForTermsFrequency script started")
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python3.5
|
#!/usr/bin/env python3
|
||||||
# -*-coding:UTF-8 -*
|
# -*-coding:UTF-8 -*
|
||||||
import time
|
import time
|
||||||
from packages import Paste
|
from packages import Paste
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/python3.5
|
#!/usr/bin/python3
|
||||||
# -*-coding:UTF-8 -*
|
# -*-coding:UTF-8 -*
|
||||||
|
|
||||||
import redis
|
import redis
|
||||||
|
@ -35,7 +35,8 @@ def main():
|
||||||
r_serv = redis.StrictRedis(
|
r_serv = redis.StrictRedis(
|
||||||
host=cfg.get("Redis_Level_DB_Hashs", "host"),
|
host=cfg.get("Redis_Level_DB_Hashs", "host"),
|
||||||
port=curYear,
|
port=curYear,
|
||||||
db=cfg.getint("Redis_Level_DB_Hashs", "db"))
|
db=cfg.getint("Redis_Level_DB_Hashs", "db"),
|
||||||
|
decode_responses=True)
|
||||||
|
|
||||||
# LOGGING #
|
# LOGGING #
|
||||||
publisher.port = 6380
|
publisher.port = 6380
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python3.5
|
#!/usr/bin/env python3
|
||||||
# -*-coding:UTF-8 -*
|
# -*-coding:UTF-8 -*
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python3.5
|
#!/usr/bin/env python3
|
||||||
# -*-coding:UTF-8 -*
|
# -*-coding:UTF-8 -*
|
||||||
"""
|
"""
|
||||||
Sentiment analyser module.
|
Sentiment analyser module.
|
||||||
|
@ -109,7 +109,7 @@ def Analyse(message, server):
|
||||||
provider_timestamp = provider + '_' + str(timestamp)
|
provider_timestamp = provider + '_' + str(timestamp)
|
||||||
server.incr('UniqID')
|
server.incr('UniqID')
|
||||||
UniqID = server.get('UniqID')
|
UniqID = server.get('UniqID')
|
||||||
print(provider_timestamp, '->', UniqID.decode('utf8'), 'dropped', num_line_removed, 'lines')
|
print(provider_timestamp, '->', UniqID, 'dropped', num_line_removed, 'lines')
|
||||||
server.sadd(provider_timestamp, UniqID)
|
server.sadd(provider_timestamp, UniqID)
|
||||||
server.set(UniqID, avg_score)
|
server.set(UniqID, avg_score)
|
||||||
else:
|
else:
|
||||||
|
@ -154,7 +154,8 @@ if __name__ == '__main__':
|
||||||
server = redis.StrictRedis(
|
server = redis.StrictRedis(
|
||||||
host=p.config.get("Redis_Level_DB_Sentiment", "host"),
|
host=p.config.get("Redis_Level_DB_Sentiment", "host"),
|
||||||
port=p.config.get("Redis_Level_DB_Sentiment", "port"),
|
port=p.config.get("Redis_Level_DB_Sentiment", "port"),
|
||||||
db=p.config.get("Redis_Level_DB_Sentiment", "db"))
|
db=p.config.get("Redis_Level_DB_Sentiment", "db"),
|
||||||
|
decode_responses=True)
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
message = p.get_from_set()
|
message = p.get_from_set()
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python3.5
|
#!/usr/bin/env python3
|
||||||
# -*-coding:UTF-8 -*
|
# -*-coding:UTF-8 -*
|
||||||
"""
|
"""
|
||||||
This Module is used for term frequency.
|
This Module is used for term frequency.
|
||||||
|
@ -54,7 +54,8 @@ if __name__ == "__main__":
|
||||||
server_term = redis.StrictRedis(
|
server_term = redis.StrictRedis(
|
||||||
host=p.config.get("Redis_Level_DB_TermFreq", "host"),
|
host=p.config.get("Redis_Level_DB_TermFreq", "host"),
|
||||||
port=p.config.get("Redis_Level_DB_TermFreq", "port"),
|
port=p.config.get("Redis_Level_DB_TermFreq", "port"),
|
||||||
db=p.config.get("Redis_Level_DB_TermFreq", "db"))
|
db=p.config.get("Redis_Level_DB_TermFreq", "db"),
|
||||||
|
decode_responses=True)
|
||||||
|
|
||||||
# FUNCTIONS #
|
# FUNCTIONS #
|
||||||
publisher.info("RegexForTermsFrequency script started")
|
publisher.info("RegexForTermsFrequency script started")
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python3.5
|
#!/usr/bin/env python3
|
||||||
# -*-coding:UTF-8 -*
|
# -*-coding:UTF-8 -*
|
||||||
"""
|
"""
|
||||||
The ZMQ_Feed_Q Module
|
The ZMQ_Feed_Q Module
|
||||||
|
@ -37,7 +37,8 @@ def main():
|
||||||
# REDIS
|
# REDIS
|
||||||
r_serv = redis.StrictRedis(host=cfg.get("Redis_Queues", "host"),
|
r_serv = redis.StrictRedis(host=cfg.get("Redis_Queues", "host"),
|
||||||
port=cfg.getint("Redis_Queues", "port"),
|
port=cfg.getint("Redis_Queues", "port"),
|
||||||
db=cfg.getint("Redis_Queues", "db"))
|
db=cfg.getint("Redis_Queues", "db"),
|
||||||
|
decode_responses=True)
|
||||||
|
|
||||||
# FIXME: automatic based on the queue name.
|
# FIXME: automatic based on the queue name.
|
||||||
# ### SCRIPTS ####
|
# ### SCRIPTS ####
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python3.5
|
#!/usr/bin/env python3
|
||||||
# -*-coding:UTF-8 -*
|
# -*-coding:UTF-8 -*
|
||||||
import time
|
import time
|
||||||
from packages import Paste
|
from packages import Paste
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python3.5
|
#!/usr/bin/env python3
|
||||||
# -*-coding:UTF-8 -*
|
# -*-coding:UTF-8 -*
|
||||||
"""
|
"""
|
||||||
The Tokenize Module
|
The Tokenize Module
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python3.5
|
#!/usr/bin/env python3
|
||||||
# -*-coding:UTF-8 -*
|
# -*-coding:UTF-8 -*
|
||||||
|
|
||||||
import configparser
|
import configparser
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python3.5
|
#!/usr/bin/env python3
|
||||||
# -*-coding:UTF-8 -*
|
# -*-coding:UTF-8 -*
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
@ -47,7 +47,8 @@ if __name__ == "__main__":
|
||||||
r_serv2 = redis.StrictRedis(
|
r_serv2 = redis.StrictRedis(
|
||||||
host=p.config.get("Redis_Cache", "host"),
|
host=p.config.get("Redis_Cache", "host"),
|
||||||
port=p.config.getint("Redis_Cache", "port"),
|
port=p.config.getint("Redis_Cache", "port"),
|
||||||
db=p.config.getint("Redis_Cache", "db"))
|
db=p.config.getint("Redis_Cache", "db"),
|
||||||
|
decode_responses=True)
|
||||||
|
|
||||||
# Protocol file path
|
# Protocol file path
|
||||||
protocolsfile_path = os.path.join(os.environ['AIL_HOME'],
|
protocolsfile_path = os.path.join(os.environ['AIL_HOME'],
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python3.5
|
#!/usr/bin/env python3
|
||||||
# -*-coding:UTF-8 -*
|
# -*-coding:UTF-8 -*
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
@ -116,7 +116,8 @@ if __name__ == '__main__':
|
||||||
r_serv_trend = redis.StrictRedis(
|
r_serv_trend = redis.StrictRedis(
|
||||||
host=p.config.get("Redis_Level_DB_Trending", "host"),
|
host=p.config.get("Redis_Level_DB_Trending", "host"),
|
||||||
port=p.config.get("Redis_Level_DB_Trending", "port"),
|
port=p.config.get("Redis_Level_DB_Trending", "port"),
|
||||||
db=p.config.get("Redis_Level_DB_Trending", "db"))
|
db=p.config.get("Redis_Level_DB_Trending", "db"),
|
||||||
|
decode_responses=True)
|
||||||
|
|
||||||
# FILE CURVE SECTION #
|
# FILE CURVE SECTION #
|
||||||
csv_path_proto = os.path.join(os.environ['AIL_HOME'],
|
csv_path_proto = os.path.join(os.environ['AIL_HOME'],
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python3.5
|
#!/usr/bin/env python3
|
||||||
# -*-coding:UTF-8 -*
|
# -*-coding:UTF-8 -*
|
||||||
|
|
||||||
from pymisp.tools.abstractgenerator import AbstractMISPObjectGenerator
|
from pymisp.tools.abstractgenerator import AbstractMISPObjectGenerator
|
||||||
|
@ -15,7 +15,7 @@ class AilleakObject(AbstractMISPObjectGenerator):
|
||||||
self._p_source = p_source.split('/')[-5:]
|
self._p_source = p_source.split('/')[-5:]
|
||||||
self._p_source = '/'.join(self._p_source)[:-3] # -3 removes .gz
|
self._p_source = '/'.join(self._p_source)[:-3] # -3 removes .gz
|
||||||
self._p_date = p_date
|
self._p_date = p_date
|
||||||
self._p_content = p_content.encode('utf8')
|
self._p_content = p_content
|
||||||
self._p_duplicate = p_duplicate
|
self._p_duplicate = p_duplicate
|
||||||
self._p_duplicate_number = p_duplicate_number
|
self._p_duplicate_number = p_duplicate_number
|
||||||
self.generate_attributes()
|
self.generate_attributes()
|
||||||
|
@ -37,7 +37,7 @@ class ObjectWrapper:
|
||||||
self.eventID_to_push = self.get_daily_event_id()
|
self.eventID_to_push = self.get_daily_event_id()
|
||||||
cfg = configparser.ConfigParser()
|
cfg = configparser.ConfigParser()
|
||||||
cfg.read('./packages/config.cfg')
|
cfg.read('./packages/config.cfg')
|
||||||
self.maxDuplicateToPushToMISP = cfg.getint("ailleakObject", "maxDuplicateToPushToMISP")
|
self.maxDuplicateToPushToMISP = cfg.getint("ailleakObject", "maxDuplicateToPushToMISP")
|
||||||
|
|
||||||
def add_new_object(self, moduleName, path):
|
def add_new_object(self, moduleName, path):
|
||||||
self.moduleName = moduleName
|
self.moduleName = moduleName
|
||||||
|
@ -45,13 +45,10 @@ class ObjectWrapper:
|
||||||
self.paste = Paste.Paste(path)
|
self.paste = Paste.Paste(path)
|
||||||
self.p_date = self.date_to_str(self.paste.p_date)
|
self.p_date = self.date_to_str(self.paste.p_date)
|
||||||
self.p_source = self.paste.p_path
|
self.p_source = self.paste.p_path
|
||||||
self.p_content = self.paste.get_p_content().decode('utf8')
|
self.p_content = self.paste.get_p_content()
|
||||||
|
|
||||||
temp = self.paste._get_p_duplicate()
|
temp = self.paste._get_p_duplicate()
|
||||||
try:
|
|
||||||
temp = temp.decode('utf8')
|
|
||||||
except AttributeError:
|
|
||||||
pass
|
|
||||||
#beautifier
|
#beautifier
|
||||||
temp = json.loads(temp)
|
temp = json.loads(temp)
|
||||||
self.p_duplicate_number = len(temp) if len(temp) >= 0 else 0
|
self.p_duplicate_number = len(temp) if len(temp) >= 0 else 0
|
||||||
|
@ -108,8 +105,8 @@ class ObjectWrapper:
|
||||||
orgc_id = None
|
orgc_id = None
|
||||||
sharing_group_id = None
|
sharing_group_id = None
|
||||||
date = None
|
date = None
|
||||||
event = self.pymisp.new_event(distribution, threat,
|
event = self.pymisp.new_event(distribution, threat,
|
||||||
analysis, info, date,
|
analysis, info, date,
|
||||||
published, orgc_id, org_id, sharing_group_id)
|
published, orgc_id, org_id, sharing_group_id)
|
||||||
return event
|
return event
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python3.5
|
#!/usr/bin/env python3
|
||||||
# -*-coding:UTF-8 -*
|
# -*-coding:UTF-8 -*
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
@ -54,7 +54,8 @@ if __name__ == "__main__":
|
||||||
server = redis.StrictRedis(
|
server = redis.StrictRedis(
|
||||||
host=p.config.get("Redis_Level_DB", "host"),
|
host=p.config.get("Redis_Level_DB", "host"),
|
||||||
port=curYear,
|
port=curYear,
|
||||||
db=p.config.get("Redis_Level_DB", "db"))
|
db=p.config.get("Redis_Level_DB", "db"),
|
||||||
|
decode_responses=True)
|
||||||
|
|
||||||
# FUNCTIONS #
|
# FUNCTIONS #
|
||||||
publisher.info("Script duplicate started")
|
publisher.info("Script duplicate started")
|
||||||
|
@ -62,7 +63,6 @@ if __name__ == "__main__":
|
||||||
while True:
|
while True:
|
||||||
message = p.get_from_set()
|
message = p.get_from_set()
|
||||||
if message is not None:
|
if message is not None:
|
||||||
#decode because of pyhton3
|
|
||||||
module_name, p_path = message.split(';')
|
module_name, p_path = message.split(';')
|
||||||
print("new alert : {}".format(module_name))
|
print("new alert : {}".format(module_name))
|
||||||
#PST = Paste.Paste(p_path)
|
#PST = Paste.Paste(p_path)
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python3.5
|
#!/usr/bin/env python3
|
||||||
# -*-coding:UTF-8 -*
|
# -*-coding:UTF-8 -*
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python3.5
|
#!/usr/bin/env python3
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
#
|
#
|
||||||
# This file is part of AIL framework - Analysis Information Leak framework
|
# This file is part of AIL framework - Analysis Information Leak framework
|
||||||
|
@ -50,7 +50,7 @@ socket = context.socket(zmq.PUB)
|
||||||
socket.bind(zmq_url)
|
socket.bind(zmq_url)
|
||||||
|
|
||||||
# check https://github.com/cvandeplas/pystemon/blob/master/pystemon.yaml#L16
|
# check https://github.com/cvandeplas/pystemon/blob/master/pystemon.yaml#L16
|
||||||
r = redis.StrictRedis(host='localhost', db=10)
|
r = redis.StrictRedis(host='localhost', db=10, decode_responses=True)
|
||||||
|
|
||||||
# 101 pastes processed feed
|
# 101 pastes processed feed
|
||||||
# 102 raw pastes feed
|
# 102 raw pastes feed
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python3.5
|
#!/usr/bin/env python3
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
#
|
#
|
||||||
# This file is part of AIL framework - Analysis Information Leak framework
|
# This file is part of AIL framework - Analysis Information Leak framework
|
||||||
|
@ -19,14 +19,14 @@ socket.connect ("tcp://crf.circl.lu:%s" % port)
|
||||||
# 101 Name of the pastes only
|
# 101 Name of the pastes only
|
||||||
# 102 Full pastes in raw base64(gz)
|
# 102 Full pastes in raw base64(gz)
|
||||||
|
|
||||||
topicfilter = b"102"
|
topicfilter = "102"
|
||||||
socket.setsockopt(zmq.SUBSCRIBE, topicfilter)
|
socket.setsockopt(zmq.SUBSCRIBE, topicfilter)
|
||||||
print('b0')
|
|
||||||
while True:
|
while True:
|
||||||
message = socket.recv()
|
message = socket.recv()
|
||||||
print('b1')
|
print('b1')
|
||||||
print (message)
|
print (message)
|
||||||
if topicfilter == b"102":
|
if topicfilter == "102":
|
||||||
topic, paste, messagedata = message.split()
|
topic, paste, messagedata = message.split()
|
||||||
print(paste, messagedata)
|
print(paste, messagedata)
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python3.5
|
#!/usr/bin/env python3
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
import zmq
|
import zmq
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python3.5
|
#!/usr/bin/env python3
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
#
|
#
|
||||||
# This file is part of AIL framework - Analysis Information Leak framework
|
# This file is part of AIL framework - Analysis Information Leak framework
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python3.5
|
#!/usr/bin/env python3
|
||||||
# -*-coding:UTF-8 -*
|
# -*-coding:UTF-8 -*
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#!/usr/bin/python3
|
||||||
|
|
||||||
class Date(object):
|
class Date(object):
|
||||||
"""docstring for Date"""
|
"""docstring for Date"""
|
||||||
def __init__(self, *args):
|
def __init__(self, *args):
|
||||||
|
@ -30,7 +32,7 @@ class Date(object):
|
||||||
|
|
||||||
def _set_day(self, day):
|
def _set_day(self, day):
|
||||||
self.day = day
|
self.day = day
|
||||||
|
|
||||||
def substract_day(self, numDay):
|
def substract_day(self, numDay):
|
||||||
import datetime
|
import datetime
|
||||||
computed_date = datetime.date(int(self.year), int(self.month), int(self.day)) - datetime.timedelta(numDay)
|
computed_date = datetime.date(int(self.year), int(self.month), int(self.day)) - datetime.timedelta(numDay)
|
||||||
|
@ -38,4 +40,3 @@ class Date(object):
|
||||||
comp_month = str(computed_date.month).zfill(2)
|
comp_month = str(computed_date.month).zfill(2)
|
||||||
comp_day = str(computed_date.day).zfill(2)
|
comp_day = str(computed_date.day).zfill(2)
|
||||||
return comp_year + comp_month + comp_day
|
return comp_year + comp_month + comp_day
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#!/usr/bin/python3
|
||||||
|
|
||||||
import hashlib
|
import hashlib
|
||||||
import crcmod
|
import crcmod
|
||||||
import mmh3
|
import mmh3
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/python3.5
|
#!/usr/bin/python3
|
||||||
|
|
||||||
"""
|
"""
|
||||||
The ``Paste Class``
|
The ``Paste Class``
|
||||||
|
@ -69,11 +69,13 @@ class Paste(object):
|
||||||
self.cache = redis.StrictRedis(
|
self.cache = redis.StrictRedis(
|
||||||
host=cfg.get("Redis_Queues", "host"),
|
host=cfg.get("Redis_Queues", "host"),
|
||||||
port=cfg.getint("Redis_Queues", "port"),
|
port=cfg.getint("Redis_Queues", "port"),
|
||||||
db=cfg.getint("Redis_Queues", "db"))
|
db=cfg.getint("Redis_Queues", "db"),
|
||||||
|
decode_responses=True)
|
||||||
self.store = redis.StrictRedis(
|
self.store = redis.StrictRedis(
|
||||||
host=cfg.get("Redis_Data_Merging", "host"),
|
host=cfg.get("Redis_Data_Merging", "host"),
|
||||||
port=cfg.getint("Redis_Data_Merging", "port"),
|
port=cfg.getint("Redis_Data_Merging", "port"),
|
||||||
db=cfg.getint("Redis_Data_Merging", "db"))
|
db=cfg.getint("Redis_Data_Merging", "db"),
|
||||||
|
decode_responses=True)
|
||||||
|
|
||||||
self.p_path = p_path
|
self.p_path = p_path
|
||||||
self.p_name = os.path.basename(self.p_path)
|
self.p_name = os.path.basename(self.p_path)
|
||||||
|
@ -112,21 +114,17 @@ class Paste(object):
|
||||||
paste = self.cache.get(self.p_path)
|
paste = self.cache.get(self.p_path)
|
||||||
if paste is None:
|
if paste is None:
|
||||||
try:
|
try:
|
||||||
with gzip.open(self.p_path, 'rb') as f:
|
with gzip.open(self.p_path, 'r') as f:
|
||||||
paste = f.read()
|
paste = f.read()
|
||||||
self.cache.set(self.p_path, paste)
|
self.cache.set(self.p_path, paste)
|
||||||
self.cache.expire(self.p_path, 300)
|
self.cache.expire(self.p_path, 300)
|
||||||
except:
|
except:
|
||||||
paste = b''
|
paste = ''
|
||||||
|
|
||||||
return paste.decode('utf8')
|
return paste
|
||||||
|
|
||||||
def get_p_content_as_file(self):
|
def get_p_content_as_file(self):
|
||||||
try:
|
message = StringIO(self.get_p_content())
|
||||||
message = StringIO( (self.get_p_content()).decode('utf8') )
|
|
||||||
except AttributeError:
|
|
||||||
message = StringIO( (self.get_p_content()) )
|
|
||||||
|
|
||||||
return message
|
return message
|
||||||
|
|
||||||
def get_p_content_with_removed_lines(self, threshold):
|
def get_p_content_with_removed_lines(self, threshold):
|
||||||
|
@ -204,7 +202,7 @@ class Paste(object):
|
||||||
|
|
||||||
"""
|
"""
|
||||||
for hash_name, the_hash in self.p_hash_kind.items():
|
for hash_name, the_hash in self.p_hash_kind.items():
|
||||||
self.p_hash[hash_name] = the_hash.Calculate(self.get_p_content().encode('utf8'))
|
self.p_hash[hash_name] = the_hash.Calculate(self.get_p_content().encode())
|
||||||
return self.p_hash
|
return self.p_hash
|
||||||
|
|
||||||
def _get_p_language(self):
|
def _get_p_language(self):
|
||||||
|
@ -276,7 +274,6 @@ class Paste(object):
|
||||||
def _get_p_duplicate(self):
|
def _get_p_duplicate(self):
|
||||||
self.p_duplicate = self.store.hget(self.p_path, "p_duplicate")
|
self.p_duplicate = self.store.hget(self.p_path, "p_duplicate")
|
||||||
if self.p_duplicate is not None:
|
if self.p_duplicate is not None:
|
||||||
self.p_duplicate = self.p_duplicate.decode('utf8')
|
|
||||||
return self.p_duplicate
|
return self.p_duplicate
|
||||||
else:
|
else:
|
||||||
return '[]'
|
return '[]'
|
||||||
|
@ -335,7 +332,7 @@ class Paste(object):
|
||||||
json_duplicate = self.store.hget(path, attr_name)
|
json_duplicate = self.store.hget(path, attr_name)
|
||||||
#json save on redis
|
#json save on redis
|
||||||
if json_duplicate is not None:
|
if json_duplicate is not None:
|
||||||
list_duplicate = json.loads(json_duplicate.decode('utf8'))
|
list_duplicate = json.loads(json_duplicate)
|
||||||
# add new duplicate
|
# add new duplicate
|
||||||
list_duplicate.append([hash_type, self.p_path, percent, date])
|
list_duplicate.append([hash_type, self.p_path, percent, date])
|
||||||
self.store.hset(path, attr_name, json.dumps(list_duplicate))
|
self.store.hset(path, attr_name, json.dumps(list_duplicate))
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#!/usr/bin/python3
|
||||||
|
|
||||||
import re
|
import re
|
||||||
import dns.resolver
|
import dns.resolver
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#!/usr/bin/python3
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import string
|
import string
|
||||||
|
|
||||||
|
@ -110,7 +112,7 @@ def create_curve_with_word_file(r_serv, csvfilename, feederfilename, year, month
|
||||||
# if the word have a value for the day
|
# if the word have a value for the day
|
||||||
# FIXME Due to performance issues (too many tlds, leads to more than 7s to perform this procedure), I added a threshold
|
# FIXME Due to performance issues (too many tlds, leads to more than 7s to perform this procedure), I added a threshold
|
||||||
value = r_serv.hget(word, curdate)
|
value = r_serv.hget(word, curdate)
|
||||||
value = int(value.decode('utf8'))
|
value = int(value)
|
||||||
if value >= threshold:
|
if value >= threshold:
|
||||||
row.append(value)
|
row.append(value)
|
||||||
writer.writerow(row)
|
writer.writerow(row)
|
||||||
|
@ -135,7 +137,7 @@ def create_curve_from_redis_set(server, csvfilename, set_to_plot, year, month):
|
||||||
|
|
||||||
redis_set_name = set_to_plot + "_set_" + str(year) + str(month).zfill(2)
|
redis_set_name = set_to_plot + "_set_" + str(year) + str(month).zfill(2)
|
||||||
words = list(server.smembers(redis_set_name))
|
words = list(server.smembers(redis_set_name))
|
||||||
words = [x.decode('utf-8') for x in words]
|
#words = [x.decode('utf-8') for x in words]
|
||||||
|
|
||||||
headers = ['Date'] + words
|
headers = ['Date'] + words
|
||||||
with open(csvfilename+'.csv', 'w') as f:
|
with open(csvfilename+'.csv', 'w') as f:
|
||||||
|
@ -154,5 +156,5 @@ def create_curve_from_redis_set(server, csvfilename, set_to_plot, year, month):
|
||||||
row.append(0)
|
row.append(0)
|
||||||
else:
|
else:
|
||||||
# if the word have a value for the day
|
# if the word have a value for the day
|
||||||
row.append(value.decode('utf8'))
|
row.append(value)
|
||||||
writer.writerow(row)
|
writer.writerow(row)
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python3.5
|
#!/usr/bin/env python3
|
||||||
# -*-coding:UTF-8 -*
|
# -*-coding:UTF-8 -*
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python3.5
|
#!/usr/bin/env python3
|
||||||
# -*-coding:UTF-8 -*
|
# -*-coding:UTF-8 -*
|
||||||
"""
|
"""
|
||||||
Template for new modules
|
Template for new modules
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python3.5
|
#!/usr/bin/env python3
|
||||||
# -*-coding:UTF-8 -*
|
# -*-coding:UTF-8 -*
|
||||||
|
|
||||||
import socks
|
import socks
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python3.5
|
#!/usr/bin/env python3
|
||||||
# -*-coding:UTF-8 -*
|
# -*-coding:UTF-8 -*
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python3.5
|
#!/usr/bin/env python3
|
||||||
# -*-coding:UTF-8 -*
|
# -*-coding:UTF-8 -*
|
||||||
|
|
||||||
import redis
|
import redis
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python3.5
|
#!/usr/bin/env python3
|
||||||
# -*-coding:UTF-8 -*
|
# -*-coding:UTF-8 -*
|
||||||
|
|
||||||
"Hepler to create a new webpage associated with a module."
|
"Hepler to create a new webpage associated with a module."
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python3.5
|
#!/usr/bin/env python3
|
||||||
# -*-coding:UTF-8 -*
|
# -*-coding:UTF-8 -*
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
@ -26,37 +26,44 @@ cfg.read(configfile)
|
||||||
r_serv = redis.StrictRedis(
|
r_serv = redis.StrictRedis(
|
||||||
host=cfg.get("Redis_Queues", "host"),
|
host=cfg.get("Redis_Queues", "host"),
|
||||||
port=cfg.getint("Redis_Queues", "port"),
|
port=cfg.getint("Redis_Queues", "port"),
|
||||||
db=cfg.getint("Redis_Queues", "db"))
|
db=cfg.getint("Redis_Queues", "db"),
|
||||||
|
decode_responses=True)
|
||||||
|
|
||||||
r_serv_log = redis.StrictRedis(
|
r_serv_log = redis.StrictRedis(
|
||||||
host=cfg.get("Redis_Log", "host"),
|
host=cfg.get("Redis_Log", "host"),
|
||||||
port=cfg.getint("Redis_Log", "port"),
|
port=cfg.getint("Redis_Log", "port"),
|
||||||
db=cfg.getint("Redis_Log", "db"))
|
db=cfg.getint("Redis_Log", "db"),
|
||||||
|
decode_responses=True)
|
||||||
|
|
||||||
r_serv_charts = redis.StrictRedis(
|
r_serv_charts = redis.StrictRedis(
|
||||||
host=cfg.get("Redis_Level_DB_Trending", "host"),
|
host=cfg.get("Redis_Level_DB_Trending", "host"),
|
||||||
port=cfg.getint("Redis_Level_DB_Trending", "port"),
|
port=cfg.getint("Redis_Level_DB_Trending", "port"),
|
||||||
db=cfg.getint("Redis_Level_DB_Trending", "db"))
|
db=cfg.getint("Redis_Level_DB_Trending", "db"),
|
||||||
|
decode_responses=True)
|
||||||
|
|
||||||
r_serv_sentiment = redis.StrictRedis(
|
r_serv_sentiment = redis.StrictRedis(
|
||||||
host=cfg.get("Redis_Level_DB_Sentiment", "host"),
|
host=cfg.get("Redis_Level_DB_Sentiment", "host"),
|
||||||
port=cfg.getint("Redis_Level_DB_Sentiment", "port"),
|
port=cfg.getint("Redis_Level_DB_Sentiment", "port"),
|
||||||
db=cfg.getint("Redis_Level_DB_Sentiment", "db"))
|
db=cfg.getint("Redis_Level_DB_Sentiment", "db"),
|
||||||
|
decode_responses=True)
|
||||||
|
|
||||||
r_serv_term = redis.StrictRedis(
|
r_serv_term = redis.StrictRedis(
|
||||||
host=cfg.get("Redis_Level_DB_TermFreq", "host"),
|
host=cfg.get("Redis_Level_DB_TermFreq", "host"),
|
||||||
port=cfg.getint("Redis_Level_DB_TermFreq", "port"),
|
port=cfg.getint("Redis_Level_DB_TermFreq", "port"),
|
||||||
db=cfg.getint("Redis_Level_DB_TermFreq", "db"))
|
db=cfg.getint("Redis_Level_DB_TermFreq", "db"),
|
||||||
|
decode_responses=True)
|
||||||
|
|
||||||
r_serv_cred = redis.StrictRedis(
|
r_serv_cred = redis.StrictRedis(
|
||||||
host=cfg.get("Redis_Level_DB_TermCred", "host"),
|
host=cfg.get("Redis_Level_DB_TermCred", "host"),
|
||||||
port=cfg.getint("Redis_Level_DB_TermCred", "port"),
|
port=cfg.getint("Redis_Level_DB_TermCred", "port"),
|
||||||
db=cfg.getint("Redis_Level_DB_TermCred", "db"))
|
db=cfg.getint("Redis_Level_DB_TermCred", "db"),
|
||||||
|
decode_responses=True)
|
||||||
|
|
||||||
r_serv_pasteName = redis.StrictRedis(
|
r_serv_pasteName = redis.StrictRedis(
|
||||||
host=cfg.get("Redis_Paste_Name", "host"),
|
host=cfg.get("Redis_Paste_Name", "host"),
|
||||||
port=cfg.getint("Redis_Paste_Name", "port"),
|
port=cfg.getint("Redis_Paste_Name", "port"),
|
||||||
db=cfg.getint("Redis_Paste_Name", "db"))
|
db=cfg.getint("Redis_Paste_Name", "db"),
|
||||||
|
decode_responses=True)
|
||||||
|
|
||||||
# VARIABLES #
|
# VARIABLES #
|
||||||
max_preview_char = int(cfg.get("Flask", "max_preview_char")) # Maximum number of character to display in the tooltip
|
max_preview_char = int(cfg.get("Flask", "max_preview_char")) # Maximum number of character to display in the tooltip
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python3.5
|
#!/usr/bin/env python3
|
||||||
# -*-coding:UTF-8 -*
|
# -*-coding:UTF-8 -*
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
@ -37,7 +37,8 @@ for year in os.listdir(lvdbdir):
|
||||||
r_serv_db[intYear] = redis.StrictRedis(
|
r_serv_db[intYear] = redis.StrictRedis(
|
||||||
host=cfg.get("Redis_Level_DB", "host"),
|
host=cfg.get("Redis_Level_DB", "host"),
|
||||||
port=intYear,
|
port=intYear,
|
||||||
db=cfg.getint("Redis_Level_DB", "db"))
|
db=cfg.getint("Redis_Level_DB", "db"),
|
||||||
|
decode_responses=True)
|
||||||
yearList.sort(reverse=True)
|
yearList.sort(reverse=True)
|
||||||
|
|
||||||
browsepastes = Blueprint('browsepastes', __name__, template_folder='templates')
|
browsepastes = Blueprint('browsepastes', __name__, template_folder='templates')
|
||||||
|
@ -55,8 +56,8 @@ def getPastebyType(server, module_name):
|
||||||
def event_stream_getImportantPasteByModule(module_name, year):
|
def event_stream_getImportantPasteByModule(module_name, year):
|
||||||
index = 0
|
index = 0
|
||||||
all_pastes_list = getPastebyType(r_serv_db[year], module_name)
|
all_pastes_list = getPastebyType(r_serv_db[year], module_name)
|
||||||
|
|
||||||
for path in all_pastes_list:
|
for path in all_pastes_list:
|
||||||
path = path.decode('utf8')
|
|
||||||
index += 1
|
index += 1
|
||||||
paste = Paste.Paste(path)
|
paste = Paste.Paste(path)
|
||||||
content = paste.get_p_content()
|
content = paste.get_p_content()
|
||||||
|
@ -94,7 +95,6 @@ def importantPasteByModule():
|
||||||
allPastes = getPastebyType(r_serv_db[currentSelectYear], module_name)
|
allPastes = getPastebyType(r_serv_db[currentSelectYear], module_name)
|
||||||
|
|
||||||
for path in allPastes[0:10]:
|
for path in allPastes[0:10]:
|
||||||
path = path.decode('utf8')
|
|
||||||
all_path.append(path)
|
all_path.append(path)
|
||||||
paste = Paste.Paste(path)
|
paste = Paste.Paste(path)
|
||||||
content = paste.get_p_content()
|
content = paste.get_p_content()
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python3.5
|
#!/usr/bin/env python3
|
||||||
# -*-coding:UTF-8 -*
|
# -*-coding:UTF-8 -*
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
@ -26,23 +26,11 @@ def event_stream():
|
||||||
pubsub = r_serv_log.pubsub()
|
pubsub = r_serv_log.pubsub()
|
||||||
pubsub.psubscribe("Script" + '.*')
|
pubsub.psubscribe("Script" + '.*')
|
||||||
for msg in pubsub.listen():
|
for msg in pubsub.listen():
|
||||||
# bytes conversion
|
|
||||||
try:
|
type = msg['type']
|
||||||
type = msg['type'].decode('utf8')
|
pattern = msg['pattern']
|
||||||
except:
|
channel = msg['channel']
|
||||||
type = msg['type']
|
data = msg['data']
|
||||||
try:
|
|
||||||
pattern = msg['pattern'].decode('utf8')
|
|
||||||
except:
|
|
||||||
pattern = msg['pattern']
|
|
||||||
try:
|
|
||||||
channel = msg['channel'].decode('utf8')
|
|
||||||
except:
|
|
||||||
channel = msg['channel']
|
|
||||||
try:
|
|
||||||
data = msg['data'].decode('utf8')
|
|
||||||
except:
|
|
||||||
data = msg['data']
|
|
||||||
|
|
||||||
msg = {'channel': channel, 'type': type, 'pattern': pattern, 'data': data}
|
msg = {'channel': channel, 'type': type, 'pattern': pattern, 'data': data}
|
||||||
|
|
||||||
|
@ -54,15 +42,13 @@ def get_queues(r):
|
||||||
# We may want to put the llen in a pipeline to do only one query.
|
# We may want to put the llen in a pipeline to do only one query.
|
||||||
newData = []
|
newData = []
|
||||||
for queue, card in r.hgetall("queues").items():
|
for queue, card in r.hgetall("queues").items():
|
||||||
queue = queue.decode('utf8')
|
|
||||||
card = card.decode('utf8')
|
|
||||||
key = "MODULE_" + queue + "_"
|
key = "MODULE_" + queue + "_"
|
||||||
keySet = "MODULE_TYPE_" + queue
|
keySet = "MODULE_TYPE_" + queue
|
||||||
|
|
||||||
for moduleNum in r.smembers(keySet):
|
for moduleNum in r.smembers(keySet):
|
||||||
moduleNum = moduleNum.decode('utf8')
|
|
||||||
|
|
||||||
value = ( r.get(key + str(moduleNum)) ).decode('utf8')
|
value = r.get(key + str(moduleNum))
|
||||||
|
|
||||||
if value is not None:
|
if value is not None:
|
||||||
timestamp, path = value.split(", ")
|
timestamp, path = value.split(", ")
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python3.5
|
#!/usr/bin/env python3
|
||||||
# -*-coding:UTF-8 -*
|
# -*-coding:UTF-8 -*
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python3.5
|
#!/usr/bin/env python3
|
||||||
# -*-coding:UTF-8 -*
|
# -*-coding:UTF-8 -*
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python3.5
|
#!/usr/bin/env python3
|
||||||
# -*-coding:UTF-8 -*
|
# -*-coding:UTF-8 -*
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
@ -58,11 +58,7 @@ def sentiment_analysis_getplotdata():
|
||||||
if allProvider == "True":
|
if allProvider == "True":
|
||||||
range_providers = r_serv_charts.smembers('all_provider_set')
|
range_providers = r_serv_charts.smembers('all_provider_set')
|
||||||
|
|
||||||
range_providers_str = []
|
return jsonify(list(range_providers))
|
||||||
for domain in range_providers:
|
|
||||||
m = domain.decode('utf8')
|
|
||||||
range_providers_str.append(m)
|
|
||||||
return jsonify(list(range_providers_str))
|
|
||||||
else:
|
else:
|
||||||
range_providers = r_serv_charts.zrevrangebyscore('providers_set_'+ get_date_range(0)[0], '+inf', '-inf', start=0, num=8)
|
range_providers = r_serv_charts.zrevrangebyscore('providers_set_'+ get_date_range(0)[0], '+inf', '-inf', start=0, num=8)
|
||||||
# if empty, get yesterday top providers
|
# if empty, get yesterday top providers
|
||||||
|
@ -74,13 +70,7 @@ def sentiment_analysis_getplotdata():
|
||||||
print('today provider empty')
|
print('today provider empty')
|
||||||
range_providers = r_serv_charts.smembers('all_provider_set')
|
range_providers = r_serv_charts.smembers('all_provider_set')
|
||||||
|
|
||||||
# decode bytes
|
return jsonify(list(range_providers))
|
||||||
range_providers_str = []
|
|
||||||
for domain in range_providers:
|
|
||||||
m = domain.decode('utf8')
|
|
||||||
range_providers_str.append(m)
|
|
||||||
|
|
||||||
return jsonify(list(range_providers_str))
|
|
||||||
|
|
||||||
elif provider is not None:
|
elif provider is not None:
|
||||||
to_return = {}
|
to_return = {}
|
||||||
|
@ -92,7 +82,7 @@ def sentiment_analysis_getplotdata():
|
||||||
|
|
||||||
list_value = []
|
list_value = []
|
||||||
for cur_id in r_serv_sentiment.smembers(cur_set_name):
|
for cur_id in r_serv_sentiment.smembers(cur_set_name):
|
||||||
cur_value = (r_serv_sentiment.get(cur_id)).decode('utf8')
|
cur_value = (r_serv_sentiment.get(cur_id))
|
||||||
list_value.append(cur_value)
|
list_value.append(cur_value)
|
||||||
list_date[cur_timestamp] = list_value
|
list_date[cur_timestamp] = list_value
|
||||||
to_return[provider] = list_date
|
to_return[provider] = list_date
|
||||||
|
@ -115,7 +105,7 @@ def sentiment_analysis_plot_tool_getdata():
|
||||||
if getProviders == 'True':
|
if getProviders == 'True':
|
||||||
providers = []
|
providers = []
|
||||||
for cur_provider in r_serv_charts.smembers('all_provider_set'):
|
for cur_provider in r_serv_charts.smembers('all_provider_set'):
|
||||||
providers.append(cur_provider.decode('utf8'))
|
providers.append(cur_provider)
|
||||||
return jsonify(providers)
|
return jsonify(providers)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
|
@ -144,7 +134,7 @@ def sentiment_analysis_plot_tool_getdata():
|
||||||
|
|
||||||
list_value = []
|
list_value = []
|
||||||
for cur_id in r_serv_sentiment.smembers(cur_set_name):
|
for cur_id in r_serv_sentiment.smembers(cur_set_name):
|
||||||
cur_value = (r_serv_sentiment.get(cur_id)).decode('utf8')
|
cur_value = (r_serv_sentiment.get(cur_id))
|
||||||
list_value.append(cur_value)
|
list_value.append(cur_value)
|
||||||
list_date[cur_timestamp] = list_value
|
list_date[cur_timestamp] = list_value
|
||||||
to_return[cur_provider] = list_date
|
to_return[cur_provider] = list_date
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python3.5
|
#!/usr/bin/env python3
|
||||||
# -*-coding:UTF-8 -*
|
# -*-coding:UTF-8 -*
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python3.5
|
#!/usr/bin/env python3
|
||||||
# -*-coding:UTF-8 -*
|
# -*-coding:UTF-8 -*
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
@ -72,7 +72,7 @@ def Term_getValueOverRange(word, startDate, num_day, per_paste=""):
|
||||||
curr_to_return = 0
|
curr_to_return = 0
|
||||||
for timestamp in range(startDate, startDate - max(num_day)*oneDay, -oneDay):
|
for timestamp in range(startDate, startDate - max(num_day)*oneDay, -oneDay):
|
||||||
value = r_serv_term.hget(per_paste+str(timestamp), word)
|
value = r_serv_term.hget(per_paste+str(timestamp), word)
|
||||||
curr_to_return += int(value.decode('utf8')) if value is not None else 0
|
curr_to_return += int(value) if value is not None else 0
|
||||||
for i in num_day:
|
for i in num_day:
|
||||||
if passed_days == i-1:
|
if passed_days == i-1:
|
||||||
to_return.append(curr_to_return)
|
to_return.append(curr_to_return)
|
||||||
|
@ -157,10 +157,8 @@ def terms_management():
|
||||||
trackReg_list_values = []
|
trackReg_list_values = []
|
||||||
trackReg_list_num_of_paste = []
|
trackReg_list_num_of_paste = []
|
||||||
for tracked_regex in r_serv_term.smembers(TrackedRegexSet_Name):
|
for tracked_regex in r_serv_term.smembers(TrackedRegexSet_Name):
|
||||||
tracked_regex = tracked_regex.decode('utf8')
|
|
||||||
print(tracked_regex)
|
|
||||||
|
|
||||||
notificationEMailTermMapping[tracked_regex] = "\n".join( (r_serv_term.smembers(TrackedTermsNotificationEmailsPrefix_Name + tracked_regex)).decode('utf8') )
|
notificationEMailTermMapping[tracked_regex] = "\n".join( (r_serv_term.smembers(TrackedTermsNotificationEmailsPrefix_Name + tracked_regex)) )
|
||||||
|
|
||||||
if tracked_regex not in notificationEnabledDict:
|
if tracked_regex not in notificationEnabledDict:
|
||||||
notificationEnabledDict[tracked_regex] = False
|
notificationEnabledDict[tracked_regex] = False
|
||||||
|
@ -176,7 +174,7 @@ def terms_management():
|
||||||
value_range.append(term_date)
|
value_range.append(term_date)
|
||||||
trackReg_list_values.append(value_range)
|
trackReg_list_values.append(value_range)
|
||||||
|
|
||||||
if tracked_regex.encode('utf8') in r_serv_term.smembers(TrackedTermsNotificationEnabled_Name):
|
if tracked_regex in r_serv_term.smembers(TrackedTermsNotificationEnabled_Name):
|
||||||
notificationEnabledDict[tracked_regex] = True
|
notificationEnabledDict[tracked_regex] = True
|
||||||
|
|
||||||
#Set
|
#Set
|
||||||
|
@ -184,9 +182,9 @@ def terms_management():
|
||||||
trackSet_list_values = []
|
trackSet_list_values = []
|
||||||
trackSet_list_num_of_paste = []
|
trackSet_list_num_of_paste = []
|
||||||
for tracked_set in r_serv_term.smembers(TrackedSetSet_Name):
|
for tracked_set in r_serv_term.smembers(TrackedSetSet_Name):
|
||||||
tracked_set = tracked_set.decode('utf8')
|
tracked_set = tracked_set
|
||||||
|
|
||||||
notificationEMailTermMapping[tracked_set] = "\n".join( (r_serv_term.smembers(TrackedTermsNotificationEmailsPrefix_Name + tracked_set)).decode('utf8') )
|
notificationEMailTermMapping[tracked_set] = "\n".join( (r_serv_term.smembers(TrackedTermsNotificationEmailsPrefix_Name + tracked_set)) )
|
||||||
|
|
||||||
|
|
||||||
if tracked_set not in notificationEnabledDict:
|
if tracked_set not in notificationEnabledDict:
|
||||||
|
@ -203,7 +201,7 @@ def terms_management():
|
||||||
value_range.append(term_date)
|
value_range.append(term_date)
|
||||||
trackSet_list_values.append(value_range)
|
trackSet_list_values.append(value_range)
|
||||||
|
|
||||||
if tracked_set.encode('utf8') in r_serv_term.smembers(TrackedTermsNotificationEnabled_Name):
|
if tracked_set in r_serv_term.smembers(TrackedTermsNotificationEnabled_Name):
|
||||||
notificationEnabledDict[tracked_set] = True
|
notificationEnabledDict[tracked_set] = True
|
||||||
|
|
||||||
#Tracked terms
|
#Tracked terms
|
||||||
|
@ -211,13 +209,7 @@ def terms_management():
|
||||||
track_list_values = []
|
track_list_values = []
|
||||||
track_list_num_of_paste = []
|
track_list_num_of_paste = []
|
||||||
for tracked_term in r_serv_term.smembers(TrackedTermsSet_Name):
|
for tracked_term in r_serv_term.smembers(TrackedTermsSet_Name):
|
||||||
tracked_term = tracked_term.decode('utf8')
|
|
||||||
print('tracked_term : .')
|
|
||||||
print(tracked_term)
|
|
||||||
|
|
||||||
#print(TrackedTermsNotificationEmailsPrefix_Name)
|
|
||||||
print(r_serv_term.smembers(TrackedTermsNotificationEmailsPrefix_Name + tracked_term))
|
|
||||||
#print(tracked_term)
|
|
||||||
notificationEMailTermMapping[tracked_term] = "\n".join( r_serv_term.smembers(TrackedTermsNotificationEmailsPrefix_Name + tracked_term))
|
notificationEMailTermMapping[tracked_term] = "\n".join( r_serv_term.smembers(TrackedTermsNotificationEmailsPrefix_Name + tracked_term))
|
||||||
|
|
||||||
if tracked_term not in notificationEnabledDict:
|
if tracked_term not in notificationEnabledDict:
|
||||||
|
@ -229,16 +221,14 @@ def terms_management():
|
||||||
term_date = r_serv_term.hget(TrackedTermsDate_Name, tracked_term)
|
term_date = r_serv_term.hget(TrackedTermsDate_Name, tracked_term)
|
||||||
|
|
||||||
set_paste_name = "tracked_" + tracked_term
|
set_paste_name = "tracked_" + tracked_term
|
||||||
print('set_paste_name : .')
|
|
||||||
print(set_paste_name)
|
|
||||||
track_list_num_of_paste.append( r_serv_term.scard(set_paste_name) )
|
track_list_num_of_paste.append( r_serv_term.scard(set_paste_name) )
|
||||||
print('track_list_num_of_paste : .')
|
|
||||||
print(track_list_num_of_paste)
|
|
||||||
term_date = datetime.datetime.utcfromtimestamp(int(term_date)) if term_date is not None else "No date recorded"
|
term_date = datetime.datetime.utcfromtimestamp(int(term_date)) if term_date is not None else "No date recorded"
|
||||||
value_range.append(term_date)
|
value_range.append(term_date)
|
||||||
track_list_values.append(value_range)
|
track_list_values.append(value_range)
|
||||||
|
|
||||||
if tracked_term.encode('utf8') in r_serv_term.smembers(TrackedTermsNotificationEnabled_Name):
|
if tracked_term in r_serv_term.smembers(TrackedTermsNotificationEnabled_Name):
|
||||||
notificationEnabledDict[tracked_term] = True
|
notificationEnabledDict[tracked_term] = True
|
||||||
|
|
||||||
#blacklist terms
|
#blacklist terms
|
||||||
|
@ -246,7 +236,7 @@ def terms_management():
|
||||||
for blacked_term in r_serv_term.smembers(BlackListTermsSet_Name):
|
for blacked_term in r_serv_term.smembers(BlackListTermsSet_Name):
|
||||||
term_date = r_serv_term.hget(BlackListTermsDate_Name, blacked_term)
|
term_date = r_serv_term.hget(BlackListTermsDate_Name, blacked_term)
|
||||||
term_date = datetime.datetime.utcfromtimestamp(int(term_date)) if term_date is not None else "No date recorded"
|
term_date = datetime.datetime.utcfromtimestamp(int(term_date)) if term_date is not None else "No date recorded"
|
||||||
black_list.append([blacked_term.decode('utf8'), term_date])
|
black_list.append([blacked_term, term_date])
|
||||||
|
|
||||||
return render_template("terms_management.html",
|
return render_template("terms_management.html",
|
||||||
black_list=black_list, track_list=track_list, trackReg_list=trackReg_list, trackSet_list=trackSet_list,
|
black_list=black_list, track_list=track_list, trackReg_list=trackReg_list, trackSet_list=trackSet_list,
|
||||||
|
@ -259,8 +249,6 @@ def terms_management():
|
||||||
@terms.route("/terms_management_query_paste/")
|
@terms.route("/terms_management_query_paste/")
|
||||||
def terms_management_query_paste():
|
def terms_management_query_paste():
|
||||||
term = request.args.get('term')
|
term = request.args.get('term')
|
||||||
print('term :')
|
|
||||||
print(term)
|
|
||||||
paste_info = []
|
paste_info = []
|
||||||
|
|
||||||
# check if regex or not
|
# check if regex or not
|
||||||
|
@ -275,7 +263,6 @@ def terms_management_query_paste():
|
||||||
track_list_path = r_serv_term.smembers(set_paste_name)
|
track_list_path = r_serv_term.smembers(set_paste_name)
|
||||||
|
|
||||||
for path in track_list_path:
|
for path in track_list_path:
|
||||||
path = path.decode('utf8')
|
|
||||||
paste = Paste.Paste(path)
|
paste = Paste.Paste(path)
|
||||||
p_date = str(paste._get_p_date())
|
p_date = str(paste._get_p_date())
|
||||||
p_date = p_date[6:]+'/'+p_date[4:6]+'/'+p_date[0:4]
|
p_date = p_date[6:]+'/'+p_date[4:6]+'/'+p_date[0:4]
|
||||||
|
@ -402,7 +389,6 @@ def terms_management_action():
|
||||||
r_serv_term.hdel(TrackedRegexDate_Name, term)
|
r_serv_term.hdel(TrackedRegexDate_Name, term)
|
||||||
elif term.startswith('\\') and term.endswith('\\'):
|
elif term.startswith('\\') and term.endswith('\\'):
|
||||||
r_serv_term.srem(TrackedSetSet_Name, term)
|
r_serv_term.srem(TrackedSetSet_Name, term)
|
||||||
#print(term)
|
|
||||||
r_serv_term.hdel(TrackedSetDate_Name, term)
|
r_serv_term.hdel(TrackedSetDate_Name, term)
|
||||||
else:
|
else:
|
||||||
r_serv_term.srem(TrackedTermsSet_Name, term.lower())
|
r_serv_term.srem(TrackedTermsSet_Name, term.lower())
|
||||||
|
@ -515,7 +501,7 @@ def terms_plot_top_data():
|
||||||
curr_value_range = int(value) if value is not None else 0
|
curr_value_range = int(value) if value is not None else 0
|
||||||
value_range.append([timestamp, curr_value_range])
|
value_range.append([timestamp, curr_value_range])
|
||||||
|
|
||||||
to_return.append([term.decode('utf8'), value_range, tot_value, position])
|
to_return.append([term, value_range, tot_value, position])
|
||||||
|
|
||||||
return jsonify(to_return)
|
return jsonify(to_return)
|
||||||
|
|
||||||
|
@ -532,7 +518,6 @@ def credentials_management_query_paste():
|
||||||
paste_info = []
|
paste_info = []
|
||||||
for pathNum in allPath:
|
for pathNum in allPath:
|
||||||
path = r_serv_cred.hget(REDIS_KEY_ALL_PATH_SET_REV, pathNum)
|
path = r_serv_cred.hget(REDIS_KEY_ALL_PATH_SET_REV, pathNum)
|
||||||
path = path.decode('utf8')
|
|
||||||
paste = Paste.Paste(path)
|
paste = Paste.Paste(path)
|
||||||
p_date = str(paste._get_p_date())
|
p_date = str(paste._get_p_date())
|
||||||
p_date = p_date[6:]+'/'+p_date[4:6]+'/'+p_date[0:4]
|
p_date = p_date[6:]+'/'+p_date[4:6]+'/'+p_date[0:4]
|
||||||
|
@ -574,7 +559,6 @@ def cred_management_action():
|
||||||
iter_num = 0
|
iter_num = 0
|
||||||
tot_iter = len(AllUsernameInRedis)*len(possibilities)
|
tot_iter = len(AllUsernameInRedis)*len(possibilities)
|
||||||
for tempUsername in AllUsernameInRedis:
|
for tempUsername in AllUsernameInRedis:
|
||||||
tempUsername = tempUsername.decode('utf8')
|
|
||||||
for poss in possibilities:
|
for poss in possibilities:
|
||||||
#FIXME print progress
|
#FIXME print progress
|
||||||
if(iter_num % int(tot_iter/20) == 0):
|
if(iter_num % int(tot_iter/20) == 0):
|
||||||
|
@ -583,7 +567,7 @@ def cred_management_action():
|
||||||
iter_num += 1
|
iter_num += 1
|
||||||
|
|
||||||
if poss in tempUsername:
|
if poss in tempUsername:
|
||||||
num = (r_serv_cred.hget(REDIS_KEY_ALL_CRED_SET, tempUsername)).decode('utf8')
|
num = (r_serv_cred.hget(REDIS_KEY_ALL_CRED_SET, tempUsername))
|
||||||
if num is not None:
|
if num is not None:
|
||||||
uniq_num_set.add(num)
|
uniq_num_set.add(num)
|
||||||
for num in r_serv_cred.smembers(tempUsername):
|
for num in r_serv_cred.smembers(tempUsername):
|
||||||
|
@ -592,7 +576,7 @@ def cred_management_action():
|
||||||
data = {'usr': [], 'path': [], 'numPaste': [], 'simil': []}
|
data = {'usr': [], 'path': [], 'numPaste': [], 'simil': []}
|
||||||
for Unum in uniq_num_set:
|
for Unum in uniq_num_set:
|
||||||
levenRatio = 2.0
|
levenRatio = 2.0
|
||||||
username = (r_serv_cred.hget(REDIS_KEY_ALL_CRED_SET_REV, Unum)).decode('utf8')
|
username = (r_serv_cred.hget(REDIS_KEY_ALL_CRED_SET_REV, Unum))
|
||||||
|
|
||||||
# Calculate Levenshtein distance, ignore negative ratio
|
# Calculate Levenshtein distance, ignore negative ratio
|
||||||
supp_splitted = supplied.split()
|
supp_splitted = supplied.split()
|
||||||
|
@ -604,20 +588,11 @@ def cred_management_action():
|
||||||
|
|
||||||
data['usr'].append(username)
|
data['usr'].append(username)
|
||||||
|
|
||||||
try:
|
|
||||||
Unum = Unum.decode('utf8')
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
|
|
||||||
allPathNum = list(r_serv_cred.smembers(REDIS_KEY_MAP_CRED_TO_PATH+'_'+Unum))
|
allPathNum = list(r_serv_cred.smembers(REDIS_KEY_MAP_CRED_TO_PATH+'_'+Unum))
|
||||||
|
|
||||||
# decode bytes
|
data['path'].append(allPathNum)
|
||||||
allPathNum_str = []
|
data['numPaste'].append(len(allPathNum))
|
||||||
for p in allPathNum:
|
|
||||||
allPathNum_str.append(p.decode('utf8'))
|
|
||||||
|
|
||||||
data['path'].append(allPathNum_str)
|
|
||||||
data['numPaste'].append(len(allPathNum_str))
|
|
||||||
data['simil'].append(levenRatioStr)
|
data['simil'].append(levenRatioStr)
|
||||||
|
|
||||||
to_return = {}
|
to_return = {}
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python3.5
|
#!/usr/bin/env python3
|
||||||
# -*-coding:UTF-8 -*
|
# -*-coding:UTF-8 -*
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
@ -47,9 +47,8 @@ def progressionCharts():
|
||||||
date_range = get_date_range(num_day)
|
date_range = get_date_range(num_day)
|
||||||
# Retreive all data from the last num_day
|
# Retreive all data from the last num_day
|
||||||
for date in date_range:
|
for date in date_range:
|
||||||
|
|
||||||
curr_value = r_serv_charts.hget(attribute_name, date)
|
curr_value = r_serv_charts.hget(attribute_name, date)
|
||||||
if curr_value is not None:
|
|
||||||
curr_value = curr_value.decode('utf8')
|
|
||||||
bar_values.append([date[0:4]+'/'+date[4:6]+'/'+date[6:8], int(curr_value if curr_value is not None else 0)])
|
bar_values.append([date[0:4]+'/'+date[4:6]+'/'+date[6:8], int(curr_value if curr_value is not None else 0)])
|
||||||
bar_values.insert(0, attribute_name)
|
bar_values.insert(0, attribute_name)
|
||||||
return jsonify(bar_values)
|
return jsonify(bar_values)
|
||||||
|
@ -58,13 +57,7 @@ def progressionCharts():
|
||||||
redis_progression_name = "z_top_progression_" + trending_name
|
redis_progression_name = "z_top_progression_" + trending_name
|
||||||
keyw_value = r_serv_charts.zrevrangebyscore(redis_progression_name, '+inf', '-inf', withscores=True, start=0, num=10)
|
keyw_value = r_serv_charts.zrevrangebyscore(redis_progression_name, '+inf', '-inf', withscores=True, start=0, num=10)
|
||||||
|
|
||||||
# decode bytes
|
return jsonify(keyw_value)
|
||||||
keyw_value_str = []
|
|
||||||
for domain, value in keyw_value:
|
|
||||||
m = domain.decode('utf8'), value
|
|
||||||
keyw_value_str.append(m)
|
|
||||||
|
|
||||||
return jsonify(keyw_value_str)
|
|
||||||
|
|
||||||
@trendings.route("/wordstrending/")
|
@trendings.route("/wordstrending/")
|
||||||
def wordstrending():
|
def wordstrending():
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python3.5
|
#!/usr/bin/env python3
|
||||||
# -*-coding:UTF-8 -*
|
# -*-coding:UTF-8 -*
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
@ -28,18 +28,12 @@ def get_top_relevant_data(server, module_name):
|
||||||
for date in get_date_range(15):
|
for date in get_date_range(15):
|
||||||
redis_progression_name_set = 'top_'+ module_name +'_set_' + date
|
redis_progression_name_set = 'top_'+ module_name +'_set_' + date
|
||||||
member_set = server.zrevrangebyscore(redis_progression_name_set, '+inf', '-inf', withscores=True)
|
member_set = server.zrevrangebyscore(redis_progression_name_set, '+inf', '-inf', withscores=True)
|
||||||
member_set_str = []
|
|
||||||
|
|
||||||
# decode bytes
|
if len(member_set) == 0: #No data for this date
|
||||||
for domain, value in member_set:
|
|
||||||
m = domain.decode('utf8'), value
|
|
||||||
member_set_str.append(m)
|
|
||||||
|
|
||||||
if len(member_set_str) == 0: #No data for this date
|
|
||||||
days += 1
|
days += 1
|
||||||
else:
|
else:
|
||||||
member_set_str.insert(0, ("passed_days", days))
|
member_set.insert(0, ("passed_days", days))
|
||||||
return member_set_str
|
return member_set
|
||||||
|
|
||||||
|
|
||||||
def get_date_range(num_day):
|
def get_date_range(num_day):
|
||||||
|
@ -94,13 +88,13 @@ def providersChart():
|
||||||
for date in date_range:
|
for date in date_range:
|
||||||
curr_value_size = ( r_serv_charts.hget(keyword_name+'_'+'size', date) )
|
curr_value_size = ( r_serv_charts.hget(keyword_name+'_'+'size', date) )
|
||||||
if curr_value_size is not None:
|
if curr_value_size is not None:
|
||||||
curr_value_size = curr_value_size.decode('utf8')
|
curr_value_size = curr_value_size
|
||||||
|
|
||||||
curr_value_num = r_serv_charts.hget(keyword_name+'_'+'num', date)
|
curr_value_num = r_serv_charts.hget(keyword_name+'_'+'num', date)
|
||||||
|
|
||||||
curr_value_size_avg = r_serv_charts.hget(keyword_name+'_'+'avg', date)
|
curr_value_size_avg = r_serv_charts.hget(keyword_name+'_'+'avg', date)
|
||||||
if curr_value_size_avg is not None:
|
if curr_value_size_avg is not None:
|
||||||
curr_value_size_avg = curr_value_size_avg.decode('utf8')
|
curr_value_size_avg = curr_value_size_avg
|
||||||
|
|
||||||
|
|
||||||
if module_name == "size":
|
if module_name == "size":
|
||||||
|
@ -119,16 +113,10 @@ def providersChart():
|
||||||
|
|
||||||
member_set = r_serv_charts.zrevrangebyscore(redis_provider_name_set, '+inf', '-inf', withscores=True, start=0, num=8)
|
member_set = r_serv_charts.zrevrangebyscore(redis_provider_name_set, '+inf', '-inf', withscores=True, start=0, num=8)
|
||||||
|
|
||||||
# decode bytes
|
|
||||||
member_set_str = []
|
|
||||||
for domain, value in member_set:
|
|
||||||
m = domain.decode('utf8'), value
|
|
||||||
member_set_str.append(m)
|
|
||||||
|
|
||||||
# Member set is a list of (value, score) pairs
|
# Member set is a list of (value, score) pairs
|
||||||
if len(member_set_str) == 0:
|
if len(member_set) == 0:
|
||||||
member_set_str.append(("No relevant data", float(100)))
|
member_set_str.append(("No relevant data", float(100)))
|
||||||
return jsonify(member_set_str)
|
return jsonify(member_set)
|
||||||
|
|
||||||
|
|
||||||
@trendingmodules.route("/moduletrending/")
|
@trendingmodules.route("/moduletrending/")
|
||||||
|
|
Loading…
Reference in a new issue