2014-08-06 09:43:40 +00:00
|
|
|
#!/usr/bin/env python2
|
|
|
|
# -*-coding:UTF-8 -*
|
|
|
|
|
|
|
|
"""
|
|
|
|
The ZMQ_PubSub_Lines Module
|
|
|
|
============================
|
|
|
|
|
|
|
|
This module is consuming the Redis-list created by the ZMQ_PubSub_Line_Q Module.
|
|
|
|
|
|
|
|
It perform a sorting on the line's length and publish/forward them to differents
|
|
|
|
channels:
|
|
|
|
|
|
|
|
*Channel 1 if max length(line) < max
|
|
|
|
*Channel 2 if max length(line) > max
|
|
|
|
|
|
|
|
The collected informations about the processed pastes
|
|
|
|
(number of lines and maximum length line) are stored in Redis.
|
|
|
|
|
|
|
|
..note:: Module ZMQ_Something_Q and ZMQ_Something are closely bound, always put
|
|
|
|
the same Subscriber name in both of them.
|
|
|
|
|
|
|
|
Requirements
|
|
|
|
------------
|
|
|
|
|
|
|
|
*Need running Redis instances. (LevelDB & Redis)
|
|
|
|
*Need the ZMQ_PubSub_Line_Q Module running to be able to work properly.
|
|
|
|
|
|
|
|
"""
|
2014-08-14 15:55:18 +00:00
|
|
|
import redis
|
|
|
|
import argparse
|
|
|
|
import ConfigParser
|
|
|
|
import time
|
|
|
|
from packages import Paste
|
2014-08-06 09:43:40 +00:00
|
|
|
from packages import ZMQ_PubSub
|
|
|
|
from pubsublogger import publisher
|
|
|
|
|
|
|
|
configfile = './packages/config.cfg'
|
|
|
|
|
2014-08-14 15:55:18 +00:00
|
|
|
|
2014-08-06 09:43:40 +00:00
|
|
|
def main():
|
|
|
|
"""Main Function"""
|
|
|
|
|
|
|
|
# CONFIG #
|
|
|
|
cfg = ConfigParser.ConfigParser()
|
|
|
|
cfg.read(configfile)
|
|
|
|
|
|
|
|
# SCRIPT PARSER #
|
|
|
|
parser = argparse.ArgumentParser(
|
2014-08-14 15:55:18 +00:00
|
|
|
description='''This script is a part of the Analysis Information Leak framework.''',
|
|
|
|
epilog='''''')
|
2014-08-06 09:43:40 +00:00
|
|
|
|
2014-08-14 15:55:18 +00:00
|
|
|
parser.add_argument('-max', type=int, default=500,
|
|
|
|
help='The limit between "short lines" and "long lines" (500)',
|
|
|
|
action='store')
|
2014-08-06 09:43:40 +00:00
|
|
|
|
|
|
|
args = parser.parse_args()
|
|
|
|
|
|
|
|
# REDIS #
|
|
|
|
r_serv = redis.StrictRedis(
|
2014-08-14 15:55:18 +00:00
|
|
|
host=cfg.get("Redis_Data_Merging", "host"),
|
|
|
|
port=cfg.getint("Redis_Data_Merging", "port"),
|
|
|
|
db=cfg.getint("Redis_Data_Merging", "db"))
|
2014-08-06 09:43:40 +00:00
|
|
|
|
|
|
|
r_serv1 = redis.StrictRedis(
|
2014-08-14 15:55:18 +00:00
|
|
|
host=cfg.get("Redis_Queues", "host"),
|
|
|
|
port=cfg.getint("Redis_Queues", "port"),
|
|
|
|
db=cfg.getint("Redis_Queues", "db"))
|
2014-08-06 09:43:40 +00:00
|
|
|
|
|
|
|
# LOGGING #
|
|
|
|
publisher.channel = "Script"
|
|
|
|
|
|
|
|
# ZMQ #
|
2014-08-14 15:55:18 +00:00
|
|
|
# Subscriber
|
2014-08-06 09:43:40 +00:00
|
|
|
channel = cfg.get("PubSub_Global", "channel")
|
|
|
|
subscriber_name = "line"
|
|
|
|
subscriber_config_section = "PubSub_Global"
|
|
|
|
|
2014-08-14 15:55:18 +00:00
|
|
|
# Publisher
|
2014-08-06 09:43:40 +00:00
|
|
|
publisher_config_section = "PubSub_Longlines"
|
|
|
|
publisher_name = "publine"
|
|
|
|
|
2014-08-14 15:55:18 +00:00
|
|
|
sub = ZMQ_PubSub.ZMQSub(configfile, subscriber_config_section, channel, subscriber_name)
|
2014-08-06 09:43:40 +00:00
|
|
|
|
2014-08-14 15:55:18 +00:00
|
|
|
pub = ZMQ_PubSub.ZMQPub(configfile, publisher_config_section, publisher_name)
|
2014-08-06 09:43:40 +00:00
|
|
|
|
|
|
|
channel_0 = cfg.get("PubSub_Longlines", "channel_0")
|
|
|
|
channel_1 = cfg.get("PubSub_Longlines", "channel_1")
|
|
|
|
|
|
|
|
# FUNCTIONS #
|
2014-08-14 15:55:18 +00:00
|
|
|
tmp_string = "Lines script Subscribed to channel {} and Start to publish on channel {}, {}"
|
|
|
|
publisher.info(tmp_string.format(
|
|
|
|
cfg.get("PubSub_Global", "channel"),
|
|
|
|
cfg.get("PubSub_Longlines", "channel_0"),
|
|
|
|
cfg.get("PubSub_Longlines", "channel_1")))
|
2014-08-06 09:43:40 +00:00
|
|
|
|
|
|
|
while True:
|
|
|
|
try:
|
2014-08-14 15:55:18 +00:00
|
|
|
message = sub.get_msg_from_queue(r_serv1)
|
|
|
|
if message is not None:
|
|
|
|
PST = Paste.Paste(message.split(" ", -1)[-1])
|
2014-08-06 09:43:40 +00:00
|
|
|
else:
|
|
|
|
if r_serv1.sismember("SHUTDOWN_FLAGS", "Lines"):
|
|
|
|
r_serv1.srem("SHUTDOWN_FLAGS", "Lines")
|
|
|
|
print "Shutdown Flag Up: Terminating"
|
|
|
|
publisher.warning("Shutdown Flag Up: Terminating.")
|
|
|
|
break
|
|
|
|
publisher.debug("Tokeniser is idling 10s")
|
|
|
|
time.sleep(10)
|
|
|
|
continue
|
|
|
|
|
|
|
|
lines_infos = PST.get_lines_info()
|
|
|
|
|
|
|
|
PST.save_attribute_redis(r_serv, "p_nb_lines", lines_infos[0])
|
|
|
|
PST.save_attribute_redis(r_serv, "p_max_length_line", lines_infos[1])
|
|
|
|
|
2014-08-14 15:55:18 +00:00
|
|
|
r_serv.sadd("Pastes_Objects", PST.p_path)
|
2014-08-06 09:43:40 +00:00
|
|
|
if lines_infos[1] >= args.max:
|
|
|
|
msg = channel_0+" "+PST.p_path
|
|
|
|
else:
|
|
|
|
msg = channel_1+" "+PST.p_path
|
|
|
|
|
2014-08-14 15:55:18 +00:00
|
|
|
pub.send_message(msg)
|
2014-08-06 09:43:40 +00:00
|
|
|
except IOError:
|
|
|
|
print "CRC Checksum Error on : ", PST.p_path
|
|
|
|
pass
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
main()
|