2014-08-06 09:43:40 +00:00
|
|
|
#!/usr/bin/env python2
|
|
|
|
# -*-coding:UTF-8 -*
|
|
|
|
|
2014-08-14 15:55:18 +00:00
|
|
|
import redis
|
|
|
|
import pprint
|
|
|
|
import time
|
2014-08-11 07:27:50 +00:00
|
|
|
import dns.exception
|
2014-08-20 13:14:57 +00:00
|
|
|
from packages import Paste
|
2014-08-06 09:43:40 +00:00
|
|
|
from packages import lib_refine
|
|
|
|
from pubsublogger import publisher
|
|
|
|
|
2014-08-20 13:14:57 +00:00
|
|
|
import Helper
|
2014-08-06 09:43:40 +00:00
|
|
|
|
2014-08-20 13:14:57 +00:00
|
|
|
if __name__ == "__main__":
|
|
|
|
publisher.channel = "Script"
|
2014-08-14 15:55:18 +00:00
|
|
|
|
2014-08-20 13:14:57 +00:00
|
|
|
config_section = 'PubSub_Categ'
|
|
|
|
config_channel = 'channel_1'
|
|
|
|
subscriber_name = 'emails'
|
2014-08-06 09:43:40 +00:00
|
|
|
|
2014-08-20 13:14:57 +00:00
|
|
|
h = Helper.Redis_Queues(config_section, config_channel, subscriber_name)
|
2014-08-06 09:43:40 +00:00
|
|
|
|
2014-08-20 13:14:57 +00:00
|
|
|
# Subscriber
|
|
|
|
h.zmq_sub(config_section)
|
2014-08-06 09:43:40 +00:00
|
|
|
|
2014-08-20 13:14:57 +00:00
|
|
|
# REDIS #
|
2014-08-06 09:43:40 +00:00
|
|
|
r_serv1 = redis.StrictRedis(
|
2014-08-20 13:14:57 +00:00
|
|
|
host=h.config.get("Redis_Data_Merging", "host"),
|
|
|
|
port=h.config.getint("Redis_Data_Merging", "port"),
|
|
|
|
db=h.config.getint("Redis_Data_Merging", "db"))
|
2014-08-06 09:43:40 +00:00
|
|
|
|
|
|
|
r_serv2 = redis.StrictRedis(
|
2014-08-20 13:14:57 +00:00
|
|
|
host=h.config.get("Redis_Cache", "host"),
|
|
|
|
port=h.config.getint("Redis_Cache", "port"),
|
|
|
|
db=h.config.getint("Redis_Cache", "db"))
|
2014-08-06 09:43:40 +00:00
|
|
|
|
|
|
|
# FUNCTIONS #
|
|
|
|
publisher.info("Suscribed to channel mails_categ")
|
|
|
|
|
2014-08-20 13:14:57 +00:00
|
|
|
message = h.redis_rpop()
|
2014-08-06 09:43:40 +00:00
|
|
|
prec_filename = None
|
|
|
|
|
2014-08-20 13:14:57 +00:00
|
|
|
# Log as critical if there are more that that amout of valid emails
|
|
|
|
is_critical = 10
|
|
|
|
|
2014-08-06 09:43:40 +00:00
|
|
|
email_regex = "[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,6}"
|
|
|
|
|
|
|
|
while True:
|
2014-08-11 07:18:55 +00:00
|
|
|
try:
|
2014-08-14 15:55:18 +00:00
|
|
|
if message is not None:
|
|
|
|
channel, filename, word, score = message.split()
|
2014-08-11 07:18:55 +00:00
|
|
|
|
2014-08-14 15:55:18 +00:00
|
|
|
if prec_filename is None or filename != prec_filename:
|
2014-08-20 13:14:57 +00:00
|
|
|
PST = Paste.Paste(filename)
|
|
|
|
MX_values = lib_refine.checking_MX_record(
|
|
|
|
r_serv2, PST.get_regex(email_regex))
|
2014-08-11 07:18:55 +00:00
|
|
|
|
|
|
|
if MX_values[0] >= 1:
|
|
|
|
|
|
|
|
PST.__setattr__(channel, MX_values)
|
2014-08-20 13:14:57 +00:00
|
|
|
PST.save_attribute_redis(r_serv1, channel,
|
|
|
|
(MX_values[0],
|
|
|
|
list(MX_values[1])))
|
2014-08-11 07:18:55 +00:00
|
|
|
|
|
|
|
pprint.pprint(MX_values)
|
2014-08-20 13:14:57 +00:00
|
|
|
to_print = 'Mails;{};{};{};Checked {} e-mail(s)'.\
|
|
|
|
format(PST.p_source, PST.p_date, PST.p_name,
|
|
|
|
MX_values[0])
|
|
|
|
if MX_values[0] > is_critical:
|
2014-08-14 15:55:18 +00:00
|
|
|
publisher.warning(to_print)
|
2014-08-11 07:18:55 +00:00
|
|
|
else:
|
2014-08-14 15:55:18 +00:00
|
|
|
publisher.info(to_print)
|
2014-08-11 07:18:55 +00:00
|
|
|
prec_filename = filename
|
|
|
|
|
|
|
|
else:
|
2014-08-20 13:14:57 +00:00
|
|
|
if h.redis_queue_shutdown():
|
2014-08-11 07:18:55 +00:00
|
|
|
print "Shutdown Flag Up: Terminating"
|
|
|
|
publisher.warning("Shutdown Flag Up: Terminating.")
|
|
|
|
break
|
|
|
|
publisher.debug("Script Mails is Idling 10s")
|
|
|
|
time.sleep(10)
|
|
|
|
|
2014-08-20 13:14:57 +00:00
|
|
|
message = h.redis_rpop()
|
2014-08-11 07:18:55 +00:00
|
|
|
except dns.exception.Timeout:
|
2014-08-20 13:14:57 +00:00
|
|
|
# FIXME retry!
|
2014-08-11 07:18:55 +00:00
|
|
|
print "dns.exception.Timeout"
|