mirror of
https://github.com/ail-project/ail-framework.git
synced 2024-11-10 08:38:28 +00:00
Added operation mode 3 for Mixer (do not filter) and added possibility to recurse through dir for import_dir
This commit is contained in:
parent
c7e0b2ee4b
commit
c3632c7d86
2 changed files with 13 additions and 2 deletions
13
bin/Mixer.py
13
bin/Mixer.py
|
@ -19,6 +19,9 @@ Depending on the configuration, this module will process the feed as follow:
|
||||||
- Elseif, the saved content associated with the paste is not the same, process it
|
- Elseif, the saved content associated with the paste is not the same, process it
|
||||||
- Else, do not process it but keep track for statistics on duplicate
|
- Else, do not process it but keep track for statistics on duplicate
|
||||||
|
|
||||||
|
operation_mode 3: "Don't look if duplicate"
|
||||||
|
- SImply do not bother to check if it is a duplicate
|
||||||
|
|
||||||
Note that the hash of the content is defined as the sha1(gzip64encoded).
|
Note that the hash of the content is defined as the sha1(gzip64encoded).
|
||||||
|
|
||||||
Every data coming from a named feed can be sent to a pre-processing module before going to the global module.
|
Every data coming from a named feed can be sent to a pre-processing module before going to the global module.
|
||||||
|
@ -76,6 +79,7 @@ if __name__ == '__main__':
|
||||||
duplicated_paste_per_feeder = {}
|
duplicated_paste_per_feeder = {}
|
||||||
time_1 = time.time()
|
time_1 = time.time()
|
||||||
|
|
||||||
|
print('Operation mode ' + str(operation_mode))
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
|
|
||||||
|
@ -121,7 +125,7 @@ if __name__ == '__main__':
|
||||||
|
|
||||||
|
|
||||||
# Keep duplicate coming from different sources
|
# Keep duplicate coming from different sources
|
||||||
else:
|
elif operation_mode == 2:
|
||||||
# Filter to avoid duplicate
|
# Filter to avoid duplicate
|
||||||
content = server.get('HASH_'+paste_name)
|
content = server.get('HASH_'+paste_name)
|
||||||
if content is None:
|
if content is None:
|
||||||
|
@ -158,6 +162,13 @@ if __name__ == '__main__':
|
||||||
#STATS
|
#STATS
|
||||||
duplicated_paste_per_feeder[feeder_name] += 1
|
duplicated_paste_per_feeder[feeder_name] += 1
|
||||||
continue
|
continue
|
||||||
|
else:
|
||||||
|
# populate Global OR populate another set based on the feeder_name
|
||||||
|
if feeder_name in FEED_QUEUE_MAPPING:
|
||||||
|
p.populate_set_out(relay_message, FEED_QUEUE_MAPPING[feeder_name])
|
||||||
|
else:
|
||||||
|
p.populate_set_out(relay_message, 'Mixer')
|
||||||
|
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# TODO Store the name of the empty paste inside a Redis-list.
|
# TODO Store the name of the empty paste inside a Redis-list.
|
||||||
|
|
|
@ -24,5 +24,5 @@ if __name__ == "__main__":
|
||||||
for filename in filenames:
|
for filename in filenames:
|
||||||
messagedata = open(os.path.join(dirname, filename)).read()
|
messagedata = open(os.path.join(dirname, filename)).read()
|
||||||
print(os.path.join(dirname, filename))
|
print(os.path.join(dirname, filename))
|
||||||
socket.send('{} {} {}'.format(args.channel, filename, base64.b64encode(messagedata)))
|
socket.send('{} {} {}'.format(args.channel, os.path.join(dirname, filename), base64.b64encode(messagedata)))
|
||||||
time.sleep(.2)
|
time.sleep(.2)
|
||||||
|
|
Loading…
Reference in a new issue