mirror of
https://github.com/ail-project/ail-framework.git
synced 2024-11-13 01:58:22 +00:00
fix duplicate + fix issue #200
This commit is contained in:
parent
559a98b537
commit
d7ba5533be
2 changed files with 7 additions and 3 deletions
|
@ -54,7 +54,7 @@ if __name__ == "__main__":
|
||||||
dico_redis[str(year)+str(month).zfill(2)] = redis.StrictRedis(
|
dico_redis[str(year)+str(month).zfill(2)] = redis.StrictRedis(
|
||||||
host=p.config.get("ARDB_DB", "host"),
|
host=p.config.get("ARDB_DB", "host"),
|
||||||
port=p.config.get("ARDB_DB", "port"),
|
port=p.config.get("ARDB_DB", "port"),
|
||||||
db='year' + 'month',
|
db=str(year) + str(month),
|
||||||
decode_responses=True)
|
decode_responses=True)
|
||||||
|
|
||||||
# FUNCTIONS #
|
# FUNCTIONS #
|
||||||
|
|
|
@ -121,7 +121,7 @@ class Paste(object):
|
||||||
except:
|
except:
|
||||||
paste = ''
|
paste = ''
|
||||||
|
|
||||||
return paste
|
return str(paste)
|
||||||
|
|
||||||
def get_p_content_as_file(self):
|
def get_p_content_as_file(self):
|
||||||
message = StringIO(self.get_p_content())
|
message = StringIO(self.get_p_content())
|
||||||
|
@ -332,7 +332,11 @@ class Paste(object):
|
||||||
json_duplicate = self.store.hget(path, attr_name)
|
json_duplicate = self.store.hget(path, attr_name)
|
||||||
#json save on redis
|
#json save on redis
|
||||||
if json_duplicate is not None:
|
if json_duplicate is not None:
|
||||||
list_duplicate = json.loads(json_duplicate)
|
list_duplicate = (json.loads(json_duplicate))
|
||||||
|
# avoid duplicate
|
||||||
|
list_duplicate = set(tuple(row) for row in list_duplicate)
|
||||||
|
list_duplicate = [list(item) for item in set(tuple(row) for row in list_duplicate)]
|
||||||
|
|
||||||
# add new duplicate
|
# add new duplicate
|
||||||
list_duplicate.append([hash_type, self.p_path, percent, date])
|
list_duplicate.append([hash_type, self.p_path, percent, date])
|
||||||
self.store.hset(path, attr_name, json.dumps(list_duplicate))
|
self.store.hset(path, attr_name, json.dumps(list_duplicate))
|
||||||
|
|
Loading…
Reference in a new issue