From d7ba5533be65bb6f811765520c8383f1d9e942ba Mon Sep 17 00:00:00 2001 From: Terrtia Date: Fri, 11 May 2018 13:25:45 +0200 Subject: [PATCH] fix duplicate + fix issue #200 --- bin/Duplicates.py | 2 +- bin/packages/Paste.py | 8 ++++++-- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/bin/Duplicates.py b/bin/Duplicates.py index 68dc6a95..ecbe20ce 100755 --- a/bin/Duplicates.py +++ b/bin/Duplicates.py @@ -54,7 +54,7 @@ if __name__ == "__main__": dico_redis[str(year)+str(month).zfill(2)] = redis.StrictRedis( host=p.config.get("ARDB_DB", "host"), port=p.config.get("ARDB_DB", "port"), - db='year' + 'month', + db=str(year) + str(month), decode_responses=True) # FUNCTIONS # diff --git a/bin/packages/Paste.py b/bin/packages/Paste.py index 9564cc19..d52539e2 100755 --- a/bin/packages/Paste.py +++ b/bin/packages/Paste.py @@ -121,7 +121,7 @@ class Paste(object): except: paste = '' - return paste + return str(paste) def get_p_content_as_file(self): message = StringIO(self.get_p_content()) @@ -332,7 +332,11 @@ class Paste(object): json_duplicate = self.store.hget(path, attr_name) #json save on redis if json_duplicate is not None: - list_duplicate = json.loads(json_duplicate) + list_duplicate = (json.loads(json_duplicate)) + # avoid duplicate + list_duplicate = set(tuple(row) for row in list_duplicate) + list_duplicate = [list(item) for item in set(tuple(row) for row in list_duplicate)] + # add new duplicate list_duplicate.append([hash_type, self.p_path, percent, date]) self.store.hset(path, attr_name, json.dumps(list_duplicate))