From 7fb29f89ff38aaea5380df182c26652024e9bba8 Mon Sep 17 00:00:00 2001 From: minoplhy Date: Wed, 11 Aug 2021 12:50:54 +0700 Subject: [PATCH] Duplicate line issues might fixed --- excluder.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/excluder.py b/excluder.py index f381701..67160cb 100644 --- a/excluder.py +++ b/excluder.py @@ -22,16 +22,16 @@ def add_file(incoming,excluded_in): data += data2 with open (incoming, 'w') as fp: fp.write(data + '\n') - with open(incoming, 'r') as f: - lines = set(f.readlines()) - with open(incoming, 'w') as f: - f.writelines(set(lines)) with open(incoming ,'r') as f: lines = f.read().split() with open(incoming ,'w') as f: for line in lines: if line.strip() and not line.startswith((tuple(comment_roc))): f.write('\n'.join([line + '\n'])) + with open(incoming, 'r') as f: + lines = set(f.readlines()) + with open(incoming, 'w') as f: + f.writelines(set(lines)) crawler.sort(incoming) os.remove(excluded_in)