From 3aa7d1a3710677e225af18941b851b375e14b779 Mon Sep 17 00:00:00 2001 From: Kale Date: Tue, 3 Aug 2021 05:04:59 -0700 Subject: [PATCH] remove duplicate defi --- excluder.py | 12 +++--------- 1 file changed, 3 insertions(+), 9 deletions(-) diff --git a/excluder.py b/excluder.py index 13ff4aa..3ad049d 100644 --- a/excluder.py +++ b/excluder.py @@ -1,3 +1,4 @@ +import crawler def add(incoming,input): with open(incoming, 'r') as f: @@ -8,7 +9,7 @@ def add(incoming,input): lines = set(f.readlines()) with open(incoming, 'w') as f: f.writelines(set(lines)) - sort(incoming) + crawler.sort(incoming) def add_file(incoming,excluded_in): data= "" @@ -24,11 +25,4 @@ def add_file(incoming,excluded_in): lines = set(f.readlines()) with open(incoming, 'w') as f: f.writelines(set(lines)) - sort(incoming) - -def sort(incoming): - with open(incoming, 'r') as f: - lines = sorted(f.readlines()) - with open(incoming, 'w') as f: - for line in lines: - f.write(line) \ No newline at end of file + crawler.sort(incoming) \ No newline at end of file