From a05727f101ce9eea8b5ccaab2f1cdc0d3697e485 Mon Sep 17 00:00:00 2001 From: minoplhy Date: Wed, 18 Aug 2021 12:34:52 +0700 Subject: [PATCH] Space issues fixing attempt --- crawler.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/crawler.py b/crawler.py index dba4b1a..e5fcc1e 100644 --- a/crawler.py +++ b/crawler.py @@ -44,6 +44,8 @@ def filteringcon(filters_regex_one): file[i] = re.sub('CNAME . ;..*', '', file[i]) file[i] = re.sub(';..*', '', file[i]) file[i] = re.sub('\A^\.' ,'' ,file[i]) + file[i] = re.sub('^\s+|\s+$' ,'' ,file[i]) + file[i] = re.sub(' $' ,'' ,file[i]) with open(filters_regex_one, 'w') as f1: f1.writelines(["%s\n" % item for item in file]) print("++ successful!") @@ -74,12 +76,6 @@ def filteringcon(filters_regex_one): for line in lines: if not line.endswith((tuple(remove_words))): f.write('\n'.join([line + '\n'])) - f.close() - with open(filters_regex_one, 'r') as f: - lines = f.read().splitlines() - with open(filters_regex_one, 'w') as f: - for line in lines: - f.write('\n'.join(line.split('\n'))) def killingdup(duplicated_file): print('Getting rid of duplicated line')