filters-maker/excluder.py

50 lines
1.5 KiB
Python
Raw Normal View History

import os
2021-08-03 12:04:59 +00:00
import crawler
2021-08-03 11:56:08 +00:00
def add(incoming,input):
with open(incoming, 'r') as f:
lines = f.read().split()
with open(incoming, 'a') as f:
f.write('\n'.join([input + '\n']))
with open(incoming, 'r') as f:
lines = set(f.readlines())
with open(incoming, 'w') as f:
f.writelines(set(lines))
2021-08-03 12:04:59 +00:00
crawler.sort(incoming)
2021-08-03 11:56:08 +00:00
def add_file(incoming,excluded_in):
2021-08-07 04:36:18 +00:00
data= ""
with open(incoming) as fp:
data = fp.read()
with open(excluded_in) as fp:
data2 = fp.read()
data += data2
with open (incoming, 'w') as fp:
fp.write(data + '\n')
2021-08-03 11:56:08 +00:00
with open(incoming, 'r') as f:
lines = set(f.readlines())
with open(incoming, 'w') as f:
f.writelines(set(lines))
crawler.sort(incoming)
2021-08-10 05:26:44 +00:00
os.remove(excluded_in)
def remove(incoming,input):
with open(incoming, 'r') as f:
lines = f.read().split()
with open(incoming, 'w') as f:
for line in lines:
if line.startswith(input) and input in line:
f.write(line.replace(input ,''))
elif not line.startswith(input):
f.write('\n'.join([line + '\n']))
with open(incoming ,'r') as f:
lines = f.read().split()
with open(incoming ,'w') as f:
for line in lines:
if line.strip():
f.write('\n'.join([line + '\n']))
with open(incoming, 'r') as f:
lines = set(f.readlines())
with open(incoming, 'w') as f:
f.writelines(set(lines))
crawler.sort(incoming)