Refactored some code, downgraded the logger level almost everywhere and fixed minor bugs

master
myumyu 3 years ago
parent d29c673e7d
commit 2630492c84
  1. 24
      evaluator.py
  2. 44
      main.py

@ -7,29 +7,21 @@ from config import config
class Evaluator: class Evaluator:
def __init__(self): def __init__(self):
self.blacklist_re = re.compile('|'.join(config.BLACKLIST), re.IGNORECASE) self.blacklist_re = re.compile('|'.join(config.BLACKLIST), re.IGNORECASE)
logging.info(f"Compiled blacklist regex: {self.blacklist_re}") self.url_blacklist_re = re.compile(f'(?!{"|".join(config.URL_WHITELIST)})', re.IGNORECASE)
self.url_blacklist_re = re.compile(f'(?!{"|".join(config.URL_WHITELIST)})', logging.debug(f'Compiled, url blacklist regex: {self.url_blacklist_re}\n blacklist regex: {self.blacklist_re}')
re.IGNORECASE) # "inverted" so anything not whitelisted will match it
logging.info(f"Compiled url blacklist regex: {self.url_blacklist_re}")
self._extractor = URLExtract(extract_localhost=False) self.__extractor = URLExtract(extract_localhost=False)
def evaluate(self, text): def evaluate(self, text):
trigger_urls = [] if not text:
trigger_entries = [] return [], []
if text and self.url_blacklist_re: # searches for blacklisted (extracted) urls trigger_urls = [*filter(self.url_blacklist_re.match, self.__extractor.find_urls(text, only_unique=True))]
trigger_urls = list(filter( trigger_entries = [entry.group(0) for entry in re.finditer(self.blacklist_re, text)]
self.url_blacklist_re.match,
self._extractor.find_urls(text, only_unique=True)
))
if text and self.blacklist_re: # searches for blacklisted text logging.debug(f'Evaluated: {text}\n{trigger_urls}\n{trigger_entries}')
trigger_entries = re.findall(self.blacklist_re, text)
logging.debug(f"Evaluated: {text}\n{trigger_urls}\n{trigger_entries}")
return trigger_urls, trigger_entries return trigger_urls, trigger_entries

@ -10,19 +10,20 @@ from config import config
from evaluator import Evaluator from evaluator import Evaluator
def build_post_path(post): def get_path(post): return f'>>>/{post["board"]}/{post["thread"] or post["postId"]} ({post["postId"]})'
return f'>>>/{post["board"]}/{post["thread"] or post["postId"]} ({post["postId"]})'
def send_notification(title, body): def send_notification(title, body):
logging.info(f'Sending notification: {title} {body}') logging.debug(f'Sending notification: {title} {body}')
subprocess.call(['termux-notification', '--title', title, '--content', body] if config.USE_TERMUX_API subprocess.call(['termux-notification', '--title', title, '--content', body] if config.USE_TERMUX_API
else ['notify-send', title, body]) else ['notify-send', title, body])
def watch_live_posts(evaluate, notify): def watch_live_posts(evaluate, notify):
def get_auth_cookie(): def get_auth_cookie():
logging.info("Requesting new cookie to watch live posts") logging.debug('Requesting new cookie to watch live posts')
return requests.post( return requests.post(
url=f'https://{config.IB_DOMAIN_NAME}/forms/login', url=f'https://{config.IB_DOMAIN_NAME}/forms/login',
data={'username': config.GLOBAL_MOD_USERNAME, 'password': config.GLOBAL_MOD_PASSWORD}, data={'username': config.GLOBAL_MOD_USERNAME, 'password': config.GLOBAL_MOD_PASSWORD},
@ -35,24 +36,26 @@ def watch_live_posts(evaluate, notify):
def on_new_post(post): def on_new_post(post):
urls, entries = evaluate(post["nomarkup"]) urls, entries = evaluate(post["nomarkup"])
if urls or entries: if urls or entries:
notify(f'Alert! {build_post_path(post)}', notify(f'Alert! {get_path(post)}', '\n'.join(urls) + '\n'.join(entries))
"\n".join(urls + entries))
while True: while True:
try: try:
client.connect(f'wss://{config.IB_DOMAIN_NAME}/', headers={'Cookie': get_auth_cookie()}) client.connect(f'wss://{config.IB_DOMAIN_NAME}/', headers={'Cookie': get_auth_cookie()})
client.emit('room', 'globalmanage-recent-hashed') client.emit('room', 'globalmanage-recent-hashed')
except Exception as e: except Exception as e:
notify(f"Lost connection", f"{e}\nRetrying in {config.LIVE_POSTS_RETRY_TIMEOUT} seconds") logging.error(f'Exception in live posts watcher: {e}')
notify(f'Lost live posts connection', f'Retrying in {config.LIVE_POSTS_RETRY_TIMEOUT} seconds')
time.sleep(config.LIVE_POSTS_RETRY_TIMEOUT) # waits for a bit, maybe will fix itself time.sleep(config.LIVE_POSTS_RETRY_TIMEOUT) # waits for a bit, maybe will fix itself
notify(f"Connected", f"Watching live posts") notify(f'Connected', f'Watching live posts')
client.wait() # blocks the thread until something happens client.wait() # blocks the thread until something happens
def watch_reports(notify): def watch_reports(notify):
def get_auth_session(): def get_auth_session():
logging.info("Starting new authenticated session to fetch reports") logging.debug('Starting new authenticated session to fetch reports')
s = requests.Session() s = requests.Session()
s.post( s.post(
url=f'https://{config.IB_DOMAIN_NAME}/forms/login', url=f'https://{config.IB_DOMAIN_NAME}/forms/login',
@ -66,26 +69,27 @@ def watch_reports(notify):
previous = 0 previous = 0
while True: while True:
time.sleep(config.FETCH_REPORTS_INTERVAL) time.sleep(config.FETCH_REPORTS_INTERVAL)
reply = session.get(f"https://{config.IB_DOMAIN_NAME}/globalmanage/reports.json") reply = session.get(f'https://{config.IB_DOMAIN_NAME}/globalmanage/reports.json')
if reply.status_code != 200: if reply.status_code != 200:
notify(f"Error while fetching reports", logging.error(f'Error while fetching reports: {reply.status_code}')
f"{reply.status_code}\nRetrying in {config.FETCH_REPORTS_INTERVAL} seconds") notify(f'Error while fetching reports', f'Retrying in {config.FETCH_REPORTS_INTERVAL} seconds')
session = get_auth_session() session = get_auth_session()
continue continue
entries = reply.json()["reports"] reported_posts = reply.json()["reports"]
current = len(entries) # number of posts reported (not the number of reports) current = len(reported_posts)
if 0 < current != previous: if 0 < current != previous:
notify(f"New reports!", notify(f'New reports!',
"\n".join([f'{build_post_path(entry)} {[report["reason"] for report in entry["globalreports"]]}' "\n".join([f'{get_path(p)} {[r["reason"] for r in p["globalreports"]]}' for p in reported_posts]))
for entry in entries]))
previous = current previous = current
def main(): def main():
logging.basicConfig(level=logging.INFO) logging.basicConfig(level=logging.DEBUG)
# launches live post watcher # launches live posts watcher
live_posts_watcher = Thread(target=watch_live_posts, args=(Evaluator().evaluate, send_notification,)) live_posts_watcher = Thread(target=watch_live_posts, args=(Evaluator().evaluate, send_notification,))
live_posts_watcher.daemon = True live_posts_watcher.daemon = True
live_posts_watcher.start() live_posts_watcher.start()
@ -99,5 +103,5 @@ def main():
reports_watcher.join() reports_watcher.join()
if __name__ == "__main__": if __name__ == '__main__':
main() main()

Loading…
Cancel
Save