From 0a8776e84b8ad7a76f4d2839b6940288f9fc5685 Mon Sep 17 00:00:00 2001 From: PlexSheep Date: Tue, 9 May 2023 15:43:53 +0200 Subject: [PATCH] upload --- .gitignore | 2 + requirements.txt | 22 +++++++++ src/__init__.py | 0 src/main.py | 117 +++++++++++++++++++++++++++++++++++++++++++++++ 4 files changed, 141 insertions(+) create mode 100644 .gitignore create mode 100644 requirements.txt create mode 100644 src/__init__.py create mode 100755 src/main.py diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..b4d8b92 --- /dev/null +++ b/.gitignore @@ -0,0 +1,2 @@ +.venv +src/.env diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..28a692a --- /dev/null +++ b/requirements.txt @@ -0,0 +1,22 @@ +aiohttp==3.8.4 +aiosignal==1.3.1 +async-timeout==4.0.2 +attrs==23.1.0 +beautifulsoup4==4.12.2 +bs4==0.0.1 +certifi==2023.5.7 +charset-normalizer==3.1.0 +discord==2.2.3 +discord.py==2.2.3 +docutils==0.19 +frozenlist==1.3.3 +idna==3.4 +lockfile==0.12.2 +lxml==4.9.2 +multidict==6.0.4 +python-daemon==3.0.1 +python-dotenv==1.0.0 +requests==2.30.0 +soupsieve==2.4.1 +urllib3==2.0.2 +yarl==1.9.2 diff --git a/src/__init__.py b/src/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/main.py b/src/main.py new file mode 100755 index 0000000..0945ad5 --- /dev/null +++ b/src/main.py @@ -0,0 +1,117 @@ +from datetime import datetime +import daemon +import requests +import time +from time import sleep +import os +import re +from bs4 import BeautifulSoup +import dotenv + +CONFIG = dotenv.dotenv_values(".env") + + +def read_known() -> list: + known: list = [] + try: + with open(CONFIG['STORAGE_FILE'], 'r+') as f: + known = f.read().splitlines() + f.close() + + except Exception as e: + print("Could not initialize known titles: %s" % e) + + print("="*120 + "\nLoading from file\n" + "="*120) + for item, index in zip(known, range(0, len(known))): + print(f"{index}:\t{item}") + + print("="*120) + return known + +def daemon(): + + try: + while True: + r = requests.get(CONFIG['URL']) + soup = BeautifulSoup(r.content, features='xml') + articles = soup.findAll('item') + article_list: list = [] + for a in articles: + title: str = str(a.find('title')).replace("", "").replace("", "") + desc: str = str(a.find('description')).replace("", "").replace("", "") + link: str = str(a.find('enclosure')) + link = re.sub(r"<.*>", "", link) + dur: str = str(a.find('duration')).replace("", "").replace("", "") + pub: str = str(a.find('pubDate')).replace("", "").replace("", "") + article = { + 'title': title, + 'desc': desc, + 'link': link, + 'dur': dur, + 'pub': pub + } + article_list.append(article) + + known: list = read_known() + for a in article_list: + if a['title'] in known: + print("found known: %s" % a['title']) + + else: + print("found unknown: %s" % a['title']) + + title = a['title'] + desc = a['desc'] + link = a['link'] + dur = a['dur'] + pub = a['pub'] + + content = f""" +=============== Neue Folge =============== +Titel: {title} + +Beschreibung: +{desc} + +Link: {link} +Datum: {pub} +Länge: {dur} +========================================== + """ + + print(content) + + #notify = requests.post( + # url=WEBHOOK, + # json={ + # "content": content + # } + # ) + #print(notify.text) + + known.append(a['title']) + + with open(CONFIG['STORAGE_FILE'], 'w+') as file: + try: + for item in known: + file.write(item + '\n') + except Exception as e: + print("Could not write to file: %s" % e) + pass + finally: + file.close() + + time.sleep(0.5) + + print("="*120 + "\nentering sleep for %d seconds at %s" % (int(CONFIG['SLEEP_SECS']), datetime.now())) + print("="*120) + sleep(int(CONFIG['SLEEP_SECS'])) + except ValueError as e: + print(e) + pass + +if __name__ == "__main__": + daemon() + +with daemon.DaemonContext(): + daemon()